Compare commits
1145 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0c517e5351 | ||
|
|
5fe435048b | ||
|
|
a722bfd099 | ||
|
|
f3d99a5fdb | ||
|
|
79de0989d5 | ||
|
|
ca334770b7 | ||
|
|
1071357505 | ||
|
|
f32dfe0278 | ||
|
|
278cedf3d0 | ||
|
|
45a6b5a436 | ||
|
|
611707a3d1 | ||
|
|
b4d20d9b9a | ||
|
|
ecc4553e67 | ||
|
|
ef790ca6f4 | ||
|
|
2d88638da7 | ||
|
|
91ba0bd0af | ||
|
|
0e2e5f3413 | ||
|
|
7a99dcf693 | ||
|
|
4e78ca5d82 | ||
|
|
83de38e56f | ||
|
|
f4be2e4fe7 | ||
|
|
16b0f7f9ee | ||
|
|
27721aef71 | ||
|
|
329a317fdf | ||
|
|
daad634894 | ||
|
|
4444925dea | ||
|
|
9c1ae96d33 | ||
|
|
b1b6d50af6 | ||
|
|
4c697ab50e | ||
|
|
7450088674 | ||
|
|
b141671d90 | ||
|
|
2ab2d9127d | ||
|
|
278453451e | ||
|
|
91ee7972d2 | ||
|
|
d1f59a6590 | ||
|
|
cdecf8904e | ||
|
|
3d16266c69 | ||
|
|
191676b011 | ||
|
|
ea07b261ad | ||
|
|
e86f737320 | ||
|
|
9a8562c624 | ||
|
|
145c41f462 | ||
|
|
1d38367e79 | ||
|
|
f58c2d0a7b | ||
|
|
ca7a6fe1f1 | ||
|
|
95042f73c7 | ||
|
|
678bcb171a | ||
|
|
8da7e505c0 | ||
|
|
72ce4405d5 | ||
|
|
d8e3d91a79 | ||
|
|
edaaedae36 | ||
|
|
da8246d8c3 | ||
|
|
5243ae80b4 | ||
|
|
3aca576a0d | ||
|
|
0bb9d91eae | ||
|
|
8825d6b15f | ||
|
|
1f73ca21bf | ||
|
|
2db0854eef | ||
|
|
f66e589312 | ||
|
|
5c9ad3068b | ||
|
|
d07b786da6 | ||
|
|
da5d32ed89 | ||
|
|
55dadea98e | ||
|
|
77fbbe95ff | ||
|
|
1aeb95396b | ||
|
|
48dfbbebc6 | ||
|
|
ccf3a9f3b2 | ||
|
|
c0cb97bd42 | ||
|
|
8efb97ef4e | ||
|
|
d8cda7fc1b | ||
|
|
ee9f1e7b70 | ||
|
|
92dd70098c | ||
|
|
4bea4c69a4 | ||
|
|
7734325b71 | ||
|
|
186ae844bc | ||
|
|
c9bdf1c184 | ||
|
|
13ffe468df | ||
|
|
a090cf7a10 | ||
|
|
b7250477b5 | ||
|
|
dfd16c5187 | ||
|
|
4afd6b78af | ||
|
|
398f6e5b0c | ||
|
|
d7f7d839f8 | ||
|
|
49a843dcdd | ||
|
|
ec045e81f2 | ||
|
|
d8a7828cb5 | ||
|
|
e32cb12ad7 | ||
|
|
ee2847cfea | ||
|
|
22e00a7080 | ||
|
|
cbe567069f | ||
|
|
53baed0389 | ||
|
|
39cb9589c8 | ||
|
|
bde03f3574 | ||
|
|
8f31d150fd | ||
|
|
453dbbb031 | ||
|
|
421754fff6 | ||
|
|
05ec5feacf | ||
|
|
639d9b27c8 | ||
|
|
2c99d027f3 | ||
|
|
4f176682dc | ||
|
|
13ef41bd42 | ||
|
|
3c6ba80323 | ||
|
|
fcfa8dfac2 | ||
|
|
5b73e9aee6 | ||
|
|
9ead264300 | ||
|
|
a617eda321 | ||
|
|
c913fa65b2 | ||
|
|
497c8c84e5 | ||
|
|
c58a94d497 | ||
|
|
6a853d1fa2 | ||
|
|
c30c58e564 | ||
|
|
3b7a4c6b6b | ||
|
|
978cdf2514 | ||
|
|
33609616aa | ||
|
|
006f6c998d | ||
|
|
05fd69eae4 | ||
|
|
b6b8719efa | ||
|
|
7e8b9549a1 | ||
|
|
032f78e0f5 | ||
|
|
4059bc9ec6 | ||
|
|
b85cd0925a | ||
|
|
f20254217f | ||
|
|
9424b763ca | ||
|
|
08ae3f8771 | ||
|
|
68f0cf419b | ||
|
|
26b12512b1 | ||
|
|
b98afadd5c | ||
|
|
499bd552a1 | ||
|
|
0090e27699 | ||
|
|
e568b3000e | ||
|
|
7ae8b46ea7 | ||
|
|
ffb903841b | ||
|
|
72ee904e67 | ||
|
|
222e1968d8 | ||
|
|
1df517afd3 | ||
|
|
cc4cea1a41 | ||
|
|
e8868d7ebf | ||
|
|
7d9a9033f9 | ||
|
|
87322d7732 | ||
|
|
08c3d6e84b | ||
|
|
b50325c3a3 | ||
|
|
12cdcf7681 | ||
|
|
34192349be | ||
|
|
153d0bb12a | ||
|
|
20092dadad | ||
|
|
6844f8f2bf | ||
|
|
58f2c6a5fc | ||
|
|
5f10d86f04 | ||
|
|
1120e823ed | ||
|
|
301b384a02 | ||
|
|
e4a26164de | ||
|
|
56d3e8893f | ||
|
|
99336908f0 | ||
|
|
090325af35 | ||
|
|
485be6c3fd | ||
|
|
faa9d36c34 | ||
|
|
ea8e108cdf | ||
|
|
100f5422f6 | ||
|
|
15c716e53b | ||
|
|
75e77c5e54 | ||
|
|
de4fdc07e0 | ||
|
|
51edb2fa14 | ||
|
|
bd995089a8 | ||
|
|
a90dd2ad1e | ||
|
|
cd44151e16 | ||
|
|
5715ba1a9a | ||
|
|
6f4a1c1751 | ||
|
|
2a1b1eb1a4 | ||
|
|
20c597b1d7 | ||
|
|
9dc1989507 | ||
|
|
681cb1b978 | ||
|
|
332a9fac5a | ||
|
|
d118f4a3f0 | ||
|
|
7620cd02f0 | ||
|
|
fdfd7bd82a | ||
|
|
01c17e10cc | ||
|
|
bed03b301b | ||
|
|
ef48762da5 | ||
|
|
7978f3f0e6 | ||
|
|
8d5fad72bf | ||
|
|
04db521851 | ||
|
|
26d27be161 | ||
|
|
1259d06302 | ||
|
|
4db3f366ef | ||
|
|
1c52c5b673 | ||
|
|
a6a885d4f4 | ||
|
|
b11066cef1 | ||
|
|
1941b0c3ed | ||
|
|
9a47fc747f | ||
|
|
a425d3a55b | ||
|
|
f03a0f6e73 | ||
|
|
74d5724092 | ||
|
|
3df21fcaa3 | ||
|
|
fbb5de6740 | ||
|
|
f7f9096c6e | ||
|
|
d5a8e1725d | ||
|
|
34413747d5 | ||
|
|
18de626919 | ||
|
|
bef192084f | ||
|
|
54eef16bfb | ||
|
|
7be49dba69 | ||
|
|
218a6af62a | ||
|
|
f0315d5c70 | ||
|
|
f82e04201b | ||
|
|
f41231f017 | ||
|
|
ae1fb76d13 | ||
|
|
fa0023223f | ||
|
|
1c80fd17fd | ||
|
|
48cb347198 | ||
|
|
ffd583ed11 | ||
|
|
97bbca3aef | ||
|
|
06fd92fd27 | ||
|
|
c0e05a7572 | ||
|
|
2abe6eec84 | ||
|
|
0b6e73840a | ||
|
|
1707fe8990 | ||
|
|
9335b0779c | ||
|
|
277b521fad | ||
|
|
af1b634d6d | ||
|
|
cc19008961 | ||
|
|
be304e37b4 | ||
|
|
0a34a4a7ad | ||
|
|
898564c8d8 | ||
|
|
708638b97f | ||
|
|
458e857956 | ||
|
|
ac62bcb7ba | ||
|
|
6b1c50b051 | ||
|
|
d4a5376f73 | ||
|
|
71b34aa3bd | ||
|
|
6b4d8b18e0 | ||
|
|
66b2013d23 | ||
|
|
dc86993c84 | ||
|
|
00a5c13001 | ||
|
|
0e34923114 | ||
|
|
011164bc32 | ||
|
|
0a43ce9ced | ||
|
|
d925fb38ce | ||
|
|
3dc617277f | ||
|
|
096af09fc4 | ||
|
|
f97f9b857b | ||
|
|
5c0829b052 | ||
|
|
aa999b34e2 | ||
|
|
0a06c291e2 | ||
|
|
4bbaf5f89c | ||
|
|
f88e070455 | ||
|
|
5c980c31be | ||
|
|
9eee37bc68 | ||
|
|
a4927477fb | ||
|
|
d0a6c6a2f3 | ||
|
|
92757c5d8c | ||
|
|
0cc7765f2b | ||
|
|
ee1ef4ff56 | ||
|
|
b5eed5e043 | ||
|
|
62a253f571 | ||
|
|
080a23dd8c | ||
|
|
c90129957e | ||
|
|
0fa717fe11 | ||
|
|
e72766a5bf | ||
|
|
104a684514 | ||
|
|
5a809d7e31 | ||
|
|
ce3f6837e9 | ||
|
|
3ffd2a745b | ||
|
|
6b9c07b809 | ||
|
|
d4e2722586 | ||
|
|
1343767295 | ||
|
|
4b4bfc052f | ||
|
|
f7539eb931 | ||
|
|
efcfecca10 | ||
|
|
6a3735822d | ||
|
|
5bbcc7f2f7 | ||
|
|
400f1d37bf | ||
|
|
985b774378 | ||
|
|
14cbcb4af6 | ||
|
|
18ce86407d | ||
|
|
d0ee203265 | ||
|
|
fc26fe0ac0 | ||
|
|
0e0cbe3517 | ||
|
|
8e2cb6d416 | ||
|
|
73a6e68e03 | ||
|
|
48f9cb09af | ||
|
|
1c83f489d1 | ||
|
|
f6d78a0044 | ||
|
|
e60a7df9a2 | ||
|
|
feaf2da834 | ||
|
|
bf8703deae | ||
|
|
1997b7b2d9 | ||
|
|
666b938550 | ||
|
|
4e8f546502 | ||
|
|
5e9f3586cd | ||
|
|
69ef26dab0 | ||
|
|
163231d307 | ||
|
|
e530750fc6 | ||
|
|
c3997c9f26 | ||
|
|
6d7defa79e | ||
|
|
bc232582df | ||
|
|
286affea38 | ||
|
|
5f5c9e2eb3 | ||
|
|
de5eaf1c2c | ||
|
|
6f3755684e | ||
|
|
6950daca9a | ||
|
|
b4de83e348 | ||
|
|
83a1a32a5e | ||
|
|
3cea4804f8 | ||
|
|
8ce32003d7 | ||
|
|
d3191490d9 | ||
|
|
f6d5ba56b1 | ||
|
|
998ca64c1e | ||
|
|
eaa33744a6 | ||
|
|
c0a47ca999 | ||
|
|
22bfab840d | ||
|
|
88b7f8ac1e | ||
|
|
6fbe4404f5 | ||
|
|
d5e340d0f6 | ||
|
|
94954eeba3 | ||
|
|
b2911b2eba | ||
|
|
c398f22e76 | ||
|
|
063f6c1d5a | ||
|
|
2ca691d3b8 | ||
|
|
f2086b3a90 | ||
|
|
bb15b744c8 | ||
|
|
f1e99de59a | ||
|
|
e0999c7ba4 | ||
|
|
89c5aac9ed | ||
|
|
4a7c9a6050 | ||
|
|
e94ce3102e | ||
|
|
0225faddbb | ||
|
|
fb10d3a5be | ||
|
|
0613e3ab12 | ||
|
|
b21edde1bc | ||
|
|
1953a8ecb7 | ||
|
|
c84b592543 | ||
|
|
53f905b88b | ||
|
|
d057a5076c | ||
|
|
bcb9c6ccb0 | ||
|
|
96f86adfb8 | ||
|
|
de89f75707 | ||
|
|
b2307d911e | ||
|
|
35a558ec01 | ||
|
|
2e97c0a5fb | ||
|
|
7d9575b7fd | ||
|
|
321e0ced2a | ||
|
|
a697eb8530 | ||
|
|
4b37c1963b | ||
|
|
8f2687e390 | ||
|
|
c0f799a807 | ||
|
|
abc5bd98b4 | ||
|
|
a51893c849 | ||
|
|
79e218d00a | ||
|
|
9ae20a6bec | ||
|
|
2f739ff0b3 | ||
|
|
e0a8f4df0d | ||
|
|
347d7c07ef | ||
|
|
42e3cf821b | ||
|
|
574f1be067 | ||
|
|
cbde0e0286 | ||
|
|
11012c6be1 | ||
|
|
432bd83188 | ||
|
|
f8adfa9873 | ||
|
|
1efd226f75 | ||
|
|
ba1bb95935 | ||
|
|
f5e740f2ec | ||
|
|
98be7b227f | ||
|
|
f07cfd4f51 | ||
|
|
029d81122b | ||
|
|
26eb7ecdb5 | ||
|
|
3cf3ea4e20 | ||
|
|
84a2937d31 | ||
|
|
1d1f3bde96 | ||
|
|
329d81ec64 | ||
|
|
c725f50f07 | ||
|
|
463560c929 | ||
|
|
c7412deb77 | ||
|
|
9ed6c78466 | ||
|
|
9a1bd9637c | ||
|
|
87fd18bee1 | ||
|
|
f3dced3199 | ||
|
|
b36989e8e4 | ||
|
|
95c45b90a8 | ||
|
|
0bdc132dcf | ||
|
|
e450391d9b | ||
|
|
38f4bf4e28 | ||
|
|
109b0ea78b | ||
|
|
5061feb7bc | ||
|
|
52480e0bc4 | ||
|
|
109dd45b56 | ||
|
|
c8da513d97 | ||
|
|
06e7e40b15 | ||
|
|
f3d6fcb52b | ||
|
|
2decae6586 | ||
|
|
6766041328 | ||
|
|
2d96cec464 | ||
|
|
843d229c3d | ||
|
|
3d5bcd9d75 | ||
|
|
b391dd6a3f | ||
|
|
c126cf2bc1 | ||
|
|
fcb5beb617 | ||
|
|
dad141726c | ||
|
|
48637188a7 | ||
|
|
5d063e8449 | ||
|
|
2b8a03e93c | ||
|
|
213665cea2 | ||
|
|
d230431227 | ||
|
|
2040c9fe41 | ||
|
|
b49821a9f4 | ||
|
|
1701b0afed | ||
|
|
7785431152 | ||
|
|
b325233b2d | ||
|
|
3baf29a6b7 | ||
|
|
2510216f21 | ||
|
|
e4d7ae9718 | ||
|
|
596fabda5d | ||
|
|
e95f34ae13 | ||
|
|
169cb9424f | ||
|
|
536576518e | ||
|
|
b034e972b0 | ||
|
|
1bf36b6461 | ||
|
|
58ab269d3d | ||
|
|
7cbb73be7a | ||
|
|
cdbf81c51c | ||
|
|
cc54368658 | ||
|
|
d81e4dbe99 | ||
|
|
4fd075aafa | ||
|
|
a789649d97 | ||
|
|
1817d014ef | ||
|
|
8f1f0f5475 | ||
|
|
b406a2430d | ||
|
|
431edb0e67 | ||
|
|
5b96944940 | ||
|
|
8a6aaf4e2d | ||
|
|
b30c4275ef | ||
|
|
7f7ec625c8 | ||
|
|
010f1f2bd1 | ||
|
|
ce32089cc4 | ||
|
|
f0ee2e14fb | ||
|
|
9b8a490061 | ||
|
|
f4e8ab27fa | ||
|
|
4da49c8d59 | ||
|
|
3960093231 | ||
|
|
3003bdd507 | ||
|
|
7909b30b4b | ||
|
|
915382f2c7 | ||
|
|
905a34188d | ||
|
|
c907d690b7 | ||
|
|
261cab8450 | ||
|
|
7c2137fcda | ||
|
|
2c3cb7f516 | ||
|
|
dd4d903f69 | ||
|
|
a823b8f70c | ||
|
|
3d7aa7a4b9 | ||
|
|
3e8bff03e7 | ||
|
|
584082a1df | ||
|
|
49644ce18a | ||
|
|
7432ef9e19 | ||
|
|
ca40e39da4 | ||
|
|
7e271129c7 | ||
|
|
b5f95a351c | ||
|
|
5e10befe28 | ||
|
|
98ebb095cc | ||
|
|
c3b5b47b22 | ||
|
|
3c133c36bd | ||
|
|
9ceb2e6084 | ||
|
|
2b322b638e | ||
|
|
0d298d743a | ||
|
|
ec5d80585d | ||
|
|
88e71e12b1 | ||
|
|
63bb72caab | ||
|
|
240c7913e9 | ||
|
|
582629a996 | ||
|
|
939dd17910 | ||
|
|
d58b1a7de7 | ||
|
|
a938895e5e | ||
|
|
9d9111e4d8 | ||
|
|
79dbfce36f | ||
|
|
e7f162e5e5 | ||
|
|
f23d10f000 | ||
|
|
67dd86988b | ||
|
|
e16ab324f4 | ||
|
|
317926f379 | ||
|
|
fe61ca2c97 | ||
|
|
2940686fba | ||
|
|
2ef2a561ef | ||
|
|
7ae31def4f | ||
|
|
8f038d7d26 | ||
|
|
b9a2652013 | ||
|
|
dce4166bc8 | ||
|
|
3c5f509bc7 | ||
|
|
182cea3385 | ||
|
|
0ba1ba55bd | ||
|
|
45440eec1d | ||
|
|
49fad14920 | ||
|
|
0f1e31643d | ||
|
|
c17c291b1c | ||
|
|
666641f990 | ||
|
|
268f99f8ac | ||
|
|
216f048466 | ||
|
|
ed8d5f1a80 | ||
|
|
27a1ef25a5 | ||
|
|
ecbc8165d5 | ||
|
|
128594584e | ||
|
|
8b798013c1 | ||
|
|
8c19c2c2e9 | ||
|
|
6f0fee4c43 | ||
|
|
a5b4a7caad | ||
|
|
c1b9db19c6 | ||
|
|
40f88faf37 | ||
|
|
81e6228ab3 | ||
|
|
157951343b | ||
|
|
9325eff6fc | ||
|
|
8c8f366e0f | ||
|
|
542221a38d | ||
|
|
aa7faaaa72 | ||
|
|
c96db31006 | ||
|
|
c66de5931c | ||
|
|
fdec13ef81 | ||
|
|
9aee44f363 | ||
|
|
2caa2d5b32 | ||
|
|
66c7f44bea | ||
|
|
99e4a79cb8 | ||
|
|
f7f0df60ec | ||
|
|
e012262301 | ||
|
|
5676155e4e | ||
|
|
d98bfa5bed | ||
|
|
7ccac8053b | ||
|
|
40cf46fe7d | ||
|
|
f5c05e1283 | ||
|
|
330e47f0b7 | ||
|
|
1e9378b429 | ||
|
|
af58fb5fa3 | ||
|
|
3d6fb2383a | ||
|
|
2407798d2e | ||
|
|
f9194bd28c | ||
|
|
816f020cc3 | ||
|
|
9191aa32df | ||
|
|
0a6395507e | ||
|
|
eff68c601c | ||
|
|
3f5540f35b | ||
|
|
e5f5030e9c | ||
|
|
df39d37ca9 | ||
|
|
bea81d0449 | ||
|
|
d261845fc6 | ||
|
|
eac0b295d2 | ||
|
|
f4eefcea13 | ||
|
|
268715711f | ||
|
|
c5f70b4401 | ||
|
|
912caf84a6 | ||
|
|
448fa4d35f | ||
|
|
cd8d4c357d | ||
|
|
bdcc6f861d | ||
|
|
d9d6b7b151 | ||
|
|
4517692c20 | ||
|
|
609b9e3369 | ||
|
|
be8ca110f4 | ||
|
|
f1da37dd12 | ||
|
|
ecca51dbdd | ||
|
|
d19015579c | ||
|
|
6179ca5668 | ||
|
|
6c70db31bd | ||
|
|
b0790d7010 | ||
|
|
eb8158673f | ||
|
|
409f17980f | ||
|
|
654ef06682 | ||
|
|
bca95a4972 | ||
|
|
4df065d8d5 | ||
|
|
a358477cda | ||
|
|
44d6db8c47 | ||
|
|
49e627d2fd | ||
|
|
c3a8d93eb4 | ||
|
|
7a648c6465 | ||
|
|
5c1c5b5b0d | ||
|
|
811da4bac5 | ||
|
|
23b2fbef45 | ||
|
|
f1b52b495a | ||
|
|
0bff3891bd | ||
|
|
a7b1658ee1 | ||
|
|
c274bbcddf | ||
|
|
57f32e5360 | ||
|
|
b4ecf8e28e | ||
|
|
06cfba8c7e | ||
|
|
5603834282 | ||
|
|
33134d4529 | ||
|
|
bf57b6e4a2 | ||
|
|
b63f87a5b5 | ||
|
|
68e3612d36 | ||
|
|
616a826b8a | ||
|
|
09d62d76b2 | ||
|
|
deab7794d6 | ||
|
|
bc892059a1 | ||
|
|
6d0fdc7510 | ||
|
|
dd1d4b86d2 | ||
|
|
65eeea9453 | ||
|
|
b2b202586c | ||
|
|
49341260dc | ||
|
|
dfcef81001 | ||
|
|
a834a6c874 | ||
|
|
ad5188a280 | ||
|
|
53c2a0c724 | ||
|
|
3a8cc31f1b | ||
|
|
0f6b452d17 | ||
|
|
e994bcf737 | ||
|
|
5432700b0d | ||
|
|
7b7534c952 | ||
|
|
b4570545ef | ||
|
|
6478db13e6 | ||
|
|
1d7ddcc10d | ||
|
|
30508c6c2c | ||
|
|
18f43c5757 | ||
|
|
52498efd14 | ||
|
|
40cb721d16 | ||
|
|
1c2699b16e | ||
|
|
d98a016087 | ||
|
|
bc5b6db031 | ||
|
|
4a4f252ad8 | ||
|
|
c81dd1a478 | ||
|
|
31016156be | ||
|
|
dad352f05e | ||
|
|
95e94618d8 | ||
|
|
045a401cd7 | ||
|
|
d5d7e2edbc | ||
|
|
64e1a6ec7e | ||
|
|
2221b425ad | ||
|
|
86b2ccae94 | ||
|
|
0bd67a54ab | ||
|
|
306f254218 | ||
|
|
449add88a6 | ||
|
|
941ba8d689 | ||
|
|
813335f8eb | ||
|
|
f0cef2f42f | ||
|
|
e767eb38f4 | ||
|
|
71cbef4c13 | ||
|
|
834ad1ef84 | ||
|
|
753e6661bc | ||
|
|
1ce19f5444 | ||
|
|
0de1230a1a | ||
|
|
5ff304324d | ||
|
|
37f7ef41f2 | ||
|
|
4022284059 | ||
|
|
dde6a2eb7d | ||
|
|
1083ed4e40 | ||
|
|
c111825b1e | ||
|
|
e36c22d29a | ||
|
|
c192931015 | ||
|
|
ae895a4aec | ||
|
|
301ad7e07d | ||
|
|
cc93616019 | ||
|
|
493d9875d4 | ||
|
|
3fb8d05f0d | ||
|
|
88066563d3 | ||
|
|
9e3590352c | ||
|
|
86ad52639f | ||
|
|
c00be946a5 | ||
|
|
322aeeb552 | ||
|
|
501d4cafa9 | ||
|
|
e556fb3e3a | ||
|
|
bb930297d6 | ||
|
|
83f10167e5 | ||
|
|
d47bb21389 | ||
|
|
4dedff00b8 | ||
|
|
2414dad656 | ||
|
|
8f98cb4860 | ||
|
|
6e96b7e00a | ||
|
|
04acce4916 | ||
|
|
1b7a304149 | ||
|
|
00287b27ab | ||
|
|
8f18b7fd6c | ||
|
|
dde7771dc6 | ||
|
|
4165184e42 | ||
|
|
f3c8211ba4 | ||
|
|
e4953a756a | ||
|
|
471ac63a3a | ||
|
|
f358eda5c5 | ||
|
|
035130ecdc | ||
|
|
908fc8573a | ||
|
|
ca0e86757b | ||
|
|
d4153607c9 | ||
|
|
99d0a0845d | ||
|
|
5fae5a9ee0 | ||
|
|
b5a75be1db | ||
|
|
93ad5433e4 | ||
|
|
eb5e0e0b9b | ||
|
|
5ead8de0bb | ||
|
|
dc90f58391 | ||
|
|
ca43c71cf5 | ||
|
|
931a311c48 | ||
|
|
9f9d7da1ce | ||
|
|
d00e8d3b0f | ||
|
|
d4124bae0c | ||
|
|
c062eb751e | ||
|
|
e273a594ba | ||
|
|
ebb724e687 | ||
|
|
58eb2d6f63 | ||
|
|
b14bf8a96d | ||
|
|
01987f1b51 | ||
|
|
0a35358e8d | ||
|
|
5bacb85c33 | ||
|
|
6933ac523f | ||
|
|
ba9120b417 | ||
|
|
3e71f5810f | ||
|
|
296a0edae4 | ||
|
|
cdf5602dfb | ||
|
|
e214f719c9 | ||
|
|
08fbcf5158 | ||
|
|
10ca515ac5 | ||
|
|
e59a14852b | ||
|
|
c696b4f2f2 | ||
|
|
553153ba92 | ||
|
|
9d2bcf807e | ||
|
|
422ac9befe | ||
|
|
793f641af6 | ||
|
|
0ea5f5d584 | ||
|
|
c024b846c3 | ||
|
|
37b3fde4e1 | ||
|
|
e89ef5de25 | ||
|
|
06cac44d02 | ||
|
|
488fe28ad3 | ||
|
|
50f474ae92 | ||
|
|
78ca2ffaba | ||
|
|
911f5bc78e | ||
|
|
b227427916 | ||
|
|
b5f77fd6e7 | ||
|
|
4fe966f534 | ||
|
|
bcce0838dd | ||
|
|
76e43bcb89 | ||
|
|
c666be32f4 | ||
|
|
2d850795d8 | ||
|
|
784982718e | ||
|
|
2f8d263c9c | ||
|
|
b214163af3 | ||
|
|
a4fe1000c2 | ||
|
|
9a275fa4ed | ||
|
|
f2c83f51de | ||
|
|
fb76b72787 | ||
|
|
bec6c4511c | ||
|
|
77b9988d05 | ||
|
|
3e49f93816 | ||
|
|
32f6932faf | ||
|
|
db76e1d65f | ||
|
|
0136ba504b | ||
|
|
459e026f16 | ||
|
|
b03a723c3e | ||
|
|
7562636151 | ||
|
|
3acc65ca0d | ||
|
|
fde0f4ca0a | ||
|
|
73cab2af2d | ||
|
|
94d2198b30 | ||
|
|
88a67c8703 | ||
|
|
ccf9b1291e | ||
|
|
47dae716ae | ||
|
|
ea26e1c72f | ||
|
|
e6d79f0673 | ||
|
|
bf7002d0ae | ||
|
|
cbae145da5 | ||
|
|
a728502988 | ||
|
|
9419f74d13 | ||
|
|
50523c1566 | ||
|
|
e9ef3e270d | ||
|
|
9078f7beef | ||
|
|
ac2c5abb09 | ||
|
|
a02235eb2b | ||
|
|
7658761940 | ||
|
|
3f03f076cf | ||
|
|
141e6de88f | ||
|
|
88bbfe5961 | ||
|
|
d60569b6d6 | ||
|
|
91165e80ba | ||
|
|
a15f9552eb | ||
|
|
501d225f93 | ||
|
|
4942f244da | ||
|
|
1be5c9af56 | ||
|
|
fab7abb85b | ||
|
|
818d383f2e | ||
|
|
5fffa32630 | ||
|
|
19d5feb483 | ||
|
|
199fc6be94 | ||
|
|
865729c033 | ||
|
|
6aa9071e24 | ||
|
|
6db3fc2eea | ||
|
|
65ffaaa67c | ||
|
|
440467e304 | ||
|
|
07f2c9c1c6 | ||
|
|
74422dd000 | ||
|
|
78d663bbb4 | ||
|
|
6cc2fa5306 | ||
|
|
db0a58ea04 | ||
|
|
eba1e69e64 | ||
|
|
a4e7877033 | ||
|
|
c62260ab02 | ||
|
|
b58550bb79 | ||
|
|
d02c7df75c | ||
|
|
6dbebf4806 | ||
|
|
1019660f6a | ||
|
|
bfd11060ec | ||
|
|
7b6dccf5ef | ||
|
|
d76eccad1c | ||
|
|
6f0ac7ae45 | ||
|
|
8f17ed1eb9 | ||
|
|
9737e4a24d | ||
|
|
805c17565b | ||
|
|
8e9d1cdd18 | ||
|
|
1d3300fb34 | ||
|
|
26d93cf3be | ||
|
|
4e3183ee65 | ||
|
|
8370ec58c1 | ||
|
|
0d1bcd3c13 | ||
|
|
a456b968af | ||
|
|
2b5562e376 | ||
|
|
3a242dc296 | ||
|
|
5aff9b6fdb | ||
|
|
35b3216fee | ||
|
|
fac6fe0c2e | ||
|
|
6f8020e30d | ||
|
|
efbc2a5715 | ||
|
|
0bee3901b6 | ||
|
|
7106c68032 | ||
|
|
be707536bd | ||
|
|
d18eebf97d | ||
|
|
60cc7afc72 | ||
|
|
b7949d2e69 | ||
|
|
48175d5b8e | ||
|
|
cb8fd6597d | ||
|
|
4754ac2bd1 | ||
|
|
3cca77e748 | ||
|
|
4ec1aaabe6 | ||
|
|
0baacbef98 | ||
|
|
70bc70ec97 | ||
|
|
be2b59431f | ||
|
|
c43193b17d | ||
|
|
2147af0e6a | ||
|
|
d8261b3359 | ||
|
|
ed2524cbbb | ||
|
|
76a4ae2aae | ||
|
|
2666e70706 | ||
|
|
19cf66be0f | ||
|
|
f3d1bd25ad | ||
|
|
3ed6d4bc7a | ||
|
|
1ecb26a3fb | ||
|
|
8b26ddcd2c | ||
|
|
47786dcb8c | ||
|
|
5d91d6a885 | ||
|
|
559f7c2683 | ||
|
|
3b4da70c85 | ||
|
|
95199bd325 | ||
|
|
a8887b211e | ||
|
|
15488fbfd0 | ||
|
|
9a758fc3dc | ||
|
|
edc9c3f01c | ||
|
|
83d769251d | ||
|
|
bd66333147 | ||
|
|
2228678520 | ||
|
|
9a042118f9 | ||
|
|
787ee454ff | ||
|
|
32a4587bd3 | ||
|
|
474050ba6b | ||
|
|
34657fd675 | ||
|
|
18747db17f | ||
|
|
dccea9434a | ||
|
|
85bf92ad2f | ||
|
|
40b07572a9 | ||
|
|
94afd4f1e2 | ||
|
|
1218944680 | ||
|
|
9147e3a0bd | ||
|
|
ed25212654 | ||
|
|
ce5fe61e67 | ||
|
|
9e9266b92a | ||
|
|
e329d72e8b | ||
|
|
92ec3fc060 | ||
|
|
3afb3a905c | ||
|
|
b3e6f04b30 | ||
|
|
0a29f51862 | ||
|
|
7149d407cd | ||
|
|
fbb17df916 | ||
|
|
34b317da7a | ||
|
|
c161829803 | ||
|
|
eda69dc881 | ||
|
|
500e5c41ff | ||
|
|
7c2ae129d7 | ||
|
|
c49afa7caa | ||
|
|
534c157809 | ||
|
|
cae4d3fae3 | ||
|
|
f59500c809 | ||
|
|
b75bf255ba | ||
|
|
1588242876 | ||
|
|
00eff651e6 | ||
|
|
a83d6a691a | ||
|
|
5065f2cb80 | ||
|
|
a4b36b041a | ||
|
|
c360d9fa18 | ||
|
|
78258eb9cb | ||
|
|
71e5b5cf72 | ||
|
|
4b0229584e | ||
|
|
b2df8297e1 | ||
|
|
95048b14fd | ||
|
|
3360791a31 | ||
|
|
4c65ecbe89 | ||
|
|
65a56a1da0 | ||
|
|
645f9b2ee2 | ||
|
|
117157f02c | ||
|
|
acab197a45 | ||
|
|
feed7cd556 | ||
|
|
c79b70fffd | ||
|
|
42a5709202 | ||
|
|
806f1ec0a3 | ||
|
|
ccee85a05e | ||
|
|
a27fb173dd | ||
|
|
e4885badfc | ||
|
|
62c488aff6 | ||
|
|
afcb5fe3cf | ||
|
|
d38bed1334 | ||
|
|
f6a9d5b038 | ||
|
|
863258f23d | ||
|
|
3b76fa3f92 | ||
|
|
023a42fa07 | ||
|
|
537515432c | ||
|
|
f93783052b | ||
|
|
991bc1a1ce | ||
|
|
1b5c557c44 | ||
|
|
5794faef6c | ||
|
|
ec01c436ea | ||
|
|
9bb5568d8e | ||
|
|
c25a107c04 | ||
|
|
8f152aac69 | ||
|
|
1cd5de697e | ||
|
|
ce8c812669 | ||
|
|
2c3ac053d8 | ||
|
|
ca3bb6a540 | ||
|
|
0932f095e1 | ||
|
|
3d3807ce41 | ||
|
|
30834eb8ff | ||
|
|
1c4e74920a | ||
|
|
e6efff426a | ||
|
|
cacf60fdd9 | ||
|
|
352f94ff2b | ||
|
|
47530d274f | ||
|
|
be13ec822f | ||
|
|
c6da0cc9a2 | ||
|
|
07623f9883 | ||
|
|
6e0d334a0c | ||
|
|
e514b99c57 | ||
|
|
cffdaefe2f | ||
|
|
9de4ca61e8 | ||
|
|
d7919c45a3 | ||
|
|
d24826a58e | ||
|
|
3dfadcc397 | ||
|
|
98d677dc0b | ||
|
|
4f28225188 | ||
|
|
8059039ef4 | ||
|
|
682f3fdc3e | ||
|
|
6f7aaba7fa | ||
|
|
ea7a1012b9 | ||
|
|
f24373699e | ||
|
|
f74e15840d | ||
|
|
c2c8a27545 | ||
|
|
0979f3c5c3 | ||
|
|
6f2fd1e2da | ||
|
|
597effc856 | ||
|
|
30b3510fbd | ||
|
|
0cf9b11c3e | ||
|
|
0388ce3e5b | ||
|
|
1ea6a14437 | ||
|
|
e8a073d538 | ||
|
|
c8ac686b22 | ||
|
|
81cfd13b4a | ||
|
|
73a3abe535 | ||
|
|
9006dd12f3 | ||
|
|
2bcbb89175 | ||
|
|
39daddef34 | ||
|
|
ff4538612e | ||
|
|
1782d00cc5 | ||
|
|
d4f468208e | ||
|
|
c60b708b9c | ||
|
|
3dee012415 | ||
|
|
a1d1fb962b | ||
|
|
24e02a6c5f | ||
|
|
4f352391ae | ||
|
|
4667e0bf88 | ||
|
|
9544e6c757 | ||
|
|
83b5e1a49d | ||
|
|
4f287b5ecd | ||
|
|
4c346be367 | ||
|
|
c87ac6f1ad | ||
|
|
02dc395880 | ||
|
|
63a3e0b325 | ||
|
|
01ae5688d7 | ||
|
|
ec8b10c85b | ||
|
|
19ff8339be | ||
|
|
a440b712de | ||
|
|
3653045922 | ||
|
|
0d7438e398 | ||
|
|
81b17bec69 | ||
|
|
e43c446c38 | ||
|
|
0cb442c6e0 | ||
|
|
17ea079fcd | ||
|
|
2270aefdee | ||
|
|
30828bcbe0 | ||
|
|
4667adc64d | ||
|
|
de779d453c | ||
|
|
fe959b30c6 | ||
|
|
3fcbb17a15 | ||
|
|
cdfde1d91f | ||
|
|
64a5b24e12 | ||
|
|
7d29bd216d | ||
|
|
1abd7cc2a0 | ||
|
|
78608d92b4 | ||
|
|
54cbacf4f4 | ||
|
|
b43aae84ab | ||
|
|
9d9789953b | ||
|
|
614eb930d3 | ||
|
|
62094a2098 | ||
|
|
84ed805fd7 | ||
|
|
82a158e803 | ||
|
|
1527ae1472 | ||
|
|
5d1e86fdc3 | ||
|
|
a6d2a390f0 | ||
|
|
a47ec7c77d | ||
|
|
ee1404d99e | ||
|
|
2caba558d7 | ||
|
|
5e4119f6a9 | ||
|
|
6b46b43367 | ||
|
|
460f10f46a | ||
|
|
37c6201a5a | ||
|
|
4184988c0c | ||
|
|
3264015dac | ||
|
|
d1c785d1d0 | ||
|
|
abd63df75b | ||
|
|
57f35292d5 | ||
|
|
7bd6f4a4ea | ||
|
|
35de8e6ad5 | ||
|
|
1549edfd55 | ||
|
|
f33cf6cc2e | ||
|
|
3a8cffe3ce | ||
|
|
58066443de | ||
|
|
20566b6571 | ||
|
|
af52544792 | ||
|
|
594445f6dd | ||
|
|
c3e86f0f21 | ||
|
|
d80102a7a4 | ||
|
|
e886c38b45 | ||
|
|
33a5823801 | ||
|
|
b220d15fff | ||
|
|
6fb856ee70 | ||
|
|
3922588716 | ||
|
|
940b12a908 | ||
|
|
db5349881d | ||
|
|
0edfe83a23 | ||
|
|
7abc6440f6 | ||
|
|
168ce2111d | ||
|
|
617d0bfee7 | ||
|
|
0d0da0d623 | ||
|
|
dc657f2eb0 | ||
|
|
ef2b4a7536 | ||
|
|
e41d75c374 | ||
|
|
eb55f5655f | ||
|
|
9f72bf7745 | ||
|
|
f34202a82a | ||
|
|
f8c8161a3e | ||
|
|
63aafd3133 | ||
|
|
e2303235cd | ||
|
|
1771d18a21 | ||
|
|
d02a0b2213 | ||
|
|
6cbbd0c515 | ||
|
|
cae26f4f70 | ||
|
|
f328d1461f | ||
|
|
d3e9799279 | ||
|
|
14eefe1f5d | ||
|
|
6c2cc4cf50 | ||
|
|
440f4729ac | ||
|
|
e1c6042189 | ||
|
|
55a057307c | ||
|
|
8a5176e593 | ||
|
|
a757778fba | ||
|
|
1259911275 | ||
|
|
4e4035a867 | ||
|
|
9bd87e78dc | ||
|
|
12a1c4ccbf | ||
|
|
888e17cb91 | ||
|
|
0fa48cea2a | ||
|
|
9b7d393d5d | ||
|
|
cc702cbdfa | ||
|
|
e8ddb0c427 | ||
|
|
feb677e656 | ||
|
|
c19dd81ecf | ||
|
|
01a50a3a98 | ||
|
|
8920a32c75 | ||
|
|
4773d0bb7f | ||
|
|
20a6c5e7b7 | ||
|
|
0826e0f96b | ||
|
|
bd374f4c36 | ||
|
|
bac8849f2d | ||
|
|
ad87c3c87d | ||
|
|
4c7eb34290 | ||
|
|
a1fd9f7310 | ||
|
|
9433908218 | ||
|
|
8f1c4cd9c4 | ||
|
|
85b210ebf6 | ||
|
|
af7ffa3878 | ||
|
|
96a84d16a6 | ||
|
|
0737ea30e8 | ||
|
|
1807811903 | ||
|
|
a33dce1948 | ||
|
|
ef4dc1b49e | ||
|
|
0f886be109 | ||
|
|
73a597855f | ||
|
|
21ec2dfa68 | ||
|
|
296d1d1b61 | ||
|
|
af536dfefb | ||
|
|
7d7c5207d7 | ||
|
|
52f0a3dfb9 | ||
|
|
081f11af40 | ||
|
|
9f0f458a02 | ||
|
|
e74c716588 | ||
|
|
89bd0791dc | ||
|
|
a591614a39 | ||
|
|
0085c8351e | ||
|
|
4df7f92a56 | ||
|
|
1841cefbd8 | ||
|
|
f13ae930a5 | ||
|
|
dbe233f0ae | ||
|
|
17e5f6d76b | ||
|
|
f52d167da3 | ||
|
|
63aa7128a7 | ||
|
|
d30a652fc1 | ||
|
|
1cd0684f62 | ||
|
|
bc97a13a62 | ||
|
|
db98eed392 | ||
|
|
76115d6a81 | ||
|
|
9e2fd52434 | ||
|
|
7ca12c3dc4 | ||
|
|
3dffa09977 | ||
|
|
7c313eed33 | ||
|
|
383cf7f4d5 | ||
|
|
4babf0d102 | ||
|
|
df4567f9e4 | ||
|
|
f4d09d46f4 | ||
|
|
9d3242c13e | ||
|
|
08c6a21bbb | ||
|
|
7d18be9928 | ||
|
|
c52070d554 | ||
|
|
23d6589a73 | ||
|
|
3ae73e7df5 | ||
|
|
8f3f60d249 | ||
|
|
16664789d2 | ||
|
|
f97aae1fd8 | ||
|
|
7e1bbecc9f | ||
|
|
376a56df26 | ||
|
|
e75ea257e8 | ||
|
|
7680bf7c0d |
9
.build-config.json
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"qpdf": {
|
||||||
|
"version": "10.6.3"
|
||||||
|
},
|
||||||
|
"jbig2enc": {
|
||||||
|
"version": "0.29",
|
||||||
|
"git_tag": "0.29"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -17,3 +17,5 @@
|
|||||||
**/htmlcov
|
**/htmlcov
|
||||||
/src/.pytest_cache
|
/src/.pytest_cache
|
||||||
.idea
|
.idea
|
||||||
|
.venv/
|
||||||
|
.vscode/
|
||||||
|
|||||||
@@ -18,14 +18,20 @@ max_line_length = off
|
|||||||
indent_size = 4
|
indent_size = 4
|
||||||
indent_style = space
|
indent_style = space
|
||||||
|
|
||||||
[*.yml]
|
[*.{yml,yaml}]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
|
|
||||||
[*.rst]
|
[*.rst]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
|
|
||||||
|
[*.md]
|
||||||
|
indent_style = space
|
||||||
|
|
||||||
# Tests don't get a line width restriction. It's still a good idea to follow
|
# Tests don't get a line width restriction. It's still a good idea to follow
|
||||||
# the 79 character rule, but in the interests of clarity, tests often need to
|
# the 79 character rule, but in the interests of clarity, tests often need to
|
||||||
# violate it.
|
# violate it.
|
||||||
[**/test_*.py]
|
[**/test_*.py]
|
||||||
max_line_length = off
|
max_line_length = off
|
||||||
|
|
||||||
|
[Dockerfile*]
|
||||||
|
indent_style = space
|
||||||
|
|||||||
2
.env
@@ -1,2 +1,2 @@
|
|||||||
COMPOSE_PROJECT_NAME=paperless
|
COMPOSE_PROJECT_NAME=paperless
|
||||||
export PROMPT="(pipenv-projectname)$P$G"
|
export PROMPT="(pipenv-projectname)$P$G"
|
||||||
|
|||||||
88
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
name: Bug report
|
||||||
|
description: Something is not working
|
||||||
|
title: "[BUG] Concise description of the issue"
|
||||||
|
labels: ["bug", "unconfirmed"]
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Have a question? 👉 [Start a new discussion](https://github.com/paperless-ngx/paperless-ngx/discussions/new) or [ask in chat](https://matrix.to/#/#paperless:adnidor.de).
|
||||||
|
|
||||||
|
Before opening an issue, please double check:
|
||||||
|
|
||||||
|
- [The troubleshooting documentation](https://paperless-ngx.readthedocs.io/en/latest/troubleshooting.html).
|
||||||
|
- [The installation instructions](https://paperless-ngx.readthedocs.io/en/latest/setup.html#installation).
|
||||||
|
- [Existing issues and discussions](https://github.com/paperless-ngx/paperless-ngx/search?q=&type=issues).
|
||||||
|
|
||||||
|
If you encounter issues while installing or configuring Paperless-ngx, please post in the ["Support" section of the discussions](https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=support).
|
||||||
|
- type: textarea
|
||||||
|
id: description
|
||||||
|
attributes:
|
||||||
|
label: Description
|
||||||
|
description: A clear and concise description of what the bug is. If applicable, add screenshots to help explain your problem.
|
||||||
|
placeholder: |
|
||||||
|
Currently Paperless does not work when...
|
||||||
|
|
||||||
|
[Screenshot if applicable]
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: reproduction
|
||||||
|
attributes:
|
||||||
|
label: Steps to reproduce
|
||||||
|
description: Steps to reproduce the behavior.
|
||||||
|
placeholder: |
|
||||||
|
1. Go to '...'
|
||||||
|
2. Click on '....'
|
||||||
|
3. See error
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: logs
|
||||||
|
attributes:
|
||||||
|
label: Webserver logs
|
||||||
|
description: If available, post any logs from the web server related to your issue.
|
||||||
|
render: bash
|
||||||
|
- type: input
|
||||||
|
id: version
|
||||||
|
attributes:
|
||||||
|
label: Paperless-ngx version
|
||||||
|
placeholder: e.g. 1.6.0
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: input
|
||||||
|
id: host-os
|
||||||
|
attributes:
|
||||||
|
label: Host OS
|
||||||
|
description: Host OS of the machine running paperless-ngx. Please add the architecture (uname -m) if applicable.
|
||||||
|
placeholder: e.g. Archlinux / Ubuntu 20.04 / Raspberry Pi `arm64`
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: dropdown
|
||||||
|
id: install-method
|
||||||
|
attributes:
|
||||||
|
label: Installation method
|
||||||
|
options:
|
||||||
|
- Docker - official image
|
||||||
|
- Docker - linuxserver.io image
|
||||||
|
- Bare metal
|
||||||
|
- Other (please describe above)
|
||||||
|
description: Note there are significant differences from the official image and linuxserver.io, please check if your issue is specific to the third-party image.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: input
|
||||||
|
id: browser
|
||||||
|
attributes:
|
||||||
|
label: Browser
|
||||||
|
description: Which browser you are using, if relevant.
|
||||||
|
placeholder: e.g. Chrome, Safari
|
||||||
|
- type: input
|
||||||
|
id: config-changes
|
||||||
|
attributes:
|
||||||
|
label: Configuration changes
|
||||||
|
description: Any configuration changes you made in `docker-compose.yml`, `docker-compose.env` or `paperless.conf`.
|
||||||
|
- type: input
|
||||||
|
id: other
|
||||||
|
attributes:
|
||||||
|
label: Other
|
||||||
|
description: Any other relevant details.
|
||||||
48
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,48 +0,0 @@
|
|||||||
---
|
|
||||||
name: Bug report
|
|
||||||
about: Something is not working
|
|
||||||
title: "[BUG] Concise description of the issue"
|
|
||||||
labels: ''
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
<!---
|
|
||||||
=> Before opening an issue, please check the documentation and see if it helps you resolve your issue: https://paperless-ngx.readthedocs.io/en/latest/troubleshooting.html
|
|
||||||
=> Please also make sure that you followed the installation instructions.
|
|
||||||
=> Please search the issues and look for similar issues before opening a bug report.
|
|
||||||
|
|
||||||
=> If you would like to submit a feature request please submit one under https://github.com/paperless-ngx/paperless-ngx/discussions/categories/feature-requests
|
|
||||||
|
|
||||||
=> If you encounter issues while installing of configuring Paperless-ngx, please post that in the "Support" section of the discussions. Remember that Paperless successfully runs on a variety of different systems. If paperless does not start, it's probably an issue with your system, and not an issue of paperless.
|
|
||||||
|
|
||||||
=> Don't remove the [BUG] prefix from the title.
|
|
||||||
-->
|
|
||||||
|
|
||||||
**Describe the bug**
|
|
||||||
A clear and concise description of what the bug is.
|
|
||||||
|
|
||||||
**To Reproduce**
|
|
||||||
Steps to reproduce the behavior:
|
|
||||||
1. Go to '...'
|
|
||||||
2. Click on '....'
|
|
||||||
3. Scroll down to '....'
|
|
||||||
4. See error
|
|
||||||
|
|
||||||
**Expected behavior**
|
|
||||||
A clear and concise description of what you expected to happen.
|
|
||||||
|
|
||||||
**Screenshots**
|
|
||||||
If applicable, add screenshots to help explain your problem.
|
|
||||||
|
|
||||||
**Webserver logs**
|
|
||||||
```
|
|
||||||
If available, post any logs from the web server related to your issue.
|
|
||||||
```
|
|
||||||
|
|
||||||
**Relevant information**
|
|
||||||
- Host OS of the machine running paperless: [e.g. Archlinux / Ubuntu 20.04]
|
|
||||||
- Browser [e.g. chrome, safari]
|
|
||||||
- Version [e.g. 1.0.0]
|
|
||||||
- Installation method: [docker / bare metal]
|
|
||||||
- Any configuration changes you made in `docker-compose.yml`, `docker-compose.env` or `paperless.conf`.
|
|
||||||
11
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
blank_issues_enabled: false
|
||||||
|
contact_links:
|
||||||
|
- name: 🤔 Questions and Help
|
||||||
|
url: https://github.com/paperless-ngx/paperless-ngx/discussions
|
||||||
|
about: This issue tracker is not for support questions. Please refer to our Discussions.
|
||||||
|
- name: 💬 Chat
|
||||||
|
url: https://matrix.to/#/#paperless:adnidor.de
|
||||||
|
about: Want to discuss Paperless-ngx with others? Check out our chat.
|
||||||
|
- name: 🚀 Feature Request
|
||||||
|
url: https://github.com/paperless-ngx/paperless-ngx/discussions/new?category=feature-requests
|
||||||
|
about: Remember to search for existing feature requests and "up-vote" any you like
|
||||||
20
.github/ISSUE_TEMPLATE/other.md
vendored
@@ -1,20 +0,0 @@
|
|||||||
---
|
|
||||||
name: Other
|
|
||||||
about: Anything that is not a feature request or bug.
|
|
||||||
title: "[Other] Title of your issue"
|
|
||||||
labels: ''
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
<!--
|
|
||||||
|
|
||||||
=> Discussions, Feedback and other suggestions belong in the "Discussion" section and not on the issue tracker.
|
|
||||||
|
|
||||||
=> If you would like to submit a feature request please submit one under https://github.com/paperless-ngx/paperless-ngx/discussions/categories/feature-requests
|
|
||||||
|
|
||||||
=> If you encounter issues while installing of configuring Paperless-ngx, please post that in the "Support" section of the discussions. Remember that Paperless successfully runs on a variety of different systems. If paperless does not start, it's probably is an issue with your system, and not an issue of paperless.
|
|
||||||
|
|
||||||
=> Don't remove the [Other] prefix from the title.
|
|
||||||
|
|
||||||
-->
|
|
||||||
32
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
<!--
|
||||||
|
Note: All PRs with code changes should be targeted to the `dev` branch, pure documentation changes can target `main`
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Proposed change
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Please include a summary of the change and which issue is fixed (if any) and any relevant motivation / context. List any dependencies that are required for this change. If appropriate, please include an explanation of how your proposed change can be tested. Screenshots and / or videos can also be helpful if appropriate.
|
||||||
|
-->
|
||||||
|
|
||||||
|
Fixes # (issue)
|
||||||
|
|
||||||
|
## Type of change
|
||||||
|
|
||||||
|
<!--
|
||||||
|
What type of change does your PR introduce to Paperless-ngx?
|
||||||
|
NOTE: Please check only one box!
|
||||||
|
-->
|
||||||
|
|
||||||
|
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||||
|
- [ ] New feature (non-breaking change which adds functionality)
|
||||||
|
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||||
|
- [ ] Other (please explain)
|
||||||
|
|
||||||
|
## Checklist:
|
||||||
|
|
||||||
|
- [ ] I have read & agree with the [contributing guidelines](https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md).
|
||||||
|
- [ ] If applicable, I have tested my code for new features & regressions on both mobile & desktop devices, using the latest version of major browsers.
|
||||||
|
- [ ] If applicable, I have checked that all tests pass, see [documentation](https://paperless-ngx.readthedocs.io/en/latest/extending.html#back-end-development).
|
||||||
|
- [ ] I have run all `pre-commit` hooks, see [documentation](https://paperless-ngx.readthedocs.io/en/latest/extending.html#code-formatting-with-pre-commit-hooks).
|
||||||
|
- [ ] I have made corresponding changes to the documentation as needed.
|
||||||
|
- [ ] I have checked my modifications for any breaking changes.
|
||||||
32
.github/dependabot.yml
vendored
@@ -6,11 +6,17 @@ updates:
|
|||||||
# Enable version updates for npm
|
# Enable version updates for npm
|
||||||
- package-ecosystem: "npm"
|
- package-ecosystem: "npm"
|
||||||
target-branch: "dev"
|
target-branch: "dev"
|
||||||
# Look for `package.json` and `lock` files in the `root` directory
|
# Look for `package.json` and `lock` files in the `/src-ui` directory
|
||||||
directory: "/src-ui"
|
directory: "/src-ui"
|
||||||
# Check the npm registry for updates every week
|
# Check the npm registry for updates every month
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "monthly"
|
||||||
|
labels:
|
||||||
|
- "frontend"
|
||||||
|
- "dependencies"
|
||||||
|
# Add reviewers
|
||||||
|
reviewers:
|
||||||
|
- "paperless-ngx/frontend"
|
||||||
|
|
||||||
# Enable version updates for Python
|
# Enable version updates for Python
|
||||||
- package-ecosystem: "pip"
|
- package-ecosystem: "pip"
|
||||||
@@ -20,3 +26,23 @@ updates:
|
|||||||
# Check for updates once a week
|
# Check for updates once a week
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
labels:
|
||||||
|
- "backend"
|
||||||
|
- "dependencies"
|
||||||
|
# Add reviewers
|
||||||
|
reviewers:
|
||||||
|
- "paperless-ngx/backend"
|
||||||
|
|
||||||
|
# Enable updates for Github Actions
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
target-branch: "dev"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
# Check for updates to GitHub Actions every month
|
||||||
|
interval: "monthly"
|
||||||
|
labels:
|
||||||
|
- "ci-cd"
|
||||||
|
- "dependencies"
|
||||||
|
# Add reviewers
|
||||||
|
reviewers:
|
||||||
|
- "paperless-ngx/ci-cd"
|
||||||
|
|||||||
37
.github/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
categories:
|
||||||
|
- title: 'Breaking Changes'
|
||||||
|
labels:
|
||||||
|
- 'breaking-change'
|
||||||
|
- title: 'Features'
|
||||||
|
labels:
|
||||||
|
- 'enhancement'
|
||||||
|
- title: 'Bug Fixes'
|
||||||
|
labels:
|
||||||
|
- 'bug'
|
||||||
|
- title: 'Documentation'
|
||||||
|
label: 'documentation'
|
||||||
|
- title: 'Maintenance'
|
||||||
|
labels:
|
||||||
|
- 'chore'
|
||||||
|
- 'deployment'
|
||||||
|
- 'translation'
|
||||||
|
- title: 'Dependencies'
|
||||||
|
collapse-after: 3
|
||||||
|
label: 'dependencies'
|
||||||
|
include-labels:
|
||||||
|
- 'enhancement'
|
||||||
|
- 'bug'
|
||||||
|
- 'chore'
|
||||||
|
- 'deployment'
|
||||||
|
- 'translation'
|
||||||
|
- 'dependencies'
|
||||||
|
replacers: # Changes "Feature: Update checker" to "Update checker"
|
||||||
|
- search: '/Feature:|Feat:|\[feature\]/gi'
|
||||||
|
replace: ''
|
||||||
|
category-template: '### $TITLE'
|
||||||
|
change-template: '- $TITLE [@$AUTHOR](https://github.com/$AUTHOR) ([#$NUMBER]($URL))'
|
||||||
|
change-title-escapes: '\<*_&#@'
|
||||||
|
template: |
|
||||||
|
## paperless-ngx $RESOLVED_VERSION
|
||||||
|
|
||||||
|
$CHANGES
|
||||||
254
.github/scripts/cleanup-tags.py
vendored
Normal file
@@ -0,0 +1,254 @@
|
|||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from typing import Final
|
||||||
|
from typing import List
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from common import get_log_level
|
||||||
|
|
||||||
|
logger = logging.getLogger("cleanup-tags")
|
||||||
|
|
||||||
|
|
||||||
|
class GithubContainerRegistry:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
session: requests.Session,
|
||||||
|
token: str,
|
||||||
|
owner_or_org: str,
|
||||||
|
):
|
||||||
|
self._session: requests.Session = session
|
||||||
|
self._token = token
|
||||||
|
self._owner_or_org = owner_or_org
|
||||||
|
# https://docs.github.com/en/rest/branches/branches
|
||||||
|
self._BRANCHES_ENDPOINT = "https://api.github.com/repos/{OWNER}/{REPO}/branches"
|
||||||
|
if self._owner_or_org == "paperless-ngx":
|
||||||
|
# https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-an-organization
|
||||||
|
self._PACKAGES_VERSIONS_ENDPOINT = "https://api.github.com/orgs/{ORG}/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions"
|
||||||
|
# https://docs.github.com/en/rest/packages#delete-package-version-for-an-organization
|
||||||
|
self._PACKAGE_VERSION_DELETE_ENDPOINT = "https://api.github.com/orgs/{ORG}/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions/{PACKAGE_VERSION_ID}"
|
||||||
|
else:
|
||||||
|
# https://docs.github.com/en/rest/packages#get-all-package-versions-for-a-package-owned-by-the-authenticated-user
|
||||||
|
self._PACKAGES_VERSIONS_ENDPOINT = "https://api.github.com/user/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions"
|
||||||
|
# https://docs.github.com/en/rest/packages#delete-a-package-version-for-the-authenticated-user
|
||||||
|
self._PACKAGE_VERSION_DELETE_ENDPOINT = "https://api.github.com/user/packages/{PACKAGE_TYPE}/{PACKAGE_NAME}/versions/{PACKAGE_VERSION_ID}"
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self._session.headers.update(
|
||||||
|
{
|
||||||
|
"Accept": "application/vnd.github.v3+json",
|
||||||
|
"Authorization": f"token {self._token}",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
if "Accept" in self._session.headers:
|
||||||
|
del self._session.headers["Accept"]
|
||||||
|
if "Authorization" in self._session.headers:
|
||||||
|
del self._session.headers["Authorization"]
|
||||||
|
|
||||||
|
def _read_all_pages(self, endpoint):
|
||||||
|
internal_data = []
|
||||||
|
|
||||||
|
while True:
|
||||||
|
resp = self._session.get(endpoint)
|
||||||
|
if resp.status_code == 200:
|
||||||
|
internal_data += resp.json()
|
||||||
|
if "next" in resp.links:
|
||||||
|
endpoint = resp.links["next"]["url"]
|
||||||
|
else:
|
||||||
|
logger.debug("Exiting pagination loop")
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
logger.warning(f"Request to {endpoint} return HTTP {resp.status_code}")
|
||||||
|
break
|
||||||
|
|
||||||
|
return internal_data
|
||||||
|
|
||||||
|
def get_branches(self, repo: str):
|
||||||
|
endpoint = self._BRANCHES_ENDPOINT.format(OWNER=self._owner_or_org, REPO=repo)
|
||||||
|
internal_data = self._read_all_pages(endpoint)
|
||||||
|
return internal_data
|
||||||
|
|
||||||
|
def filter_branches_by_name_pattern(self, branch_data, pattern: str):
|
||||||
|
matches = {}
|
||||||
|
|
||||||
|
for branch in branch_data:
|
||||||
|
if branch["name"].startswith(pattern):
|
||||||
|
matches[branch["name"]] = branch
|
||||||
|
|
||||||
|
return matches
|
||||||
|
|
||||||
|
def get_package_versions(
|
||||||
|
self,
|
||||||
|
package_name: str,
|
||||||
|
package_type: str = "container",
|
||||||
|
) -> List:
|
||||||
|
package_name = quote(package_name, safe="")
|
||||||
|
endpoint = self._PACKAGES_VERSIONS_ENDPOINT.format(
|
||||||
|
ORG=self._owner_or_org,
|
||||||
|
PACKAGE_TYPE=package_type,
|
||||||
|
PACKAGE_NAME=package_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
internal_data = self._read_all_pages(endpoint)
|
||||||
|
|
||||||
|
return internal_data
|
||||||
|
|
||||||
|
def filter_packages_by_tag_pattern(self, package_data, pattern: str):
|
||||||
|
matches = {}
|
||||||
|
|
||||||
|
for package in package_data:
|
||||||
|
if "metadata" in package and "container" in package["metadata"]:
|
||||||
|
container_metadata = package["metadata"]["container"]
|
||||||
|
if "tags" in container_metadata:
|
||||||
|
container_tags = container_metadata["tags"]
|
||||||
|
for tag in container_tags:
|
||||||
|
if tag.startswith(pattern):
|
||||||
|
matches[tag] = package
|
||||||
|
break
|
||||||
|
|
||||||
|
return matches
|
||||||
|
|
||||||
|
def filter_packages_untagged(self, package_data):
|
||||||
|
matches = {}
|
||||||
|
|
||||||
|
for package in package_data:
|
||||||
|
if "metadata" in package and "container" in package["metadata"]:
|
||||||
|
container_metadata = package["metadata"]["container"]
|
||||||
|
if "tags" in container_metadata:
|
||||||
|
container_tags = container_metadata["tags"]
|
||||||
|
if not len(container_tags):
|
||||||
|
matches[package["name"]] = package
|
||||||
|
|
||||||
|
return matches
|
||||||
|
|
||||||
|
def delete_package_version(self, package_name, package_data):
|
||||||
|
package_name = quote(package_name, safe="")
|
||||||
|
endpoint = self._PACKAGE_VERSION_DELETE_ENDPOINT.format(
|
||||||
|
ORG=self._owner_or_org,
|
||||||
|
PACKAGE_TYPE=package_data["metadata"]["package_type"],
|
||||||
|
PACKAGE_NAME=package_name,
|
||||||
|
PACKAGE_VERSION_ID=package_data["id"],
|
||||||
|
)
|
||||||
|
resp = self._session.delete(endpoint)
|
||||||
|
if resp.status_code != 204:
|
||||||
|
logger.warning(
|
||||||
|
f"Request to delete {endpoint} returned HTTP {resp.status_code}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _main():
|
||||||
|
parser = ArgumentParser(
|
||||||
|
description="Using the GitHub API locate and optionally delete container"
|
||||||
|
" tags which no longer have an associated feature branch",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--delete",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="If provided, actually delete the container tags",
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO There's a lot of untagged images, do those need to stay for anything?
|
||||||
|
parser.add_argument(
|
||||||
|
"--untagged",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="If provided, delete untagged containers as well",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--loglevel",
|
||||||
|
default="info",
|
||||||
|
help="Configures the logging level",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=get_log_level(args),
|
||||||
|
datefmt="%Y-%m-%d %H:%M:%S",
|
||||||
|
format="%(asctime)s %(levelname)-8s %(message)s",
|
||||||
|
)
|
||||||
|
|
||||||
|
repo_owner: Final[str] = os.environ["GITHUB_REPOSITORY_OWNER"]
|
||||||
|
repo: Final[str] = os.environ["GITHUB_REPOSITORY"]
|
||||||
|
gh_token: Final[str] = os.environ["GITHUB_TOKEN"]
|
||||||
|
|
||||||
|
with requests.session() as sess:
|
||||||
|
with GithubContainerRegistry(sess, gh_token, repo_owner) as gh_api:
|
||||||
|
all_branches = gh_api.get_branches("paperless-ngx")
|
||||||
|
logger.info(f"Located {len(all_branches)} branches of {repo_owner}/{repo} ")
|
||||||
|
|
||||||
|
feature_branches = gh_api.filter_branches_by_name_pattern(
|
||||||
|
all_branches,
|
||||||
|
"feature-",
|
||||||
|
)
|
||||||
|
logger.info(f"Located {len(feature_branches)} feature branches")
|
||||||
|
|
||||||
|
for package_name in ["paperless-ngx", "paperless-ngx/builder/cache/app"]:
|
||||||
|
|
||||||
|
all_package_versions = gh_api.get_package_versions(package_name)
|
||||||
|
logger.info(
|
||||||
|
f"Located {len(all_package_versions)} versions of package {package_name}",
|
||||||
|
)
|
||||||
|
|
||||||
|
packages_tagged_feature = gh_api.filter_packages_by_tag_pattern(
|
||||||
|
all_package_versions,
|
||||||
|
"feature-",
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f'Located {len(packages_tagged_feature)} versions of package {package_name} tagged "feature-"',
|
||||||
|
)
|
||||||
|
|
||||||
|
untagged_packages = gh_api.filter_packages_untagged(
|
||||||
|
all_package_versions,
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"Located {len(untagged_packages)} untagged versions of package {package_name}",
|
||||||
|
)
|
||||||
|
|
||||||
|
to_delete = list(
|
||||||
|
set(packages_tagged_feature.keys()) - set(feature_branches.keys()),
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"Located {len(to_delete)} versions of package {package_name} to delete",
|
||||||
|
)
|
||||||
|
|
||||||
|
for tag_to_delete in to_delete:
|
||||||
|
package_version_info = packages_tagged_feature[tag_to_delete]
|
||||||
|
|
||||||
|
if args.delete:
|
||||||
|
logger.info(
|
||||||
|
f"Deleting {tag_to_delete} (id {package_version_info['id']})",
|
||||||
|
)
|
||||||
|
gh_api.delete_package_version(
|
||||||
|
package_name,
|
||||||
|
package_version_info,
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
f"Would delete {tag_to_delete} (id {package_version_info['id']})",
|
||||||
|
)
|
||||||
|
|
||||||
|
if args.untagged:
|
||||||
|
logger.info(f"Deleting untagged packages of {package_name}")
|
||||||
|
for to_delete_name in untagged_packages:
|
||||||
|
to_delete_version = untagged_packages[to_delete_name]
|
||||||
|
logger.info(f"Deleting id {to_delete_version['id']}")
|
||||||
|
if args.delete:
|
||||||
|
gh_api.delete_package_version(
|
||||||
|
package_name,
|
||||||
|
to_delete_version,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info("Leaving untagged images untouched")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
_main()
|
||||||
44
.github/scripts/common.py
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import logging
|
||||||
|
from argparse import ArgumentError
|
||||||
|
|
||||||
|
|
||||||
|
def get_image_tag(
|
||||||
|
repo_name: str,
|
||||||
|
pkg_name: str,
|
||||||
|
pkg_version: str,
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Returns a string representing the normal image for a given package
|
||||||
|
"""
|
||||||
|
return f"ghcr.io/{repo_name.lower()}/builder/{pkg_name}:{pkg_version}"
|
||||||
|
|
||||||
|
|
||||||
|
def get_cache_image_tag(
|
||||||
|
repo_name: str,
|
||||||
|
pkg_name: str,
|
||||||
|
pkg_version: str,
|
||||||
|
branch_name: str,
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Returns a string representing the expected image cache tag for a given package
|
||||||
|
|
||||||
|
Registry type caching is utilized for the builder images, to allow fast
|
||||||
|
rebuilds, generally almost instant for the same version
|
||||||
|
"""
|
||||||
|
return f"ghcr.io/{repo_name.lower()}/builder/cache/{pkg_name}:{pkg_version}"
|
||||||
|
|
||||||
|
|
||||||
|
def get_log_level(args) -> int:
|
||||||
|
levels = {
|
||||||
|
"critical": logging.CRITICAL,
|
||||||
|
"error": logging.ERROR,
|
||||||
|
"warn": logging.WARNING,
|
||||||
|
"warning": logging.WARNING,
|
||||||
|
"info": logging.INFO,
|
||||||
|
"debug": logging.DEBUG,
|
||||||
|
}
|
||||||
|
level = levels.get(args.loglevel.lower())
|
||||||
|
if level is None:
|
||||||
|
level = logging.INFO
|
||||||
|
return level
|
||||||
92
.github/scripts/get-build-json.py
vendored
Executable file
@@ -0,0 +1,92 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
This is a helper script for the mutli-stage Docker image builder.
|
||||||
|
It provides a single point of configuration for package version control.
|
||||||
|
The output JSON object is used by the CI workflow to determine what versions
|
||||||
|
to build and pull into the final Docker image.
|
||||||
|
|
||||||
|
Python package information is obtained from the Pipfile.lock. As this is
|
||||||
|
kept updated by dependabot, it usually will need no further configuration.
|
||||||
|
The sole exception currently is pikepdf, which has a dependency on qpdf,
|
||||||
|
and is configured here to use the latest version of qpdf built by the workflow.
|
||||||
|
|
||||||
|
Other package version information is configured directly below, generally by
|
||||||
|
setting the version and Git information, if any.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
from common import get_cache_image_tag
|
||||||
|
from common import get_image_tag
|
||||||
|
|
||||||
|
|
||||||
|
def _main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Generate a JSON object of information required to build the given package, based on the Pipfile.lock",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"package",
|
||||||
|
help="The name of the package to generate JSON for",
|
||||||
|
)
|
||||||
|
|
||||||
|
PIPFILE_LOCK_PATH: Final[Path] = Path("Pipfile.lock")
|
||||||
|
BUILD_CONFIG_PATH: Final[Path] = Path(".build-config.json")
|
||||||
|
|
||||||
|
# Read the main config file
|
||||||
|
build_json: Final = json.loads(BUILD_CONFIG_PATH.read_text())
|
||||||
|
|
||||||
|
# Read Pipfile.lock file
|
||||||
|
pipfile_data: Final = json.loads(PIPFILE_LOCK_PATH.read_text())
|
||||||
|
|
||||||
|
args: Final = parser.parse_args()
|
||||||
|
|
||||||
|
# Read from environment variables set by GitHub Actions
|
||||||
|
repo_name: Final[str] = os.environ["GITHUB_REPOSITORY"]
|
||||||
|
branch_name: Final[str] = os.environ["GITHUB_REF_NAME"]
|
||||||
|
|
||||||
|
# Default output values
|
||||||
|
version = None
|
||||||
|
extra_config = {}
|
||||||
|
|
||||||
|
if args.package in pipfile_data["default"]:
|
||||||
|
# Read the version from Pipfile.lock
|
||||||
|
pkg_data = pipfile_data["default"][args.package]
|
||||||
|
pkg_version = pkg_data["version"].split("==")[-1]
|
||||||
|
version = pkg_version
|
||||||
|
|
||||||
|
# Any extra/special values needed
|
||||||
|
if args.package == "pikepdf":
|
||||||
|
extra_config["qpdf_version"] = build_json["qpdf"]["version"]
|
||||||
|
|
||||||
|
elif args.package in build_json:
|
||||||
|
version = build_json[args.package]["version"]
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(args.package)
|
||||||
|
|
||||||
|
# The JSON object we'll output
|
||||||
|
output = {
|
||||||
|
"name": args.package,
|
||||||
|
"version": version,
|
||||||
|
"image_tag": get_image_tag(repo_name, args.package, version),
|
||||||
|
"cache_tag": get_cache_image_tag(
|
||||||
|
repo_name,
|
||||||
|
args.package,
|
||||||
|
version,
|
||||||
|
branch_name,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add anything special a package may need
|
||||||
|
output.update(extra_config)
|
||||||
|
|
||||||
|
# Output the JSON info to stdout
|
||||||
|
print(json.dumps(output))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
_main()
|
||||||
15
.github/stale.yml
vendored
@@ -1,18 +1,23 @@
|
|||||||
# Number of days of inactivity before an issue becomes stale
|
# Number of days of inactivity before an issue becomes stale
|
||||||
daysUntilStale: 30
|
daysUntilStale: 30
|
||||||
|
|
||||||
# Number of days of inactivity before a stale issue is closed
|
# Number of days of inactivity before a stale issue is closed
|
||||||
daysUntilClose: 7
|
daysUntilClose: 7
|
||||||
# Issues with these labels will never be considered stale
|
|
||||||
exemptLabels:
|
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
|
||||||
- pinned
|
onlyLabels: [cant-reproduce]
|
||||||
- security
|
|
||||||
- fixpending
|
|
||||||
# Label to use when marking an issue as stale
|
# Label to use when marking an issue as stale
|
||||||
staleLabel: stale
|
staleLabel: stale
|
||||||
|
|
||||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||||
markComment: >
|
markComment: >
|
||||||
This issue has been automatically marked as stale because it has not had
|
This issue has been automatically marked as stale because it has not had
|
||||||
recent activity. It will be closed if no further activity occurs. Thank you
|
recent activity. It will be closed if no further activity occurs. Thank you
|
||||||
for your contributions.
|
for your contributions.
|
||||||
|
|
||||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||||
closeComment: false
|
closeComment: false
|
||||||
|
|
||||||
|
# See https://github.com/marketplace/stale for more info on the app
|
||||||
|
# and https://github.com/probot/stale for the configuration docs
|
||||||
|
|||||||
379
.github/workflows/ci.yml
vendored
@@ -3,8 +3,10 @@ name: ci
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- ngx-*
|
# https://semver.org/#spec-item-2
|
||||||
- beta-*
|
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||||
|
# https://semver.org/#spec-item-9
|
||||||
|
- 'v[0-9]+.[0-9]+.[0-9]+-beta.rc[0-9]+'
|
||||||
branches-ignore:
|
branches-ignore:
|
||||||
- 'translations**'
|
- 'translations**'
|
||||||
pull_request:
|
pull_request:
|
||||||
@@ -13,215 +15,237 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
documentation:
|
documentation:
|
||||||
|
name: "Build Documentation"
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Install pipenv
|
||||||
|
run: pipx install pipenv
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
-
|
cache: "pipenv"
|
||||||
name: Get pip cache dir
|
cache-dependency-path: 'Pipfile.lock'
|
||||||
id: pip-cache
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=dir::$(pip cache dir)"
|
|
||||||
-
|
|
||||||
name: Persistent Github pip cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: ${{ steps.pip-cache.outputs.dir }}
|
|
||||||
key: ${{ runner.os }}-pip3.8}
|
|
||||||
-
|
-
|
||||||
name: Install dependencies
|
name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pipenv
|
pipenv sync --dev
|
||||||
pipenv install --system --dev --ignore-pipfile
|
|
||||||
-
|
-
|
||||||
name: Make documentation
|
name: Make documentation
|
||||||
run: |
|
run: |
|
||||||
cd docs/
|
cd docs/
|
||||||
make html
|
pipenv run make html
|
||||||
-
|
-
|
||||||
name: Upload artifact
|
name: Upload artifact
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: documentation
|
name: documentation
|
||||||
path: docs/_build/html/
|
path: docs/_build/html/
|
||||||
|
|
||||||
codestyle:
|
ci-backend:
|
||||||
runs-on: ubuntu-20.04
|
uses: ./.github/workflows/reusable-ci-backend.yml
|
||||||
steps:
|
|
||||||
-
|
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
-
|
|
||||||
name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
-
|
|
||||||
name: Get pip cache dir
|
|
||||||
id: pip-cache
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=dir::$(pip cache dir)"
|
|
||||||
-
|
|
||||||
name: Persistent Github pip cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: ${{ steps.pip-cache.outputs.dir }}
|
|
||||||
key: ${{ runner.os }}-pip${{ matrix.python-version }}
|
|
||||||
-
|
|
||||||
name: Install dependencies
|
|
||||||
run: |
|
|
||||||
pip install --upgrade pipenv
|
|
||||||
pipenv install --system --dev --ignore-pipfile
|
|
||||||
-
|
|
||||||
name: Codestyle
|
|
||||||
run: |
|
|
||||||
cd src/
|
|
||||||
pycodestyle --max-line-length=88 --ignore=E121,E123,E126,E226,E24,E704,W503,W504,E203
|
|
||||||
codeformatting:
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
steps:
|
|
||||||
-
|
|
||||||
name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
-
|
|
||||||
name: Run black
|
|
||||||
uses: psf/black@stable
|
|
||||||
with:
|
|
||||||
options: "--check --diff"
|
|
||||||
version: "22.1.0"
|
|
||||||
|
|
||||||
tests:
|
ci-frontend:
|
||||||
|
uses: ./.github/workflows/reusable-ci-frontend.yml
|
||||||
|
|
||||||
|
prepare-docker-build:
|
||||||
|
name: Prepare Docker Pipeline Data
|
||||||
|
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || contains(github.ref, 'beta.rc') || startsWith(github.ref, 'refs/tags/v'))
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
strategy:
|
# If the push triggered the installer library workflow, wait for it to
|
||||||
matrix:
|
# complete here. This ensures the required versions for the final
|
||||||
python-version: ['3.8', '3.9']
|
# image have been built, while not waiting at all if the versions haven't changed
|
||||||
fail-fast: false
|
concurrency:
|
||||||
|
group: build-installer-library
|
||||||
|
cancel-in-progress: false
|
||||||
|
needs:
|
||||||
|
- documentation
|
||||||
|
- ci-backend
|
||||||
|
- ci-frontend
|
||||||
steps:
|
steps:
|
||||||
|
-
|
||||||
|
name: Set ghcr repository name
|
||||||
|
id: set-ghcr-repository
|
||||||
|
run: |
|
||||||
|
ghcr_name=$(echo "${GITHUB_REPOSITORY}" | awk '{ print tolower($0) }')
|
||||||
|
echo ::set-output name=repository::${ghcr_name}
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "${{ matrix.python-version }}"
|
python-version: "3.9"
|
||||||
-
|
-
|
||||||
name: Get pip cache dir
|
name: Setup qpdf image
|
||||||
id: pip-cache
|
id: qpdf-setup
|
||||||
run: |
|
run: |
|
||||||
echo "::set-output name=dir::$(pip cache dir)"
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py qpdf)
|
||||||
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=qpdf-json::${build_json}
|
||||||
-
|
-
|
||||||
name: Persistent Github pip cache
|
name: Setup psycopg2 image
|
||||||
uses: actions/cache@v2
|
id: psycopg2-setup
|
||||||
with:
|
|
||||||
path: ${{ steps.pip-cache.outputs.dir }}
|
|
||||||
key: ${{ runner.os }}-pip${{ matrix.python-version }}
|
|
||||||
-
|
|
||||||
name: Install dependencies
|
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update -qq
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py psycopg2)
|
||||||
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript optipng
|
|
||||||
pip install --upgrade pipenv
|
echo ${build_json}
|
||||||
pipenv install --system --dev --ignore-pipfile
|
|
||||||
|
echo ::set-output name=psycopg2-json::${build_json}
|
||||||
-
|
-
|
||||||
name: Tests
|
name: Setup pikepdf image
|
||||||
|
id: pikepdf-setup
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py pikepdf)
|
||||||
pytest
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=pikepdf-json::${build_json}
|
||||||
-
|
-
|
||||||
name: Publish coverage results
|
name: Setup jbig2enc image
|
||||||
if: matrix.python-version == '3.9'
|
id: jbig2enc-setup
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
# https://github.com/coveralls-clients/coveralls-python/issues/251
|
|
||||||
run: |
|
run: |
|
||||||
cd src/
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py jbig2enc)
|
||||||
coveralls --service=github
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=jbig2enc-json::${build_json}
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
|
||||||
|
ghcr-repository: ${{ steps.set-ghcr-repository.outputs.repository }}
|
||||||
|
|
||||||
|
qpdf-json: ${{ steps.qpdf-setup.outputs.qpdf-json }}
|
||||||
|
|
||||||
|
pikepdf-json: ${{ steps.pikepdf-setup.outputs.pikepdf-json }}
|
||||||
|
|
||||||
|
psycopg2-json: ${{ steps.psycopg2-setup.outputs.psycopg2-json }}
|
||||||
|
|
||||||
|
jbig2enc-json: ${{ steps.jbig2enc-setup.outputs.jbig2enc-json}}
|
||||||
|
|
||||||
# build and push image to docker hub.
|
# build and push image to docker hub.
|
||||||
build-docker-image:
|
build-docker-image:
|
||||||
if: github.event_name == 'push' && (startsWith(github.ref, 'refs/heads/feature-') || github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/beta' || startsWith(github.ref, 'refs/tags/ngx-') || startsWith(github.ref, 'refs/tags/beta-'))
|
runs-on: ubuntu-20.04
|
||||||
runs-on: ubuntu-latest
|
concurrency:
|
||||||
needs: [tests, codeformatting, codestyle]
|
group: ${{ github.workflow }}-build-docker-image-${{ github.ref_name }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
needs:
|
||||||
|
- prepare-docker-build
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Prepare
|
name: Check pushing to Docker Hub
|
||||||
id: prepare
|
id: docker-hub
|
||||||
|
# Only push to Dockerhub from the main repo AND the ref is either:
|
||||||
|
# main
|
||||||
|
# dev
|
||||||
|
# beta
|
||||||
|
# a tag
|
||||||
|
# Otherwise forks would require a Docker Hub account and secrets setup
|
||||||
run: |
|
run: |
|
||||||
IMAGE_NAME=ghcr.io/${{ github.repository }}
|
if [[ ${{ needs.prepare-docker-build.outputs.ghcr-repository }} == "paperless-ngx/paperless-ngx" && ( ${{ github.ref_name }} == "main" || ${{ github.ref_name }} == "dev" || ${{ github.ref_name }} == "beta" || ${{ startsWith(github.ref, 'refs/tags/v') }} == "true" ) ]] ; then
|
||||||
if [[ $GITHUB_REF == refs/tags/ngx-* ]]; then
|
echo "Enabling DockerHub image push"
|
||||||
TAGS=${IMAGE_NAME}:${GITHUB_REF#refs/tags/ngx-},${IMAGE_NAME}:latest
|
echo ::set-output name=enable::"true"
|
||||||
INSPECT_TAG=${IMAGE_NAME}:latest
|
|
||||||
elif [[ $GITHUB_REF == refs/tags/beta-* ]]; then
|
|
||||||
TAGS=${IMAGE_NAME}:beta
|
|
||||||
INSPECT_TAG=${TAGS}
|
|
||||||
elif [[ $GITHUB_REF == refs/heads/* ]]; then
|
|
||||||
TAGS=${IMAGE_NAME}:${GITHUB_REF#refs/heads/}
|
|
||||||
INSPECT_TAG=${TAGS}
|
|
||||||
else
|
else
|
||||||
exit 1
|
echo "Not pushing to DockerHub"
|
||||||
|
echo ::set-output name=enable::"false"
|
||||||
fi
|
fi
|
||||||
echo ::set-output name=tags::${TAGS}
|
-
|
||||||
echo ::set-output name=inspect_tag::${INSPECT_TAG}
|
name: Gather Docker metadata
|
||||||
|
id: docker-meta
|
||||||
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
|
images: |
|
||||||
|
ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}
|
||||||
|
name=paperlessngx/paperless-ngx,enable=${{ steps.docker-hub.outputs.enable }}
|
||||||
|
tags: |
|
||||||
|
# Tag branches with branch name
|
||||||
|
type=ref,event=branch
|
||||||
|
# Process semver tags
|
||||||
|
# For a tag x.y.z or vX.Y.Z, output an x.y.z and x.y image tag
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
-
|
-
|
||||||
name: Set up Docker Buildx
|
name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v2
|
||||||
-
|
-
|
||||||
name: Set up QEMU
|
name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1
|
uses: docker/setup-qemu-action@v2
|
||||||
-
|
-
|
||||||
name: Login to Github Container Registry
|
name: Login to Github Container Registry
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
-
|
||||||
|
name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
# Don't attempt to login is not pushing to Docker Hub
|
||||||
|
if: steps.docker-hub.outputs.enable == 'true'
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
-
|
-
|
||||||
name: Build and push
|
name: Build and push
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||||
push: true
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ${{ steps.prepare.outputs.tags }}
|
tags: ${{ steps.docker-meta.outputs.tags }}
|
||||||
cache-from: type=gha
|
labels: ${{ steps.docker-meta.outputs.labels }}
|
||||||
cache-to: type=gha,mode=max
|
build-args: |
|
||||||
|
JBIG2ENC_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).version }}
|
||||||
|
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
||||||
|
PIKEPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).version }}
|
||||||
|
PSYCOPG2_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).version }}
|
||||||
|
# Get cache layers from this branch, then dev, then main
|
||||||
|
# This allows new branches to get at least some cache benefits, generally from dev
|
||||||
|
cache-from: |
|
||||||
|
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||||
|
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:dev
|
||||||
|
type=registry,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:main
|
||||||
|
cache-to: |
|
||||||
|
type=registry,mode=max,ref=ghcr.io/${{ needs.prepare-docker-build.outputs.ghcr-repository }}/builder/cache/app:${{ github.ref_name }}
|
||||||
-
|
-
|
||||||
name: Inspect image
|
name: Inspect image
|
||||||
run: |
|
run: |
|
||||||
docker buildx imagetools inspect ${{ steps.prepare.outputs.inspect_tag }}
|
docker buildx imagetools inspect ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||||
-
|
-
|
||||||
name: Export frontend artifact from docker
|
name: Export frontend artifact from docker
|
||||||
run: |
|
run: |
|
||||||
docker run -d --name frontend-extract ${{ steps.prepare.outputs.inspect_tag }}
|
docker create --name frontend-extract ${{ fromJSON(steps.docker-meta.outputs.json).tags[0] }}
|
||||||
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
|
docker cp frontend-extract:/usr/src/paperless/src/documents/static/frontend src/documents/static/frontend/
|
||||||
-
|
-
|
||||||
name: Upload frontend artifact
|
name: Upload frontend artifact
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: frontend-compiled
|
name: frontend-compiled
|
||||||
path: src/documents/static/frontend/
|
path: src/documents/static/frontend/
|
||||||
|
|
||||||
build-release:
|
build-release:
|
||||||
needs: [build-docker-image, documentation, tests, codeformatting, codestyle]
|
needs:
|
||||||
|
- build-docker-image
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Checkout
|
name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
-
|
-
|
||||||
name: Set up Python
|
name: Set up Python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
-
|
-
|
||||||
@@ -233,13 +257,13 @@ jobs:
|
|||||||
pip3 install -r requirements.txt
|
pip3 install -r requirements.txt
|
||||||
-
|
-
|
||||||
name: Download frontend artifact
|
name: Download frontend artifact
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: frontend-compiled
|
name: frontend-compiled
|
||||||
path: src/documents/static/frontend/
|
path: src/documents/static/frontend/
|
||||||
-
|
-
|
||||||
name: Download documentation artifact
|
name: Download documentation artifact
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: documentation
|
name: documentation
|
||||||
path: docs/_build/html/
|
path: docs/_build/html/
|
||||||
@@ -274,19 +298,24 @@ jobs:
|
|||||||
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
tar -cJf paperless-ngx.tar.xz paperless-ngx/
|
||||||
-
|
-
|
||||||
name: Upload release artifact
|
name: Upload release artifact
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: release
|
name: release
|
||||||
path: dist/paperless-ngx.tar.xz
|
path: dist/paperless-ngx.tar.xz
|
||||||
|
|
||||||
publish-release:
|
publish-release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-20.04
|
||||||
needs: build-release
|
outputs:
|
||||||
if: contains(github.ref, 'refs/tags/ngx-') || contains(github.ref, 'refs/tags/beta-')
|
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||||
|
changelog: ${{ steps.create-release.outputs.body }}
|
||||||
|
version: ${{ steps.get_version.outputs.version }}
|
||||||
|
needs:
|
||||||
|
- build-release
|
||||||
|
if: github.ref_type == 'tag' && (startsWith(github.ref_name, 'v') || contains(github.ref_name, '-beta.rc'))
|
||||||
steps:
|
steps:
|
||||||
-
|
-
|
||||||
name: Download release artifact
|
name: Download release artifact
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: release
|
name: release
|
||||||
path: ./
|
path: ./
|
||||||
@@ -294,27 +323,24 @@ jobs:
|
|||||||
name: Get version
|
name: Get version
|
||||||
id: get_version
|
id: get_version
|
||||||
run: |
|
run: |
|
||||||
if [[ $GITHUB_REF == refs/tags/ngx-* ]]; then
|
echo ::set-output name=version::${{ github.ref_name }}
|
||||||
echo ::set-output name=version::${GITHUB_REF#refs/tags/ngx-}
|
if [[ ${{ contains(github.ref_name, '-beta.rc') }} == 'true' ]]; then
|
||||||
echo ::set-output name=prerelease::false
|
|
||||||
echo ::set-output name=body::"For a complete list of changes, see the changelog at https://paperless-ngx.readthedocs.io/en/latest/changelog.html"
|
|
||||||
elif [[ $GITHUB_REF == refs/tags/beta-* ]]; then
|
|
||||||
echo ::set-output name=version::${GITHUB_REF#refs/tags/beta-}
|
|
||||||
echo ::set-output name=prerelease::true
|
echo ::set-output name=prerelease::true
|
||||||
echo ::set-output name=body::"For a complete list of changes, see the changelog at https://github.com/paperless-ngx/paperless-ngx/blob/beta/docs/changelog.rst"
|
else
|
||||||
|
echo ::set-output name=prerelease::false
|
||||||
fi
|
fi
|
||||||
-
|
-
|
||||||
name: Create release
|
name: Create Release and Changelog
|
||||||
id: create_release
|
id: create-release
|
||||||
uses: actions/create-release@v1
|
uses: release-drafter/release-drafter@v5
|
||||||
|
with:
|
||||||
|
name: Paperless-ngx ${{ steps.get_version.outputs.version }}
|
||||||
|
tag: ${{ steps.get_version.outputs.version }}
|
||||||
|
version: ${{ steps.get_version.outputs.version }}
|
||||||
|
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
||||||
|
publish: true # ensures release is not marked as draft
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
|
||||||
tag_name: ngx-${{ steps.get_version.outputs.version }}
|
|
||||||
release_name: Paperless-ngx ${{ steps.get_version.outputs.version }}
|
|
||||||
draft: false
|
|
||||||
prerelease: ${{ steps.get_version.outputs.prerelease }}
|
|
||||||
body: ${{ steps.get_version.outputs.body }}
|
|
||||||
-
|
-
|
||||||
name: Upload release archive
|
name: Upload release archive
|
||||||
id: upload-release-asset
|
id: upload-release-asset
|
||||||
@@ -322,7 +348,54 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
|
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||||
asset_path: ./paperless-ngx.tar.xz
|
asset_path: ./paperless-ngx.tar.xz
|
||||||
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
|
asset_name: paperless-ngx-${{ steps.get_version.outputs.version }}.tar.xz
|
||||||
asset_content_type: application/x-xz
|
asset_content_type: application/x-xz
|
||||||
|
|
||||||
|
append-changelog:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
needs:
|
||||||
|
- publish-release
|
||||||
|
if: needs.publish-release.outputs.prerelease == 'false'
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
-
|
||||||
|
name: Append Changelog to docs
|
||||||
|
id: append-Changelog
|
||||||
|
working-directory: docs
|
||||||
|
run: |
|
||||||
|
git branch ${{ needs.publish-release.outputs.version }}-changelog
|
||||||
|
git checkout ${{ needs.publish-release.outputs.version }}-changelog
|
||||||
|
echo -e "# Changelog\n\n${{ needs.publish-release.outputs.changelog }}\n" > changelog-new.md
|
||||||
|
CURRENT_CHANGELOG=`tail --lines +2 changelog.md`
|
||||||
|
echo -e "$CURRENT_CHANGELOG" >> changelog-new.md
|
||||||
|
mv changelog-new.md changelog.md
|
||||||
|
git config --global user.name "github-actions"
|
||||||
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
|
git commit -am "Changelog ${{ steps.get_version.outputs.version }} - GHA"
|
||||||
|
git push origin ${{ needs.publish-release.outputs.version }}-changelog
|
||||||
|
-
|
||||||
|
name: Create Pull Request
|
||||||
|
uses: actions/github-script@v6
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const { repo, owner } = context.repo;
|
||||||
|
const result = await github.rest.pulls.create({
|
||||||
|
title: '[Documentation] Add ${{ needs.publish-release.outputs.version }} changelog',
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
head: '${{ needs.publish-release.outputs.version }}-changelog',
|
||||||
|
base: 'main',
|
||||||
|
body: 'This PR is auto-generated by CI.'
|
||||||
|
});
|
||||||
|
github.rest.issues.addLabels({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: result.data.number,
|
||||||
|
labels: ['documentation']
|
||||||
|
});
|
||||||
|
|||||||
48
.github/workflows/cleanup-tags.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
name: Cleanup Image Tags
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * SAT'
|
||||||
|
delete:
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- closed
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- ".github/workflows/cleanup-tags.yml"
|
||||||
|
- ".github/scripts/cleanup-tags.py"
|
||||||
|
- ".github/scripts/common.py"
|
||||||
|
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
cleanup:
|
||||||
|
name: Cleanup Image Tags
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Login to Github Container Registry
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
-
|
||||||
|
name: Set up Python
|
||||||
|
uses: actions/setup-python@v3
|
||||||
|
with:
|
||||||
|
python-version: "3.9"
|
||||||
|
-
|
||||||
|
name: Install requests
|
||||||
|
run: |
|
||||||
|
python -m pip install requests
|
||||||
|
-
|
||||||
|
name: Cleanup feature tags
|
||||||
|
run: |
|
||||||
|
python ${GITHUB_WORKSPACE}/.github/scripts/cleanup-tags.py --loglevel info --delete
|
||||||
4
.github/workflows/codeql-analysis.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
|||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v1
|
uses: github/codeql-action/init@v2
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
@@ -51,4 +51,4 @@ jobs:
|
|||||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v1
|
uses: github/codeql-action/analyze@v2
|
||||||
|
|||||||
147
.github/workflows/installer-library.yml
vendored
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
# This workflow will run to update the installer library of
|
||||||
|
# Docker images. These are the images which provide updated wheels
|
||||||
|
# .deb installation packages or maybe just some compiled library
|
||||||
|
|
||||||
|
name: Build Image Library
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
# Must match one of these branches AND one of the paths
|
||||||
|
# to be triggered
|
||||||
|
branches:
|
||||||
|
- "main"
|
||||||
|
- "dev"
|
||||||
|
- "library-*"
|
||||||
|
- "feature-*"
|
||||||
|
paths:
|
||||||
|
# Trigger the workflow if a Dockerfile changed
|
||||||
|
- "docker-builders/**"
|
||||||
|
# Trigger if a package was updated
|
||||||
|
- ".build-config.json"
|
||||||
|
- "Pipfile.lock"
|
||||||
|
# Also trigger on workflow changes related to the library
|
||||||
|
- ".github/workflows/installer-library.yml"
|
||||||
|
- ".github/workflows/reusable-workflow-builder.yml"
|
||||||
|
- ".github/scripts/**"
|
||||||
|
|
||||||
|
# Set a workflow level concurrency group so primary workflow
|
||||||
|
# can wait for this to complete if needed
|
||||||
|
# DO NOT CHANGE without updating main workflow group
|
||||||
|
concurrency:
|
||||||
|
group: build-installer-library
|
||||||
|
cancel-in-progress: false
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prepare-docker-build:
|
||||||
|
name: Prepare Docker Image Version Data
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Set ghcr repository name
|
||||||
|
id: set-ghcr-repository
|
||||||
|
run: |
|
||||||
|
ghcr_name=$(echo "${GITHUB_REPOSITORY}" | awk '{ print tolower($0) }')
|
||||||
|
echo ::set-output name=repository::${ghcr_name}
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.9"
|
||||||
|
-
|
||||||
|
name: Setup qpdf image
|
||||||
|
id: qpdf-setup
|
||||||
|
run: |
|
||||||
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py qpdf)
|
||||||
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=qpdf-json::${build_json}
|
||||||
|
-
|
||||||
|
name: Setup psycopg2 image
|
||||||
|
id: psycopg2-setup
|
||||||
|
run: |
|
||||||
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py psycopg2)
|
||||||
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=psycopg2-json::${build_json}
|
||||||
|
-
|
||||||
|
name: Setup pikepdf image
|
||||||
|
id: pikepdf-setup
|
||||||
|
run: |
|
||||||
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py pikepdf)
|
||||||
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=pikepdf-json::${build_json}
|
||||||
|
-
|
||||||
|
name: Setup jbig2enc image
|
||||||
|
id: jbig2enc-setup
|
||||||
|
run: |
|
||||||
|
build_json=$(python ${GITHUB_WORKSPACE}/.github/scripts/get-build-json.py jbig2enc)
|
||||||
|
|
||||||
|
echo ${build_json}
|
||||||
|
|
||||||
|
echo ::set-output name=jbig2enc-json::${build_json}
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
|
||||||
|
ghcr-repository: ${{ steps.set-ghcr-repository.outputs.repository }}
|
||||||
|
|
||||||
|
qpdf-json: ${{ steps.qpdf-setup.outputs.qpdf-json }}
|
||||||
|
|
||||||
|
pikepdf-json: ${{ steps.pikepdf-setup.outputs.pikepdf-json }}
|
||||||
|
|
||||||
|
psycopg2-json: ${{ steps.psycopg2-setup.outputs.psycopg2-json }}
|
||||||
|
|
||||||
|
jbig2enc-json: ${{ steps.jbig2enc-setup.outputs.jbig2enc-json}}
|
||||||
|
|
||||||
|
build-qpdf-debs:
|
||||||
|
name: qpdf
|
||||||
|
needs:
|
||||||
|
- prepare-docker-build
|
||||||
|
uses: ./.github/workflows/reusable-workflow-builder.yml
|
||||||
|
with:
|
||||||
|
dockerfile: ./docker-builders/Dockerfile.qpdf
|
||||||
|
build-json: ${{ needs.prepare-docker-build.outputs.qpdf-json }}
|
||||||
|
build-args: |
|
||||||
|
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
||||||
|
|
||||||
|
build-jbig2enc:
|
||||||
|
name: jbig2enc
|
||||||
|
needs:
|
||||||
|
- prepare-docker-build
|
||||||
|
uses: ./.github/workflows/reusable-workflow-builder.yml
|
||||||
|
with:
|
||||||
|
dockerfile: ./docker-builders/Dockerfile.jbig2enc
|
||||||
|
build-json: ${{ needs.prepare-docker-build.outputs.jbig2enc-json }}
|
||||||
|
build-args: |
|
||||||
|
JBIG2ENC_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.jbig2enc-json).version }}
|
||||||
|
|
||||||
|
build-psycopg2-wheel:
|
||||||
|
name: psycopg2
|
||||||
|
needs:
|
||||||
|
- prepare-docker-build
|
||||||
|
uses: ./.github/workflows/reusable-workflow-builder.yml
|
||||||
|
with:
|
||||||
|
dockerfile: ./docker-builders/Dockerfile.psycopg2
|
||||||
|
build-json: ${{ needs.prepare-docker-build.outputs.psycopg2-json }}
|
||||||
|
build-args: |
|
||||||
|
PSYCOPG2_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.psycopg2-json).version }}
|
||||||
|
|
||||||
|
build-pikepdf-wheel:
|
||||||
|
name: pikepdf
|
||||||
|
needs:
|
||||||
|
- prepare-docker-build
|
||||||
|
- build-qpdf-debs
|
||||||
|
uses: ./.github/workflows/reusable-workflow-builder.yml
|
||||||
|
with:
|
||||||
|
dockerfile: ./docker-builders/Dockerfile.pikepdf
|
||||||
|
build-json: ${{ needs.prepare-docker-build.outputs.pikepdf-json }}
|
||||||
|
build-args: |
|
||||||
|
REPO=${{ needs.prepare-docker-build.outputs.ghcr-repository }}
|
||||||
|
QPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.qpdf-json).version }}
|
||||||
|
PIKEPDF_VERSION=${{ fromJSON(needs.prepare-docker-build.outputs.pikepdf-json).version }}
|
||||||
47
.github/workflows/project-actions.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
name: Project Automations
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- reopened
|
||||||
|
pull_request_target: #_target allows access to secrets
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- reopened
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- dev
|
||||||
|
|
||||||
|
env:
|
||||||
|
todo: Todo
|
||||||
|
done: Done
|
||||||
|
in_progress: In Progress
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
issue_opened_or_reopened:
|
||||||
|
name: issue_opened_or_reopened
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'issues' && (github.event.action == 'opened' || github.event.action == 'reopened')
|
||||||
|
steps:
|
||||||
|
- name: Set issue status to ${{ env.todo }}
|
||||||
|
uses: leonsteinhaeuser/project-beta-automations@v1.2.1
|
||||||
|
with:
|
||||||
|
gh_token: ${{ secrets.GH_TOKEN }}
|
||||||
|
organization: paperless-ngx
|
||||||
|
project_id: 2
|
||||||
|
resource_node_id: ${{ github.event.issue.node_id }}
|
||||||
|
status_value: ${{ env.todo }} # Target status
|
||||||
|
pr_opened_or_reopened:
|
||||||
|
name: pr_opened_or_reopened
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'pull_request_target' && (github.event.action == 'opened' || github.event.action == 'reopened')
|
||||||
|
steps:
|
||||||
|
- name: Set PR status to ${{ env.in_progress }}
|
||||||
|
uses: leonsteinhaeuser/project-beta-automations@v1.2.1
|
||||||
|
with:
|
||||||
|
gh_token: ${{ secrets.GH_TOKEN }}
|
||||||
|
organization: paperless-ngx
|
||||||
|
project_id: 2
|
||||||
|
resource_node_id: ${{ github.event.pull_request.node_id }}
|
||||||
|
status_value: ${{ env.in_progress }} # Target status
|
||||||
129
.github/workflows/reusable-ci-backend.yml
vendored
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
name: Backend CI Jobs
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
code-checks-backend:
|
||||||
|
name: "Code Style Checks"
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Install checkers
|
||||||
|
run: |
|
||||||
|
pipx install reorder-python-imports
|
||||||
|
pipx install yesqa
|
||||||
|
pipx install add-trailing-comma
|
||||||
|
pipx install flake8
|
||||||
|
-
|
||||||
|
name: Run reorder-python-imports
|
||||||
|
run: |
|
||||||
|
find src/ -type f -name '*.py' ! -path "*/migrations/*" | xargs reorder-python-imports
|
||||||
|
-
|
||||||
|
name: Run yesqa
|
||||||
|
run: |
|
||||||
|
find src/ -type f -name '*.py' ! -path "*/migrations/*" | xargs yesqa
|
||||||
|
-
|
||||||
|
name: Run add-trailing-comma
|
||||||
|
run: |
|
||||||
|
find src/ -type f -name '*.py' ! -path "*/migrations/*" | xargs add-trailing-comma
|
||||||
|
# black is placed after add-trailing-comma because it may format differently
|
||||||
|
# if a trailing comma is added
|
||||||
|
-
|
||||||
|
name: Run black
|
||||||
|
uses: psf/black@stable
|
||||||
|
with:
|
||||||
|
options: "--check --diff"
|
||||||
|
version: "22.3.0"
|
||||||
|
-
|
||||||
|
name: Run flake8 checks
|
||||||
|
run: |
|
||||||
|
cd src/
|
||||||
|
flake8 --max-line-length=88 --ignore=E203,W503
|
||||||
|
|
||||||
|
tests-backend:
|
||||||
|
name: "Tests (${{ matrix.python-version }})"
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
needs:
|
||||||
|
- code-checks-backend
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: ['3.8', '3.9', '3.10']
|
||||||
|
fail-fast: false
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 2
|
||||||
|
-
|
||||||
|
name: Install pipenv
|
||||||
|
run: pipx install pipenv
|
||||||
|
-
|
||||||
|
name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "${{ matrix.python-version }}"
|
||||||
|
cache: "pipenv"
|
||||||
|
cache-dependency-path: 'Pipfile.lock'
|
||||||
|
-
|
||||||
|
name: Install system dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update -qq
|
||||||
|
sudo apt-get install -qq --no-install-recommends unpaper tesseract-ocr imagemagick ghostscript libzbar0 poppler-utils
|
||||||
|
-
|
||||||
|
name: Install Python dependencies
|
||||||
|
run: |
|
||||||
|
pipenv sync --dev
|
||||||
|
-
|
||||||
|
name: Tests
|
||||||
|
run: |
|
||||||
|
cd src/
|
||||||
|
pipenv run pytest
|
||||||
|
-
|
||||||
|
name: Get changed files
|
||||||
|
id: changed-files-specific
|
||||||
|
uses: tj-actions/changed-files@v23.1
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
src/**
|
||||||
|
-
|
||||||
|
name: List all changed files
|
||||||
|
run: |
|
||||||
|
for file in ${{ steps.changed-files-specific.outputs.all_changed_files }}; do
|
||||||
|
echo "${file} was changed"
|
||||||
|
done
|
||||||
|
-
|
||||||
|
name: Publish coverage results
|
||||||
|
if: matrix.python-version == '3.9' && steps.changed-files-specific.outputs.any_changed == 'true'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
# https://github.com/coveralls-clients/coveralls-python/issues/251
|
||||||
|
run: |
|
||||||
|
cd src/
|
||||||
|
pipenv run coveralls --service=github
|
||||||
|
|
||||||
|
dockerfile-lint:
|
||||||
|
name: "Lint ${{ matrix.dockerfile }}"
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
dockerfile:
|
||||||
|
- Dockerfile
|
||||||
|
- docker-builders/Dockerfile.qpdf
|
||||||
|
- docker-builders/Dockerfile.jbig2enc
|
||||||
|
- docker-builders/Dockerfile.psycopg2
|
||||||
|
- docker-builders/Dockerfile.pikepdf
|
||||||
|
fail-fast: false
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
uses: hadolint/hadolint-action@v2.1.0
|
||||||
|
with:
|
||||||
|
dockerfile: ${{ matrix.dockerfile }}
|
||||||
42
.github/workflows/reusable-ci-frontend.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
name: Frontend CI Jobs
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
code-checks-frontend:
|
||||||
|
name: "Code Style Checks"
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: '16'
|
||||||
|
-
|
||||||
|
name: Install prettier
|
||||||
|
run: |
|
||||||
|
npm install prettier
|
||||||
|
-
|
||||||
|
name: Run prettier
|
||||||
|
run:
|
||||||
|
npx prettier --check --ignore-path Pipfile.lock **/*.js **/*.ts *.md **/*.md
|
||||||
|
tests-frontend:
|
||||||
|
name: "Tests"
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
needs:
|
||||||
|
- code-checks-frontend
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
node-version: [16.x]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Use Node.js ${{ matrix.node-version }}
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: ${{ matrix.node-version }}
|
||||||
|
- run: cd src-ui && npm ci
|
||||||
|
- run: cd src-ui && npm run test
|
||||||
|
- run: cd src-ui && npm run e2e:ci
|
||||||
53
.github/workflows/reusable-workflow-builder.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
name: Reusable Image Builder
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
dockerfile:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
build-json:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
build-args:
|
||||||
|
required: false
|
||||||
|
default: ""
|
||||||
|
type: string
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ fromJSON(inputs.build-json).name }}-${{ fromJSON(inputs.build-json).version }}
|
||||||
|
cancel-in-progress: false
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-image:
|
||||||
|
name: Build ${{ fromJSON(inputs.build-json).name }} @ ${{ fromJSON(inputs.build-json).version }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
-
|
||||||
|
name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
-
|
||||||
|
name: Login to Github Container Registry
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
-
|
||||||
|
name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
-
|
||||||
|
name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
-
|
||||||
|
name: Build ${{ fromJSON(inputs.build-json).name }}
|
||||||
|
uses: docker/build-push-action@v3
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ${{ inputs.dockerfile }}
|
||||||
|
tags: ${{ fromJSON(inputs.build-json).image_tag }}
|
||||||
|
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||||
|
build-args: ${{ inputs.build-args }}
|
||||||
|
push: true
|
||||||
|
cache-from: type=registry,ref=${{ fromJSON(inputs.build-json).cache_tag }}
|
||||||
|
cache-to: type=registry,mode=max,ref=${{ fromJSON(inputs.build-json).cache_tag }}
|
||||||
11
.gitignore
vendored
@@ -61,10 +61,16 @@ target/
|
|||||||
# PyCharm
|
# PyCharm
|
||||||
.idea
|
.idea
|
||||||
|
|
||||||
|
# VS Code
|
||||||
|
.vscode
|
||||||
|
/src-ui/.vscode
|
||||||
|
/docs/.vscode
|
||||||
|
|
||||||
# Other stuff that doesn't belong
|
# Other stuff that doesn't belong
|
||||||
.virtualenv
|
.virtualenv
|
||||||
virtualenv
|
virtualenv
|
||||||
/venv
|
/venv
|
||||||
|
.venv/
|
||||||
/docker-compose.env
|
/docker-compose.env
|
||||||
/docker-compose.yml
|
/docker-compose.yml
|
||||||
|
|
||||||
@@ -81,8 +87,9 @@ scripts/nuke
|
|||||||
/paperless.conf
|
/paperless.conf
|
||||||
/consume/
|
/consume/
|
||||||
/export/
|
/export/
|
||||||
/src-ui/.vscode
|
|
||||||
|
|
||||||
# this is where the compiled frontend is moved to.
|
# this is where the compiled frontend is moved to.
|
||||||
/src/documents/static/frontend/
|
/src/documents/static/frontend/
|
||||||
/docs/.vscode/settings.json
|
|
||||||
|
# mac os
|
||||||
|
.DS_Store
|
||||||
|
|||||||
8
.hadolint.yml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
failure-threshold: warning
|
||||||
|
ignored:
|
||||||
|
# https://github.com/hadolint/hadolint/wiki/DL3008
|
||||||
|
- DL3008
|
||||||
|
# https://github.com/hadolint/hadolint/wiki/DL3013
|
||||||
|
- DL3013
|
||||||
|
# https://github.com/hadolint/hadolint/wiki/DL3003
|
||||||
|
- DL3003
|
||||||
87
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
# This file configures pre-commit hooks.
|
||||||
|
# See https://pre-commit.com/ for general information
|
||||||
|
# See https://pre-commit.com/hooks.html for a listing of possible hooks
|
||||||
|
|
||||||
|
repos:
|
||||||
|
# General hooks
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v4.3.0
|
||||||
|
hooks:
|
||||||
|
- id: check-docstring-first
|
||||||
|
- id: check-json
|
||||||
|
exclude: "tsconfig.*json"
|
||||||
|
- id: check-yaml
|
||||||
|
- id: check-toml
|
||||||
|
- id: check-executables-have-shebangs
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
exclude_types:
|
||||||
|
- svg
|
||||||
|
- pofile
|
||||||
|
exclude: "(^LICENSE$)"
|
||||||
|
- id: mixed-line-ending
|
||||||
|
args:
|
||||||
|
- "--fix=lf"
|
||||||
|
- id: trailing-whitespace
|
||||||
|
exclude_types:
|
||||||
|
- svg
|
||||||
|
- id: check-case-conflict
|
||||||
|
- id: detect-private-key
|
||||||
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
|
rev: "v2.7.1"
|
||||||
|
hooks:
|
||||||
|
- id: prettier
|
||||||
|
types_or:
|
||||||
|
- javascript
|
||||||
|
- ts
|
||||||
|
- markdown
|
||||||
|
exclude: "(^Pipfile\\.lock$)"
|
||||||
|
# Python hooks
|
||||||
|
- repo: https://github.com/asottile/reorder_python_imports
|
||||||
|
rev: v3.8.1
|
||||||
|
hooks:
|
||||||
|
- id: reorder-python-imports
|
||||||
|
exclude: "(migrations)"
|
||||||
|
- repo: https://github.com/asottile/yesqa
|
||||||
|
rev: "v1.3.0"
|
||||||
|
hooks:
|
||||||
|
- id: yesqa
|
||||||
|
exclude: "(migrations)"
|
||||||
|
- repo: https://github.com/asottile/add-trailing-comma
|
||||||
|
rev: "v2.2.3"
|
||||||
|
hooks:
|
||||||
|
- id: add-trailing-comma
|
||||||
|
exclude: "(migrations)"
|
||||||
|
- repo: https://gitlab.com/pycqa/flake8
|
||||||
|
rev: 3.9.2
|
||||||
|
hooks:
|
||||||
|
- id: flake8
|
||||||
|
files: ^src/
|
||||||
|
args:
|
||||||
|
- "--config=./src/setup.cfg"
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 22.6.0
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v2.37.1
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
exclude: "(migrations)"
|
||||||
|
args:
|
||||||
|
- "--py38-plus"
|
||||||
|
# Dockerfile hooks
|
||||||
|
- repo: https://github.com/AleksaC/hadolint-py
|
||||||
|
rev: v2.10.0
|
||||||
|
hooks:
|
||||||
|
- id: hadolint
|
||||||
|
# Shell script hooks
|
||||||
|
- repo: https://github.com/lovesegfault/beautysh
|
||||||
|
rev: v6.2.1
|
||||||
|
hooks:
|
||||||
|
- id: beautysh
|
||||||
|
args:
|
||||||
|
- "--tab"
|
||||||
|
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||||
|
rev: "v0.8.0.4"
|
||||||
|
hooks:
|
||||||
|
- id: shellcheck
|
||||||
4
.prettierrc
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# https://prettier.io/docs/en/options.html#semicolons
|
||||||
|
semi: false
|
||||||
|
# https://prettier.io/docs/en/options.html#quotes
|
||||||
|
singleQuote: true
|
||||||
10
CODEOWNERS
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
/.github/workflows/ @paperless-ngx/ci-cd
|
||||||
|
/docker/ @paperless-ngx/ci-cd
|
||||||
|
/scripts/ @paperless-ngx/ci-cd
|
||||||
|
|
||||||
|
/src-ui/ @paperless-ngx/frontend
|
||||||
|
|
||||||
|
/src/ @paperless-ngx/backend
|
||||||
|
Pipfile* @paperless-ngx/backend
|
||||||
|
*.py @paperless-ngx/backend
|
||||||
|
requirements.txt @paperless-ngx/backend
|
||||||
128
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
We as members, contributors, and leaders pledge to make participation in our
|
||||||
|
community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||||
|
identity and expression, level of experience, education, socio-economic status,
|
||||||
|
nationality, personal appearance, race, religion, or sexual identity
|
||||||
|
and orientation.
|
||||||
|
|
||||||
|
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||||
|
diverse, inclusive, and healthy community.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to a positive environment for our
|
||||||
|
community include:
|
||||||
|
|
||||||
|
- Demonstrating empathy and kindness toward other people
|
||||||
|
- Being respectful of differing opinions, viewpoints, and experiences
|
||||||
|
- Giving and gracefully accepting constructive feedback
|
||||||
|
- Accepting responsibility and apologizing to those affected by our mistakes,
|
||||||
|
and learning from the experience
|
||||||
|
- Focusing on what is best not just for us as individuals, but for the
|
||||||
|
overall community
|
||||||
|
|
||||||
|
Examples of unacceptable behavior include:
|
||||||
|
|
||||||
|
- The use of sexualized language or imagery, and sexual attention or
|
||||||
|
advances of any kind
|
||||||
|
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||||
|
- Public or private harassment
|
||||||
|
- Publishing others' private information, such as a physical or email
|
||||||
|
address, without their explicit permission
|
||||||
|
- Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Enforcement Responsibilities
|
||||||
|
|
||||||
|
Community leaders are responsible for clarifying and enforcing our standards of
|
||||||
|
acceptable behavior and will take appropriate and fair corrective action in
|
||||||
|
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||||
|
or harmful.
|
||||||
|
|
||||||
|
Community leaders have the right and responsibility to remove, edit, or reject
|
||||||
|
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||||
|
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||||
|
decisions when appropriate.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies within all community spaces, and also applies when
|
||||||
|
an individual is officially representing the community in public spaces.
|
||||||
|
Examples of representing our community include using an official e-mail address,
|
||||||
|
posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported to the community leaders responsible for enforcement at
|
||||||
|
hello@paperless-ngx.com.
|
||||||
|
All complaints will be reviewed and investigated promptly and fairly.
|
||||||
|
|
||||||
|
All community leaders are obligated to respect the privacy and security of the
|
||||||
|
reporter of any incident.
|
||||||
|
|
||||||
|
## Enforcement Guidelines
|
||||||
|
|
||||||
|
Community leaders will follow these Community Impact Guidelines in determining
|
||||||
|
the consequences for any action they deem in violation of this Code of Conduct:
|
||||||
|
|
||||||
|
### 1. Correction
|
||||||
|
|
||||||
|
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||||
|
unprofessional or unwelcome in the community.
|
||||||
|
|
||||||
|
**Consequence**: A private, written warning from community leaders, providing
|
||||||
|
clarity around the nature of the violation and an explanation of why the
|
||||||
|
behavior was inappropriate. A public apology may be requested.
|
||||||
|
|
||||||
|
### 2. Warning
|
||||||
|
|
||||||
|
**Community Impact**: A violation through a single incident or series
|
||||||
|
of actions.
|
||||||
|
|
||||||
|
**Consequence**: A warning with consequences for continued behavior. No
|
||||||
|
interaction with the people involved, including unsolicited interaction with
|
||||||
|
those enforcing the Code of Conduct, for a specified period of time. This
|
||||||
|
includes avoiding interactions in community spaces as well as external channels
|
||||||
|
like social media. Violating these terms may lead to a temporary or
|
||||||
|
permanent ban.
|
||||||
|
|
||||||
|
### 3. Temporary Ban
|
||||||
|
|
||||||
|
**Community Impact**: A serious violation of community standards, including
|
||||||
|
sustained inappropriate behavior.
|
||||||
|
|
||||||
|
**Consequence**: A temporary ban from any sort of interaction or public
|
||||||
|
communication with the community for a specified period of time. No public or
|
||||||
|
private interaction with the people involved, including unsolicited interaction
|
||||||
|
with those enforcing the Code of Conduct, is allowed during this period.
|
||||||
|
Violating these terms may lead to a permanent ban.
|
||||||
|
|
||||||
|
### 4. Permanent Ban
|
||||||
|
|
||||||
|
**Community Impact**: Demonstrating a pattern of violation of community
|
||||||
|
standards, including sustained inappropriate behavior, harassment of an
|
||||||
|
individual, or aggression toward or disparagement of classes of individuals.
|
||||||
|
|
||||||
|
**Consequence**: A permanent ban from any sort of public interaction within
|
||||||
|
the community.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||||
|
version 2.0, available at
|
||||||
|
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||||
|
|
||||||
|
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||||
|
enforcement ladder](https://github.com/mozilla/diversity).
|
||||||
|
|
||||||
|
[homepage]: https://www.contributor-covenant.org
|
||||||
|
|
||||||
|
For answers to common questions about this code of conduct, see the FAQ at
|
||||||
|
https://www.contributor-covenant.org/faq. Translations are available at
|
||||||
|
https://www.contributor-covenant.org/translations.
|
||||||
@@ -4,10 +4,10 @@ If you feel like contributing to the project, please do! Bug fixes and improveme
|
|||||||
|
|
||||||
If you want to implement something big:
|
If you want to implement something big:
|
||||||
|
|
||||||
* Please start a discussion about that in the issues! Maybe something similar is already in development and we can make it happen together.
|
- Please start a discussion about that in the issues! Maybe something similar is already in development and we can make it happen together.
|
||||||
* When making additions to the project, consider if the majority of users will benefit from your change. If not, you're probably better of forking the project.
|
- When making additions to the project, consider if the majority of users will benefit from your change. If not, you're probably better of forking the project.
|
||||||
* Also consider if your change will get in the way of other users. A good change is a change that enhances the experience of some users who want that change and does not affect users who do not care about the change.
|
- Also consider if your change will get in the way of other users. A good change is a change that enhances the experience of some users who want that change and does not affect users who do not care about the change.
|
||||||
* Please see the [paperless-ngx merge process](#merging-prs) below.
|
- Please see the [paperless-ngx merge process](#merging-prs) below.
|
||||||
|
|
||||||
## Python
|
## Python
|
||||||
|
|
||||||
@@ -15,7 +15,7 @@ Paperless supports python 3.8 and 3.9. We format Python code with [Black](https:
|
|||||||
|
|
||||||
## Branches
|
## Branches
|
||||||
|
|
||||||
`master` always reflects the latest release. Apart from changes to the documentation or readme, absolutely no functional changes on this branch in between releases.
|
`main` always reflects the latest release. Apart from changes to the documentation or readme, absolutely no functional changes on this branch in between releases.
|
||||||
|
|
||||||
`dev` contains all changes that will be part of the next release. Use this branch to start making your changes.
|
`dev` contains all changes that will be part of the next release. Use this branch to start making your changes.
|
||||||
|
|
||||||
@@ -27,6 +27,8 @@ Please format and test your code! I know it's a hassle, but it makes sure that y
|
|||||||
|
|
||||||
To test your code, execute `pytest` in the src/ directory. This also generates a html coverage report, which you can use to see if you missed anything important during testing.
|
To test your code, execute `pytest` in the src/ directory. This also generates a html coverage report, which you can use to see if you missed anything important during testing.
|
||||||
|
|
||||||
|
Before you can run `pytest`, ensure to [properly set up your local environment](https://paperless-ngx.readthedocs.io/en/latest/extending.html#initial-setup-and-first-start).
|
||||||
|
|
||||||
## More info:
|
## More info:
|
||||||
|
|
||||||
... is available in the documentation. https://paperless-ngx.readthedocs.io/en/latest/extending.html
|
... is available in the documentation. https://paperless-ngx.readthedocs.io/en/latest/extending.html
|
||||||
@@ -41,9 +43,9 @@ PRs deemed `non-trivial` will go through a stricter review process before being
|
|||||||
|
|
||||||
Examples of `non-trivial` PRs might include:
|
Examples of `non-trivial` PRs might include:
|
||||||
|
|
||||||
* Additional features
|
- Additional features
|
||||||
* Large changes to many distinct files
|
- Large changes to many distinct files
|
||||||
* Breaking or depreciation of existing features
|
- Breaking or depreciation of existing features
|
||||||
|
|
||||||
Our community review process for `non-trivial` PRs is the following:
|
Our community review process for `non-trivial` PRs is the following:
|
||||||
|
|
||||||
@@ -75,21 +77,56 @@ If a language has already been added, and you would like to contribute new trans
|
|||||||
If you would like the project to be translated to another language, first head over to https://crwd.in/paperless-ngx to check if that language has already been enabled for translation.
|
If you would like the project to be translated to another language, first head over to https://crwd.in/paperless-ngx to check if that language has already been enabled for translation.
|
||||||
If not, please request the language to be added by creating an issue on GitHub. The issue should contain:
|
If not, please request the language to be added by creating an issue on GitHub. The issue should contain:
|
||||||
|
|
||||||
* English name of the language (the localized name can be added on Crowdin).
|
- English name of the language (the localized name can be added on Crowdin).
|
||||||
* ISO language code. A list of those can be found here: https://support.crowdin.com/enterprise/language-codes/
|
- ISO language code. A list of those can be found here: https://support.crowdin.com/enterprise/language-codes/
|
||||||
* Date format commonly used for the language, e.g. dd/mm/yyyy, mm/dd/yyyy, etc.
|
- Date format commonly used for the language, e.g. dd/mm/yyyy, mm/dd/yyyy, etc.
|
||||||
|
|
||||||
After the language has been added and some translations have been made on Crowdin, the language needs to be enabled in the code.
|
After the language has been added and some translations have been made on Crowdin, the language needs to be enabled in the code.
|
||||||
Note that there is no need to manually add a .po of .xlf file as those will be automatically generated and imported from Crowdin.
|
Note that there is no need to manually add a .po of .xlf file as those will be automatically generated and imported from Crowdin.
|
||||||
The following files need to be changed:
|
The following files need to be changed:
|
||||||
|
|
||||||
* src-ui/angular.json (under the _projects/paperless-ui/i18n/locales_ JSON key)
|
- src-ui/angular.json (under the _projects/paperless-ui/i18n/locales_ JSON key)
|
||||||
* src/paperless/settings.py (in the _LANGUAGES_ array)
|
- src/paperless/settings.py (in the _LANGUAGES_ array)
|
||||||
* src-ui/src/app/services/settings.service.ts (inside the _getLanguageOptions_ method)
|
- src-ui/src/app/services/settings.service.ts (inside the _getLanguageOptions_ method)
|
||||||
* src-ui/src/app/app.module.ts (import locale from _angular/common/locales_ and call _registerLocaleData_)
|
- src-ui/src/app/app.module.ts (import locale from _angular/common/locales_ and call _registerLocaleData_)
|
||||||
|
|
||||||
Please add the language in the correct order, alphabetically by locale.
|
Please add the language in the correct order, alphabetically by locale.
|
||||||
Note that _en-us_ needs to stay on top of the list, as it is the default project language
|
Note that _en-us_ needs to stay on top of the list, as it is the default project language
|
||||||
|
|
||||||
If you are familiar with Git, feel free to send a Pull Request with those changes.
|
If you are familiar with Git, feel free to send a Pull Request with those changes.
|
||||||
If not, let us know in the issue you created for the language, so that another developer can make these changes.
|
If not, let us know in the issue you created for the language, so that another developer can make these changes.
|
||||||
|
|
||||||
|
# Organization Structure & Membership
|
||||||
|
|
||||||
|
Paperless-ngx is a community project. We do our best to delegate permission and responsibility among a team of people to ensure the longevity of the project.
|
||||||
|
|
||||||
|
## Structure
|
||||||
|
|
||||||
|
As of writing, there are 21 members in paperless-ngx. 4 of these people have complete administrative privileges to the repo:
|
||||||
|
|
||||||
|
- [@shamoon](https://github.com/shamoon)
|
||||||
|
- [@bauerj](https://github.com/bauerj)
|
||||||
|
- [@qcasey](https://github.com/qcasey)
|
||||||
|
- [@FrankStrieter](https://github.com/FrankStrieter)
|
||||||
|
|
||||||
|
There are 5 teams collaborating on specific tasks within paperless-ngx:
|
||||||
|
|
||||||
|
- @paperless-ngx/backend (Python / django)
|
||||||
|
- @paperless-ngx/frontend (JavaScript / Typescript)
|
||||||
|
- @paperless-ngx/ci-cd (GitHub Actions / Deployment)
|
||||||
|
- @paperless-ngx/issues (Issue triage)
|
||||||
|
- @paperless-ngx/test (General testing for larger PRs)
|
||||||
|
|
||||||
|
## Permissions
|
||||||
|
|
||||||
|
All team members are notified when mentioned or assigned to a relevant issue or pull request. Additionally, each team has slightly different access to paperless-ngx:
|
||||||
|
|
||||||
|
- The **test** team has no special permissions.
|
||||||
|
- The **issues** team has `triage` access. This means they can organize issues and pull requests.
|
||||||
|
- The **backend**, **frontend**, and **ci-cd** teams have `write` access. This means they can approve PRs and push code, containers, releases, and more.
|
||||||
|
|
||||||
|
## Joining
|
||||||
|
|
||||||
|
We are not overly strict with inviting people to the organization. If you have read the [team permissions](#permissions) and think having additional access would enhance your contributions, please reach out to an [admin](#structure) of the team.
|
||||||
|
|
||||||
|
The admins occasionally invite contributors directly if we believe having them on a team will accelerate their work.
|
||||||
|
|||||||
345
Dockerfile
@@ -1,134 +1,231 @@
|
|||||||
FROM node:16 AS compile-frontend
|
# syntax=docker/dockerfile:1.4
|
||||||
|
|
||||||
COPY . /src
|
# Pull the installer images from the library
|
||||||
|
# These are all built previously
|
||||||
|
# They provide either a .deb or .whl
|
||||||
|
|
||||||
|
ARG JBIG2ENC_VERSION
|
||||||
|
ARG QPDF_VERSION
|
||||||
|
ARG PIKEPDF_VERSION
|
||||||
|
ARG PSYCOPG2_VERSION
|
||||||
|
|
||||||
|
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/jbig2enc:${JBIG2ENC_VERSION} as jbig2enc-builder
|
||||||
|
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/qpdf:${QPDF_VERSION} as qpdf-builder
|
||||||
|
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/pikepdf:${PIKEPDF_VERSION} as pikepdf-builder
|
||||||
|
FROM ghcr.io/paperless-ngx/paperless-ngx/builder/psycopg2:${PSYCOPG2_VERSION} as psycopg2-builder
|
||||||
|
|
||||||
|
FROM --platform=$BUILDPLATFORM node:16-bullseye-slim AS compile-frontend
|
||||||
|
|
||||||
|
# This stage compiles the frontend
|
||||||
|
# This stage runs once for the native platform, as the outputs are not
|
||||||
|
# dependent on target arch
|
||||||
|
# Inputs: None
|
||||||
|
|
||||||
|
COPY ./src-ui /src/src-ui
|
||||||
|
|
||||||
WORKDIR /src/src-ui
|
WORKDIR /src/src-ui
|
||||||
RUN npm update npm -g && npm install
|
RUN set -eux \
|
||||||
RUN ./node_modules/.bin/ng build --configuration production
|
&& npm update npm -g \
|
||||||
|
&& npm ci --omit=optional
|
||||||
|
RUN set -eux \
|
||||||
|
&& ./node_modules/.bin/ng build --configuration production
|
||||||
|
|
||||||
|
FROM python:3.9-slim-bullseye as main-app
|
||||||
FROM ubuntu:20.04 AS jbig2enc
|
|
||||||
|
|
||||||
WORKDIR /usr/src/jbig2enc
|
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends build-essential automake libtool libleptonica-dev zlib1g-dev git ca-certificates
|
|
||||||
|
|
||||||
RUN git clone https://github.com/agl/jbig2enc .
|
|
||||||
RUN ./autogen.sh
|
|
||||||
RUN ./configure && make
|
|
||||||
|
|
||||||
|
|
||||||
FROM python:3.9-slim-bullseye
|
|
||||||
|
|
||||||
# Binary dependencies
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get -y --no-install-recommends install \
|
|
||||||
# Basic dependencies
|
|
||||||
curl \
|
|
||||||
gnupg \
|
|
||||||
imagemagick \
|
|
||||||
gettext \
|
|
||||||
tzdata \
|
|
||||||
gosu \
|
|
||||||
# fonts for text file thumbnail generation
|
|
||||||
fonts-liberation \
|
|
||||||
# for Numpy
|
|
||||||
libatlas-base-dev \
|
|
||||||
libxslt1-dev \
|
|
||||||
# thumbnail size reduction
|
|
||||||
optipng \
|
|
||||||
libxml2 \
|
|
||||||
pngquant \
|
|
||||||
unpaper \
|
|
||||||
zlib1g \
|
|
||||||
ghostscript \
|
|
||||||
icc-profiles-free \
|
|
||||||
# Mime type detection
|
|
||||||
file \
|
|
||||||
libmagic-dev \
|
|
||||||
media-types \
|
|
||||||
# OCRmyPDF dependencies
|
|
||||||
liblept5 \
|
|
||||||
tesseract-ocr \
|
|
||||||
tesseract-ocr-eng \
|
|
||||||
tesseract-ocr-deu \
|
|
||||||
tesseract-ocr-fra \
|
|
||||||
tesseract-ocr-ita \
|
|
||||||
tesseract-ocr-spa \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# copy jbig2enc
|
|
||||||
COPY --from=jbig2enc /usr/src/jbig2enc/src/.libs/libjbig2enc* /usr/local/lib/
|
|
||||||
COPY --from=jbig2enc /usr/src/jbig2enc/src/jbig2 /usr/local/bin/
|
|
||||||
COPY --from=jbig2enc /usr/src/jbig2enc/src/*.h /usr/local/include/
|
|
||||||
|
|
||||||
WORKDIR /usr/src/paperless/src/
|
|
||||||
|
|
||||||
COPY requirements.txt ../
|
|
||||||
|
|
||||||
# Python dependencies
|
|
||||||
RUN apt-get update \
|
|
||||||
&& apt-get -y --no-install-recommends install \
|
|
||||||
build-essential \
|
|
||||||
libpq-dev \
|
|
||||||
git \
|
|
||||||
zlib1g-dev \
|
|
||||||
libjpeg62-turbo-dev \
|
|
||||||
&& if [ "$(uname -m)" = "armv7l" ] || [ "$(uname -m)" = "aarch64" ]; \
|
|
||||||
then echo "Building qpdf" \
|
|
||||||
&& mkdir -p /usr/src/qpdf \
|
|
||||||
&& cd /usr/src/qpdf \
|
|
||||||
&& git clone https://github.com/qpdf/qpdf.git . \
|
|
||||||
&& git checkout --quiet release-qpdf-10.6.2 \
|
|
||||||
&& ./configure \
|
|
||||||
&& make \
|
|
||||||
&& make install \
|
|
||||||
&& cd /usr/src/paperless/src/ \
|
|
||||||
&& rm -rf /usr/src/qpdf; \
|
|
||||||
else \
|
|
||||||
echo "Skipping qpdf build because pikepdf binary wheels are available."; \
|
|
||||||
fi \
|
|
||||||
&& python3 -m pip install --upgrade pip wheel \
|
|
||||||
&& python3 -m pip install --default-timeout=1000 --upgrade --no-cache-dir supervisor \
|
|
||||||
&& python3 -m pip install --default-timeout=1000 --no-cache-dir -r ../requirements.txt \
|
|
||||||
&& apt-get -y purge build-essential git zlib1g-dev libjpeg62-turbo-dev \
|
|
||||||
&& apt-get -y autoremove --purge \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# setup docker-specific things
|
|
||||||
COPY docker/ ./docker/
|
|
||||||
|
|
||||||
RUN cd docker \
|
|
||||||
&& cp imagemagick-policy.xml /etc/ImageMagick-6/policy.xml \
|
|
||||||
&& mkdir /var/log/supervisord /var/run/supervisord \
|
|
||||||
&& cp supervisord.conf /etc/supervisord.conf \
|
|
||||||
&& cp docker-entrypoint.sh /sbin/docker-entrypoint.sh \
|
|
||||||
&& cp docker-prepare.sh /sbin/docker-prepare.sh \
|
|
||||||
&& chmod 755 /sbin/docker-entrypoint.sh \
|
|
||||||
&& chmod +x install_management_commands.sh \
|
|
||||||
&& ./install_management_commands.sh \
|
|
||||||
&& cd .. \
|
|
||||||
&& rm docker -rf
|
|
||||||
|
|
||||||
COPY gunicorn.conf.py ../
|
|
||||||
|
|
||||||
# copy app
|
|
||||||
COPY --from=compile-frontend /src/src/ ./
|
|
||||||
|
|
||||||
# add users, setup scripts
|
|
||||||
RUN addgroup --gid 1000 paperless \
|
|
||||||
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
|
|
||||||
&& chown -R paperless:paperless ../ \
|
|
||||||
&& gosu paperless python3 manage.py collectstatic --clear --no-input \
|
|
||||||
&& gosu paperless python3 manage.py compilemessages
|
|
||||||
|
|
||||||
VOLUME ["/usr/src/paperless/data", "/usr/src/paperless/media", "/usr/src/paperless/consume", "/usr/src/paperless/export"]
|
|
||||||
ENTRYPOINT ["/sbin/docker-entrypoint.sh"]
|
|
||||||
EXPOSE 8000
|
|
||||||
CMD ["/usr/local/bin/supervisord", "-c", "/etc/supervisord.conf"]
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
LABEL org.opencontainers.image.authors="paperless-ngx team <hello@paperless-ngx.com>"
|
||||||
LABEL org.opencontainers.image.documentation="https://paperless-ngx.readthedocs.io/en/latest/"
|
LABEL org.opencontainers.image.documentation="https://paperless-ngx.readthedocs.io/en/latest/"
|
||||||
LABEL org.opencontainers.image.source="https://github.com/paperless-ngx/paperless-ngx"
|
LABEL org.opencontainers.image.source="https://github.com/paperless-ngx/paperless-ngx"
|
||||||
LABEL org.opencontainers.image.url="https://github.com/paperless-ngx/paperless-ngx"
|
LABEL org.opencontainers.image.url="https://github.com/paperless-ngx/paperless-ngx"
|
||||||
LABEL org.opencontainers.image.licenses="GPL-3.0-only"
|
LABEL org.opencontainers.image.licenses="GPL-3.0-only"
|
||||||
|
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
#
|
||||||
|
# Begin installation and configuration
|
||||||
|
# Order the steps below from least often changed to most
|
||||||
|
#
|
||||||
|
|
||||||
|
# copy jbig2enc
|
||||||
|
# Basically will never change again
|
||||||
|
COPY --from=jbig2enc-builder /usr/src/jbig2enc/src/.libs/libjbig2enc* /usr/local/lib/
|
||||||
|
COPY --from=jbig2enc-builder /usr/src/jbig2enc/src/jbig2 /usr/local/bin/
|
||||||
|
COPY --from=jbig2enc-builder /usr/src/jbig2enc/src/*.h /usr/local/include/
|
||||||
|
|
||||||
|
# Packages need for running
|
||||||
|
ARG RUNTIME_PACKAGES="\
|
||||||
|
curl \
|
||||||
|
file \
|
||||||
|
# fonts for text file thumbnail generation
|
||||||
|
fonts-liberation \
|
||||||
|
gettext \
|
||||||
|
ghostscript \
|
||||||
|
gnupg \
|
||||||
|
gosu \
|
||||||
|
icc-profiles-free \
|
||||||
|
imagemagick \
|
||||||
|
media-types \
|
||||||
|
liblept5 \
|
||||||
|
libpq5 \
|
||||||
|
libxml2 \
|
||||||
|
liblcms2-2 \
|
||||||
|
libtiff5 \
|
||||||
|
libxslt1.1 \
|
||||||
|
libfreetype6 \
|
||||||
|
libwebp6 \
|
||||||
|
libopenjp2-7 \
|
||||||
|
libimagequant0 \
|
||||||
|
libraqm0 \
|
||||||
|
libgnutls30 \
|
||||||
|
libjpeg62-turbo \
|
||||||
|
python3 \
|
||||||
|
python3-pip \
|
||||||
|
python3-setuptools \
|
||||||
|
postgresql-client \
|
||||||
|
# For Numpy
|
||||||
|
libatlas3-base \
|
||||||
|
# OCRmyPDF dependencies
|
||||||
|
tesseract-ocr \
|
||||||
|
tesseract-ocr-eng \
|
||||||
|
tesseract-ocr-deu \
|
||||||
|
tesseract-ocr-fra \
|
||||||
|
tesseract-ocr-ita \
|
||||||
|
tesseract-ocr-spa \
|
||||||
|
# Suggested for OCRmyPDF
|
||||||
|
pngquant \
|
||||||
|
# Suggested for pikepdf
|
||||||
|
jbig2dec \
|
||||||
|
tzdata \
|
||||||
|
unpaper \
|
||||||
|
# Mime type detection
|
||||||
|
zlib1g \
|
||||||
|
# Barcode splitter
|
||||||
|
libzbar0 \
|
||||||
|
poppler-utils"
|
||||||
|
|
||||||
|
# Install basic runtime packages.
|
||||||
|
# These change very infrequently
|
||||||
|
RUN set -eux \
|
||||||
|
echo "Installing system packages" \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install --yes --quiet --no-install-recommends ${RUNTIME_PACKAGES} \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
&& echo "Installing supervisor" \
|
||||||
|
&& python3 -m pip install --default-timeout=1000 --upgrade --no-cache-dir supervisor==4.2.4
|
||||||
|
|
||||||
|
# Copy gunicorn config
|
||||||
|
# Changes very infrequently
|
||||||
|
WORKDIR /usr/src/paperless/
|
||||||
|
|
||||||
|
COPY gunicorn.conf.py .
|
||||||
|
|
||||||
|
# setup docker-specific things
|
||||||
|
# Use mounts to avoid copying installer files into the image
|
||||||
|
# These change sometimes, but rarely
|
||||||
|
WORKDIR /usr/src/paperless/src/docker/
|
||||||
|
|
||||||
|
COPY [ \
|
||||||
|
"docker/imagemagick-policy.xml", \
|
||||||
|
"docker/supervisord.conf", \
|
||||||
|
"docker/docker-entrypoint.sh", \
|
||||||
|
"docker/docker-prepare.sh", \
|
||||||
|
"docker/paperless_cmd.sh", \
|
||||||
|
"docker/wait-for-redis.py", \
|
||||||
|
"docker/management_script.sh", \
|
||||||
|
"docker/install_management_commands.sh", \
|
||||||
|
"/usr/src/paperless/src/docker/" \
|
||||||
|
]
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& echo "Configuring ImageMagick" \
|
||||||
|
&& mv imagemagick-policy.xml /etc/ImageMagick-6/policy.xml \
|
||||||
|
&& echo "Configuring supervisord" \
|
||||||
|
&& mkdir /var/log/supervisord /var/run/supervisord \
|
||||||
|
&& mv supervisord.conf /etc/supervisord.conf \
|
||||||
|
&& echo "Setting up Docker scripts" \
|
||||||
|
&& mv docker-entrypoint.sh /sbin/docker-entrypoint.sh \
|
||||||
|
&& chmod 755 /sbin/docker-entrypoint.sh \
|
||||||
|
&& mv docker-prepare.sh /sbin/docker-prepare.sh \
|
||||||
|
&& chmod 755 /sbin/docker-prepare.sh \
|
||||||
|
&& mv wait-for-redis.py /sbin/wait-for-redis.py \
|
||||||
|
&& chmod 755 /sbin/wait-for-redis.py \
|
||||||
|
&& mv paperless_cmd.sh /usr/local/bin/paperless_cmd.sh \
|
||||||
|
&& chmod 755 /usr/local/bin/paperless_cmd.sh \
|
||||||
|
&& echo "Installing managment commands" \
|
||||||
|
&& chmod +x install_management_commands.sh \
|
||||||
|
&& ./install_management_commands.sh
|
||||||
|
|
||||||
|
# Install the built packages from the installer library images
|
||||||
|
# Use mounts to avoid copying installer files into the image
|
||||||
|
# These change sometimes
|
||||||
|
RUN --mount=type=bind,from=qpdf-builder,target=/qpdf \
|
||||||
|
--mount=type=bind,from=psycopg2-builder,target=/psycopg2 \
|
||||||
|
--mount=type=bind,from=pikepdf-builder,target=/pikepdf \
|
||||||
|
set -eux \
|
||||||
|
&& echo "Installing qpdf" \
|
||||||
|
&& apt-get install --yes --no-install-recommends /qpdf/usr/src/qpdf/libqpdf28_*.deb \
|
||||||
|
&& apt-get install --yes --no-install-recommends /qpdf/usr/src/qpdf/qpdf_*.deb \
|
||||||
|
&& echo "Installing pikepdf and dependencies" \
|
||||||
|
&& python3 -m pip install --no-cache-dir /pikepdf/usr/src/wheels/pyparsing*.whl \
|
||||||
|
&& python3 -m pip install --no-cache-dir /pikepdf/usr/src/wheels/packaging*.whl \
|
||||||
|
&& python3 -m pip install --no-cache-dir /pikepdf/usr/src/wheels/lxml*.whl \
|
||||||
|
&& python3 -m pip install --no-cache-dir /pikepdf/usr/src/wheels/Pillow*.whl \
|
||||||
|
&& python3 -m pip install --no-cache-dir /pikepdf/usr/src/wheels/pikepdf*.whl \
|
||||||
|
&& python3 -m pip list \
|
||||||
|
&& echo "Installing psycopg2" \
|
||||||
|
&& python3 -m pip install --no-cache-dir /psycopg2/usr/src/wheels/psycopg2*.whl \
|
||||||
|
&& python3 -m pip list
|
||||||
|
|
||||||
|
# Python dependencies
|
||||||
|
# Change pretty frequently
|
||||||
|
COPY requirements.txt ../
|
||||||
|
|
||||||
|
# Packages needed only for building a few quick Python
|
||||||
|
# dependencies
|
||||||
|
ARG BUILD_PACKAGES="\
|
||||||
|
build-essential \
|
||||||
|
git \
|
||||||
|
python3-dev"
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& echo "Installing build system packages" \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||||
|
&& python3 -m pip install --no-cache-dir --upgrade wheel \
|
||||||
|
&& echo "Installing Python requirements" \
|
||||||
|
&& python3 -m pip install --default-timeout=1000 --no-cache-dir -r ../requirements.txt \
|
||||||
|
&& echo "Cleaning up image" \
|
||||||
|
&& apt-get -y purge ${BUILD_PACKAGES} \
|
||||||
|
&& apt-get -y autoremove --purge \
|
||||||
|
&& apt-get clean --yes \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
&& rm -rf /tmp/* \
|
||||||
|
&& rm -rf /var/tmp/* \
|
||||||
|
&& rm -rf /var/cache/apt/archives/* \
|
||||||
|
&& truncate -s 0 /var/log/*log
|
||||||
|
|
||||||
|
WORKDIR /usr/src/paperless/src/
|
||||||
|
|
||||||
|
# copy backend
|
||||||
|
COPY ./src ./
|
||||||
|
|
||||||
|
# copy frontend
|
||||||
|
COPY --from=compile-frontend /src/src/documents/static/frontend/ ./documents/static/frontend/
|
||||||
|
|
||||||
|
# add users, setup scripts
|
||||||
|
RUN set -eux \
|
||||||
|
&& addgroup --gid 1000 paperless \
|
||||||
|
&& useradd --uid 1000 --gid paperless --home-dir /usr/src/paperless paperless \
|
||||||
|
&& chown -R paperless:paperless ../ \
|
||||||
|
&& gosu paperless python3 manage.py collectstatic --clear --no-input \
|
||||||
|
&& gosu paperless python3 manage.py compilemessages
|
||||||
|
|
||||||
|
VOLUME ["/usr/src/paperless/data", \
|
||||||
|
"/usr/src/paperless/media", \
|
||||||
|
"/usr/src/paperless/consume", \
|
||||||
|
"/usr/src/paperless/export"]
|
||||||
|
|
||||||
|
ENTRYPOINT ["/sbin/docker-entrypoint.sh"]
|
||||||
|
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
CMD ["/usr/local/bin/paperless_cmd.sh"]
|
||||||
|
|||||||
42
Pipfile
@@ -9,35 +9,33 @@ verify_ssl = true
|
|||||||
name = "piwheels"
|
name = "piwheels"
|
||||||
|
|
||||||
[packages]
|
[packages]
|
||||||
dateparser = "~=1.1.0"
|
dateparser = "~=1.1"
|
||||||
django = "~=3.2"
|
django = "~=4.0"
|
||||||
django-cors-headers = "*"
|
django-cors-headers = "*"
|
||||||
django-extensions = "*"
|
django-extensions = "*"
|
||||||
django-filter = "~=21.1"
|
django-filter = "~=22.1"
|
||||||
django-q = "~=1.3.4"
|
django-q = {editable = true, ref = "paperless-main", git = "https://github.com/paperless-ngx/django-q.git"}
|
||||||
djangorestframework = "~=3.13.1"
|
djangorestframework = "~=3.13"
|
||||||
filelock = "*"
|
filelock = "*"
|
||||||
fuzzywuzzy = {extras = ["speedup"], version = "*"}
|
fuzzywuzzy = {extras = ["speedup"], version = "*"}
|
||||||
gunicorn = "*"
|
gunicorn = "*"
|
||||||
imap-tools = "*"
|
imap-tools = "*"
|
||||||
langdetect = "*"
|
langdetect = "*"
|
||||||
numpy = "~=1.22.0"
|
|
||||||
pathvalidate = "*"
|
pathvalidate = "*"
|
||||||
pillow = "~=9.0"
|
pillow = "~=9.2"
|
||||||
pikepdf = "~=5.0"
|
pikepdf = "~=5.1"
|
||||||
python-gnupg = "*"
|
python-gnupg = "*"
|
||||||
python-dotenv = "*"
|
python-dotenv = "*"
|
||||||
python-dateutil = "*"
|
python-dateutil = "*"
|
||||||
python-magic = "*"
|
python-magic = "*"
|
||||||
psycopg2-binary = "*"
|
psycopg2 = "*"
|
||||||
redis = "*"
|
redis = "*"
|
||||||
# Pinned because aarch64 wheels and updates cause warnings when loading the classifier model.
|
scikit-learn="~=1.1"
|
||||||
scikit-learn="==0.24.0"
|
whitenoise = "~=6.2.0"
|
||||||
whitenoise = "~=6.0.0"
|
watchdog = "~=2.1.9"
|
||||||
watchdog = "~=2.1.0"
|
|
||||||
whoosh="~=2.7.4"
|
whoosh="~=2.7.4"
|
||||||
inotifyrecursive = "~=0.3.4"
|
inotifyrecursive = "~=0.3"
|
||||||
ocrmypdf = "~=13.4.0"
|
ocrmypdf = "~=13.4"
|
||||||
tqdm = "*"
|
tqdm = "*"
|
||||||
tika = "*"
|
tika = "*"
|
||||||
# TODO: This will sadly also install daphne+dependencies,
|
# TODO: This will sadly also install daphne+dependencies,
|
||||||
@@ -46,11 +44,12 @@ channels = "~=3.0"
|
|||||||
channels-redis = "*"
|
channels-redis = "*"
|
||||||
uvicorn = {extras = ["standard"], version = "*"}
|
uvicorn = {extras = ["standard"], version = "*"}
|
||||||
concurrent-log-handler = "*"
|
concurrent-log-handler = "*"
|
||||||
# uvloop 0.15+ incompatible with python 3.6
|
|
||||||
uvloop = "~=0.16"
|
|
||||||
cryptography = "~=36.0.1"
|
|
||||||
"pdfminer.six" = "*"
|
"pdfminer.six" = "*"
|
||||||
"backports.zoneinfo" = "*"
|
"backports.zoneinfo" = {version = "*", markers = "python_version < '3.9'"}
|
||||||
|
"importlib-resources" = {version = "*", markers = "python_version < '3.9'"}
|
||||||
|
zipp = {version = "*", markers = "python_version < '3.9'"}
|
||||||
|
pyzbar = "*"
|
||||||
|
pdf2image = "*"
|
||||||
|
|
||||||
[dev-packages]
|
[dev-packages]
|
||||||
coveralls = "*"
|
coveralls = "*"
|
||||||
@@ -62,7 +61,10 @@ pytest-django = "*"
|
|||||||
pytest-env = "*"
|
pytest-env = "*"
|
||||||
pytest-sugar = "*"
|
pytest-sugar = "*"
|
||||||
pytest-xdist = "*"
|
pytest-xdist = "*"
|
||||||
sphinx = "~=3.4.2"
|
sphinx = "~=5.0.2"
|
||||||
sphinx_rtd_theme = "*"
|
sphinx_rtd_theme = "*"
|
||||||
tox = "*"
|
tox = "*"
|
||||||
black = "*"
|
black = "*"
|
||||||
|
pre-commit = "*"
|
||||||
|
sphinx-autobuild = "*"
|
||||||
|
myst-parser = "*"
|
||||||
|
|||||||
2137
Pipfile.lock
generated
73
README.md
@@ -10,23 +10,23 @@
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<!-- omit in toc -->
|
<!-- omit in toc -->
|
||||||
|
|
||||||
# Paperless-ngx
|
# Paperless-ngx
|
||||||
|
|
||||||
Paperless-ngx is a document management system that transforms your physical documents into a searchable online archive so you can keep, well, *less paper*.
|
Paperless-ngx is a document management system that transforms your physical documents into a searchable online archive so you can keep, well, _less paper_.
|
||||||
|
|
||||||
Paperless-ngx forked from [paperless-ng](https://github.com/jonaswinkler/paperless-ng) to continue the great work and distribute responsibility of supporting and advancing the project among a team of people. [Consider joining us!](#community-support) Discussion of this transition can be found in issues
|
Paperless-ngx forked from [paperless-ng](https://github.com/jonaswinkler/paperless-ng) to continue the great work and distribute responsibility of supporting and advancing the project among a team of people. [Consider joining us!](#community-support) Discussion of this transition can be found in issues
|
||||||
[#1599](https://github.com/jonaswinkler/paperless-ng/issues/1599) and [#1632](https://github.com/jonaswinkler/paperless-ng/issues/1632).
|
[#1599](https://github.com/jonaswinkler/paperless-ng/issues/1599) and [#1632](https://github.com/jonaswinkler/paperless-ng/issues/1632).
|
||||||
|
|
||||||
A demo is available at [demo.paperless-ngx.com](https://demo.paperless-ngx.com) using login `demo` / `demo`. *Note: demo content is reset frequently and confidential information should not be uploaded.*
|
A demo is available at [demo.paperless-ngx.com](https://demo.paperless-ngx.com) using login `demo` / `demo`. _Note: demo content is reset frequently and confidential information should not be uploaded._
|
||||||
|
|
||||||
|
|
||||||
- [Features](#features)
|
- [Features](#features)
|
||||||
- [Getting started](#getting-started)
|
- [Getting started](#getting-started)
|
||||||
- [Contributing](#contributing)
|
- [Contributing](#contributing)
|
||||||
- [Community Support](#community-support)
|
- [Community Support](#community-support)
|
||||||
- [Translation](#translation)
|
- [Translation](#translation)
|
||||||
- [Feature Requests](#feature-requests)
|
- [Feature Requests](#feature-requests)
|
||||||
- [Bugs](#bugs)
|
- [Bugs](#bugs)
|
||||||
- [Affiliated Projects](#affiliated-projects)
|
- [Affiliated Projects](#affiliated-projects)
|
||||||
- [Important Note](#important-note)
|
- [Important Note](#important-note)
|
||||||
|
|
||||||
@@ -35,28 +35,28 @@ A demo is available at [demo.paperless-ngx.com](https://demo.paperless-ngx.com)
|
|||||||

|

|
||||||

|

|
||||||
|
|
||||||
* Organize and index your scanned documents with tags, correspondents, types, and more.
|
- Organize and index your scanned documents with tags, correspondents, types, and more.
|
||||||
* Performs OCR on your documents, adds selectable text to image only documents and adds tags, correspondents and document types to your documents.
|
- Performs OCR on your documents, adds selectable text to image only documents and adds tags, correspondents and document types to your documents.
|
||||||
* Supports PDF documents, images, plain text files, and Office documents (Word, Excel, Powerpoint, and LibreOffice equivalents).
|
- Supports PDF documents, images, plain text files, and Office documents (Word, Excel, Powerpoint, and LibreOffice equivalents).
|
||||||
* Office document support is optional and provided by Apache Tika (see [configuration](https://paperless-ngx.readthedocs.io/en/latest/configuration.html#tika-settings))
|
- Office document support is optional and provided by Apache Tika (see [configuration](https://paperless-ngx.readthedocs.io/en/latest/configuration.html#tika-settings))
|
||||||
* Paperless stores your documents plain on disk. Filenames and folders are managed by paperless and their format can be configured freely.
|
- Paperless stores your documents plain on disk. Filenames and folders are managed by paperless and their format can be configured freely.
|
||||||
* Single page application front end.
|
- Single page application front end.
|
||||||
* Includes a dashboard that shows basic statistics and has document upload.
|
- Includes a dashboard that shows basic statistics and has document upload.
|
||||||
* Filtering by tags, correspondents, types, and more.
|
- Filtering by tags, correspondents, types, and more.
|
||||||
* Customizable views can be saved and displayed on the dashboard.
|
- Customizable views can be saved and displayed on the dashboard.
|
||||||
* Full text search helps you find what you need.
|
- Full text search helps you find what you need.
|
||||||
* Auto completion suggests relevant words from your documents.
|
- Auto completion suggests relevant words from your documents.
|
||||||
* Results are sorted by relevance to your search query.
|
- Results are sorted by relevance to your search query.
|
||||||
* Highlighting shows you which parts of the document matched the query.
|
- Highlighting shows you which parts of the document matched the query.
|
||||||
* Searching for similar documents ("More like this")
|
- Searching for similar documents ("More like this")
|
||||||
* Email processing: Paperless adds documents from your email accounts.
|
- Email processing: Paperless adds documents from your email accounts.
|
||||||
* Configure multiple accounts and filters for each account.
|
- Configure multiple accounts and filters for each account.
|
||||||
* When adding documents from mail, paperless can move these mail to a new folder, mark them as read, flag them as important or delete them.
|
- When adding documents from mail, paperless can move these mail to a new folder, mark them as read, flag them as important or delete them.
|
||||||
* Machine learning powered document matching.
|
- Machine learning powered document matching.
|
||||||
* Paperless-ngx learns from your documents and will be able to automatically assign tags, correspondents and types to documents once you've stored a few documents in paperless.
|
- Paperless-ngx learns from your documents and will be able to automatically assign tags, correspondents and types to documents once you've stored a few documents in paperless.
|
||||||
* Optimized for multi core systems: Paperless-ngx consumes multiple documents in parallel.
|
- Optimized for multi core systems: Paperless-ngx consumes multiple documents in parallel.
|
||||||
* The integrated sanity checker makes sure that your document archive is in good health.
|
- The integrated sanity checker makes sure that your document archive is in good health.
|
||||||
* [More screenshots are available in the documentation](https://paperless-ngx.readthedocs.io/en/latest/screenshots.html).
|
- [More screenshots are available in the documentation](https://paperless-ngx.readthedocs.io/en/latest/screenshots.html).
|
||||||
|
|
||||||
# Getting started
|
# Getting started
|
||||||
|
|
||||||
@@ -65,7 +65,7 @@ The easiest way to deploy paperless is docker-compose. The files in the [`/docke
|
|||||||
If you'd like to jump right in, you can configure a docker-compose environment with our install script:
|
If you'd like to jump right in, you can configure a docker-compose environment with our install script:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/master/install-paperless-ngx.sh)"
|
bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
||||||
```
|
```
|
||||||
|
|
||||||
Alternatively, you can install the dependencies and setup apache and a database server yourself. The [documentation](https://paperless-ngx.readthedocs.io/en/latest/setup.html#installation) has a step by step guide on how to do it.
|
Alternatively, you can install the dependencies and setup apache and a database server yourself. The [documentation](https://paperless-ngx.readthedocs.io/en/latest/setup.html#installation) has a step by step guide on how to do it.
|
||||||
@@ -73,6 +73,7 @@ Alternatively, you can install the dependencies and setup apache and a database
|
|||||||
Migrating from Paperless-ng is easy, just drop in the new docker image! See the [documentation on migrating](https://paperless-ngx.readthedocs.io/en/latest/setup.html#migrating-from-paperless-ng) for more details.
|
Migrating from Paperless-ng is easy, just drop in the new docker image! See the [documentation on migrating](https://paperless-ngx.readthedocs.io/en/latest/setup.html#migrating-from-paperless-ng) for more details.
|
||||||
|
|
||||||
<!-- omit in toc -->
|
<!-- omit in toc -->
|
||||||
|
|
||||||
### Documentation
|
### Documentation
|
||||||
|
|
||||||
The documentation for Paperless-ngx is available on [ReadTheDocs](https://paperless-ngx.readthedocs.io/).
|
The documentation for Paperless-ngx is available on [ReadTheDocs](https://paperless-ngx.readthedocs.io/).
|
||||||
@@ -101,18 +102,18 @@ For bugs please [open an issue](https://github.com/paperless-ngx/paperless-ngx/i
|
|||||||
|
|
||||||
Paperless has been around a while now, and people are starting to build stuff on top of it. If you're one of those people, we can add your project to this list:
|
Paperless has been around a while now, and people are starting to build stuff on top of it. If you're one of those people, we can add your project to this list:
|
||||||
|
|
||||||
* [Paperless App](https://github.com/bauerj/paperless_app): An Android/iOS app for Paperless-ngx. Also works with the original Paperless and Paperless-ngx.
|
- [Paperless App](https://github.com/bauerj/paperless_app): An Android/iOS app for Paperless-ngx. Also works with the original Paperless and Paperless-ng.
|
||||||
* [Paperless Share](https://github.com/qcasey/paperless_share). Share any files from your Android application with paperless. Very simple, but works with all of the mobile scanning apps out there that allow you to share scanned documents.
|
- [Paperless Share](https://github.com/qcasey/paperless_share). Share any files from your Android application with paperless. Very simple, but works with all of the mobile scanning apps out there that allow you to share scanned documents.
|
||||||
* [Scan to Paperless](https://github.com/sbrunner/scan-to-paperless): Scan and prepare (crop, deskew, OCR, ...) your documents for Paperless.
|
- [Scan to Paperless](https://github.com/sbrunner/scan-to-paperless): Scan and prepare (crop, deskew, OCR, ...) your documents for Paperless.
|
||||||
|
|
||||||
These projects also exist, but their status and compatibility with paperless-ngx is unknown.
|
These projects also exist, but their status and compatibility with paperless-ngx is unknown.
|
||||||
|
|
||||||
* [paperless-cli](https://github.com/stgarf/paperless-cli): A golang command line binary to interact with a Paperless instance.
|
- [paperless-cli](https://github.com/stgarf/paperless-cli): A golang command line binary to interact with a Paperless instance.
|
||||||
|
|
||||||
This project also exists, but needs updates to be compatible with paperless-ngx.
|
This project also exists, but needs updates to be compatible with paperless-ngx.
|
||||||
|
|
||||||
* [Paperless Desktop](https://github.com/thomasbrueggemann/paperless-desktop): A desktop UI for your Paperless installation. Runs on Mac, Linux, and Windows.
|
- [Paperless Desktop](https://github.com/thomasbrueggemann/paperless-desktop): A desktop UI for your Paperless installation. Runs on Mac, Linux, and Windows.
|
||||||
Known issues on Mac: (Could not load reminders and documents)
|
Known issues on Mac: (Could not load reminders and documents)
|
||||||
|
|
||||||
# Important Note
|
# Important Note
|
||||||
|
|
||||||
|
|||||||
43
build-docker-image.sh
Executable file
@@ -0,0 +1,43 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Helper script for building the Docker image locally.
|
||||||
|
# Parses and provides the nessecary versions of other images to Docker
|
||||||
|
# before passing in the rest of script args.
|
||||||
|
|
||||||
|
# First Argument: The Dockerfile to build
|
||||||
|
# Other Arguments: Additional arguments to docker build
|
||||||
|
|
||||||
|
# Example Usage:
|
||||||
|
# ./build-docker-image.sh Dockerfile -t paperless-ngx:my-awesome-feature
|
||||||
|
|
||||||
|
set -eux
|
||||||
|
|
||||||
|
if ! command -v jq; then
|
||||||
|
echo "jq required"
|
||||||
|
exit 1
|
||||||
|
elif [ ! -f "$1" ]; then
|
||||||
|
echo "$1 is not a file, please provide the Dockerfile"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse what we can from Pipfile.lock
|
||||||
|
pikepdf_version=$(jq ".default.pikepdf.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||||
|
psycopg2_version=$(jq ".default.psycopg2.version" Pipfile.lock | sed 's/=//g' | sed 's/"//g')
|
||||||
|
# Read this from the other config file
|
||||||
|
qpdf_version=$(jq ".qpdf.version" .build-config.json | sed 's/"//g')
|
||||||
|
jbig2enc_version=$(jq ".jbig2enc.version" .build-config.json | sed 's/"//g')
|
||||||
|
# Get the branch name (used for caching)
|
||||||
|
branch_name=$(git rev-parse --abbrev-ref HEAD)
|
||||||
|
|
||||||
|
# https://docs.docker.com/develop/develop-images/build_enhancements/
|
||||||
|
# Required to use cache-from
|
||||||
|
export DOCKER_BUILDKIT=1
|
||||||
|
|
||||||
|
docker build --file "$1" \
|
||||||
|
--progress=plain \
|
||||||
|
--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/app:"${branch_name}" \
|
||||||
|
--cache-from ghcr.io/paperless-ngx/paperless-ngx/builder/cache/app:dev \
|
||||||
|
--build-arg JBIG2ENC_VERSION="${jbig2enc_version}" \
|
||||||
|
--build-arg QPDF_VERSION="${qpdf_version}" \
|
||||||
|
--build-arg PIKEPDF_VERSION="${pikepdf_version}" \
|
||||||
|
--build-arg PSYCOPG2_VERSION="${psycopg2_version}" "${@:2}" .
|
||||||
14
docker-builders/Dockerfile.frontend
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# This Dockerfile compiles the frontend
|
||||||
|
# Inputs: None
|
||||||
|
|
||||||
|
FROM node:16-bullseye-slim AS compile-frontend
|
||||||
|
|
||||||
|
COPY ./src /src/src
|
||||||
|
COPY ./src-ui /src/src-ui
|
||||||
|
|
||||||
|
WORKDIR /src/src-ui
|
||||||
|
RUN set -eux \
|
||||||
|
&& npm update npm -g \
|
||||||
|
&& npm ci --omit=optional
|
||||||
|
RUN set -eux \
|
||||||
|
&& ./node_modules/.bin/ng build --configuration production
|
||||||
39
docker-builders/Dockerfile.jbig2enc
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# This Dockerfile compiles the jbig2enc library
|
||||||
|
# Inputs:
|
||||||
|
# - JBIG2ENC_VERSION - the Git tag to checkout and build
|
||||||
|
|
||||||
|
FROM debian:bullseye-slim as main
|
||||||
|
|
||||||
|
LABEL org.opencontainers.image.description="A intermediate image with jbig2enc built"
|
||||||
|
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
ARG BUILD_PACKAGES="\
|
||||||
|
build-essential \
|
||||||
|
automake \
|
||||||
|
libtool \
|
||||||
|
libleptonica-dev \
|
||||||
|
zlib1g-dev \
|
||||||
|
git \
|
||||||
|
ca-certificates"
|
||||||
|
|
||||||
|
WORKDIR /usr/src/jbig2enc
|
||||||
|
|
||||||
|
# As this is an base image for a multi-stage final image
|
||||||
|
# the added size of the install is basically irrelevant
|
||||||
|
RUN apt-get update --quiet \
|
||||||
|
&& apt-get install --yes --quiet --no-install-recommends ${BUILD_PACKAGES} \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Layers after this point change according to required version
|
||||||
|
# For better caching, seperate the basic installs from
|
||||||
|
# the building
|
||||||
|
|
||||||
|
ARG JBIG2ENC_VERSION
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& git clone --quiet --branch $JBIG2ENC_VERSION https://github.com/agl/jbig2enc .
|
||||||
|
RUN set -eux \
|
||||||
|
&& ./autogen.sh
|
||||||
|
RUN set -eux \
|
||||||
|
&& ./configure && make
|
||||||
88
docker-builders/Dockerfile.pikepdf
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
# This Dockerfile builds the pikepdf wheel
|
||||||
|
# Inputs:
|
||||||
|
# - REPO - Docker repository to pull qpdf from
|
||||||
|
# - QPDF_VERSION - The image qpdf version to copy .deb files from
|
||||||
|
# - PIKEPDF_VERSION - Version of pikepdf to build wheel for
|
||||||
|
|
||||||
|
# Default to pulling from the main repo registry when manually building
|
||||||
|
ARG REPO="paperless-ngx/paperless-ngx"
|
||||||
|
|
||||||
|
ARG QPDF_VERSION
|
||||||
|
FROM ghcr.io/${REPO}/builder/qpdf:${QPDF_VERSION} as qpdf-builder
|
||||||
|
|
||||||
|
# This does nothing, except provide a name for a copy below
|
||||||
|
|
||||||
|
FROM python:3.9-slim-bullseye as main
|
||||||
|
|
||||||
|
LABEL org.opencontainers.image.description="A intermediate image with pikepdf wheel built"
|
||||||
|
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
ARG BUILD_PACKAGES="\
|
||||||
|
build-essential \
|
||||||
|
python3-dev \
|
||||||
|
python3-pip \
|
||||||
|
# qpdf requirement - https://github.com/qpdf/qpdf#crypto-providers
|
||||||
|
libgnutls28-dev \
|
||||||
|
# lxml requrements - https://lxml.de/installation.html
|
||||||
|
libxml2-dev \
|
||||||
|
libxslt1-dev \
|
||||||
|
# Pillow requirements - https://pillow.readthedocs.io/en/stable/installation.html#external-libraries
|
||||||
|
# JPEG functionality
|
||||||
|
libjpeg62-turbo-dev \
|
||||||
|
# conpressed PNG
|
||||||
|
zlib1g-dev \
|
||||||
|
# compressed TIFF
|
||||||
|
libtiff-dev \
|
||||||
|
# type related services
|
||||||
|
libfreetype-dev \
|
||||||
|
# color management
|
||||||
|
liblcms2-dev \
|
||||||
|
# WebP format
|
||||||
|
libwebp-dev \
|
||||||
|
# JPEG 2000
|
||||||
|
libopenjp2-7-dev \
|
||||||
|
# improved color quantization
|
||||||
|
libimagequant-dev \
|
||||||
|
# complex text layout support
|
||||||
|
libraqm-dev"
|
||||||
|
|
||||||
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
COPY --from=qpdf-builder /usr/src/qpdf/*.deb ./
|
||||||
|
|
||||||
|
# As this is an base image for a multi-stage final image
|
||||||
|
# the added size of the install is basically irrelevant
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& apt-get update --quiet \
|
||||||
|
&& apt-get install --yes --quiet --no-install-recommends $BUILD_PACKAGES \
|
||||||
|
&& dpkg --install libqpdf28_*.deb \
|
||||||
|
&& dpkg --install libqpdf-dev_*.deb \
|
||||||
|
&& python3 -m pip install --no-cache-dir --upgrade \
|
||||||
|
pip \
|
||||||
|
wheel \
|
||||||
|
# https://pikepdf.readthedocs.io/en/latest/installation.html#requirements
|
||||||
|
pybind11 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Layers after this point change according to required version
|
||||||
|
# For better caching, seperate the basic installs from
|
||||||
|
# the building
|
||||||
|
|
||||||
|
ARG PIKEPDF_VERSION
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& echo "Building pikepdf wheel ${PIKEPDF_VERSION}" \
|
||||||
|
&& mkdir wheels \
|
||||||
|
&& python3 -m pip wheel \
|
||||||
|
# Build the package at the required version
|
||||||
|
pikepdf==${PIKEPDF_VERSION} \
|
||||||
|
# Output the *.whl into this directory
|
||||||
|
--wheel-dir wheels \
|
||||||
|
# Do not use a binary packge for the package being built
|
||||||
|
--no-binary=pikepdf \
|
||||||
|
# Do use binary packages for dependencies
|
||||||
|
--prefer-binary \
|
||||||
|
--no-cache-dir \
|
||||||
|
&& ls -ahl wheels
|
||||||
49
docker-builders/Dockerfile.psycopg2
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
# This Dockerfile builds the psycopg2 wheel
|
||||||
|
# Inputs:
|
||||||
|
# - PSYCOPG2_VERSION - Version to build
|
||||||
|
|
||||||
|
FROM python:3.9-slim-bullseye as main
|
||||||
|
|
||||||
|
LABEL org.opencontainers.image.description="A intermediate image with psycopg2 wheel built"
|
||||||
|
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
ARG BUILD_PACKAGES="\
|
||||||
|
build-essential \
|
||||||
|
python3-dev \
|
||||||
|
python3-pip \
|
||||||
|
# https://www.psycopg.org/docs/install.html#prerequisites
|
||||||
|
libpq-dev"
|
||||||
|
|
||||||
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
# As this is an base image for a multi-stage final image
|
||||||
|
# the added size of the install is basically irrelevant
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& apt-get update --quiet \
|
||||||
|
&& apt-get install --yes --quiet --no-install-recommends $BUILD_PACKAGES \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
&& python3 -m pip install --no-cache-dir --upgrade pip wheel
|
||||||
|
|
||||||
|
# Layers after this point change according to required version
|
||||||
|
# For better caching, seperate the basic installs from
|
||||||
|
# the building
|
||||||
|
|
||||||
|
ARG PSYCOPG2_VERSION
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& echo "Building psycopg2 wheel ${PSYCOPG2_VERSION}" \
|
||||||
|
&& cd /usr/src \
|
||||||
|
&& mkdir wheels \
|
||||||
|
&& python3 -m pip wheel \
|
||||||
|
# Build the package at the required version
|
||||||
|
psycopg2==${PSYCOPG2_VERSION} \
|
||||||
|
# Output the *.whl into this directory
|
||||||
|
--wheel-dir wheels \
|
||||||
|
# Do not use a binary packge for the package being built
|
||||||
|
--no-binary=psycopg2 \
|
||||||
|
# Do use binary packages for dependencies
|
||||||
|
--prefer-binary \
|
||||||
|
--no-cache-dir \
|
||||||
|
&& ls -ahl wheels/
|
||||||
53
docker-builders/Dockerfile.qpdf
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
FROM debian:bullseye-slim as main
|
||||||
|
|
||||||
|
LABEL org.opencontainers.image.description="A intermediate image with qpdf built"
|
||||||
|
|
||||||
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
ARG BUILD_PACKAGES="\
|
||||||
|
build-essential \
|
||||||
|
debhelper \
|
||||||
|
debian-keyring \
|
||||||
|
devscripts \
|
||||||
|
equivs \
|
||||||
|
libtool \
|
||||||
|
# https://qpdf.readthedocs.io/en/stable/installation.html#system-requirements
|
||||||
|
libjpeg62-turbo-dev \
|
||||||
|
libgnutls28-dev \
|
||||||
|
packaging-dev \
|
||||||
|
zlib1g-dev"
|
||||||
|
|
||||||
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
# As this is an base image for a multi-stage final image
|
||||||
|
# the added size of the install is basically irrelevant
|
||||||
|
|
||||||
|
RUN set -eux \
|
||||||
|
&& apt-get update --quiet \
|
||||||
|
&& apt-get install --yes --quiet --no-install-recommends $BUILD_PACKAGES \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Layers after this point change according to required version
|
||||||
|
# For better caching, seperate the basic installs from
|
||||||
|
# the building
|
||||||
|
|
||||||
|
# This must match to pikepdf's minimum at least
|
||||||
|
ARG QPDF_VERSION
|
||||||
|
|
||||||
|
# In order to get the required version of qpdf, it is backported from bookwork
|
||||||
|
# and then built from source
|
||||||
|
RUN set -eux \
|
||||||
|
&& echo "Building qpdf" \
|
||||||
|
&& echo "deb-src http://deb.debian.org/debian/ bookworm main" > /etc/apt/sources.list.d/bookworm-src.list \
|
||||||
|
&& apt-get update \
|
||||||
|
&& mkdir qpdf \
|
||||||
|
&& cd qpdf \
|
||||||
|
&& apt-get source --yes --quiet qpdf=${QPDF_VERSION}-1/bookworm \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
&& cd qpdf-$QPDF_VERSION \
|
||||||
|
# We don't need to build the tests (also don't run them)
|
||||||
|
&& rm -rf libtests \
|
||||||
|
&& DEBEMAIL=hello@paperless-ngx.com debchange --bpo \
|
||||||
|
&& export DEB_BUILD_OPTIONS="terse nocheck nodoc parallel=2" \
|
||||||
|
&& dpkg-buildpackage --build=binary --unsigned-source --unsigned-changes \
|
||||||
|
&& ls -ahl ../*.deb
|
||||||
@@ -22,6 +22,10 @@
|
|||||||
# Docker setup does not use the configuration file.
|
# Docker setup does not use the configuration file.
|
||||||
# A few commonly adjusted settings are provided below.
|
# A few commonly adjusted settings are provided below.
|
||||||
|
|
||||||
|
# This is required if you will be exposing Paperless-ngx on a public domain
|
||||||
|
# (if doing so please consider security measures such as reverse proxy)
|
||||||
|
#PAPERLESS_URL=https://paperless.example.com
|
||||||
|
|
||||||
# Adjust this key if you plan to make paperless available publicly. It should
|
# Adjust this key if you plan to make paperless available publicly. It should
|
||||||
# be a very long sequence of random characters. You don't need to remember it.
|
# be a very long sequence of random characters. You don't need to remember it.
|
||||||
#PAPERLESS_SECRET_KEY=change-me
|
#PAPERLESS_SECRET_KEY=change-me
|
||||||
|
|||||||
@@ -31,13 +31,13 @@
|
|||||||
version: "3.4"
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: redis:6.0
|
image: docker.io/library/redis:6.0
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: postgres:13
|
image: docker.io/library/postgres:13
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
@@ -55,7 +55,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 8010:8000
|
- 8010:8000
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 5
|
retries: 5
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# docker-compose file for running paperless from the Docker Hub.
|
# docker-compose file for running paperless from the docker container registry.
|
||||||
# This file contains everything paperless needs to run.
|
# This file contains everything paperless needs to run.
|
||||||
# Paperless supports amd64, arm and arm64 hardware.
|
# Paperless supports amd64, arm and arm64 hardware.
|
||||||
#
|
#
|
||||||
@@ -33,13 +33,13 @@
|
|||||||
version: "3.4"
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: redis:6.0
|
image: docker.io/library/redis:6.0
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: postgres:13
|
image: docker.io/library/postgres:13
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
@@ -59,7 +59,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- 8000:8000
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 5
|
retries: 5
|
||||||
@@ -77,13 +77,14 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: gotenberg/gotenberg:7
|
image: docker.io/gotenberg/gotenberg:7.4
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
command:
|
||||||
CHROMIUM_DISABLE_ROUTES: 1
|
- "gotenberg"
|
||||||
|
- "--chromium-disable-routes=true"
|
||||||
|
|
||||||
tika:
|
tika:
|
||||||
image: apache/tika
|
image: ghcr.io/paperless-ngx/tika:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
@@ -29,13 +29,13 @@
|
|||||||
version: "3.4"
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: redis:6.0
|
image: docker.io/library/redis:6.0
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: postgres:13
|
image: docker.io/library/postgres:13
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- pgdata:/var/lib/postgresql/data
|
- pgdata:/var/lib/postgresql/data
|
||||||
@@ -53,7 +53,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- 8000:8000
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 5
|
retries: 5
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
# docker-compose file for running paperless from the Docker Hub.
|
# docker-compose file for running paperless from the docker container registry.
|
||||||
# This file contains everything paperless needs to run.
|
# This file contains everything paperless needs to run.
|
||||||
# Paperless supports amd64, arm and arm64 hardware.
|
# Paperless supports amd64, arm and arm64 hardware.
|
||||||
#
|
|
||||||
# All compose files of paperless configure paperless in the following way:
|
# All compose files of paperless configure paperless in the following way:
|
||||||
#
|
#
|
||||||
# - Paperless is (re)started on system boot, if it was running before shutdown.
|
# - Paperless is (re)started on system boot, if it was running before shutdown.
|
||||||
@@ -34,7 +33,7 @@
|
|||||||
version: "3.4"
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: redis:6.0
|
image: docker.io/library/redis:6.0
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
@@ -49,7 +48,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- 8000:8000
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 5
|
retries: 5
|
||||||
@@ -66,13 +65,14 @@ services:
|
|||||||
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
PAPERLESS_TIKA_ENDPOINT: http://tika:9998
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: gotenberg/gotenberg:7
|
image: docker.io/gotenberg/gotenberg:7.4
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
command:
|
||||||
CHROMIUM_DISABLE_ROUTES: 1
|
- "gotenberg"
|
||||||
|
- "--chromium-disable-routes=true"
|
||||||
|
|
||||||
tika:
|
tika:
|
||||||
image: apache/tika
|
image: ghcr.io/paperless-ngx/tika:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
@@ -26,7 +26,7 @@
|
|||||||
version: "3.4"
|
version: "3.4"
|
||||||
services:
|
services:
|
||||||
broker:
|
broker:
|
||||||
image: redis:6.0
|
image: docker.io/library/redis:6.0
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
@@ -39,7 +39,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 8000:8000
|
- 8000:8000
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8000"]
|
test: ["CMD", "curl", "-fs", "-S", "--max-time", "2", "http://localhost:8000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 5
|
retries: 5
|
||||||
|
|||||||
@@ -1,7 +1,38 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
# Adapted from:
|
||||||
|
# https://github.com/docker-library/postgres/blob/master/docker-entrypoint.sh
|
||||||
|
# usage: file_env VAR
|
||||||
|
# ie: file_env 'XYZ_DB_PASSWORD' will allow for "$XYZ_DB_PASSWORD_FILE" to
|
||||||
|
# fill in the value of "$XYZ_DB_PASSWORD" from a file, especially for Docker's
|
||||||
|
# secrets feature
|
||||||
|
file_env() {
|
||||||
|
local var="$1"
|
||||||
|
local fileVar="${var}_FILE"
|
||||||
|
|
||||||
|
# Basic validation
|
||||||
|
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
|
||||||
|
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Only export var if the _FILE exists
|
||||||
|
if [ "${!fileVar:-}" ]; then
|
||||||
|
# And the file exists
|
||||||
|
if [[ -f ${!fileVar} ]]; then
|
||||||
|
echo "Setting ${var} from file"
|
||||||
|
val="$(< "${!fileVar}")"
|
||||||
|
export "$var"="$val"
|
||||||
|
else
|
||||||
|
echo "File ${!fileVar} doesn't exist"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
# Source: https://github.com/sameersbn/docker-gitlab/
|
# Source: https://github.com/sameersbn/docker-gitlab/
|
||||||
map_uidgid() {
|
map_uidgid() {
|
||||||
USERMAP_ORIG_UID=$(id -u paperless)
|
USERMAP_ORIG_UID=$(id -u paperless)
|
||||||
@@ -10,31 +41,61 @@ map_uidgid() {
|
|||||||
USERMAP_NEW_GID=${USERMAP_GID:-${USERMAP_ORIG_GID:-$USERMAP_NEW_UID}}
|
USERMAP_NEW_GID=${USERMAP_GID:-${USERMAP_ORIG_GID:-$USERMAP_NEW_UID}}
|
||||||
if [[ ${USERMAP_NEW_UID} != "${USERMAP_ORIG_UID}" || ${USERMAP_NEW_GID} != "${USERMAP_ORIG_GID}" ]]; then
|
if [[ ${USERMAP_NEW_UID} != "${USERMAP_ORIG_UID}" || ${USERMAP_NEW_GID} != "${USERMAP_ORIG_GID}" ]]; then
|
||||||
echo "Mapping UID and GID for paperless:paperless to $USERMAP_NEW_UID:$USERMAP_NEW_GID"
|
echo "Mapping UID and GID for paperless:paperless to $USERMAP_NEW_UID:$USERMAP_NEW_GID"
|
||||||
usermod -u "${USERMAP_NEW_UID}" paperless
|
usermod -o -u "${USERMAP_NEW_UID}" paperless
|
||||||
groupmod -o -g "${USERMAP_NEW_GID}" paperless
|
groupmod -o -g "${USERMAP_NEW_GID}" paperless
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
map_folders() {
|
||||||
|
# Export these so they can be used in docker-prepare.sh
|
||||||
|
export DATA_DIR="${PAPERLESS_DATA_DIR:-/usr/src/paperless/data}"
|
||||||
|
export MEDIA_ROOT_DIR="${PAPERLESS_MEDIA_ROOT:-/usr/src/paperless/media}"
|
||||||
|
}
|
||||||
|
|
||||||
initialize() {
|
initialize() {
|
||||||
|
|
||||||
|
# Setup environment from secrets before anything else
|
||||||
|
for env_var in \
|
||||||
|
PAPERLESS_DBUSER \
|
||||||
|
PAPERLESS_DBPASS \
|
||||||
|
PAPERLESS_SECRET_KEY \
|
||||||
|
PAPERLESS_AUTO_LOGIN_USERNAME \
|
||||||
|
PAPERLESS_ADMIN_USER \
|
||||||
|
PAPERLESS_ADMIN_MAIL \
|
||||||
|
PAPERLESS_ADMIN_PASSWORD; do
|
||||||
|
# Check for a version of this var with _FILE appended
|
||||||
|
# and convert the contents to the env var value
|
||||||
|
file_env ${env_var}
|
||||||
|
done
|
||||||
|
|
||||||
|
# Change the user and group IDs if needed
|
||||||
map_uidgid
|
map_uidgid
|
||||||
|
|
||||||
for dir in export data data/index media media/documents media/documents/originals media/documents/thumbnails; do
|
# Check for overrides of certain folders
|
||||||
if [[ ! -d "../$dir" ]]; then
|
map_folders
|
||||||
echo "Creating directory ../$dir"
|
|
||||||
mkdir ../$dir
|
local export_dir="/usr/src/paperless/export"
|
||||||
|
|
||||||
|
for dir in "${export_dir}" "${DATA_DIR}" "${DATA_DIR}/index" "${MEDIA_ROOT_DIR}" "${MEDIA_ROOT_DIR}/documents" "${MEDIA_ROOT_DIR}/documents/originals" "${MEDIA_ROOT_DIR}/documents/thumbnails"; do
|
||||||
|
if [[ ! -d "${dir}" ]]; then
|
||||||
|
echo "Creating directory ${dir}"
|
||||||
|
mkdir "${dir}"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "Creating directory /tmp/paperless"
|
local tmp_dir="/tmp/paperless"
|
||||||
mkdir -p /tmp/paperless
|
echo "Creating directory ${tmp_dir}"
|
||||||
|
mkdir -p "${tmp_dir}"
|
||||||
|
|
||||||
set +e
|
set +e
|
||||||
echo "Adjusting permissions of paperless files. This may take a while."
|
echo "Adjusting permissions of paperless files. This may take a while."
|
||||||
chown -R paperless:paperless /tmp/paperless
|
chown -R paperless:paperless ${tmp_dir}
|
||||||
find .. -not \( -user paperless -and -group paperless \) -exec chown paperless:paperless {} +
|
for dir in "${export_dir}" "${DATA_DIR}" "${MEDIA_ROOT_DIR}"; do
|
||||||
|
find "${dir}" -not \( -user paperless -and -group paperless \) -exec chown paperless:paperless {} +
|
||||||
|
done
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
gosu paperless /sbin/docker-prepare.sh
|
${gosu_cmd[@]} /sbin/docker-prepare.sh
|
||||||
}
|
}
|
||||||
|
|
||||||
install_languages() {
|
install_languages() {
|
||||||
@@ -56,12 +117,12 @@ install_languages() {
|
|||||||
# continue
|
# continue
|
||||||
#fi
|
#fi
|
||||||
|
|
||||||
if dpkg -s $pkg &>/dev/null; then
|
if dpkg -s "$pkg" &>/dev/null; then
|
||||||
echo "Package $pkg already installed!"
|
echo "Package $pkg already installed!"
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! apt-cache show $pkg &>/dev/null; then
|
if ! apt-cache show "$pkg" &>/dev/null; then
|
||||||
echo "Package $pkg not found! :("
|
echo "Package $pkg not found! :("
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
@@ -76,8 +137,13 @@ install_languages() {
|
|||||||
|
|
||||||
echo "Paperless-ngx docker container starting..."
|
echo "Paperless-ngx docker container starting..."
|
||||||
|
|
||||||
|
gosu_cmd=(gosu paperless)
|
||||||
|
if [ $(id -u) == $(id -u paperless) ]; then
|
||||||
|
gosu_cmd=()
|
||||||
|
fi
|
||||||
|
|
||||||
# Install additional languages if specified
|
# Install additional languages if specified
|
||||||
if [[ ! -z "$PAPERLESS_OCR_LANGUAGES" ]]; then
|
if [[ -n "$PAPERLESS_OCR_LANGUAGES" ]]; then
|
||||||
install_languages "$PAPERLESS_OCR_LANGUAGES"
|
install_languages "$PAPERLESS_OCR_LANGUAGES"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -85,7 +151,7 @@ initialize
|
|||||||
|
|
||||||
if [[ "$1" != "/"* ]]; then
|
if [[ "$1" != "/"* ]]; then
|
||||||
echo Executing management command "$@"
|
echo Executing management command "$@"
|
||||||
exec gosu paperless python3 manage.py "$@"
|
exec ${gosu_cmd[@]} python3 manage.py "$@"
|
||||||
else
|
else
|
||||||
echo Executing "$@"
|
echo Executing "$@"
|
||||||
exec "$@"
|
exec "$@"
|
||||||
|
|||||||
@@ -1,19 +1,19 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
wait_for_postgres() {
|
wait_for_postgres() {
|
||||||
attempt_num=1
|
local attempt_num=1
|
||||||
max_attempts=5
|
local max_attempts=5
|
||||||
|
|
||||||
echo "Waiting for PostgreSQL to start..."
|
echo "Waiting for PostgreSQL to start..."
|
||||||
|
|
||||||
host="${PAPERLESS_DBHOST}"
|
local host="${PAPERLESS_DBHOST:-localhost}"
|
||||||
port="${PAPERLESS_DBPORT}"
|
local port="${PAPERLESS_DBPORT:-5432}"
|
||||||
|
|
||||||
if [[ -z $port ]]; then
|
# Disable warning, host and port can't have spaces
|
||||||
port="5432"
|
# shellcheck disable=SC2086
|
||||||
fi
|
while [ ! "$(pg_isready -h ${host} -p ${port})" ]; do
|
||||||
|
|
||||||
while ! </dev/tcp/$host/$port; do
|
|
||||||
|
|
||||||
if [ $attempt_num -eq $max_attempts ]; then
|
if [ $attempt_num -eq $max_attempts ]; then
|
||||||
echo "Unable to connect to database."
|
echo "Unable to connect to database."
|
||||||
@@ -23,11 +23,19 @@ wait_for_postgres() {
|
|||||||
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
attempt_num=$(expr "$attempt_num" + 1)
|
attempt_num=$(("$attempt_num" + 1))
|
||||||
sleep 5
|
sleep 5
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
|
wait_for_redis() {
|
||||||
|
# We use a Python script to send the Redis ping
|
||||||
|
# instead of installing redis-tools just for 1 thing
|
||||||
|
if ! python3 /sbin/wait-for-redis.py; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
migrations() {
|
migrations() {
|
||||||
(
|
(
|
||||||
# flock is in place to prevent multiple containers from doing migrations
|
# flock is in place to prevent multiple containers from doing migrations
|
||||||
@@ -36,17 +44,18 @@ migrations() {
|
|||||||
flock 200
|
flock 200
|
||||||
echo "Apply database migrations..."
|
echo "Apply database migrations..."
|
||||||
python3 manage.py migrate
|
python3 manage.py migrate
|
||||||
) 200>/usr/src/paperless/data/migration_lock
|
) 200>"${DATA_DIR}/migration_lock"
|
||||||
}
|
}
|
||||||
|
|
||||||
search_index() {
|
search_index() {
|
||||||
index_version=1
|
|
||||||
index_version_file=/usr/src/paperless/data/.index_version
|
|
||||||
|
|
||||||
if [[ (! -f "$index_version_file") || $(<$index_version_file) != "$index_version" ]]; then
|
local index_version=1
|
||||||
|
local index_version_file=${DATA_DIR}/.index_version
|
||||||
|
|
||||||
|
if [[ (! -f "${index_version_file}") || $(<"${index_version_file}") != "$index_version" ]]; then
|
||||||
echo "Search index out of date. Updating..."
|
echo "Search index out of date. Updating..."
|
||||||
python3 manage.py document_index reindex
|
python3 manage.py document_index reindex --no-progress-bar
|
||||||
echo $index_version | tee $index_version_file >/dev/null
|
echo ${index_version} | tee "${index_version_file}" >/dev/null
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -61,6 +70,8 @@ do_work() {
|
|||||||
wait_for_postgres
|
wait_for_postgres
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
wait_for_redis
|
||||||
|
|
||||||
migrations
|
migrations
|
||||||
|
|
||||||
search_index
|
search_index
|
||||||
|
|||||||
@@ -1,4 +1,19 @@
|
|||||||
for command in document_archiver document_exporter document_importer mail_fetcher document_create_classifier document_index document_renamer document_retagger document_thumbnails document_sanity_checker manage_superuser;
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
for command in decrypt_documents \
|
||||||
|
document_archiver \
|
||||||
|
document_exporter \
|
||||||
|
document_importer \
|
||||||
|
mail_fetcher \
|
||||||
|
document_create_classifier \
|
||||||
|
document_index \
|
||||||
|
document_renamer \
|
||||||
|
document_retagger \
|
||||||
|
document_thumbnails \
|
||||||
|
document_sanity_checker \
|
||||||
|
manage_superuser;
|
||||||
do
|
do
|
||||||
echo "installing $command..."
|
echo "installing $command..."
|
||||||
sed "s/management_command/$command/g" management_script.sh > /usr/local/bin/$command
|
sed "s/management_command/$command/g" management_script.sh > /usr/local/bin/$command
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
@@ -6,10 +6,10 @@ cd /usr/src/paperless/src/
|
|||||||
|
|
||||||
if [[ $(id -u) == 0 ]] ;
|
if [[ $(id -u) == 0 ]] ;
|
||||||
then
|
then
|
||||||
gosu paperless python3 manage.py management_command "$@"
|
gosu paperless python3 manage.py management_command "$@"
|
||||||
elif [[ $(id -un) == "paperless" ]] ;
|
elif [[ $(id -un) == "paperless" ]] ;
|
||||||
then
|
then
|
||||||
python3 manage.py management_command "$@"
|
python3 manage.py management_command "$@"
|
||||||
else
|
else
|
||||||
echo "Unknown user."
|
echo "Unknown user."
|
||||||
fi
|
fi
|
||||||
|
|||||||
15
docker/paperless_cmd.sh
Executable file
@@ -0,0 +1,15 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
rootless_args=()
|
||||||
|
if [ $(id -u) == $(id -u paperless) ]; then
|
||||||
|
rootless_args=(
|
||||||
|
--user
|
||||||
|
paperless
|
||||||
|
--logfile
|
||||||
|
supervisord.log
|
||||||
|
--pidfile
|
||||||
|
supervisord.pid
|
||||||
|
)
|
||||||
|
fi
|
||||||
|
|
||||||
|
/usr/local/bin/supervisord -c /etc/supervisord.conf ${rootless_args[@]}
|
||||||
@@ -28,6 +28,7 @@ stderr_logfile_maxbytes=0
|
|||||||
[program:scheduler]
|
[program:scheduler]
|
||||||
command=python3 manage.py qcluster
|
command=python3 manage.py qcluster
|
||||||
user=paperless
|
user=paperless
|
||||||
|
stopasgroup = true
|
||||||
|
|
||||||
stdout_logfile=/dev/stdout
|
stdout_logfile=/dev/stdout
|
||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
|
|||||||
44
docker/wait-for-redis.py
Executable file
@@ -0,0 +1,44 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Simple script which attempts to ping the Redis broker as set in the environment for
|
||||||
|
a certain number of times, waiting a little bit in between
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
from redis import Redis
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
|
||||||
|
MAX_RETRY_COUNT: Final[int] = 5
|
||||||
|
RETRY_SLEEP_SECONDS: Final[int] = 5
|
||||||
|
|
||||||
|
REDIS_URL: Final[str] = os.getenv("PAPERLESS_REDIS", "redis://localhost:6379")
|
||||||
|
|
||||||
|
print(f"Waiting for Redis: {REDIS_URL}", flush=True)
|
||||||
|
|
||||||
|
attempt = 0
|
||||||
|
with Redis.from_url(url=REDIS_URL) as client:
|
||||||
|
while attempt < MAX_RETRY_COUNT:
|
||||||
|
try:
|
||||||
|
client.ping()
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
print(
|
||||||
|
f"Redis ping #{attempt} failed.\n"
|
||||||
|
f"Error: {str(e)}.\n"
|
||||||
|
f"Waiting {RETRY_SLEEP_SECONDS}s",
|
||||||
|
flush=True,
|
||||||
|
)
|
||||||
|
time.sleep(RETRY_SLEEP_SECONDS)
|
||||||
|
attempt += 1
|
||||||
|
|
||||||
|
if attempt >= MAX_RETRY_COUNT:
|
||||||
|
print(f"Failed to connect to: {REDIS_URL}")
|
||||||
|
sys.exit(os.EX_UNAVAILABLE)
|
||||||
|
else:
|
||||||
|
print(f"Connected to Redis broker: {REDIS_URL}")
|
||||||
|
sys.exit(os.EX_OK)
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
FROM python:3.5.1
|
|
||||||
MAINTAINER Pit Kleyersburg <pitkley@googlemail.com>
|
|
||||||
|
|
||||||
# Install Sphinx and Pygments
|
|
||||||
RUN pip install Sphinx Pygments
|
|
||||||
|
|
||||||
# Setup directories, copy data
|
|
||||||
RUN mkdir /build
|
|
||||||
COPY . /build
|
|
||||||
WORKDIR /build/docs
|
|
||||||
|
|
||||||
# Build documentation
|
|
||||||
RUN make html
|
|
||||||
|
|
||||||
# Start webserver
|
|
||||||
WORKDIR /build/docs/_build/html
|
|
||||||
EXPOSE 8000/tcp
|
|
||||||
CMD ["python3", "-m", "http.server"]
|
|
||||||
@@ -24,6 +24,7 @@ I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
|||||||
help:
|
help:
|
||||||
@echo "Please use \`make <target>' where <target> is one of"
|
@echo "Please use \`make <target>' where <target> is one of"
|
||||||
@echo " html to make standalone HTML files"
|
@echo " html to make standalone HTML files"
|
||||||
|
@echo " livehtml to preview changes with live reload in your browser"
|
||||||
@echo " dirhtml to make HTML files named index.html in directories"
|
@echo " dirhtml to make HTML files named index.html in directories"
|
||||||
@echo " singlehtml to make a single large HTML file"
|
@echo " singlehtml to make a single large HTML file"
|
||||||
@echo " pickle to make pickle files"
|
@echo " pickle to make pickle files"
|
||||||
@@ -54,6 +55,9 @@ html:
|
|||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||||
|
|
||||||
|
livehtml:
|
||||||
|
sphinx-autobuild "./" "$(BUILDDIR)" $(O)
|
||||||
|
|
||||||
dirhtml:
|
dirhtml:
|
||||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||||
@echo
|
@echo
|
||||||
|
|||||||
597
docs/_static/css/custom.css
vendored
Normal file
@@ -0,0 +1,597 @@
|
|||||||
|
/* Variables */
|
||||||
|
:root {
|
||||||
|
--color-text-body: #5c5962;
|
||||||
|
--color-text-body-light: #fcfcfc;
|
||||||
|
--color-text-anchor: #7253ed;
|
||||||
|
--color-text-alt: rgba(0, 0, 0, 0.3);
|
||||||
|
--color-text-title: #27262b;
|
||||||
|
--color-text-code-inline: #e74c3c;
|
||||||
|
--color-text-code-nt: #062873;
|
||||||
|
--color-text-selection: #b19eff;
|
||||||
|
--color-bg-body: #fcfcfc;
|
||||||
|
--color-bg-body-alt: #f3f6f6;
|
||||||
|
--color-bg-side-nav: #f5f6fa;
|
||||||
|
--color-bg-side-nav-hover: #ebedf5;
|
||||||
|
--color-bg-code-block: var(--color-bg-side-nav);
|
||||||
|
--color-border: #eeebee;
|
||||||
|
--color-btn-neutral-bg: #f3f6f6;
|
||||||
|
--color-btn-neutral-bg-hover: #e5ebeb;
|
||||||
|
--color-success-title: #1abc9c;
|
||||||
|
--color-success-body: #dbfaf4;
|
||||||
|
--color-warning-title: #f0b37e;
|
||||||
|
--color-warning-body: #ffedcc;
|
||||||
|
--color-danger-title: #f29f97;
|
||||||
|
--color-danger-body: #fdf3f2;
|
||||||
|
--color-info-title: #6ab0de;
|
||||||
|
--color-info-body: #e7f2fa;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark-mode {
|
||||||
|
--color-text-body: #abb2bf;
|
||||||
|
--color-text-body-light: #9499a2;
|
||||||
|
--color-text-alt: rgba(0255, 255, 255, 0.5);
|
||||||
|
--color-text-title: var(--color-text-anchor);
|
||||||
|
--color-text-code-inline: #abb2bf;
|
||||||
|
--color-text-code-nt: #2063f3;
|
||||||
|
--color-text-selection: #030303;
|
||||||
|
--color-bg-body: #1d1d20 !important;
|
||||||
|
--color-bg-body-alt: #131315;
|
||||||
|
--color-bg-side-nav: #18181a;
|
||||||
|
--color-bg-side-nav-hover: #101216;
|
||||||
|
--color-bg-code-block: #101216;
|
||||||
|
--color-border: #47494f;
|
||||||
|
--color-btn-neutral-bg: #242529;
|
||||||
|
--color-btn-neutral-bg-hover: #101216;
|
||||||
|
--color-success-title: #02120f;
|
||||||
|
--color-success-body: #041b17;
|
||||||
|
--color-warning-title: #1b0e03;
|
||||||
|
--color-warning-body: #371d06;
|
||||||
|
--color-danger-title: #120902;
|
||||||
|
--color-danger-body: #1b0503;
|
||||||
|
--color-info-title: #020608;
|
||||||
|
--color-info-body: #06141e;
|
||||||
|
}
|
||||||
|
|
||||||
|
* {
|
||||||
|
transition: background-color 0.3s ease, border-color 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Typography */
|
||||||
|
body {
|
||||||
|
font-family: system-ui,-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,sans-serif;
|
||||||
|
font-size: inherit;
|
||||||
|
line-height: 1.4;
|
||||||
|
color: var(--color-text-body);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content p {
|
||||||
|
word-break: break-word;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1, h2, h3, h4, h5, h6 {
|
||||||
|
font-family: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content .toctree-wrapper>p.caption, .rst-content h1, .rst-content h2, .rst-content h3, .rst-content h4, .rst-content h5, .rst-content h6 {
|
||||||
|
padding-top: .5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
p, .main-content-wrap, .rst-content .section ul, .rst-content .toctree-wrapper ul, .rst-content section ul, .wy-plain-list-disc, article ul {
|
||||||
|
line-height: 1.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre, .code, .rst-content .linenodiv pre, .rst-content div[class^=highlight] pre, .rst-content pre.literal-block {
|
||||||
|
font-family: "SFMono-Regular", Menlo,Consolas, Monospace;
|
||||||
|
font-size: 0.75em;
|
||||||
|
line-height: 1.8;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4 {
|
||||||
|
font-size: 1rem
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-versions {
|
||||||
|
font-family: inherit;
|
||||||
|
line-height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
footer, footer p {
|
||||||
|
font-size: .8rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
footer .rst-footer-buttons {
|
||||||
|
font-size: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 400px) {
|
||||||
|
/* break code lines on mobile */
|
||||||
|
pre, code {
|
||||||
|
word-break: break-word;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Layout */
|
||||||
|
.wy-side-nav-search, .wy-menu-vertical {
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-nav-side {
|
||||||
|
z-index: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
background-color: var(--color-bg-side-nav);
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-side-scroll {
|
||||||
|
width: 100%;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (min-width: 66.5rem) {
|
||||||
|
.wy-side-scroll {
|
||||||
|
width:264px
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (min-width: 50rem) {
|
||||||
|
.wy-nav-side {
|
||||||
|
flex-wrap: nowrap;
|
||||||
|
position: fixed;
|
||||||
|
width: 248px;
|
||||||
|
height: 100%;
|
||||||
|
flex-direction: column;
|
||||||
|
border-right: 1px solid var(--color-border);
|
||||||
|
align-items:flex-end
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (min-width: 66.5rem) {
|
||||||
|
.wy-nav-side {
|
||||||
|
width: calc((100% - 1064px) / 2 + 264px);
|
||||||
|
min-width:264px
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (min-width: 50rem) {
|
||||||
|
.wy-nav-content-wrap {
|
||||||
|
position: relative;
|
||||||
|
max-width: 800px;
|
||||||
|
margin-left:248px
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (min-width: 66.5rem) {
|
||||||
|
.wy-nav-content-wrap {
|
||||||
|
margin-left:calc((100% - 1064px) / 2 + 264px)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Colors */
|
||||||
|
body.wy-body-for-nav,
|
||||||
|
.wy-nav-content {
|
||||||
|
background: var(--color-bg-body);
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-nav-side {
|
||||||
|
border-right: 1px solid var(--color-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-side-nav-search, .wy-nav-top {
|
||||||
|
background: var(--color-bg-side-nav);
|
||||||
|
border-bottom: 1px solid var(--color-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-nav-content-wrap {
|
||||||
|
background: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-side-nav-search > a, .wy-nav-top a, .wy-nav-top i {
|
||||||
|
color: var(--color-text-title);
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-side-nav-search > a:hover, .wy-nav-top a:hover {
|
||||||
|
background: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-side-nav-search > div.version {
|
||||||
|
color: var(--color-text-alt)
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-side-nav-search > div[role="search"] {
|
||||||
|
border-top: 1px solid var(--color-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-menu-vertical li.toctree-l2.current>a, .wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,
|
||||||
|
.wy-menu-vertical li.toctree-l3.current>a, .wy-menu-vertical li.toctree-l3.current li.toctree-l4>a {
|
||||||
|
background: var(--color-bg-side-nav);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content .highlighted {
|
||||||
|
background: #eedd85;
|
||||||
|
box-shadow: 0 0 0 2px #eedd85;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-side-nav-search input[type=text],
|
||||||
|
html.writer-html5 .rst-content table.docutils th {
|
||||||
|
color: var(--color-text-body);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,
|
||||||
|
.wy-table-backed,
|
||||||
|
.wy-table-odd td,
|
||||||
|
.wy-table-striped tr:nth-child(2n-1) td {
|
||||||
|
background-color: var(--color-bg-body-alt);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content table.docutils,
|
||||||
|
.wy-table-bordered-all,
|
||||||
|
html.writer-html5 .rst-content table.docutils th,
|
||||||
|
.rst-content table.docutils td,
|
||||||
|
.wy-table-bordered-all td,
|
||||||
|
hr {
|
||||||
|
border-color: var(--color-border) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
::selection {
|
||||||
|
background: var(--color-text-selection);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Ridiculous rules are taken from sphinx_rtd */
|
||||||
|
.rst-content .admonition-title,
|
||||||
|
.wy-alert-title {
|
||||||
|
color: var(--color-text-body-light);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content .hint,
|
||||||
|
.rst-content .important,
|
||||||
|
.rst-content .tip,
|
||||||
|
.rst-content .wy-alert-success,
|
||||||
|
.wy-alert.wy-alert-success {
|
||||||
|
background: var(--color-success-body);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content .hint .admonition-title,
|
||||||
|
.rst-content .hint .wy-alert-title,
|
||||||
|
.rst-content .important .admonition-title,
|
||||||
|
.rst-content .important .wy-alert-title,
|
||||||
|
.rst-content .tip .admonition-title,
|
||||||
|
.rst-content .tip .wy-alert-title,
|
||||||
|
.rst-content .wy-alert-success .admonition-title,
|
||||||
|
.rst-content .wy-alert-success .wy-alert-title,
|
||||||
|
.wy-alert.wy-alert-success .rst-content .admonition-title,
|
||||||
|
.wy-alert.wy-alert-success .wy-alert-title {
|
||||||
|
background-color: var(--color-success-title);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content .admonition-todo,
|
||||||
|
.rst-content .attention,
|
||||||
|
.rst-content .caution,
|
||||||
|
.rst-content .warning,
|
||||||
|
.rst-content .wy-alert-warning,
|
||||||
|
.wy-alert.wy-alert-warning {
|
||||||
|
background: var(--color-warning-body);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content .admonition-todo .admonition-title,
|
||||||
|
.rst-content .admonition-todo .wy-alert-title,
|
||||||
|
.rst-content .attention .admonition-title,
|
||||||
|
.rst-content .attention .wy-alert-title,
|
||||||
|
.rst-content .caution .admonition-title,
|
||||||
|
.rst-content .caution .wy-alert-title,
|
||||||
|
.rst-content .warning .admonition-title,
|
||||||
|
.rst-content .warning .wy-alert-title,
|
||||||
|
.rst-content .wy-alert-warning .admonition-title,
|
||||||
|
.rst-content .wy-alert-warning .wy-alert-title,
|
||||||
|
.rst-content .wy-alert.wy-alert-warning .admonition-title,
|
||||||
|
.wy-alert.wy-alert-warning .rst-content .admonition-title,
|
||||||
|
.wy-alert.wy-alert-warning .wy-alert-title {
|
||||||
|
background: var(--color-warning-title);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content .danger,
|
||||||
|
.rst-content .error,
|
||||||
|
.rst-content .wy-alert-danger,
|
||||||
|
.wy-alert.wy-alert-danger {
|
||||||
|
background: var(--color-danger-body);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content .danger .admonition-title,
|
||||||
|
.rst-content .danger .wy-alert-title,
|
||||||
|
.rst-content .error .admonition-title,
|
||||||
|
.rst-content .error .wy-alert-title,
|
||||||
|
.rst-content .wy-alert-danger .admonition-title,
|
||||||
|
.rst-content .wy-alert-danger .wy-alert-title,
|
||||||
|
.wy-alert.wy-alert-danger .rst-content .admonition-title,
|
||||||
|
.wy-alert.wy-alert-danger .wy-alert-title {
|
||||||
|
background: var(--color-danger-title);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content .note,
|
||||||
|
.rst-content .seealso,
|
||||||
|
.rst-content .wy-alert-info,
|
||||||
|
.wy-alert.wy-alert-info {
|
||||||
|
background: var(--color-info-body);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content .note .admonition-title,
|
||||||
|
.rst-content .note .wy-alert-title,
|
||||||
|
.rst-content .seealso .admonition-title,
|
||||||
|
.rst-content .seealso .wy-alert-title,
|
||||||
|
.rst-content .wy-alert-info .admonition-title,
|
||||||
|
.rst-content .wy-alert-info .wy-alert-title,
|
||||||
|
.wy-alert.wy-alert-info .rst-content .admonition-title,
|
||||||
|
.wy-alert.wy-alert-info .wy-alert-title {
|
||||||
|
background: var(--color-info-title);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/* Links */
|
||||||
|
a, a:visited,
|
||||||
|
.wy-menu-vertical a,
|
||||||
|
a.icon.icon-home,
|
||||||
|
.wy-menu-vertical li.toctree-l1.current > a.current {
|
||||||
|
color: var(--color-text-anchor);
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover, .wy-breadcrumbs-aside a {
|
||||||
|
color: var(--color-text-anchor); /* reset */
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-versions a, .rst-versions .rst-current-version {
|
||||||
|
color: #var(--color-text-anchor);
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-nav-content a.reference, .wy-nav-content a:not([class]) {
|
||||||
|
background-image: linear-gradient(var(--color-border) 0%, var(--color-border) 100%);
|
||||||
|
background-repeat: repeat-x;
|
||||||
|
background-position: 0 100%;
|
||||||
|
background-size: 1px 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-nav-content a.reference:hover, .wy-nav-content a:not([class]):hover {
|
||||||
|
background-image: linear-gradient(rgba(114,83,237,0.45) 0%, rgba(114,83,237,0.45) 100%);
|
||||||
|
background-size: 1px 1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-menu-vertical a:hover,
|
||||||
|
.wy-menu-vertical li.current a:hover,
|
||||||
|
.wy-menu-vertical a:active {
|
||||||
|
background: var(--color-bg-side-nav-hover) !important;
|
||||||
|
color: var(--color-text-body);
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-menu-vertical li.toctree-l1.current>a,
|
||||||
|
.wy-menu-vertical li.current>a,
|
||||||
|
.wy-menu-vertical li.on a {
|
||||||
|
background-color: var(--color-bg-side-nav-hover);
|
||||||
|
border: none;
|
||||||
|
font-weight: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-menu-vertical li.current {
|
||||||
|
background-color: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-menu-vertical li.current a {
|
||||||
|
border-right: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-menu-vertical li.toctree-l2 a,
|
||||||
|
.wy-menu-vertical li.toctree-l3 a,
|
||||||
|
.wy-menu-vertical li.toctree-l4 a,
|
||||||
|
.wy-menu-vertical li.toctree-l5 a,
|
||||||
|
.wy-menu-vertical li.toctree-l6 a,
|
||||||
|
.wy-menu-vertical li.toctree-l7 a,
|
||||||
|
.wy-menu-vertical li.toctree-l8 a,
|
||||||
|
.wy-menu-vertical li.toctree-l9 a,
|
||||||
|
.wy-menu-vertical li.toctree-l10 a {
|
||||||
|
color: var(--color-text-body);
|
||||||
|
}
|
||||||
|
|
||||||
|
a.image-reference, a.image-reference:hover {
|
||||||
|
background: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
a.image-reference img {
|
||||||
|
cursor: zoom-in;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Code blocks */
|
||||||
|
.rst-content code, .rst-content tt, code {
|
||||||
|
padding: 0.25em;
|
||||||
|
font-weight: 400;
|
||||||
|
background-color: var(--color-bg-code-block);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content div[class^=highlight], .rst-content pre.literal-block {
|
||||||
|
padding: 0.7rem;
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 0.75rem;
|
||||||
|
overflow-x: auto;
|
||||||
|
background-color: var(--color-bg-side-nav);
|
||||||
|
border-color: var(--color-border);
|
||||||
|
border-radius: 4px;
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content .admonition-title,
|
||||||
|
.rst-content div.admonition,
|
||||||
|
.wy-alert-title {
|
||||||
|
padding: 10px 12px;
|
||||||
|
border-top-left-radius: 4px;
|
||||||
|
border-top-right-radius: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.highlight .go {
|
||||||
|
color: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
.highlight .nt {
|
||||||
|
color: var(--color-text-code-nt);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content code.literal,
|
||||||
|
.rst-content tt.literal,
|
||||||
|
html.writer-html5 .rst-content dl.footnote code {
|
||||||
|
border-color: var(--color-border);
|
||||||
|
background-color: var(--color-border);
|
||||||
|
color: var(--color-text-code-inline)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Search */
|
||||||
|
.wy-side-nav-search input[type=text] {
|
||||||
|
border: none;
|
||||||
|
border-radius: 0;
|
||||||
|
background-color: transparent;
|
||||||
|
font-family: inherit;
|
||||||
|
font-size: .85rem;
|
||||||
|
box-shadow: none;
|
||||||
|
padding: .7rem 1rem .7rem 2.8rem;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#rtd-search-form {
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
#rtd-search-form:before {
|
||||||
|
font: normal normal normal 14px/1 FontAwesome;
|
||||||
|
font-size: inherit;
|
||||||
|
text-rendering: auto;
|
||||||
|
-webkit-font-smoothing: antialiased;
|
||||||
|
-moz-osx-font-smoothing: grayscale;
|
||||||
|
content: "\f002";
|
||||||
|
color: var(--color-text-alt);
|
||||||
|
position: absolute;
|
||||||
|
left: 1.5rem;
|
||||||
|
top: .7rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Side nav */
|
||||||
|
.wy-side-nav-search {
|
||||||
|
padding: 1rem 0 0 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-menu-vertical li a button.toctree-expand {
|
||||||
|
float: right;
|
||||||
|
margin-right: -1.5em;
|
||||||
|
padding: 0 .5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-menu-vertical a,
|
||||||
|
.wy-menu-vertical li.current>a,
|
||||||
|
.wy-menu-vertical li.current li>a {
|
||||||
|
padding-right: 1.5em !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-menu-vertical li.current li>a.current {
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Misc spacing */
|
||||||
|
.rst-content .admonition-title, .wy-alert-title {
|
||||||
|
padding: 10px 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Buttons */
|
||||||
|
.btn {
|
||||||
|
display: inline-block;
|
||||||
|
box-sizing: border-box;
|
||||||
|
padding: 0.3em 1em;
|
||||||
|
margin: 0;
|
||||||
|
font-family: inherit;
|
||||||
|
font-size: inherit;
|
||||||
|
font-weight: 500;
|
||||||
|
line-height: 1.5;
|
||||||
|
color: #var(--color-text-anchor);
|
||||||
|
text-decoration: none;
|
||||||
|
vertical-align: baseline;
|
||||||
|
background-color: #f7f7f7;
|
||||||
|
border-width: 0;
|
||||||
|
border-radius: 4px;
|
||||||
|
box-shadow: 0 1px 2px rgba(0,0,0,0.12),0 3px 10px rgba(0,0,0,0.08);
|
||||||
|
appearance: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn:active {
|
||||||
|
padding: 0.3em 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content .btn:focus {
|
||||||
|
outline: 1px solid #ccc;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content .btn-neutral, .rst-content .btn span.fa {
|
||||||
|
color: var(--color-text-body) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-neutral {
|
||||||
|
background-color: var(--color-btn-neutral-bg) !important;
|
||||||
|
color: var(--color-btn-neutral-text) !important;
|
||||||
|
border: 1px solid var(--color-btn-neutral-bg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn:hover, .btn-neutral:hover {
|
||||||
|
background-color: var(--color-btn-neutral-bg-hover) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Icon overrides */
|
||||||
|
.wy-side-nav-search a.icon-home:before {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before {
|
||||||
|
content: "\f106"; /* fa-angle-up */
|
||||||
|
}
|
||||||
|
|
||||||
|
.fa-plus-square-o:before, .wy-menu-vertical li button.toctree-expand:before {
|
||||||
|
content: "\f107"; /* fa-angle-down */
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Misc */
|
||||||
|
.wy-nav-top {
|
||||||
|
line-height: 36px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-nav-top > i {
|
||||||
|
font-size: 24px;
|
||||||
|
padding: 8px 0 0 2px;
|
||||||
|
color:#var(--color-text-anchor);
|
||||||
|
}
|
||||||
|
|
||||||
|
.rst-content table.docutils td,
|
||||||
|
.rst-content table.docutils th,
|
||||||
|
.rst-content table.field-list td,
|
||||||
|
.rst-content table.field-list th,
|
||||||
|
.wy-table td,
|
||||||
|
.wy-table th {
|
||||||
|
padding: 8px 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark-mode-toggle {
|
||||||
|
position: absolute;
|
||||||
|
top: 14px;
|
||||||
|
right: 12px;
|
||||||
|
height: 20px;
|
||||||
|
width: 24px;
|
||||||
|
z-index: 10;
|
||||||
|
border: none;
|
||||||
|
background-color: transparent;
|
||||||
|
color: inherit;
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.wy-nav-content-wrap {
|
||||||
|
z-index: 20;
|
||||||
|
}
|
||||||
14
docs/_static/custom.css
vendored
@@ -1,14 +0,0 @@
|
|||||||
/* override table width restrictions */
|
|
||||||
@media screen and (min-width: 767px) {
|
|
||||||
|
|
||||||
.wy-table-responsive table td {
|
|
||||||
/* !important prevents the common CSS stylesheets from
|
|
||||||
overriding this as on RTD they are loaded after this stylesheet */
|
|
||||||
white-space: normal !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.wy-table-responsive {
|
|
||||||
overflow: visible !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
47
docs/_static/js/darkmode.js
vendored
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
let toggleButton
|
||||||
|
let icon
|
||||||
|
|
||||||
|
function load() {
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
toggleButton = document.createElement('button')
|
||||||
|
toggleButton.setAttribute('title', 'Toggle dark mode')
|
||||||
|
toggleButton.classList.add('dark-mode-toggle')
|
||||||
|
icon = document.createElement('i')
|
||||||
|
icon.classList.add('fa', darkModeState ? 'fa-sun-o' : 'fa-moon-o')
|
||||||
|
toggleButton.appendChild(icon)
|
||||||
|
document.body.prepend(toggleButton)
|
||||||
|
|
||||||
|
// Listen for changes in the OS settings
|
||||||
|
// addListener is used because older versions of Safari don't support addEventListener
|
||||||
|
// prefersDarkQuery set in <head>
|
||||||
|
if (prefersDarkQuery) {
|
||||||
|
prefersDarkQuery.addListener(function (evt) {
|
||||||
|
toggleDarkMode(evt.matches)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initial setting depending on the prefers-color-mode or localstorage
|
||||||
|
// darkModeState should be set in the document <head> to prevent flash
|
||||||
|
if (darkModeState == undefined) darkModeState = false
|
||||||
|
toggleDarkMode(darkModeState)
|
||||||
|
|
||||||
|
// Toggles the "dark-mode" class on click and sets localStorage state
|
||||||
|
toggleButton.addEventListener('click', () => {
|
||||||
|
darkModeState = !darkModeState
|
||||||
|
|
||||||
|
toggleDarkMode(darkModeState)
|
||||||
|
localStorage.setItem('dark-mode', darkModeState)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function toggleDarkMode(state) {
|
||||||
|
document.documentElement.classList.toggle('dark-mode', state)
|
||||||
|
document.documentElement.classList.toggle('light-mode', !state)
|
||||||
|
icon.classList.remove('fa-sun-o')
|
||||||
|
icon.classList.remove('fa-moon-o')
|
||||||
|
icon.classList.add(state ? 'fa-sun-o' : 'fa-moon-o')
|
||||||
|
darkModeState = state
|
||||||
|
}
|
||||||
|
|
||||||
|
document.addEventListener('DOMContentLoaded', load)
|
||||||
BIN
docs/_static/screenshot.png
vendored
|
Before Width: | Height: | Size: 445 KiB |
BIN
docs/_static/screenshots/bulk-edit.png
vendored
Normal file
|
After Width: | Height: | Size: 661 KiB |
BIN
docs/_static/screenshots/correspondents.png
vendored
|
Before Width: | Height: | Size: 106 KiB After Width: | Height: | Size: 457 KiB |
BIN
docs/_static/screenshots/dashboard.png
vendored
|
Before Width: | Height: | Size: 167 KiB After Width: | Height: | Size: 436 KiB |
BIN
docs/_static/screenshots/documents-filter.png
vendored
|
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 462 KiB |
BIN
docs/_static/screenshots/documents-largecards.png
vendored
|
Before Width: | Height: | Size: 306 KiB After Width: | Height: | Size: 608 KiB |
BIN
docs/_static/screenshots/documents-smallcards-dark.png
vendored
Normal file
|
After Width: | Height: | Size: 698 KiB |
BIN
docs/_static/screenshots/documents-smallcards.png
vendored
|
Before Width: | Height: | Size: 410 KiB After Width: | Height: | Size: 706 KiB |
BIN
docs/_static/screenshots/documents-table.png
vendored
|
Before Width: | Height: | Size: 137 KiB After Width: | Height: | Size: 480 KiB |
BIN
docs/_static/screenshots/editing.png
vendored
|
Before Width: | Height: | Size: 293 KiB After Width: | Height: | Size: 848 KiB |
BIN
docs/_static/screenshots/logs.png
vendored
|
Before Width: | Height: | Size: 260 KiB After Width: | Height: | Size: 703 KiB |
BIN
docs/_static/screenshots/mobile.png
vendored
|
Before Width: | Height: | Size: 158 KiB After Width: | Height: | Size: 388 KiB |
BIN
docs/_static/screenshots/new-tag.png
vendored
|
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 26 KiB |
BIN
docs/_static/screenshots/search-preview.png
vendored
|
Before Width: | Height: | Size: 61 KiB After Width: | Height: | Size: 54 KiB |
BIN
docs/_static/screenshots/search-results.png
vendored
|
Before Width: | Height: | Size: 261 KiB After Width: | Height: | Size: 517 KiB |
13
docs/_templates/layout.html
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
{% extends "!layout.html" %}
|
||||||
|
{% block extrahead %}
|
||||||
|
<script>
|
||||||
|
// MediaQueryList object
|
||||||
|
const prefersDarkQuery = window.matchMedia("(prefers-color-scheme: dark)");
|
||||||
|
const lsDark = localStorage.getItem("dark-mode");
|
||||||
|
let darkModeState = lsDark !== null ? lsDark == "true" : prefersDarkQuery.matches;
|
||||||
|
|
||||||
|
document.documentElement.classList.toggle("dark-mode", darkModeState);
|
||||||
|
document.documentElement.classList.toggle("light-mode", !darkModeState);
|
||||||
|
</script>
|
||||||
|
{{ super() }}
|
||||||
|
{% endblock %}
|
||||||
@@ -117,6 +117,23 @@ Then you can start paperless-ngx with ``-d`` to have it run in the background.
|
|||||||
|
|
||||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
In version 1.7.1 and onwards, the Docker image can now be pinned to a release series.
|
||||||
|
This is often combined with automatic updaters such as Watchtower to allow safer
|
||||||
|
unattended upgrading to new bugfix releases only. It is still recommended to always
|
||||||
|
review release notes before upgrading. To pin your install to a release series, edit
|
||||||
|
the ``docker-compose.yml`` find the line that says
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
|
|
||||||
|
and replace the version with the series you want to track, for example:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
image: ghcr.io/paperless-ngx/paperless-ngx:1.7
|
||||||
|
|
||||||
Bare Metal Route
|
Bare Metal Route
|
||||||
================
|
================
|
||||||
|
|
||||||
@@ -270,6 +287,10 @@ When you use the provided docker compose script, put the export inside the
|
|||||||
``export`` folder in your paperless source directory. Specify ``../export``
|
``export`` folder in your paperless source directory. Specify ``../export``
|
||||||
as the ``source``.
|
as the ``source``.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Importing from a previous version of Paperless may work, but for best results
|
||||||
|
it is suggested to match the versions.
|
||||||
|
|
||||||
.. _utilities-retagger:
|
.. _utilities-retagger:
|
||||||
|
|
||||||
@@ -369,8 +390,8 @@ the naming scheme.
|
|||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
Since this command moves you documents around alot, it is advised to to
|
Since this command moves your documents, it is advised to do
|
||||||
a backup before. The renaming logic is robust and will never overwrite
|
a backup beforehand. The renaming logic is robust and will never overwrite
|
||||||
or delete a file, but you can't ever be careful enough.
|
or delete a file, but you can't ever be careful enough.
|
||||||
|
|
||||||
.. code::
|
.. code::
|
||||||
@@ -379,7 +400,7 @@ the naming scheme.
|
|||||||
|
|
||||||
The command takes no arguments and processes all your documents at once.
|
The command takes no arguments and processes all your documents at once.
|
||||||
|
|
||||||
Learn how to use :ref:`Management Utilities<Management utilities>`.
|
Learn how to use :ref:`Management Utilities<utilities-management-commands>`.
|
||||||
|
|
||||||
|
|
||||||
.. _utilities-sanity-checker:
|
.. _utilities-sanity-checker:
|
||||||
|
|||||||
@@ -7,12 +7,12 @@ easier.
|
|||||||
|
|
||||||
.. _advanced-matching:
|
.. _advanced-matching:
|
||||||
|
|
||||||
Matching tags, correspondents and document types
|
Matching tags, correspondents, document types, and storage paths
|
||||||
################################################
|
################################################################
|
||||||
|
|
||||||
Paperless will compare the matching algorithms defined by every tag and
|
Paperless will compare the matching algorithms defined by every tag, correspondent,
|
||||||
correspondent already set in your database to see if they apply to the text in
|
document type, and storage path in your database to see if they apply to the text
|
||||||
a document. In other words, if you defined a tag called ``Home Utility``
|
in a document. In other words, if you define a tag called ``Home Utility``
|
||||||
that had a ``match`` property of ``bc hydro`` and a ``matching_algorithm`` of
|
that had a ``match`` property of ``bc hydro`` and a ``matching_algorithm`` of
|
||||||
``literal``, Paperless will automatically tag your newly-consumed document with
|
``literal``, Paperless will automatically tag your newly-consumed document with
|
||||||
your ``Home Utility`` tag so long as the text ``bc hydro`` appears in the body
|
your ``Home Utility`` tag so long as the text ``bc hydro`` appears in the body
|
||||||
@@ -22,10 +22,10 @@ The matching logic is quite powerful. It supports searching the text of your
|
|||||||
document with different algorithms, and as such, some experimentation may be
|
document with different algorithms, and as such, some experimentation may be
|
||||||
necessary to get things right.
|
necessary to get things right.
|
||||||
|
|
||||||
In order to have a tag, correspondent, or type assigned automatically to newly
|
In order to have a tag, correspondent, document type, or storage path assigned
|
||||||
consumed documents, assign a match and matching algorithm using the web
|
automatically to newly consumed documents, assign a match and matching algorithm
|
||||||
interface. These settings define when to assign correspondents, tags, and types
|
using the web interface. These settings define when to assign tags, correspondents,
|
||||||
to documents.
|
document types, and storage paths to documents.
|
||||||
|
|
||||||
The following algorithms are available:
|
The following algorithms are available:
|
||||||
|
|
||||||
@@ -37,7 +37,7 @@ The following algorithms are available:
|
|||||||
* **Literal:** Matches only if the match appears exactly as provided (i.e. preserve ordering) in the PDF.
|
* **Literal:** Matches only if the match appears exactly as provided (i.e. preserve ordering) in the PDF.
|
||||||
* **Regular expression:** Parses the match as a regular expression and tries to
|
* **Regular expression:** Parses the match as a regular expression and tries to
|
||||||
find a match within the document.
|
find a match within the document.
|
||||||
* **Fuzzy match:** I dont know. Look at the source.
|
* **Fuzzy match:** I don't know. Look at the source.
|
||||||
* **Auto:** Tries to automatically match new documents. This does not require you
|
* **Auto:** Tries to automatically match new documents. This does not require you
|
||||||
to set a match. See the notes below.
|
to set a match. See the notes below.
|
||||||
|
|
||||||
@@ -47,9 +47,9 @@ defining a match text of ``"Bank of America" BofA`` using the *any* algorithm,
|
|||||||
will match documents that contain either "Bank of America" or "BofA", but will
|
will match documents that contain either "Bank of America" or "BofA", but will
|
||||||
not match documents containing "Bank of South America".
|
not match documents containing "Bank of South America".
|
||||||
|
|
||||||
Then just save your tag/correspondent and run another document through the
|
Then just save your tag, correspondent, document type, or storage path and run
|
||||||
consumer. Once complete, you should see the newly-created document,
|
another document through the consumer. Once complete, you should see the
|
||||||
automatically tagged with the appropriate data.
|
newly-created document, automatically tagged with the appropriate data.
|
||||||
|
|
||||||
|
|
||||||
.. _advanced-automatic_matching:
|
.. _advanced-automatic_matching:
|
||||||
@@ -58,9 +58,9 @@ Automatic matching
|
|||||||
==================
|
==================
|
||||||
|
|
||||||
Paperless-ngx comes with a new matching algorithm called *Auto*. This matching
|
Paperless-ngx comes with a new matching algorithm called *Auto*. This matching
|
||||||
algorithm tries to assign tags, correspondents, and document types to your
|
algorithm tries to assign tags, correspondents, document types, and storage paths
|
||||||
documents based on how you have already assigned these on existing documents. It
|
to your documents based on how you have already assigned these on existing documents.
|
||||||
uses a neural network under the hood.
|
It uses a neural network under the hood.
|
||||||
|
|
||||||
If, for example, all your bank statements of your account 123 at the Bank of
|
If, for example, all your bank statements of your account 123 at the Bank of
|
||||||
America are tagged with the tag "bofa_123" and the matching algorithm of this
|
America are tagged with the tag "bofa_123" and the matching algorithm of this
|
||||||
@@ -80,20 +80,21 @@ feature:
|
|||||||
that the neural network only learns from documents which you have correctly
|
that the neural network only learns from documents which you have correctly
|
||||||
tagged before.
|
tagged before.
|
||||||
* The matching algorithm can only work if there is a correlation between the
|
* The matching algorithm can only work if there is a correlation between the
|
||||||
tag, correspondent, or document type and the document itself. Your bank
|
tag, correspondent, document type, or storage path and the document itself.
|
||||||
statements usually contain your bank account number and the name of the bank,
|
Your bank statements usually contain your bank account number and the name
|
||||||
so this works reasonably well, However, tags such as "TODO" cannot be
|
of the bank, so this works reasonably well, However, tags such as "TODO"
|
||||||
automatically assigned.
|
cannot be automatically assigned.
|
||||||
* The matching algorithm needs a reasonable number of documents to identify when
|
* The matching algorithm needs a reasonable number of documents to identify when
|
||||||
to assign tags, correspondents, and types. If one out of a thousand documents
|
to assign tags, correspondents, storage paths, and types. If one out of a
|
||||||
has the correspondent "Very obscure web shop I bought something five years
|
thousand documents has the correspondent "Very obscure web shop I bought
|
||||||
ago", it will probably not assign this correspondent automatically if you buy
|
something five years ago", it will probably not assign this correspondent
|
||||||
something from them again. The more documents, the better.
|
automatically if you buy something from them again. The more documents, the better.
|
||||||
* Paperless also needs a reasonable amount of negative examples to decide when
|
* Paperless also needs a reasonable amount of negative examples to decide when
|
||||||
not to assign a certain tag, correspondent or type. This will usually be the
|
not to assign a certain tag, correspondent, document type, or storage path. This will
|
||||||
case as you start filling up paperless with documents. Example: If all your
|
usually be the case as you start filling up paperless with documents.
|
||||||
documents are either from "Webshop" and "Bank", paperless will assign one of
|
Example: If all your documents are either from "Webshop" and "Bank", paperless
|
||||||
these correspondents to ANY new document, if both are set to automatic matching.
|
will assign one of these correspondents to ANY new document, if both are set
|
||||||
|
to automatic matching.
|
||||||
|
|
||||||
Hooking into the consumption process
|
Hooking into the consumption process
|
||||||
####################################
|
####################################
|
||||||
@@ -120,10 +121,10 @@ Pre-consumption script
|
|||||||
======================
|
======================
|
||||||
|
|
||||||
Executed after the consumer sees a new document in the consumption folder, but
|
Executed after the consumer sees a new document in the consumption folder, but
|
||||||
before any processing of the document is performed. This script receives exactly
|
before any processing of the document is performed. This script can access the
|
||||||
one argument:
|
following relevant environment variables set:
|
||||||
|
|
||||||
* Document file name
|
* ``DOCUMENT_SOURCE_PATH``
|
||||||
|
|
||||||
A simple but common example for this would be creating a simple script like
|
A simple but common example for this would be creating a simple script like
|
||||||
this:
|
this:
|
||||||
@@ -133,7 +134,7 @@ this:
|
|||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
pdf2pdfocr.py -i ${1}
|
pdf2pdfocr.py -i ${DOCUMENT_SOURCE_PATH}
|
||||||
|
|
||||||
``/etc/paperless.conf``
|
``/etc/paperless.conf``
|
||||||
|
|
||||||
@@ -156,16 +157,20 @@ Post-consumption script
|
|||||||
=======================
|
=======================
|
||||||
|
|
||||||
Executed after the consumer has successfully processed a document and has moved it
|
Executed after the consumer has successfully processed a document and has moved it
|
||||||
into paperless. It receives the following arguments:
|
into paperless. It receives the following environment variables:
|
||||||
|
|
||||||
* Document id
|
* ``DOCUMENT_ID``
|
||||||
* Generated file name
|
* ``DOCUMENT_FILE_NAME``
|
||||||
* Source path
|
* ``DOCUMENT_CREATED``
|
||||||
* Thumbnail path
|
* ``DOCUMENT_MODIFIED``
|
||||||
* Download URL
|
* ``DOCUMENT_ADDED``
|
||||||
* Thumbnail URL
|
* ``DOCUMENT_SOURCE_PATH``
|
||||||
* Correspondent
|
* ``DOCUMENT_ARCHIVE_PATH``
|
||||||
* Tags
|
* ``DOCUMENT_THUMBNAIL_PATH``
|
||||||
|
* ``DOCUMENT_DOWNLOAD_URL``
|
||||||
|
* ``DOCUMENT_THUMBNAIL_URL``
|
||||||
|
* ``DOCUMENT_CORRESPONDENT``
|
||||||
|
* ``DOCUMENT_TAGS``
|
||||||
|
|
||||||
The script can be in any language, but for a simple shell script
|
The script can be in any language, but for a simple shell script
|
||||||
example, you can take a look at `post-consumption-example.sh`_ in this project.
|
example, you can take a look at `post-consumption-example.sh`_ in this project.
|
||||||
@@ -179,13 +184,14 @@ Assumed you have ``/home/foo/paperless-ngx/scripts/post-consumption-example.sh``
|
|||||||
You can pass that script into the consumer container via a host mount in your ``docker-compose.yml``.
|
You can pass that script into the consumer container via a host mount in your ``docker-compose.yml``.
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
...
|
|
||||||
consumer:
|
...
|
||||||
...
|
consumer:
|
||||||
volumes:
|
...
|
||||||
...
|
volumes:
|
||||||
- /home/paperless-ngx/scripts:/path/in/container/scripts/
|
...
|
||||||
...
|
- /home/paperless-ngx/scripts:/path/in/container/scripts/
|
||||||
|
...
|
||||||
|
|
||||||
Example (docker-compose.yml): ``- /home/foo/paperless-ngx/scripts:/usr/src/paperless/scripts``
|
Example (docker-compose.yml): ``- /home/foo/paperless-ngx/scripts:/usr/src/paperless/scripts``
|
||||||
|
|
||||||
@@ -199,7 +205,7 @@ Troubleshooting:
|
|||||||
- Check your script's permission e.g. in case of permission error ``sudo chmod 755 post-consumption-example.sh``
|
- Check your script's permission e.g. in case of permission error ``sudo chmod 755 post-consumption-example.sh``
|
||||||
- Pipe your scripts's output to a log file e.g. ``echo "${DOCUMENT_ID}" | tee --append /usr/src/paperless/scripts/post-consumption-example.log``
|
- Pipe your scripts's output to a log file e.g. ``echo "${DOCUMENT_ID}" | tee --append /usr/src/paperless/scripts/post-consumption-example.log``
|
||||||
|
|
||||||
.. _post-consumption-example.sh: https://github.com/jonaswinkler/paperless-ngx/blob/master/scripts/post-consumption-example.sh
|
.. _post-consumption-example.sh: https://github.com/paperless-ngx/paperless-ngx/blob/main/scripts/post-consumption-example.sh
|
||||||
|
|
||||||
.. _advanced-file_name_handling:
|
.. _advanced-file_name_handling:
|
||||||
|
|
||||||
@@ -242,7 +248,7 @@ will create a directory structure as follows:
|
|||||||
last filename a document was stored as. If you do rename a file, paperless will
|
last filename a document was stored as. If you do rename a file, paperless will
|
||||||
report your files as missing and won't be able to find them.
|
report your files as missing and won't be able to find them.
|
||||||
|
|
||||||
Paperless provides the following placeholders withing filenames:
|
Paperless provides the following placeholders within filenames:
|
||||||
|
|
||||||
* ``{asn}``: The archive serial number of the document, or "none".
|
* ``{asn}``: The archive serial number of the document, or "none".
|
||||||
* ``{correspondent}``: The name of the correspondent, or "none".
|
* ``{correspondent}``: The name of the correspondent, or "none".
|
||||||
@@ -267,6 +273,17 @@ If paperless detects that two documents share the same filename, paperless will
|
|||||||
append ``_01``, ``_02``, etc to the filename. This happens if all the placeholders in a filename
|
append ``_01``, ``_02``, etc to the filename. This happens if all the placeholders in a filename
|
||||||
evaluate to the same value.
|
evaluate to the same value.
|
||||||
|
|
||||||
|
.. hint::
|
||||||
|
You can affect how empty placeholders are treated by changing the following setting to
|
||||||
|
`true`.
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
PAPERLESS_FILENAME_FORMAT_REMOVE_NONE=True
|
||||||
|
|
||||||
|
Doing this results in all empty placeholders resolving to "" instead of "none" as stated above.
|
||||||
|
Spaces before empty placeholders are removed as well, empty directories are omitted.
|
||||||
|
|
||||||
.. hint::
|
.. hint::
|
||||||
|
|
||||||
Paperless checks the filename of a document whenever it is saved. Therefore,
|
Paperless checks the filename of a document whenever it is saved. Therefore,
|
||||||
@@ -289,3 +306,59 @@ evaluate to the same value.
|
|||||||
|
|
||||||
However, keep in mind that inside docker, if files get stored outside of the
|
However, keep in mind that inside docker, if files get stored outside of the
|
||||||
predefined volumes, they will be lost after a restart of paperless.
|
predefined volumes, they will be lost after a restart of paperless.
|
||||||
|
|
||||||
|
|
||||||
|
Storage paths
|
||||||
|
#############
|
||||||
|
|
||||||
|
One of the best things in Paperless is that you can not only access the documents via the
|
||||||
|
web interface, but also via the file system.
|
||||||
|
|
||||||
|
When as single storage layout is not sufficient for your use case, storage paths come to
|
||||||
|
the rescue. Storage paths allow you to configure more precisely where each document is stored
|
||||||
|
in the file system.
|
||||||
|
|
||||||
|
- Each storage path is a `PAPERLESS_FILENAME_FORMAT` and follows the rules described above
|
||||||
|
- Each document is assigned a storage path using the matching algorithms described above, but
|
||||||
|
can be overwritten at any time
|
||||||
|
|
||||||
|
For example, you could define the following two storage paths:
|
||||||
|
|
||||||
|
1. Normal communications are put into a folder structure sorted by `year/correspondent`
|
||||||
|
2. Communications with insurance companies are stored in a flat structure with longer file names,
|
||||||
|
but containing the full date of the correspondence.
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
By Year = {created_year}/{correspondent}/{title}
|
||||||
|
Insurances = Insurances/{correspondent}/{created_year}-{created_month}-{created_day} {title}
|
||||||
|
|
||||||
|
|
||||||
|
If you then map these storage paths to the documents, you might get the following result.
|
||||||
|
For simplicity, `By Year` defines the same structure as in the previous example above.
|
||||||
|
|
||||||
|
.. code:: text
|
||||||
|
|
||||||
|
2019/ # By Year
|
||||||
|
My bank/
|
||||||
|
Statement January.pdf
|
||||||
|
Statement February.pdf
|
||||||
|
|
||||||
|
Insurances/ # Insurances
|
||||||
|
Healthcare 123/
|
||||||
|
2022-01-01 Statement January.pdf
|
||||||
|
2022-02-02 Letter.pdf
|
||||||
|
2022-02-03 Letter.pdf
|
||||||
|
Dental 456/
|
||||||
|
2021-12-01 New Conditions.pdf
|
||||||
|
|
||||||
|
|
||||||
|
.. hint::
|
||||||
|
|
||||||
|
Defining a storage path is optional. If no storage path is defined for a document, the global
|
||||||
|
`PAPERLESS_FILENAME_FORMAT` is applied.
|
||||||
|
|
||||||
|
.. caution::
|
||||||
|
|
||||||
|
If you adjust the format of an existing storage path, old documents don't get relocated automatically.
|
||||||
|
You need to run the :ref:`document renamer <utilities-renamer>` to adjust their pathes.
|
||||||
|
|||||||
@@ -31,7 +31,8 @@ The objects served by the document endpoint contain the following fields:
|
|||||||
* ``tags``: List of IDs of tags assigned to this document, or empty list.
|
* ``tags``: List of IDs of tags assigned to this document, or empty list.
|
||||||
* ``document_type``: Document type of this document, or null.
|
* ``document_type``: Document type of this document, or null.
|
||||||
* ``correspondent``: Correspondent of this document or null.
|
* ``correspondent``: Correspondent of this document or null.
|
||||||
* ``created``: The date at which this document was created.
|
* ``created``: The date time at which this document was created.
|
||||||
|
* ``created_date``: The date (YYYY-MM-DD) at which this document was created. Optional. If also passed with created, this is ignored.
|
||||||
* ``modified``: The date at which this document was last edited in paperless. Read-only.
|
* ``modified``: The date at which this document was last edited in paperless. Read-only.
|
||||||
* ``added``: The date at which this document was added to paperless. Read-only.
|
* ``added``: The date at which this document was added to paperless. Read-only.
|
||||||
* ``archive_serial_number``: The identifier of this document in a physical document archive.
|
* ``archive_serial_number``: The identifier of this document in a physical document archive.
|
||||||
@@ -60,7 +61,7 @@ The endpoints correctly serve the response header fields ``Content-Disposition``
|
|||||||
and ``Content-Type`` to indicate the filename for download and the type of content of
|
and ``Content-Type`` to indicate the filename for download and the type of content of
|
||||||
the document.
|
the document.
|
||||||
|
|
||||||
In order to download or preview the original document when an archied document is available,
|
In order to download or preview the original document when an archived document is available,
|
||||||
supply the query parameter ``original=true``.
|
supply the query parameter ``original=true``.
|
||||||
|
|
||||||
.. hint::
|
.. hint::
|
||||||
@@ -240,11 +241,13 @@ be instructed to consume the document from there.
|
|||||||
The endpoint supports the following optional form fields:
|
The endpoint supports the following optional form fields:
|
||||||
|
|
||||||
* ``title``: Specify a title that the consumer should use for the document.
|
* ``title``: Specify a title that the consumer should use for the document.
|
||||||
|
* ``created``: Specify a DateTime where the document was created (e.g. "2016-04-19" or "2016-04-19 06:15:00+02:00").
|
||||||
* ``correspondent``: Specify the ID of a correspondent that the consumer should use for the document.
|
* ``correspondent``: Specify the ID of a correspondent that the consumer should use for the document.
|
||||||
* ``document_type``: Similar to correspondent.
|
* ``document_type``: Similar to correspondent.
|
||||||
* ``tags``: Similar to correspondent. Specify this multiple times to have multiple tags added
|
* ``tags``: Similar to correspondent. Specify this multiple times to have multiple tags added
|
||||||
to the document.
|
to the document.
|
||||||
|
|
||||||
|
|
||||||
The endpoint will immediately return "OK" if the document consumption process
|
The endpoint will immediately return "OK" if the document consumption process
|
||||||
was started successfully. No additional status information about the consumption
|
was started successfully. No additional status information about the consumption
|
||||||
process itself is available, since that happens in a different process.
|
process itself is available, since that happens in a different process.
|
||||||
|
|||||||
1947
docs/changelog.md
Normal file
1688
docs/changelog.rst
24
docs/conf.py
@@ -2,6 +2,8 @@ import sphinx_rtd_theme
|
|||||||
|
|
||||||
|
|
||||||
__version__ = None
|
__version__ = None
|
||||||
|
__full_version_str__ = None
|
||||||
|
__major_minor_version_str__ = None
|
||||||
exec(open("../src/paperless/version.py").read())
|
exec(open("../src/paperless/version.py").read())
|
||||||
|
|
||||||
|
|
||||||
@@ -12,13 +14,17 @@ extensions = [
|
|||||||
"sphinx.ext.imgmath",
|
"sphinx.ext.imgmath",
|
||||||
"sphinx.ext.viewcode",
|
"sphinx.ext.viewcode",
|
||||||
"sphinx_rtd_theme",
|
"sphinx_rtd_theme",
|
||||||
|
"myst_parser",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
# templates_path = ['_templates']
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = ".rst"
|
source_suffix = {
|
||||||
|
".rst": "restructuredtext",
|
||||||
|
".md": "markdown",
|
||||||
|
}
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
# source_encoding = 'utf-8-sig'
|
# source_encoding = 'utf-8-sig'
|
||||||
@@ -41,9 +47,9 @@ copyright = "2015-2022, Daniel Quinn, Jonas Winkler, and the paperless-ngx team"
|
|||||||
#
|
#
|
||||||
|
|
||||||
# The short X.Y version.
|
# The short X.Y version.
|
||||||
version = ".".join([str(_) for _ in __version__[:2]])
|
version = __major_minor_version_str__
|
||||||
# The full version, including alpha/beta/rc tags.
|
# The full version, including alpha/beta/rc tags.
|
||||||
release = ".".join([str(_) for _ in __version__[:3]])
|
release = __full_version_str__
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
@@ -119,6 +125,16 @@ html_theme_path = []
|
|||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
html_static_path = ["_static"]
|
html_static_path = ["_static"]
|
||||||
|
|
||||||
|
# These paths are either relative to html_static_path
|
||||||
|
# or fully qualified paths (eg. https://...)
|
||||||
|
html_css_files = [
|
||||||
|
"css/custom.css",
|
||||||
|
]
|
||||||
|
|
||||||
|
html_js_files = [
|
||||||
|
"js/darkmode.js",
|
||||||
|
]
|
||||||
|
|
||||||
# Add any extra paths that contain custom files (such as robots.txt or
|
# Add any extra paths that contain custom files (such as robots.txt or
|
||||||
# .htaccess) here, relative to this directory. These files are copied
|
# .htaccess) here, relative to this directory. These files are copied
|
||||||
# directly to the root of the documentation.
|
# directly to the root of the documentation.
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ PAPERLESS_REDIS=<url>
|
|||||||
|
|
||||||
PAPERLESS_DBHOST=<hostname>
|
PAPERLESS_DBHOST=<hostname>
|
||||||
By default, sqlite is used as the database backend. This can be changed here.
|
By default, sqlite is used as the database backend. This can be changed here.
|
||||||
Set PAPERLESS_DBHOST and PostgreSQL will be used instead of mysql.
|
Set PAPERLESS_DBHOST and PostgreSQL will be used instead of sqlite.
|
||||||
|
|
||||||
PAPERLESS_DBPORT=<port>
|
PAPERLESS_DBPORT=<port>
|
||||||
Adjust port if necessary.
|
Adjust port if necessary.
|
||||||
@@ -60,6 +60,13 @@ PAPERLESS_DBSSLMODE=<mode>
|
|||||||
|
|
||||||
Default is ``prefer``.
|
Default is ``prefer``.
|
||||||
|
|
||||||
|
PAPERLESS_DB_TIMEOUT=<float>
|
||||||
|
Amount of time for a database connection to wait for the database to unlock.
|
||||||
|
Mostly applicable for an sqlite based installation, consider changing to postgresql
|
||||||
|
if you need to increase this.
|
||||||
|
|
||||||
|
Defaults to unset, keeping the Django defaults.
|
||||||
|
|
||||||
Paths and folders
|
Paths and folders
|
||||||
#################
|
#################
|
||||||
|
|
||||||
@@ -111,6 +118,14 @@ PAPERLESS_FILENAME_FORMAT=<format>
|
|||||||
|
|
||||||
Default is none, which disables this feature.
|
Default is none, which disables this feature.
|
||||||
|
|
||||||
|
PAPERLESS_FILENAME_FORMAT_REMOVE_NONE=<bool>
|
||||||
|
Tells paperless to replace placeholders in `PAPERLESS_FILENAME_FORMAT` that would resolve
|
||||||
|
to 'none' to be omitted from the resulting filename. This also holds true for directory
|
||||||
|
names.
|
||||||
|
See :ref:`advanced-file_name_handling` for details.
|
||||||
|
|
||||||
|
Defaults to `false` which disables this feature.
|
||||||
|
|
||||||
PAPERLESS_LOGGING_DIR=<path>
|
PAPERLESS_LOGGING_DIR=<path>
|
||||||
This is where paperless will store log files.
|
This is where paperless will store log files.
|
||||||
|
|
||||||
@@ -130,6 +145,8 @@ PAPERLESS_LOGROTATE_MAX_BACKUPS=<num>
|
|||||||
|
|
||||||
Defaults to 20.
|
Defaults to 20.
|
||||||
|
|
||||||
|
.. _hosting-and-security:
|
||||||
|
|
||||||
Hosting & Security
|
Hosting & Security
|
||||||
##################
|
##################
|
||||||
|
|
||||||
@@ -142,7 +159,24 @@ PAPERLESS_SECRET_KEY=<key>
|
|||||||
|
|
||||||
Default is listed in the file ``src/paperless/settings.py``.
|
Default is listed in the file ``src/paperless/settings.py``.
|
||||||
|
|
||||||
PAPERLESS_ALLOWED_HOSTS<comma-separated-list>
|
PAPERLESS_URL=<url>
|
||||||
|
This setting can be used to set the three options below (ALLOWED_HOSTS,
|
||||||
|
CORS_ALLOWED_HOSTS and CSRF_TRUSTED_ORIGINS). If the other options are
|
||||||
|
set the values will be combined with this one. Do not include a trailing
|
||||||
|
slash. E.g. https://paperless.domain.com
|
||||||
|
|
||||||
|
Defaults to empty string, leaving the other settings unaffected.
|
||||||
|
|
||||||
|
PAPERLESS_CSRF_TRUSTED_ORIGINS=<comma-separated-list>
|
||||||
|
A list of trusted origins for unsafe requests (e.g. POST). As of Django 4.0
|
||||||
|
this is required to access the Django admin via the web.
|
||||||
|
See https://docs.djangoproject.com/en/4.0/ref/settings/#csrf-trusted-origins
|
||||||
|
|
||||||
|
Can also be set using PAPERLESS_URL (see above).
|
||||||
|
|
||||||
|
Defaults to empty string, which does not add any origins to the trusted list.
|
||||||
|
|
||||||
|
PAPERLESS_ALLOWED_HOSTS=<comma-separated-list>
|
||||||
If you're planning on putting Paperless on the open internet, then you
|
If you're planning on putting Paperless on the open internet, then you
|
||||||
really should set this value to the domain name you're using. Failing to do
|
really should set this value to the domain name you're using. Failing to do
|
||||||
so leaves you open to HTTP host header attacks:
|
so leaves you open to HTTP host header attacks:
|
||||||
@@ -151,12 +185,19 @@ PAPERLESS_ALLOWED_HOSTS<comma-separated-list>
|
|||||||
Just remember that this is a comma-separated list, so "example.com" is fine,
|
Just remember that this is a comma-separated list, so "example.com" is fine,
|
||||||
as is "example.com,www.example.com", but NOT " example.com" or "example.com,"
|
as is "example.com,www.example.com", but NOT " example.com" or "example.com,"
|
||||||
|
|
||||||
|
Can also be set using PAPERLESS_URL (see above).
|
||||||
|
|
||||||
|
If manually set, please remember to include "localhost". Otherwise docker
|
||||||
|
healthcheck will fail.
|
||||||
|
|
||||||
Defaults to "*", which is all hosts.
|
Defaults to "*", which is all hosts.
|
||||||
|
|
||||||
PAPERLESS_CORS_ALLOWED_HOSTS<comma-separated-list>
|
PAPERLESS_CORS_ALLOWED_HOSTS=<comma-separated-list>
|
||||||
You need to add your servers to the list of allowed hosts that can do CORS
|
You need to add your servers to the list of allowed hosts that can do CORS
|
||||||
calls. Set this to your public domain name.
|
calls. Set this to your public domain name.
|
||||||
|
|
||||||
|
Can also be set using PAPERLESS_URL (see above).
|
||||||
|
|
||||||
Defaults to "http://localhost:8000".
|
Defaults to "http://localhost:8000".
|
||||||
|
|
||||||
PAPERLESS_FORCE_SCRIPT_NAME=<path>
|
PAPERLESS_FORCE_SCRIPT_NAME=<path>
|
||||||
@@ -185,7 +226,7 @@ PAPERLESS_AUTO_LOGIN_USERNAME=<username>
|
|||||||
PAPERLESS_ADMIN_USER=<username>
|
PAPERLESS_ADMIN_USER=<username>
|
||||||
If this environment variable is specified, Paperless automatically creates
|
If this environment variable is specified, Paperless automatically creates
|
||||||
a superuser with the provided username at start. This is useful in cases
|
a superuser with the provided username at start. This is useful in cases
|
||||||
where you can not run the `createsuperuser` command seperately, such as Kubernetes
|
where you can not run the `createsuperuser` command separately, such as Kubernetes
|
||||||
or AWS ECS.
|
or AWS ECS.
|
||||||
|
|
||||||
Requires `PAPERLESS_ADMIN_PASSWORD` to be set.
|
Requires `PAPERLESS_ADMIN_PASSWORD` to be set.
|
||||||
@@ -389,6 +430,24 @@ PAPERLESS_OCR_IMAGE_DPI=<num>
|
|||||||
Default is none, which will automatically calculate image DPI so that
|
Default is none, which will automatically calculate image DPI so that
|
||||||
the produced PDF documents are A4 sized.
|
the produced PDF documents are A4 sized.
|
||||||
|
|
||||||
|
PAPERLESS_OCR_MAX_IMAGE_PIXELS=<num>
|
||||||
|
Paperless will raise a warning when OCRing images which are over this limit and
|
||||||
|
will not OCR images which are more than twice this limit. Note this does not
|
||||||
|
prevent the document from being consumed, but could result in missing text content.
|
||||||
|
|
||||||
|
If unset, will default to the value determined by
|
||||||
|
`Pillow <https://pillow.readthedocs.io/en/stable/reference/Image.html#PIL.Image.MAX_IMAGE_PIXELS>`_.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Increasing this limit could cause Paperless to consume additional resources
|
||||||
|
when consuming a file. Be sure you have sufficient system resources.
|
||||||
|
|
||||||
|
.. caution::
|
||||||
|
|
||||||
|
The limit is intended to prevent malicious files from consuming system resources
|
||||||
|
and causing crashes and other errors. Only increase this value if you are certain
|
||||||
|
your documents are not malicious and you need the text which was not OCRed
|
||||||
|
|
||||||
PAPERLESS_OCR_USER_ARGS=<json>
|
PAPERLESS_OCR_USER_ARGS=<json>
|
||||||
OCRmyPDF offers many more options. Use this parameter to specify any
|
OCRmyPDF offers many more options. Use this parameter to specify any
|
||||||
@@ -439,7 +498,7 @@ PAPERLESS_TIKA_GOTENBERG_ENDPOINT=<url>
|
|||||||
Defaults to "http://localhost:3000".
|
Defaults to "http://localhost:3000".
|
||||||
|
|
||||||
If you run paperless on docker, you can add those services to the docker-compose
|
If you run paperless on docker, you can add those services to the docker-compose
|
||||||
file (see the provided ``docker-compose.tika.yml`` file for reference). The changes
|
file (see the provided ``docker-compose.sqlite-tika.yml`` file for reference). The changes
|
||||||
requires are as follows:
|
requires are as follows:
|
||||||
|
|
||||||
.. code:: yaml
|
.. code:: yaml
|
||||||
@@ -460,19 +519,22 @@ requires are as follows:
|
|||||||
# ...
|
# ...
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: gotenberg/gotenberg:7
|
image: gotenberg/gotenberg:7.4
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
command:
|
||||||
CHROMIUM_DISABLE_ROUTES: 1
|
- "gotenberg"
|
||||||
|
- "--chromium-disable-routes=true"
|
||||||
|
|
||||||
tika:
|
tika:
|
||||||
image: apache/tika
|
image: ghcr.io/paperless-ngx/tika:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
Add the configuration variables to the environment of the webserver (alternatively
|
Add the configuration variables to the environment of the webserver (alternatively
|
||||||
put the configuration in the ``docker-compose.env`` file) and add the additional
|
put the configuration in the ``docker-compose.env`` file) and add the additional
|
||||||
services below the webserver service. Watch out for indentation.
|
services below the webserver service. Watch out for indentation.
|
||||||
|
|
||||||
|
Make sure to use the correct format `PAPERLESS_TIKA_ENABLED = 1` so python_dotenv can parse the statement correctly.
|
||||||
|
|
||||||
Software tweaks
|
Software tweaks
|
||||||
###############
|
###############
|
||||||
|
|
||||||
@@ -481,6 +543,8 @@ PAPERLESS_TASK_WORKERS=<num>
|
|||||||
maintain the automatic matching algorithm, check emails, consume documents,
|
maintain the automatic matching algorithm, check emails, consume documents,
|
||||||
etc. This variable specifies how many things it will do in parallel.
|
etc. This variable specifies how many things it will do in parallel.
|
||||||
|
|
||||||
|
Defaults to 1
|
||||||
|
|
||||||
|
|
||||||
PAPERLESS_THREADS_PER_WORKER=<num>
|
PAPERLESS_THREADS_PER_WORKER=<num>
|
||||||
Furthermore, paperless uses multiple threads when consuming documents to
|
Furthermore, paperless uses multiple threads when consuming documents to
|
||||||
@@ -528,6 +592,10 @@ PAPERLESS_WORKER_TIMEOUT=<num>
|
|||||||
large documents within the default 1800 seconds. So extending this timeout
|
large documents within the default 1800 seconds. So extending this timeout
|
||||||
may prove to be useful on weak hardware setups.
|
may prove to be useful on weak hardware setups.
|
||||||
|
|
||||||
|
PAPERLESS_WORKER_RETRY=<num>
|
||||||
|
If PAPERLESS_WORKER_TIMEOUT has been configured, the retry time for a task can
|
||||||
|
also be configured. By default, this value will be set to 10s more than the
|
||||||
|
worker timeout. This value should never be set less than the worker timeout.
|
||||||
|
|
||||||
PAPERLESS_TIME_ZONE=<timezone>
|
PAPERLESS_TIME_ZONE=<timezone>
|
||||||
Set the time zone here.
|
Set the time zone here.
|
||||||
@@ -548,6 +616,28 @@ PAPERLESS_CONSUMER_POLLING=<num>
|
|||||||
|
|
||||||
Defaults to 0, which disables polling and uses filesystem notifications.
|
Defaults to 0, which disables polling and uses filesystem notifications.
|
||||||
|
|
||||||
|
PAPERLESS_CONSUMER_POLLING_RETRY_COUNT=<num>
|
||||||
|
If consumer polling is enabled, sets the number of times paperless will check for a
|
||||||
|
file to remain unmodified.
|
||||||
|
|
||||||
|
Defaults to 5.
|
||||||
|
|
||||||
|
PAPERLESS_CONSUMER_POLLING_DELAY=<num>
|
||||||
|
If consumer polling is enabled, sets the delay in seconds between each check (above) paperless
|
||||||
|
will do while waiting for a file to remain unmodified.
|
||||||
|
|
||||||
|
Defaults to 5.
|
||||||
|
|
||||||
|
.. _configuration-inotify:
|
||||||
|
|
||||||
|
PAPERLESS_CONSUMER_INOTIFY_DELAY=<num>
|
||||||
|
Sets the time in seconds the consumer will wait for additional events
|
||||||
|
from inotify before the consumer will consider a file ready and begin consumption.
|
||||||
|
Certain scanners or network setups may generate multiple events for a single file,
|
||||||
|
leading to multiple consumers working on the same file. Configure this to
|
||||||
|
prevent that.
|
||||||
|
|
||||||
|
Defaults to 0.5 seconds.
|
||||||
|
|
||||||
PAPERLESS_CONSUMER_DELETE_DUPLICATES=<bool>
|
PAPERLESS_CONSUMER_DELETE_DUPLICATES=<bool>
|
||||||
When the consumer detects a duplicate document, it will not touch the
|
When the consumer detects a duplicate document, it will not touch the
|
||||||
@@ -576,6 +666,37 @@ PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS=<bool>
|
|||||||
|
|
||||||
Defaults to false.
|
Defaults to false.
|
||||||
|
|
||||||
|
PAPERLESS_CONSUMER_ENABLE_BARCODES=<bool>
|
||||||
|
Enables the scanning and page separation based on detected barcodes.
|
||||||
|
This allows for scanning and adding multiple documents per uploaded
|
||||||
|
file, which are separated by one or multiple barcode pages.
|
||||||
|
|
||||||
|
For ease of use, it is suggested to use a standardized separation page,
|
||||||
|
e.g. `here <https://www.alliancegroup.co.uk/patch-codes.htm>`_.
|
||||||
|
|
||||||
|
If no barcodes are detected in the uploaded file, no page separation
|
||||||
|
will happen.
|
||||||
|
|
||||||
|
The original document will be removed and the separated pages will be
|
||||||
|
saved as pdf.
|
||||||
|
|
||||||
|
Defaults to false.
|
||||||
|
|
||||||
|
PAPERLESS_CONSUMER_BARCODE_TIFF_SUPPORT=<bool>
|
||||||
|
Whether TIFF image files should be scanned for barcodes.
|
||||||
|
This will automatically convert any TIFF image(s) to pdfs for later
|
||||||
|
processing.
|
||||||
|
This only has an effect, if PAPERLESS_CONSUMER_ENABLE_BARCODES has been
|
||||||
|
enabled.
|
||||||
|
|
||||||
|
Defaults to false.
|
||||||
|
|
||||||
|
PAPERLESS_CONSUMER_BARCODE_STRING=PATCHT
|
||||||
|
Defines the string to be detected as a separator barcode.
|
||||||
|
If paperless is used with the PATCH-T separator pages, users
|
||||||
|
shouldn't change this.
|
||||||
|
|
||||||
|
Defaults to "PATCHT"
|
||||||
|
|
||||||
PAPERLESS_CONVERT_MEMORY_LIMIT=<num>
|
PAPERLESS_CONVERT_MEMORY_LIMIT=<num>
|
||||||
On smaller systems, or even in the case of Very Large Documents, the consumer
|
On smaller systems, or even in the case of Very Large Documents, the consumer
|
||||||
@@ -600,13 +721,6 @@ PAPERLESS_CONVERT_TMPDIR=<path>
|
|||||||
|
|
||||||
Default is none, which disables the temporary directory.
|
Default is none, which disables the temporary directory.
|
||||||
|
|
||||||
PAPERLESS_OPTIMIZE_THUMBNAILS=<bool>
|
|
||||||
Use optipng to optimize thumbnails. This usually reduces the size of
|
|
||||||
thumbnails by about 20%, but uses considerable compute time during
|
|
||||||
consumption.
|
|
||||||
|
|
||||||
Defaults to true.
|
|
||||||
|
|
||||||
PAPERLESS_POST_CONSUME_SCRIPT=<filename>
|
PAPERLESS_POST_CONSUME_SCRIPT=<filename>
|
||||||
After a document is consumed, Paperless can trigger an arbitrary script if
|
After a document is consumed, Paperless can trigger an arbitrary script if
|
||||||
you like. This script will be passed a number of arguments for you to work
|
you like. This script will be passed a number of arguments for you to work
|
||||||
@@ -622,6 +736,9 @@ PAPERLESS_FILENAME_DATE_ORDER=<format>
|
|||||||
The filename will be checked first, and if nothing is found, the document
|
The filename will be checked first, and if nothing is found, the document
|
||||||
text will be checked as normal.
|
text will be checked as normal.
|
||||||
|
|
||||||
|
A date in a filename must have some separators (`.`, `-`, `/`, etc)
|
||||||
|
for it to be parsed.
|
||||||
|
|
||||||
Defaults to none, which disables this feature.
|
Defaults to none, which disables this feature.
|
||||||
|
|
||||||
PAPERLESS_THUMBNAIL_FONT_NAME=<filename>
|
PAPERLESS_THUMBNAIL_FONT_NAME=<filename>
|
||||||
@@ -639,10 +756,7 @@ PAPERLESS_IGNORE_DATES=<string>
|
|||||||
this process. This is useful for special dates (like date of birth) that appear
|
this process. This is useful for special dates (like date of birth) that appear
|
||||||
in documents regularly but are very unlikely to be the documents creation date.
|
in documents regularly but are very unlikely to be the documents creation date.
|
||||||
|
|
||||||
You may specify dates in a multitude of formats supported by dateparser (see
|
The date is parsed using the order specified in PAPERLESS_DATE_ORDER
|
||||||
https://dateparser.readthedocs.io/en/latest/#popular-formats) but as the dates
|
|
||||||
need to be comma separated, the options are limited.
|
|
||||||
Example: "2020-12-02,22.04.1999"
|
|
||||||
|
|
||||||
Defaults to an empty string to not ignore any dates.
|
Defaults to an empty string to not ignore any dates.
|
||||||
|
|
||||||
@@ -659,7 +773,7 @@ PAPERLESS_CONSUMER_IGNORE_PATTERNS=<json>
|
|||||||
|
|
||||||
This can be adjusted by configuring a custom json array with patterns to exclude.
|
This can be adjusted by configuring a custom json array with patterns to exclude.
|
||||||
|
|
||||||
Defautls to ``[".DS_STORE/*", "._*", ".stfolder/*"]``.
|
Defaults to ``[".DS_STORE/*", "._*", ".stfolder/*", ".stversions/*", ".localized/*", "desktop.ini"]``.
|
||||||
|
|
||||||
Binaries
|
Binaries
|
||||||
########
|
########
|
||||||
@@ -677,9 +791,6 @@ PAPERLESS_CONVERT_BINARY=<path>
|
|||||||
PAPERLESS_GS_BINARY=<path>
|
PAPERLESS_GS_BINARY=<path>
|
||||||
Defaults to "/usr/bin/gs".
|
Defaults to "/usr/bin/gs".
|
||||||
|
|
||||||
PAPERLESS_OPTIPNG_BINARY=<path>
|
|
||||||
Defaults to "/usr/bin/optipng".
|
|
||||||
|
|
||||||
|
|
||||||
.. _configuration-docker:
|
.. _configuration-docker:
|
||||||
|
|
||||||
@@ -695,9 +806,7 @@ PAPERLESS_WEBSERVER_WORKERS=<num>
|
|||||||
also loads the entire application into memory separately, so increasing this value
|
also loads the entire application into memory separately, so increasing this value
|
||||||
will increase RAM usage.
|
will increase RAM usage.
|
||||||
|
|
||||||
Consider configuring this to 1 on low power devices with limited amount of RAM.
|
Defaults to 1.
|
||||||
|
|
||||||
Defaults to 2.
|
|
||||||
|
|
||||||
PAPERLESS_PORT=<port>
|
PAPERLESS_PORT=<port>
|
||||||
The port number the webserver will listen on inside the container. There are
|
The port number the webserver will listen on inside the container. There are
|
||||||
@@ -752,3 +861,26 @@ PAPERLESS_OCR_LANGUAGES=<list>
|
|||||||
PAPERLESS_OCR_LANGUAGE=tur
|
PAPERLESS_OCR_LANGUAGE=tur
|
||||||
|
|
||||||
Defaults to none, which does not install any additional languages.
|
Defaults to none, which does not install any additional languages.
|
||||||
|
|
||||||
|
|
||||||
|
.. _configuration-update-checking:
|
||||||
|
|
||||||
|
Update Checking
|
||||||
|
###############
|
||||||
|
|
||||||
|
PAPERLESS_ENABLE_UPDATE_CHECK=<bool>
|
||||||
|
Enable (or disable) the automatic check for available updates. This feature is disabled
|
||||||
|
by default but if it is not explicitly set Paperless-ngx will show a message about this.
|
||||||
|
|
||||||
|
If enabled, the feature works by pinging the the Github API for the latest release e.g.
|
||||||
|
https://api.github.com/repos/paperless-ngx/paperless-ngx/releases/latest
|
||||||
|
to determine whether a new version is available.
|
||||||
|
|
||||||
|
Actual updating of the app must still be performed manually.
|
||||||
|
|
||||||
|
Note that for users of thirdy-party containers e.g. linuxserver.io this notification
|
||||||
|
may be 'ahead' of a new release from the third-party maintainers.
|
||||||
|
|
||||||
|
In either case, no tracking data is collected by the app in any way.
|
||||||
|
|
||||||
|
Defaults to none, which disables the feature.
|
||||||
|
|||||||
@@ -1,145 +0,0 @@
|
|||||||
.. _contributing:
|
|
||||||
|
|
||||||
Contributing to Paperless
|
|
||||||
#########################
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
This section is not updated to paperless-ngx yet.
|
|
||||||
|
|
||||||
Maybe you've been using Paperless for a while and want to add a feature or two,
|
|
||||||
or maybe you've come across a bug that you have some ideas how to solve. The
|
|
||||||
beauty of Free software is that you can see what's wrong and help to get it
|
|
||||||
fixed for everyone!
|
|
||||||
|
|
||||||
|
|
||||||
How to Get Your Changes Rolled Into Paperless
|
|
||||||
=============================================
|
|
||||||
|
|
||||||
If you've found a bug, but don't know how to fix it, you can always post an
|
|
||||||
issue on `GitHub`_ in the hopes that someone will have the time to fix it for
|
|
||||||
you. If however you're the one with the time, pull requests are always
|
|
||||||
welcome, you just have to make sure that your code conforms to a few standards:
|
|
||||||
|
|
||||||
Pep8
|
|
||||||
----
|
|
||||||
|
|
||||||
It's the standard for all Python development, so it's `very well documented`_.
|
|
||||||
The short version is:
|
|
||||||
|
|
||||||
* Lines should wrap at 79 characters
|
|
||||||
* Use ``snake_case`` for variables, ``CamelCase`` for classes, and ``ALL_CAPS``
|
|
||||||
for constants.
|
|
||||||
* Space out your operators: ``stuff + 7`` instead of ``stuff+7``
|
|
||||||
* Two empty lines between classes, and functions, but 1 empty line between
|
|
||||||
class methods.
|
|
||||||
|
|
||||||
There's more to it than that, but if you follow those, you'll probably be
|
|
||||||
alright. When you submit your pull request, there's a pep8 checker that'll
|
|
||||||
look at your code to see if anything is off. If it finds anything, it'll
|
|
||||||
complain at you until you fix it.
|
|
||||||
|
|
||||||
|
|
||||||
Additional Style Guides
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
Where pep8 is ambiguous, I've tried to be a little more specific. These rules
|
|
||||||
aren't hard-and-fast, but if you can conform to them, I'll appreciate it and
|
|
||||||
spend less time trying to conform your PR before merging:
|
|
||||||
|
|
||||||
|
|
||||||
Function calls
|
|
||||||
..............
|
|
||||||
|
|
||||||
If you're calling a function and that necessitates more than one line of code,
|
|
||||||
please format it like this:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
my_function(
|
|
||||||
argument1,
|
|
||||||
kwarg1="x",
|
|
||||||
kwarg2="y"
|
|
||||||
another_really_long_kwarg="some big value"
|
|
||||||
a_kwarg_calling_another_long_function=another_function(
|
|
||||||
another_arg,
|
|
||||||
another_kwarg="kwarg!"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
This is all in the interest of code uniformity rather than anything else. If
|
|
||||||
we stick to a style, everything is understandable in the same way.
|
|
||||||
|
|
||||||
|
|
||||||
Quoting Strings
|
|
||||||
...............
|
|
||||||
|
|
||||||
pep8 is a little too open-minded on this for my liking. Python strings should
|
|
||||||
be quoted with double quotes (``"``) except in cases where the resulting string
|
|
||||||
would require too much escaping of a double quote, in which case, a single
|
|
||||||
quoted, or triple-quoted string will do:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
my_string = "This is my string"
|
|
||||||
problematic_string = 'This is a "string" with "quotes" in it'
|
|
||||||
|
|
||||||
In HTML templates, please use double-quotes for tag attributes, and single
|
|
||||||
quotes for arguments passed to Django template tags:
|
|
||||||
|
|
||||||
.. code:: html
|
|
||||||
|
|
||||||
<div class="stuff">
|
|
||||||
<a href="{% url 'some-url-name' pk='w00t' %}">link this</a>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
This is to keep linters happy they look at an HTML file and see an attribute
|
|
||||||
closing the ``"`` before it should have been.
|
|
||||||
|
|
||||||
--
|
|
||||||
|
|
||||||
That's all there is in terms of guidelines, so I hope it's not too daunting.
|
|
||||||
|
|
||||||
|
|
||||||
Indentation & Spacing
|
|
||||||
.....................
|
|
||||||
|
|
||||||
When it comes to indentation:
|
|
||||||
|
|
||||||
* For Python, the rule is: follow pep8 and use 4 spaces.
|
|
||||||
* For Javascript, CSS, and HTML, please use 1 tab.
|
|
||||||
|
|
||||||
Additionally, Django templates making use of block elements like ``{% if %}``,
|
|
||||||
``{% for %}``, and ``{% block %}`` etc. should be indented:
|
|
||||||
|
|
||||||
Good:
|
|
||||||
|
|
||||||
.. code:: html
|
|
||||||
|
|
||||||
{% block stuff %}
|
|
||||||
<h1>This is the stuff</h1>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
Bad:
|
|
||||||
|
|
||||||
.. code:: html
|
|
||||||
|
|
||||||
{% block stuff %}
|
|
||||||
<h1>This is the stuff</h1>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
|
|
||||||
The Code of Conduct
|
|
||||||
===================
|
|
||||||
|
|
||||||
Paperless has a `code of conduct`_. It's a lot like the other ones you see out
|
|
||||||
there, with a few small changes, but basically it boils down to:
|
|
||||||
|
|
||||||
> Don't be an ass, or you might get banned.
|
|
||||||
|
|
||||||
I'm proud to say that the CoC has never had to be enforced because everyone has
|
|
||||||
been awesome, friendly, and professional.
|
|
||||||
|
|
||||||
.. _GitHub: https://github.com/the-paperless-project/paperless/issues
|
|
||||||
.. _very well documented: https://www.python.org/dev/peps/pep-0008/
|
|
||||||
.. _code of conduct: https://github.com/the-paperless-project/paperless/blob/master/CODE_OF_CONDUCT.md
|
|
||||||
@@ -1,13 +1,13 @@
|
|||||||
.. _extending:
|
.. _extending:
|
||||||
|
|
||||||
Paperless development
|
Paperless-ngx Development
|
||||||
#####################
|
#########################
|
||||||
|
|
||||||
This section describes the steps you need to take to start development on paperless-ngx.
|
This section describes the steps you need to take to start development on paperless-ngx.
|
||||||
|
|
||||||
Check out the source from github. The repository is organized in the following way:
|
Check out the source from github. The repository is organized in the following way:
|
||||||
|
|
||||||
* ``master`` always represents the latest release and will only see changes
|
* ``main`` always represents the latest release and will only see changes
|
||||||
when a new release is made.
|
when a new release is made.
|
||||||
* ``dev`` contains the code that will be in the next release.
|
* ``dev`` contains the code that will be in the next release.
|
||||||
* ``feature-X`` contain bigger changes that will be in some release, but not
|
* ``feature-X`` contain bigger changes that will be in some release, but not
|
||||||
@@ -23,6 +23,33 @@ Apart from that, the folder structure is as follows:
|
|||||||
* ``scripts/`` - Various scripts that help with different parts of development.
|
* ``scripts/`` - Various scripts that help with different parts of development.
|
||||||
* ``docker/`` - Files required to build the docker image.
|
* ``docker/`` - Files required to build the docker image.
|
||||||
|
|
||||||
|
Contributing to Paperless
|
||||||
|
=========================
|
||||||
|
|
||||||
|
Maybe you've been using Paperless for a while and want to add a feature or two,
|
||||||
|
or maybe you've come across a bug that you have some ideas how to solve. The
|
||||||
|
beauty of open source software is that you can see what's wrong and help to get
|
||||||
|
it fixed for everyone!
|
||||||
|
|
||||||
|
Before contributing please review our `code of conduct`_ and other important
|
||||||
|
information in the `contributing guidelines`_.
|
||||||
|
|
||||||
|
.. _code-formatting-with-pre-commit-hooks:
|
||||||
|
|
||||||
|
Code formatting with pre-commit Hooks
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
To ensure a consistent style and formatting across the project source, the project
|
||||||
|
utilizes a Git `pre-commit` hook to perform some formatting and linting before a
|
||||||
|
commit is allowed. That way, everyone uses the same style and some common issues
|
||||||
|
can be caught early on. See below for installation instructions.
|
||||||
|
|
||||||
|
Once installed, hooks will run when you commit. If the formatting isn't quite right
|
||||||
|
or a linter catches something, the commit will be rejected. You'll need to look at the
|
||||||
|
output and fix the issue. Some hooks, such as the Python formatting tool `black`,
|
||||||
|
will format failing files, so all you need to do is `git add` those files again and
|
||||||
|
retry your commit.
|
||||||
|
|
||||||
Initial setup and first start
|
Initial setup and first start
|
||||||
=============================
|
=============================
|
||||||
|
|
||||||
@@ -37,13 +64,19 @@ To do the setup you need to perform the steps from the following chapters in a c
|
|||||||
|
|
||||||
$ npm install -g @angular/cli
|
$ npm install -g @angular/cli
|
||||||
|
|
||||||
4. Create ``consume`` and ``media`` folders in the cloned root folder.
|
4. Install pre-commit
|
||||||
|
|
||||||
|
.. code:: shell-session
|
||||||
|
|
||||||
|
pre-commit install
|
||||||
|
|
||||||
|
5. Create ``consume`` and ``media`` folders in the cloned root folder.
|
||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
|
|
||||||
mkdir -p consume media
|
mkdir -p consume media
|
||||||
|
|
||||||
5. You can now either ...
|
6. You can now either ...
|
||||||
|
|
||||||
* install redis or
|
* install redis or
|
||||||
* use the included scripts/start-services.sh to use docker to fire up a redis instance (and some other services such as tika, gotenberg and a postgresql server) or
|
* use the included scripts/start-services.sh to use docker to fire up a redis instance (and some other services such as tika, gotenberg and a postgresql server) or
|
||||||
@@ -53,41 +86,42 @@ To do the setup you need to perform the steps from the following chapters in a c
|
|||||||
|
|
||||||
docker run -d -p 6379:6379 --restart unless-stopped redis:latest
|
docker run -d -p 6379:6379 --restart unless-stopped redis:latest
|
||||||
|
|
||||||
6. Install the python dependencies by performing in the src/ directory.
|
7. Install the python dependencies by performing in the src/ directory.
|
||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
|
|
||||||
pipenv install --dev
|
pipenv install --dev
|
||||||
|
|
||||||
* Make sure you're using python 3.9.x or lower. Otherwise you might get issues with building dependencies. You can use `pyenv <https://github.com/pyenv/pyenv>`_ to install a specific python version.
|
* Make sure you're using python 3.9.x or lower. Otherwise you might get issues with building dependencies. You can use `pyenv <https://github.com/pyenv/pyenv>`_ to install a specific python version.
|
||||||
|
|
||||||
7. Generate the static UI so you can perform a login to get session that is required for frontend development (this needs to be done one time only). From src-ui directory:
|
8. Generate the static UI so you can perform a login to get session that is required for frontend development (this needs to be done one time only). From src-ui directory:
|
||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
|
|
||||||
npm install .
|
npm install .
|
||||||
./node_modules/.bin/ng build --configuration production
|
./node_modules/.bin/ng build --configuration production
|
||||||
|
|
||||||
8. Apply migrations and create a superuser for your dev instance:
|
9. Apply migrations and create a superuser for your dev instance:
|
||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
|
|
||||||
python3 manage.py migrate
|
python3 manage.py migrate
|
||||||
python3 manage.py createsuperuser
|
python3 manage.py createsuperuser
|
||||||
|
|
||||||
9. Now spin up the dev backend. Depending on which part of paperless you're developing for, you need to have some or all of them running.
|
10. Now spin up the dev backend. Depending on which part of paperless you're developing for, you need to have some or all of them running.
|
||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
|
|
||||||
python3 manage.py runserver & python3 manage.py document_consumer & python3 manage.py qcluster
|
python3 manage.py runserver & python3 manage.py document_consumer & python3 manage.py qcluster
|
||||||
|
|
||||||
10. Login with the superuser credentials provided in step 8 at ``http://localhost:8000`` to create a session that enables you to use the backend.
|
11. Login with the superuser credentials provided in step 8 at ``http://localhost:8000`` to create a session that enables you to use the backend.
|
||||||
|
|
||||||
Backend development environment is now ready, to start Frontend development go to ``/src-ui`` and run ``ng serve``. From there you can use ``http://localhost:4200`` for a preview.
|
Backend development environment is now ready, to start Frontend development go to ``/src-ui`` and run ``ng serve``. From there you can use ``http://localhost:4200`` for a preview.
|
||||||
|
|
||||||
Back end development
|
Back end development
|
||||||
====================
|
====================
|
||||||
|
|
||||||
The backend is a django application. I use PyCharm for development, but you can use whatever
|
The backend is a django application. PyCharm works well for development, but you can use whatever
|
||||||
you want.
|
you want.
|
||||||
|
|
||||||
Configure the IDE to use the src/ folder as the base source folder. Configure the following
|
Configure the IDE to use the src/ folder as the base source folder. Configure the following
|
||||||
@@ -108,8 +142,9 @@ Testing and code style:
|
|||||||
* Run ``pytest`` in the src/ directory to execute all tests. This also generates a HTML coverage
|
* Run ``pytest`` in the src/ directory to execute all tests. This also generates a HTML coverage
|
||||||
report. When runnings test, paperless.conf is loaded as well. However: the tests rely on the default
|
report. When runnings test, paperless.conf is loaded as well. However: the tests rely on the default
|
||||||
configuration. This is not ideal. But for now, make sure no settings except for DEBUG are overridden when testing.
|
configuration. This is not ideal. But for now, make sure no settings except for DEBUG are overridden when testing.
|
||||||
* Run ``black`` to format your code.
|
* Coding style is enforced by the Git pre-commit hooks. These will ensure your code is formatted and do some
|
||||||
* Run ``pycodestyle`` to test your code for issues with the configured code style settings.
|
linting when you do a `git commit`.
|
||||||
|
* You can also run ``black`` manually to format your code
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@@ -121,9 +156,8 @@ Testing and code style:
|
|||||||
Front end development
|
Front end development
|
||||||
=====================
|
=====================
|
||||||
|
|
||||||
The front end is build using angular. I use the ``Code - OSS`` IDE for development.
|
The front end is built using Angular. In order to get started, you need ``npm``.
|
||||||
|
Install the Angular CLI interface with
|
||||||
In order to get started, you need ``npm``. Install the Angular CLI interface with
|
|
||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
|
|
||||||
@@ -152,6 +186,31 @@ X-Frame-Options are in place so that the front end behaves exactly as in product
|
|||||||
relies on you being logged into the back end. Without a valid session, The front end will simply
|
relies on you being logged into the back end. Without a valid session, The front end will simply
|
||||||
not work.
|
not work.
|
||||||
|
|
||||||
|
Testing and code style:
|
||||||
|
|
||||||
|
* The frontend code (.ts, .html, .scss) use ``prettier`` for code formatting via the Git
|
||||||
|
``pre-commit`` hooks which run automatically on commit. See
|
||||||
|
:ref:`above <code-formatting-with-pre-commit-hooks>` for installation. You can also run this
|
||||||
|
via cli with a command such as
|
||||||
|
|
||||||
|
.. code:: shell-session
|
||||||
|
|
||||||
|
$ git ls-files -- '*.ts' | xargs pre-commit run prettier --files
|
||||||
|
|
||||||
|
* Frontend testing uses jest and cypress. There is currently a need for significantly more
|
||||||
|
frontend tests. Unit tests and e2e tests, respectively, can be run non-interactively with:
|
||||||
|
|
||||||
|
.. code:: shell-session
|
||||||
|
|
||||||
|
$ ng test
|
||||||
|
$ npm run e2e:ci
|
||||||
|
|
||||||
|
Cypress also includes a UI which can be run from within the ``src-ui`` directory with
|
||||||
|
|
||||||
|
.. code:: shell-session
|
||||||
|
|
||||||
|
$ ./node_modules/.bin/cypress open
|
||||||
|
|
||||||
In order to build the front end and serve it as part of django, execute
|
In order to build the front end and serve it as part of django, execute
|
||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
@@ -275,11 +334,17 @@ directory.
|
|||||||
Building the Docker image
|
Building the Docker image
|
||||||
=========================
|
=========================
|
||||||
|
|
||||||
|
The docker image is primarily built by the GitHub actions workflow, but it can be
|
||||||
|
faster when developing to build and tag an image locally.
|
||||||
|
|
||||||
|
To provide the build arguments automatically, build the image using the helper
|
||||||
|
script ``build-docker-image.sh``.
|
||||||
|
|
||||||
Building the docker image from source:
|
Building the docker image from source:
|
||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
|
|
||||||
docker build . -t <your-tag>
|
./build-docker-image.sh Dockerfile -t <your-tag>
|
||||||
|
|
||||||
Extending Paperless
|
Extending Paperless
|
||||||
===================
|
===================
|
||||||
@@ -361,3 +426,6 @@ that returns information about your parser:
|
|||||||
download. We could guess that from the file extensions, but some mime types have many extensions
|
download. We could guess that from the file extensions, but some mime types have many extensions
|
||||||
associated with them and the python methods responsible for guessing the extension do not always
|
associated with them and the python methods responsible for guessing the extension do not always
|
||||||
return the same value.
|
return the same value.
|
||||||
|
|
||||||
|
.. _code of conduct: https://github.com/paperless-ngx/paperless-ngx/blob/main/CODE_OF_CONDUCT.md
|
||||||
|
.. _contributing guidelines: https://github.com/paperless-ngx/paperless-ngx/blob/main/CONTRIBUTING.md
|
||||||
|
|||||||
13
docs/faq.rst
@@ -5,11 +5,11 @@ Frequently asked questions
|
|||||||
|
|
||||||
**Q:** *What's the general plan for Paperless-ngx?*
|
**Q:** *What's the general plan for Paperless-ngx?*
|
||||||
|
|
||||||
**A:** While Paperless-ngx is already considered largely "feature-complete" it is a community-driven
|
**A:** While Paperless-ngx is already considered largely "feature-complete" it is a community-driven
|
||||||
project and development will be guided in this way. New features can be submitted via
|
project and development will be guided in this way. New features can be submitted via
|
||||||
GitHub discussions and "up-voted" by the community but this is not a garauntee the feature
|
GitHub discussions and "up-voted" by the community but this is not a guarantee the feature
|
||||||
will be implemented. This project will always be open to collaboration in the form of PRs,
|
will be implemented. This project will always be open to collaboration in the form of PRs,
|
||||||
ideas etc.
|
ideas etc.
|
||||||
|
|
||||||
**Q:** *I'm using docker. Where are my documents?*
|
**Q:** *I'm using docker. Where are my documents?*
|
||||||
|
|
||||||
@@ -81,11 +81,10 @@ python requirements do not have precompiled packages for ARM / ARM64. Installati
|
|||||||
of these will require additional development libraries and compilation will take
|
of these will require additional development libraries and compilation will take
|
||||||
a long time.
|
a long time.
|
||||||
|
|
||||||
**Q:** *How do I run this on unRaid?*
|
**Q:** *How do I run this on Unraid?*
|
||||||
|
|
||||||
**A:** Head over to `<https://github.com/selfhosters/unRAID-CA-templates>`_,
|
**A:** Paperless-ngx is available as `community app <https://unraid.net/community/apps?q=paperless-ngx>`_
|
||||||
`Uli Fahrer <https://github.com/Tooa>`_ created a container template for that.
|
in Unraid. `Uli Fahrer <https://github.com/Tooa>`_ created a container template for that.
|
||||||
I don't exactly know how to use that though, since I don't use unRaid.
|
|
||||||
|
|
||||||
**Q:** *How do I run this on my toaster?*
|
**Q:** *How do I run this on my toaster?*
|
||||||
|
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ resources in the documentation:
|
|||||||
* Paperless is now integrated with a
|
* Paperless is now integrated with a
|
||||||
:ref:`task processing queue <setup-task_processor>` that tells you
|
:ref:`task processing queue <setup-task_processor>` that tells you
|
||||||
at a glance when and why something is not working.
|
at a glance when and why something is not working.
|
||||||
* The :ref:`changelog <paperless_changelog>` contains a detailed list of all changes
|
* The :doc:`changelog </changelog>` contains a detailed list of all changes
|
||||||
in paperless-ngx.
|
in paperless-ngx.
|
||||||
|
|
||||||
Contents
|
Contents
|
||||||
@@ -70,7 +70,6 @@ Contents
|
|||||||
faq
|
faq
|
||||||
troubleshooting
|
troubleshooting
|
||||||
extending
|
extending
|
||||||
contributing
|
|
||||||
scanners
|
scanners
|
||||||
screenshots
|
screenshots
|
||||||
changelog
|
changelog
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
myst-parser==0.17.2
|
||||||
|
|||||||
@@ -13,43 +13,45 @@ that works right for you based on recommendations from other Paperless users.
|
|||||||
Physical scanners
|
Physical scanners
|
||||||
=================
|
=================
|
||||||
|
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Brand | Model | Supports | Recommended By |
|
| Brand | Model | Supports | Recommended By |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| | | FTP | NFS | SMB | SMTP | API [1]_ | |
|
| | | FTP | SFTP | NFS | SMB | SMTP | API [1]_ | |
|
||||||
+=========+================+=====+=====+=====+======+==========+================+
|
+=========+===================+=====+======+=====+==========+======+==========+================+
|
||||||
| Brother | `ADS-1700W`_ | yes | | yes | yes | |`holzhannes`_ |
|
| Brother | `ADS-1700W`_ | yes | yes | | yes | yes | |`holzhannes`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Brother | `ADS-1600W`_ | yes | | yes | yes | |`holzhannes`_ |
|
| Brother | `ADS-1600W`_ | yes | | | yes | yes | |`holzhannes`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Brother | `ADS-1500W`_ | yes | | yes | yes | |`danielquinn`_ |
|
| Brother | `ADS-1500W`_ | yes | | | yes | yes | |`danielquinn`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Brother | `ADS-1100W`_ | yes | | | | |`ytzelf`_ |
|
| Brother | `ADS-1100W`_ | yes | | | | | |`ytzelf`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Brother | `ADS-2800W`_ | yes | yes | | yes | yes |`philpagel`_ |
|
| Brother | `ADS-2800W`_ | yes | yes | | yes | yes | |`philpagel`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Brother | `MFC-J6930DW`_ | yes | | | | |`ayounggun`_ |
|
| Brother | `MFC-J6930DW`_ | yes | | | | | |`ayounggun`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Brother | `MFC-L5850DW`_ | yes | | | yes | |`holzhannes`_ |
|
| Brother | `MFC-L5850DW`_ | yes | | | | yes | |`holzhannes`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Brother | `MFC-L2750DW`_ | yes | | yes | yes | |`muued`_ |
|
| Brother | `MFC-L2750DW`_ | yes | | | yes | yes | |`muued`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Brother | `MFC-J5910DW`_ | yes | | | | |`bmsleight`_ |
|
| Brother | `MFC-J5910DW`_ | yes | | | | | |`bmsleight`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Brother | `MFC-8950DW`_ | yes | | | yes | yes |`philpagel`_ |
|
| Brother | `MFC-8950DW`_ | yes | | | yes | yes | |`philpagel`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Brother | `MFC-9142CDN`_ | yes | | yes | | |`REOLDEV`_ |
|
| Brother | `MFC-9142CDN`_ | yes | | | yes | | |`REOLDEV`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Fujitsu | `ix500`_ | yes | | yes | | |`eonist`_ |
|
| Canon | `Maxify MB 5350`_ | | | | yes [2]_ | yes | |`eingemaischt`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Epson | `ES-580W`_ | yes | | yes | yes | |`fignew`_ |
|
| Fujitsu | `ix500`_ | yes | | | yes | | |`eonist`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Epson | `WF-7710DWF`_ | yes | | yes | | |`Skylinar`_ |
|
| Epson | `ES-580W`_ | yes | | | yes | yes | |`fignew`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Fujitsu | `S1300i`_ | yes | | yes | | |`jonaswinkler`_ |
|
| Epson | `WF-7710DWF`_ | yes | | | yes | | |`Skylinar`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
| Doxie | `Q2`_ | | | | | yes |`Unkn0wnCat`_ |
|
| Fujitsu | `S1300i`_ | yes | | | yes | | |`jonaswinkler`_ |
|
||||||
+---------+----------------+-----+-----+-----+------+----------+----------------+
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
|
| Doxie | `Q2`_ | | | | | | yes |`Unkn0wnCat`_ |
|
||||||
|
+---------+-------------------+-----+------+-----+----------+------+----------+----------------+
|
||||||
|
|
||||||
.. _MFC-L5850DW: https://www.brother-usa.com/products/mfcl5850dw
|
.. _MFC-L5850DW: https://www.brother-usa.com/products/mfcl5850dw
|
||||||
.. _MFC-L2750DW: https://www.brother.de/drucker/laserdrucker/mfc-l2750dw
|
.. _MFC-L2750DW: https://www.brother.de/drucker/laserdrucker/mfc-l2750dw
|
||||||
@@ -58,6 +60,7 @@ Physical scanners
|
|||||||
.. _ADS-1500W: https://www.brother.ca/en/p/ads1500w
|
.. _ADS-1500W: https://www.brother.ca/en/p/ads1500w
|
||||||
.. _ADS-1100W: https://support.brother.com/g/b/downloadtop.aspx?c=fr&lang=fr&prod=ads1100w_eu_as_cn
|
.. _ADS-1100W: https://support.brother.com/g/b/downloadtop.aspx?c=fr&lang=fr&prod=ads1100w_eu_as_cn
|
||||||
.. _ADS-2800W: https://www.brother-usa.com/products/ads2800w
|
.. _ADS-2800W: https://www.brother-usa.com/products/ads2800w
|
||||||
|
.. _Maxify MB 5350: https://www.canon.de/printers/inkjet/maxify/maxify_mb5350/specification.html
|
||||||
.. _MFC-J6930DW: https://www.brother.ca/en/p/MFCJ6930DW
|
.. _MFC-J6930DW: https://www.brother.ca/en/p/MFCJ6930DW
|
||||||
.. _MFC-J5910DW: https://www.brother.co.uk/printers/inkjet-printers/mfcj5910dw
|
.. _MFC-J5910DW: https://www.brother.co.uk/printers/inkjet-printers/mfcj5910dw
|
||||||
.. _MFC-8950DW: https://www.brother-usa.com/products/mfc8950dw
|
.. _MFC-8950DW: https://www.brother-usa.com/products/mfc8950dw
|
||||||
@@ -81,8 +84,11 @@ Physical scanners
|
|||||||
.. _Unkn0wnCat: https://github.com/Unkn0wnCat
|
.. _Unkn0wnCat: https://github.com/Unkn0wnCat
|
||||||
.. _muued: https://github.com/muued
|
.. _muued: https://github.com/muued
|
||||||
.. _philpagel: https://github.com/philpagel
|
.. _philpagel: https://github.com/philpagel
|
||||||
|
.. _eingemaischt: https://github.com/eingemaischt
|
||||||
|
|
||||||
.. [1] Scanners with API Integration allow to push scanned documents directly to :ref:`Paperless API <api-file_uploads>`, sometimes referred to as Webhook or Document POST.
|
.. [1] Scanners with API Integration allow to push scanned documents directly to :ref:`Paperless API <api-file_uploads>`, sometimes referred to as Webhook or Document POST.
|
||||||
|
.. [2] Canon Multi Function Printers show strange behavior over SMB. They close and reopen the file after every page. It's recommended to tune the
|
||||||
|
:ref:`polling <configuration-polling>` and :ref:`inotify <configuration-inotify>` configuration values for your scanner. The scanner timeout is 3 minutes, so ``180`` is a good starting point.
|
||||||
|
|
||||||
Mobile phone software
|
Mobile phone software
|
||||||
=====================
|
=====================
|
||||||
@@ -105,6 +111,9 @@ You can use your phone to "scan" documents. The regular camera app will work, bu
|
|||||||
|
|
||||||
On Android, you can use these applications in combination with one of the :ref:`Paperless-ngx compatible apps <usage-mobile_upload>` to "Share" the documents produced by these scanner apps with paperless. On iOS, you can share the scanned documents via iOS-Sharing to other mail, WebDav or FTP apps.
|
On Android, you can use these applications in combination with one of the :ref:`Paperless-ngx compatible apps <usage-mobile_upload>` to "Share" the documents produced by these scanner apps with paperless. On iOS, you can share the scanned documents via iOS-Sharing to other mail, WebDav or FTP apps.
|
||||||
|
|
||||||
|
There is also an iOS Shortcut that allows you to directly upload text, PDF and image documents available here: https://www.icloud.com/shortcuts/d234abc0885040129d9d75fa45fe1154
|
||||||
|
Please note this only works for documents downloaded to iCloud / the device, in other words not directly from a URL.
|
||||||
|
|
||||||
.. _Office Lens: https://play.google.com/store/apps/details?id=com.microsoft.office.officelens
|
.. _Office Lens: https://play.google.com/store/apps/details?id=com.microsoft.office.officelens
|
||||||
.. _Genius Scan: https://play.google.com/store/apps/details?id=com.thegrizzlylabs.geniusscan.free
|
.. _Genius Scan: https://play.google.com/store/apps/details?id=com.thegrizzlylabs.geniusscan.free
|
||||||
.. _OCR Scanner - QuickScan: https://apps.apple.com/us/app/quickscan-scanner-text-ocr/id1513790291
|
.. _OCR Scanner - QuickScan: https://apps.apple.com/us/app/quickscan-scanner-text-ocr/id1513790291
|
||||||
@@ -112,3 +121,22 @@ On Android, you can use these applications in combination with one of the :ref:`
|
|||||||
|
|
||||||
.. _hannahswain: https://github.com/hannahswain
|
.. _hannahswain: https://github.com/hannahswain
|
||||||
.. _benjaminfrank: https://github.com/benjaminfrank
|
.. _benjaminfrank: https://github.com/benjaminfrank
|
||||||
|
|
||||||
|
API Scanning Setup
|
||||||
|
==================
|
||||||
|
|
||||||
|
This sections contains information on how to set up scanners to post directly to :ref:`Paperless API <api-file_uploads>`.
|
||||||
|
|
||||||
|
Doxie Q2
|
||||||
|
--------
|
||||||
|
|
||||||
|
This part assumes your Doxie is connected to WiFi and you know its IP.
|
||||||
|
|
||||||
|
1. Open your Doxie web UI by navigating to its IP address
|
||||||
|
2. Navigate to Options -> Webhook
|
||||||
|
3. Set the *URL* to ``https://[your-paperless-ngx-instance]/api/documents/post_document/``
|
||||||
|
4. Set the *File Parameter Name* to ``document``
|
||||||
|
5. Add the username and password to the respective fields (Consider creating a user just for your Doxie)
|
||||||
|
6. Click *Submit* at the bottom of the page
|
||||||
|
|
||||||
|
Congrats, you can now scan directly from your Doxie to your Paperless-ngx instance!
|
||||||
|
|||||||
@@ -4,42 +4,60 @@
|
|||||||
Screenshots
|
Screenshots
|
||||||
***********
|
***********
|
||||||
|
|
||||||
This is what paperless-ngx looks like. You shouldn't use paperless to index
|
This is what Paperless-ngx looks like.
|
||||||
research papers though, its a horrible tool for that job.
|
|
||||||
|
|
||||||
The dashboard shows customizable views on your document and allows document uploads:
|
The dashboard shows customizable views on your document and allows document uploads:
|
||||||
|
|
||||||
.. image:: _static/screenshots/dashboard.png
|
.. image:: _static/screenshots/dashboard.png
|
||||||
|
:target: _static/screenshots/dashboard.png
|
||||||
|
|
||||||
The document list provides three different styles to scroll through your documents:
|
The document list provides three different styles to scroll through your documents:
|
||||||
|
|
||||||
.. image:: _static/screenshots/documents-table.png
|
.. image:: _static/screenshots/documents-table.png
|
||||||
|
:target: _static/screenshots/documents-table.png
|
||||||
.. image:: _static/screenshots/documents-smallcards.png
|
.. image:: _static/screenshots/documents-smallcards.png
|
||||||
|
:target: _static/screenshots/documents-smallcards.png
|
||||||
.. image:: _static/screenshots/documents-largecards.png
|
.. image:: _static/screenshots/documents-largecards.png
|
||||||
|
:target: _static/screenshots/documents-largecards.png
|
||||||
|
|
||||||
|
Paperless-ngx also supports "dark mode":
|
||||||
|
|
||||||
|
.. image:: _static/screenshots/documents-smallcards-dark.png
|
||||||
|
:target: _static/screenshots/documents-smallcards-dark.png
|
||||||
|
|
||||||
Extensive filtering mechanisms:
|
Extensive filtering mechanisms:
|
||||||
|
|
||||||
.. image:: _static/screenshots/documents-filter.png
|
.. image:: _static/screenshots/documents-filter.png
|
||||||
|
:target: _static/screenshots/documents-filter.png
|
||||||
|
|
||||||
Side-by-side editing of documents. Optimized for 1080p.
|
Bulk editing of document tags, correspondents, etc.:
|
||||||
|
|
||||||
|
.. image:: _static/screenshots/bulk-edit.png
|
||||||
|
:target: _static/screenshots/bulk-edit.png
|
||||||
|
|
||||||
|
Side-by-side editing of documents:
|
||||||
|
|
||||||
.. image:: _static/screenshots/editing.png
|
.. image:: _static/screenshots/editing.png
|
||||||
|
:target: _static/screenshots/editing.png
|
||||||
|
|
||||||
Tag editing. This looks about the same for correspondents and document types.
|
Tag editing. This looks about the same for correspondents and document types.
|
||||||
|
|
||||||
.. image:: _static/screenshots/new-tag.png
|
.. image:: _static/screenshots/new-tag.png
|
||||||
|
:target: _static/screenshots/new-tag.png
|
||||||
|
|
||||||
Searching provides auto complete and highlights the results.
|
Searching provides auto complete and highlights the results.
|
||||||
|
|
||||||
.. image:: _static/screenshots/search-preview.png
|
.. image:: _static/screenshots/search-preview.png
|
||||||
|
:target: _static/screenshots/search-preview.png
|
||||||
.. image:: _static/screenshots/search-results.png
|
.. image:: _static/screenshots/search-results.png
|
||||||
|
:target: _static/screenshots/search-results.png
|
||||||
|
|
||||||
Fancy mail filters!
|
Fancy mail filters!
|
||||||
|
|
||||||
.. image:: _static/screenshots/mail-rules-edited.png
|
.. image:: _static/screenshots/mail-rules-edited.png
|
||||||
|
:target: _static/screenshots/mail-rules-edited.png
|
||||||
|
|
||||||
Mobile support in the future? This kinda works, however some layouts are still
|
Mobile devices are supported.
|
||||||
too wide.
|
|
||||||
|
|
||||||
.. image:: _static/screenshots/mobile.png
|
.. image:: _static/screenshots/mobile.png
|
||||||
|
:target: _static/screenshots/mobile.png
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ performs all the steps described in :ref:`setup-docker_hub` automatically.
|
|||||||
|
|
||||||
.. code:: shell-session
|
.. code:: shell-session
|
||||||
|
|
||||||
$ bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/master/install-paperless-ngx.sh)"
|
$ bash -c "$(curl -L https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
||||||
|
|
||||||
.. _setup-docker_hub:
|
.. _setup-docker_hub:
|
||||||
|
|
||||||
@@ -184,6 +184,25 @@ Install Paperless from Docker Hub
|
|||||||
port 8000. Modifying the part before the colon will map requests on another
|
port 8000. Modifying the part before the colon will map requests on another
|
||||||
port to the webserver running on the default port.
|
port to the webserver running on the default port.
|
||||||
|
|
||||||
|
**Rootless**
|
||||||
|
|
||||||
|
If you want to run Paperless as a rootless container, you will need to do the
|
||||||
|
following in your ``docker-compose.yml``:
|
||||||
|
|
||||||
|
- set the ``user`` running the container to map to the ``paperless`` user in the
|
||||||
|
container.
|
||||||
|
This value (``user_id`` below), should be the same id that ``USERMAP_UID`` and
|
||||||
|
``USERMAP_GID`` are set to in the next step.
|
||||||
|
See ``USERMAP_UID`` and ``USERMAP_GID`` :ref:`here <configuration-docker>`.
|
||||||
|
|
||||||
|
Your entry for Paperless should contain something like:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
webserver:
|
||||||
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
|
user: <user_id>
|
||||||
|
|
||||||
5. Modify ``docker-compose.env``, following the comments in the file. The
|
5. Modify ``docker-compose.env``, following the comments in the file. The
|
||||||
most important change is to set ``USERMAP_UID`` and ``USERMAP_GID``
|
most important change is to set ``USERMAP_UID`` and ``USERMAP_GID``
|
||||||
to the uid and gid of your user on the host system. Use ``id -u`` and
|
to the uid and gid of your user on the host system. Use ``id -u`` and
|
||||||
@@ -200,6 +219,19 @@ Install Paperless from Docker Hub
|
|||||||
You can copy any setting from the file ``paperless.conf.example`` and paste it here.
|
You can copy any setting from the file ``paperless.conf.example`` and paste it here.
|
||||||
Have a look at :ref:`configuration` to see what's available.
|
Have a look at :ref:`configuration` to see what's available.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
You can utilize Docker secrets for some configuration settings by
|
||||||
|
appending `_FILE` to some configuration values. This is supported currently
|
||||||
|
only by:
|
||||||
|
* PAPERLESS_DBUSER
|
||||||
|
* PAPERLESS_DBPASS
|
||||||
|
* PAPERLESS_SECRET_KEY
|
||||||
|
* PAPERLESS_AUTO_LOGIN_USERNAME
|
||||||
|
* PAPERLESS_ADMIN_USER
|
||||||
|
* PAPERLESS_ADMIN_MAIL
|
||||||
|
* PAPERLESS_ADMIN_PASSWORD
|
||||||
|
|
||||||
.. caution::
|
.. caution::
|
||||||
|
|
||||||
Some file systems such as NFS network shares don't support file system
|
Some file systems such as NFS network shares don't support file system
|
||||||
@@ -286,17 +318,18 @@ writing. Windows is not and will never be supported.
|
|||||||
|
|
||||||
* ``fonts-liberation`` for generating thumbnails for plain text files
|
* ``fonts-liberation`` for generating thumbnails for plain text files
|
||||||
* ``imagemagick`` >= 6 for PDF conversion
|
* ``imagemagick`` >= 6 for PDF conversion
|
||||||
* ``optipng`` for optimizing thumbnails
|
|
||||||
* ``gnupg`` for handling encrypted documents
|
* ``gnupg`` for handling encrypted documents
|
||||||
* ``libpq-dev`` for PostgreSQL
|
* ``libpq-dev`` for PostgreSQL
|
||||||
* ``libmagic-dev`` for mime type detection
|
* ``libmagic-dev`` for mime type detection
|
||||||
* ``mime-support`` for mime type detection
|
* ``mime-support`` for mime type detection
|
||||||
|
* ``libzbar0`` for barcode detection
|
||||||
|
* ``poppler-utils`` for barcode detection
|
||||||
|
|
||||||
Use this list for your preferred package management:
|
Use this list for your preferred package management:
|
||||||
|
|
||||||
.. code::
|
.. code::
|
||||||
|
|
||||||
python3 python3-pip python3-dev imagemagick fonts-liberation optipng gnupg libpq-dev libmagic-dev mime-support
|
python3 python3-pip python3-dev imagemagick fonts-liberation gnupg libpq-dev libmagic-dev mime-support libzbar0 poppler-utils
|
||||||
|
|
||||||
These dependencies are required for OCRmyPDF, which is used for text recognition.
|
These dependencies are required for OCRmyPDF, which is used for text recognition.
|
||||||
|
|
||||||
@@ -306,7 +339,7 @@ writing. Windows is not and will never be supported.
|
|||||||
* ``qpdf``
|
* ``qpdf``
|
||||||
* ``liblept5``
|
* ``liblept5``
|
||||||
* ``libxml2``
|
* ``libxml2``
|
||||||
* ``pngquant``
|
* ``pngquant`` (suggested for certain PDF image optimizations)
|
||||||
* ``zlib1g``
|
* ``zlib1g``
|
||||||
* ``tesseract-ocr`` >= 4.0.0 for OCR
|
* ``tesseract-ocr`` >= 4.0.0 for OCR
|
||||||
* ``tesseract-ocr`` language packs (``tesseract-ocr-eng``, ``tesseract-ocr-deu``, etc)
|
* ``tesseract-ocr`` language packs (``tesseract-ocr-eng``, ``tesseract-ocr-deu``, etc)
|
||||||
@@ -330,6 +363,12 @@ writing. Windows is not and will never be supported.
|
|||||||
3. Optional. Install ``postgresql`` and configure a database, user and password for paperless. If you do not wish
|
3. Optional. Install ``postgresql`` and configure a database, user and password for paperless. If you do not wish
|
||||||
to use PostgreSQL, SQLite is available as well.
|
to use PostgreSQL, SQLite is available as well.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
On bare-metal installations using SQLite, ensure the
|
||||||
|
`JSON1 extension <https://code.djangoproject.com/wiki/JSON1Extension>`_ is enabled. This is
|
||||||
|
usually the case, but not always.
|
||||||
|
|
||||||
4. Get the release archive from `<https://github.com/paperless-ngx/paperless-ngx/releases>`_.
|
4. Get the release archive from `<https://github.com/paperless-ngx/paperless-ngx/releases>`_.
|
||||||
If you clone the git repo as it is, you also have to compile the front end by yourself.
|
If you clone the git repo as it is, you also have to compile the front end by yourself.
|
||||||
Extract the archive to a place from where you wish to execute it, such as ``/opt/paperless``.
|
Extract the archive to a place from where you wish to execute it, such as ``/opt/paperless``.
|
||||||
@@ -345,6 +384,8 @@ writing. Windows is not and will never be supported.
|
|||||||
paperless stores its data. If you like, you can point both to the same directory.
|
paperless stores its data. If you like, you can point both to the same directory.
|
||||||
* ``PAPERLESS_SECRET_KEY`` should be a random sequence of characters. It's used for authentication. Failure
|
* ``PAPERLESS_SECRET_KEY`` should be a random sequence of characters. It's used for authentication. Failure
|
||||||
to do so allows third parties to forge authentication credentials.
|
to do so allows third parties to forge authentication credentials.
|
||||||
|
* ``PAPERLESS_URL`` if you are behind a reverse proxy. This should point to your domain. Please see
|
||||||
|
:ref:`configuration` for more information.
|
||||||
|
|
||||||
Many more adjustments can be made to paperless, especially the OCR part. The following options are recommended
|
Many more adjustments can be made to paperless, especially the OCR part. The following options are recommended
|
||||||
for everyone:
|
for everyone:
|
||||||
@@ -477,7 +518,7 @@ Migrating from Paperless-ng
|
|||||||
===========================
|
===========================
|
||||||
|
|
||||||
Paperless-ngx is meant to be a drop-in replacement for Paperless-ng and thus upgrading should be
|
Paperless-ngx is meant to be a drop-in replacement for Paperless-ng and thus upgrading should be
|
||||||
trivial for most users, especially when using docker. However, as with any major change, it is
|
trivial for most users, especially when using docker. However, as with any major change, it is
|
||||||
recommended to take a full backup first. Once you are ready, simply change the docker image to
|
recommended to take a full backup first. Once you are ready, simply change the docker image to
|
||||||
point to the new source. E.g. if using Docker Compose, edit ``docker-compose.yml`` and change:
|
point to the new source. E.g. if using Docker Compose, edit ``docker-compose.yml`` and change:
|
||||||
|
|
||||||
@@ -490,12 +531,12 @@ to
|
|||||||
.. code::
|
.. code::
|
||||||
|
|
||||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
|
|
||||||
and then run ``docker-compose up -d`` which will pull the new image recreate the container.
|
and then run ``docker-compose up -d`` which will pull the new image recreate the container.
|
||||||
That's it!
|
That's it!
|
||||||
|
|
||||||
Users who installed with the bare-metal route should also update their Git clone to point to
|
Users who installed with the bare-metal route should also update their Git clone to point to
|
||||||
``https://github.com/paperless-ngx/paperless-ngx``, e.g. using the command
|
``https://github.com/paperless-ngx/paperless-ngx``, e.g. using the command
|
||||||
``git remote set-url origin https://github.com/paperless-ngx/paperless-ngx`` and then pull the
|
``git remote set-url origin https://github.com/paperless-ngx/paperless-ngx`` and then pull the
|
||||||
lastest version.
|
lastest version.
|
||||||
|
|
||||||
@@ -509,7 +550,7 @@ how you installed paperless.
|
|||||||
This setup describes how to update an existing paperless Docker installation.
|
This setup describes how to update an existing paperless Docker installation.
|
||||||
The important things to keep in mind are as follows:
|
The important things to keep in mind are as follows:
|
||||||
|
|
||||||
* Read the :ref:`changelog <paperless_changelog>` and take note of breaking changes.
|
* Read the :doc:`changelog </changelog>` and take note of breaking changes.
|
||||||
* You should decide if you want to stick with SQLite or want to migrate your database
|
* You should decide if you want to stick with SQLite or want to migrate your database
|
||||||
to PostgreSQL. See :ref:`setup-sqlite_to_psql` for details on how to move your data from
|
to PostgreSQL. See :ref:`setup-sqlite_to_psql` for details on how to move your data from
|
||||||
SQLite to PostgreSQL. Both work fine with paperless. However, if you already have a
|
SQLite to PostgreSQL. Both work fine with paperless. However, if you already have a
|
||||||
@@ -720,8 +761,6 @@ configuring some options in paperless can help improve performance immensely:
|
|||||||
* If you want to perform OCR on the device, consider using ``PAPERLESS_OCR_CLEAN=none``.
|
* If you want to perform OCR on the device, consider using ``PAPERLESS_OCR_CLEAN=none``.
|
||||||
This will speed up OCR times and use less memory at the expense of slightly worse
|
This will speed up OCR times and use less memory at the expense of slightly worse
|
||||||
OCR results.
|
OCR results.
|
||||||
* Set ``PAPERLESS_OPTIMIZE_THUMBNAILS`` to 'false' if you want faster consumption
|
|
||||||
times. Thumbnails will be about 20% larger.
|
|
||||||
* If using docker, consider setting ``PAPERLESS_WEBSERVER_WORKERS`` to
|
* If using docker, consider setting ``PAPERLESS_WEBSERVER_WORKERS`` to
|
||||||
1. This will save some memory.
|
1. This will save some memory.
|
||||||
|
|
||||||
@@ -782,4 +821,6 @@ the following configuration is required for paperless to operate:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
The ``PAPERLESS_URL`` configuration variable is also required when using a reverse proxy. Please refer to the :ref:`hosting-and-security` docs.
|
||||||
|
|
||||||
Also read `this <https://channels.readthedocs.io/en/stable/deploying.html#nginx-supervisor-ubuntu>`__, towards the end of the section.
|
Also read `this <https://channels.readthedocs.io/en/stable/deploying.html#nginx-supervisor-ubuntu>`__, towards the end of the section.
|
||||||
|
|||||||
@@ -25,6 +25,19 @@ Check for the following issues:
|
|||||||
* Go to the admin interface, and check if there are failed tasks. If so, the
|
* Go to the admin interface, and check if there are failed tasks. If so, the
|
||||||
tasks will contain an error message.
|
tasks will contain an error message.
|
||||||
|
|
||||||
|
Consumer warns ``OCR for XX failed``
|
||||||
|
####################################
|
||||||
|
|
||||||
|
If you find the OCR accuracy to be too low, and/or the document consumer warns
|
||||||
|
that ``OCR for XX failed, but we're going to stick with what we've got since
|
||||||
|
FORGIVING_OCR is enabled``, then you might need to install the
|
||||||
|
`Tesseract language files <http://packages.ubuntu.com/search?keywords=tesseract-ocr>`_
|
||||||
|
marching your document's languages.
|
||||||
|
|
||||||
|
As an example, if you are running Paperless-ngx from any Ubuntu or Debian
|
||||||
|
box, and your documents are written in Spanish you may need to run::
|
||||||
|
|
||||||
|
apt-get install -y tesseract-ocr-spa
|
||||||
|
|
||||||
Consumer fails to pickup any new files
|
Consumer fails to pickup any new files
|
||||||
######################################
|
######################################
|
||||||
@@ -106,17 +119,18 @@ You may experience these errors when using the optional TIKA integration:
|
|||||||
Gotenberg is a server that converts Office documents into PDF documents and has a default timeout of 30 seconds.
|
Gotenberg is a server that converts Office documents into PDF documents and has a default timeout of 30 seconds.
|
||||||
When conversion takes longer, Gotenberg raises this error.
|
When conversion takes longer, Gotenberg raises this error.
|
||||||
|
|
||||||
You can increase the timeout by configuring an environment variable for Gotenberg (see also `here <https://gotenberg.dev/docs/modules/api#properties>`__).
|
You can increase the timeout by configuring a command flag for Gotenberg (see also `here <https://gotenberg.dev/docs/modules/api#properties>`__).
|
||||||
If using docker-compose, this is achieved by the following configuration change in the ``docker-compose.yml`` file:
|
If using docker-compose, this is achieved by the following configuration change in the ``docker-compose.yml`` file:
|
||||||
|
|
||||||
.. code:: yaml
|
.. code:: yaml
|
||||||
|
|
||||||
gotenberg:
|
gotenberg:
|
||||||
image: gotenberg/gotenberg:7
|
image: gotenberg/gotenberg:7.4
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
command:
|
||||||
CHROMIUM_DISABLE_ROUTES: 1
|
- "gotenberg"
|
||||||
API_PROCESS_TIMEOUT: 60
|
- "--chromium-disable-routes=true"
|
||||||
|
- "--api-timeout=60"
|
||||||
|
|
||||||
Permission denied errors in the consumption directory
|
Permission denied errors in the consumption directory
|
||||||
#####################################################
|
#####################################################
|
||||||
@@ -221,3 +235,85 @@ You might find messages like these in your log files:
|
|||||||
This indicates that paperless failed to read PDF metadata from one of your documents. This happens when you
|
This indicates that paperless failed to read PDF metadata from one of your documents. This happens when you
|
||||||
open the affected documents in paperless for editing. Paperless will continue to work, and will simply not
|
open the affected documents in paperless for editing. Paperless will continue to work, and will simply not
|
||||||
show the invalid metadata.
|
show the invalid metadata.
|
||||||
|
|
||||||
|
Consumer fails with a FileNotFoundError
|
||||||
|
#######################################
|
||||||
|
|
||||||
|
You might find messages like these in your log files:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
[ERROR] [paperless.consumer] Error while consuming document SCN_0001.pdf: FileNotFoundError: [Errno 2] No such file or directory: '/tmp/ocrmypdf.io.yhk3zbv0/origin.pdf'
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "/app/paperless/src/paperless_tesseract/parsers.py", line 261, in parse
|
||||||
|
ocrmypdf.ocr(**args)
|
||||||
|
File "/usr/local/lib/python3.8/dist-packages/ocrmypdf/api.py", line 337, in ocr
|
||||||
|
return run_pipeline(options=options, plugin_manager=plugin_manager, api=True)
|
||||||
|
File "/usr/local/lib/python3.8/dist-packages/ocrmypdf/_sync.py", line 385, in run_pipeline
|
||||||
|
exec_concurrent(context, executor)
|
||||||
|
File "/usr/local/lib/python3.8/dist-packages/ocrmypdf/_sync.py", line 302, in exec_concurrent
|
||||||
|
pdf = post_process(pdf, context, executor)
|
||||||
|
File "/usr/local/lib/python3.8/dist-packages/ocrmypdf/_sync.py", line 235, in post_process
|
||||||
|
pdf_out = metadata_fixup(pdf_out, context)
|
||||||
|
File "/usr/local/lib/python3.8/dist-packages/ocrmypdf/_pipeline.py", line 798, in metadata_fixup
|
||||||
|
with pikepdf.open(context.origin) as original, pikepdf.open(working_file) as pdf:
|
||||||
|
File "/usr/local/lib/python3.8/dist-packages/pikepdf/_methods.py", line 923, in open
|
||||||
|
pdf = Pdf._open(
|
||||||
|
FileNotFoundError: [Errno 2] No such file or directory: '/tmp/ocrmypdf.io.yhk3zbv0/origin.pdf'
|
||||||
|
|
||||||
|
This probably indicates paperless tried to consume the same file twice. This can happen for a number of reasons,
|
||||||
|
depending on how documents are placed into the consume folder. If paperless is using inotify (the default) to
|
||||||
|
check for documents, try adjusting the :ref:`inotify configuration <configuration-inotify>`. If polling is enabled,
|
||||||
|
try adjusting the :ref:`polling configuration <configuration-polling>`.
|
||||||
|
|
||||||
|
Consumer fails waiting for file to remain unmodified.
|
||||||
|
#####################################################
|
||||||
|
|
||||||
|
You might find messages like these in your log files:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
[ERROR] [paperless.management.consumer] Timeout while waiting on file /usr/src/paperless/src/../consume/SCN_0001.pdf to remain unmodified.
|
||||||
|
|
||||||
|
This indicates paperless timed out while waiting for the file to be completely written to the consume folder.
|
||||||
|
Adjusting :ref:`polling configuration <configuration-polling>` values should resolve the issue.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The user will need to manually move the file out of the consume folder and
|
||||||
|
back in, for the initial failing file to be consumed.
|
||||||
|
|
||||||
|
Consumer fails reporting "OS reports file as busy still".
|
||||||
|
#########################################################
|
||||||
|
|
||||||
|
You might find messages like these in your log files:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
[WARNING] [paperless.management.consumer] Not consuming file /usr/src/paperless/src/../consume/SCN_0001.pdf: OS reports file as busy still
|
||||||
|
|
||||||
|
This indicates paperless was unable to open the file, as the OS reported the file as still being in use. To prevent a
|
||||||
|
crash, paperless did not try to consume the file. If paperless is using inotify (the default) to
|
||||||
|
check for documents, try adjusting the :ref:`inotify configuration <configuration-inotify>`. If polling is enabled,
|
||||||
|
try adjusting the :ref:`polling configuration <configuration-polling>`.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
The user will need to manually move the file out of the consume folder and
|
||||||
|
back in, for the initial failing file to be consumed.
|
||||||
|
|
||||||
|
Log reports "Creating PaperlessTask failed".
|
||||||
|
#########################################################
|
||||||
|
|
||||||
|
You might find messages like these in your log files:
|
||||||
|
|
||||||
|
.. code::
|
||||||
|
|
||||||
|
[ERROR] [paperless.management.consumer] Creating PaperlessTask failed: db locked
|
||||||
|
|
||||||
|
You are likely using an sqlite based installation, with an increased number of workers and are running into sqlite's concurrency limitations.
|
||||||
|
Uploading or consuming multiple files at once results in many workers attempting to access the database simultaneously.
|
||||||
|
|
||||||
|
Consider changing to the PostgreSQL database if you will be processing many documents at once often. Otherwise,
|
||||||
|
try tweaking the ``PAPERLESS_DB_TIMEOUT`` setting to allow more time for the database to unlock. This may have
|
||||||
|
minor performance implications.
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ your documents:
|
|||||||
|
|
||||||
1. OCR the document, if it has no text. Digital documents usually have text,
|
1. OCR the document, if it has no text. Digital documents usually have text,
|
||||||
and this step will be skipped for those documents.
|
and this step will be skipped for those documents.
|
||||||
2. Paperless will create an archiveable PDF/A document from your document.
|
2. Paperless will create an archivable PDF/A document from your document.
|
||||||
If this document is coming from your scanner, it will have embedded selectable text.
|
If this document is coming from your scanner, it will have embedded selectable text.
|
||||||
3. Paperless performs automatic matching of tags, correspondents and types on the
|
3. Paperless performs automatic matching of tags, correspondents and types on the
|
||||||
document before storing it in the database.
|
document before storing it in the database.
|
||||||
@@ -102,12 +102,14 @@ files from the scanner. Typically, you're looking at an FTP server like
|
|||||||
|
|
||||||
.. TODO: hyperref to configuration of the location of this magic folder.
|
.. TODO: hyperref to configuration of the location of this magic folder.
|
||||||
|
|
||||||
Dashboard upload
|
Web UI Upload
|
||||||
================
|
=============
|
||||||
|
|
||||||
The dashboard has a file drop field to upload documents to paperless. Simply drag a file
|
The dashboard has a file drop field to upload documents to paperless. Simply drag a file
|
||||||
onto this field or select a file with the file dialog. Multiple files are supported.
|
onto this field or select a file with the file dialog. Multiple files are supported.
|
||||||
|
|
||||||
|
You can also upload documents on any other page of the web UI by dragging-and-dropping
|
||||||
|
files into your browser window.
|
||||||
|
|
||||||
.. _usage-mobile_upload:
|
.. _usage-mobile_upload:
|
||||||
|
|
||||||
@@ -159,6 +161,9 @@ These are as follows:
|
|||||||
will not consume flagged mails.
|
will not consume flagged mails.
|
||||||
* **Move to folder:** Moves consumed mails out of the way so that paperless wont
|
* **Move to folder:** Moves consumed mails out of the way so that paperless wont
|
||||||
consume them again.
|
consume them again.
|
||||||
|
* **Add custom Tag:** Adds a custom tag to mails with consumed documents (the IMAP
|
||||||
|
standard calls these "keywords"). Paperless will not consume mails already tagged.
|
||||||
|
Not all mail servers support this feature!
|
||||||
|
|
||||||
.. caution::
|
.. caution::
|
||||||
|
|
||||||
@@ -178,6 +183,15 @@ These are as follows:
|
|||||||
automatically or manually and tell paperless to move them to yet another folder
|
automatically or manually and tell paperless to move them to yet another folder
|
||||||
after consumption. It's up to you.
|
after consumption. It's up to you.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
When defining a mail rule with a folder, you may need to try different characters to
|
||||||
|
define how the sub-folders are separated. Common values include ".", "/" or "|", but
|
||||||
|
this varies by the mail server. Check the documentation for your mail server. In the
|
||||||
|
event of an error fetching mail from a certain folder, check the Paperless logs. When
|
||||||
|
a folder is not located, Paperless will attempt to list all folders found in the account
|
||||||
|
to the Paperless logs.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Paperless will process the rules in the order defined in the admin page.
|
Paperless will process the rules in the order defined in the admin page.
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
bind = f'0.0.0.0:{os.getenv("PAPERLESS_PORT", 8000)}'
|
bind = f'[::]:{os.getenv("PAPERLESS_PORT", 8000)}'
|
||||||
workers = int(os.getenv("PAPERLESS_WEBSERVER_WORKERS", 2))
|
workers = int(os.getenv("PAPERLESS_WEBSERVER_WORKERS", 1))
|
||||||
worker_class = "paperless.workers.ConfigurableWorker"
|
worker_class = "paperless.workers.ConfigurableWorker"
|
||||||
timeout = 120
|
timeout = 120
|
||||||
|
|
||||||
@@ -24,7 +24,7 @@ def worker_int(worker):
|
|||||||
## get traceback info
|
## get traceback info
|
||||||
import threading, sys, traceback
|
import threading, sys, traceback
|
||||||
|
|
||||||
id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
|
id2name = {th.ident: th.name for th in threading.enumerate()}
|
||||||
code = []
|
code = []
|
||||||
for threadId, stack in sys._current_frames().items():
|
for threadId, stack in sys._current_frames().items():
|
||||||
code.append("\n# Thread: %s(%d)" % (id2name.get(threadId, ""), threadId))
|
code.append("\n# Thread: %s(%d)" % (id2name.get(threadId, ""), threadId))
|
||||||
|
|||||||
@@ -1,18 +1,18 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
ask() {
|
ask() {
|
||||||
while true ; do
|
while true ; do
|
||||||
if [[ -z $3 ]] ; then
|
if [[ -z $3 ]] ; then
|
||||||
read -p "$1 [$2]: " result
|
read -r -p "$1 [$2]: " result
|
||||||
else
|
else
|
||||||
read -p "$1 ($3) [$2]: " result
|
read -r -p "$1 ($3) [$2]: " result
|
||||||
fi
|
fi
|
||||||
if [[ -z $result ]]; then
|
if [[ -z $result ]]; then
|
||||||
ask_result=$2
|
ask_result=$2
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
array=$3
|
array=$3
|
||||||
if [[ -z $3 || " ${array[@]} " =~ " ${result} " ]]; then
|
if [[ -z $3 || " ${array[*]} " =~ ${result} ]]; then
|
||||||
ask_result=$result
|
ask_result=$result
|
||||||
return
|
return
|
||||||
else
|
else
|
||||||
@@ -24,7 +24,7 @@ ask() {
|
|||||||
ask_docker_folder() {
|
ask_docker_folder() {
|
||||||
while true ; do
|
while true ; do
|
||||||
|
|
||||||
read -p "$1 [$2]: " result
|
read -r -p "$1 [$2]: " result
|
||||||
|
|
||||||
if [[ -z $result ]]; then
|
if [[ -z $result ]]; then
|
||||||
ask_result=$2
|
ask_result=$2
|
||||||
@@ -47,25 +47,29 @@ if [[ $(id -u) == "0" ]] ; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -z $(which wget) ]] ; then
|
if ! command -v wget &> /dev/null ; then
|
||||||
echo "wget executable not found. Is wget installed?"
|
echo "wget executable not found. Is wget installed?"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -z $(which docker) ]] ; then
|
if ! command -v docker &> /dev/null ; then
|
||||||
echo "docker executable not found. Is docker installed?"
|
echo "docker executable not found. Is docker installed?"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -z $(which docker-compose) ]] ; then
|
DOCKER_COMPOSE_CMD="docker-compose"
|
||||||
echo "docker-compose executable not found. Is docker-compose installed?"
|
if ! command -v ${DOCKER_COMPOSE_CMD} ; then
|
||||||
exit 1
|
if docker compose version &> /dev/null ; then
|
||||||
|
DOCKER_COMPOSE_CMD="docker compose"
|
||||||
|
else
|
||||||
|
echo "docker-compose executable not found. Is docker-compose installed?"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if user has permissions to run Docker by trying to get the status of Docker (docker status).
|
# Check if user has permissions to run Docker by trying to get the status of Docker (docker status).
|
||||||
# If this fails, the user probably does not have permissions for Docker.
|
# If this fails, the user probably does not have permissions for Docker.
|
||||||
docker stats --no-stream 2>/dev/null 1>&2
|
if ! docker stats --no-stream &> /dev/null ; then
|
||||||
if [ $? -ne 0 ] ; then
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "WARN: It look like the current user does not have Docker permissions."
|
echo "WARN: It look like the current user does not have Docker permissions."
|
||||||
echo "WARN: Use 'sudo usermod -aG docker $USER' to assign Docker permissions to the user."
|
echo "WARN: Use 'sudo usermod -aG docker $USER' to assign Docker permissions to the user."
|
||||||
@@ -88,6 +92,14 @@ echo ""
|
|||||||
echo "1. Application configuration"
|
echo "1. Application configuration"
|
||||||
echo "============================"
|
echo "============================"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "The URL paperless will be available at. This is required if the"
|
||||||
|
echo "installation will be accessible via the web, otherwise can be left blank."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
ask "URL" ""
|
||||||
|
URL=$ask_result
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "The port on which the paperless webserver will listen for incoming"
|
echo "The port on which the paperless webserver will listen for incoming"
|
||||||
echo "connections."
|
echo "connections."
|
||||||
@@ -162,7 +174,7 @@ ask "Target folder" "$(pwd)/paperless-ngx"
|
|||||||
TARGET_FOLDER=$ask_result
|
TARGET_FOLDER=$ask_result
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "The consume folder is where paperles will search for new documents."
|
echo "The consume folder is where paperless will search for new documents."
|
||||||
echo "Point this to a folder where your scanner is able to put your scanned"
|
echo "Point this to a folder where your scanner is able to put your scanned"
|
||||||
echo "documents."
|
echo "documents."
|
||||||
echo ""
|
echo ""
|
||||||
@@ -228,7 +240,7 @@ ask "Paperless username" "$(whoami)"
|
|||||||
USERNAME=$ask_result
|
USERNAME=$ask_result
|
||||||
|
|
||||||
while true; do
|
while true; do
|
||||||
read -sp "Paperless password: " PASSWORD
|
read -r -sp "Paperless password: " PASSWORD
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
if [[ -z $PASSWORD ]] ; then
|
if [[ -z $PASSWORD ]] ; then
|
||||||
@@ -236,7 +248,7 @@ while true; do
|
|||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
|
||||||
read -sp "Paperless password (again): " PASSWORD_REPEAT
|
read -r -sp "Paperless password (again): " PASSWORD_REPEAT
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
if [[ ! "$PASSWORD" == "$PASSWORD_REPEAT" ]] ; then
|
if [[ ! "$PASSWORD" == "$PASSWORD_REPEAT" ]] ; then
|
||||||
@@ -274,6 +286,7 @@ if [[ "$DATABASE_BACKEND" == "postgres" ]] ; then
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
echo ""
|
echo ""
|
||||||
|
echo "URL: $URL"
|
||||||
echo "Port: $PORT"
|
echo "Port: $PORT"
|
||||||
echo "Database: $DATABASE_BACKEND"
|
echo "Database: $DATABASE_BACKEND"
|
||||||
echo "Tika enabled: $TIKA_ENABLED"
|
echo "Tika enabled: $TIKA_ENABLED"
|
||||||
@@ -285,7 +298,7 @@ echo "Paperless username: $USERNAME"
|
|||||||
echo "Paperless email: $EMAIL"
|
echo "Paperless email: $EMAIL"
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
read -p "Press any key to install."
|
read -r -p "Press any key to install."
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "Installing paperless..."
|
echo "Installing paperless..."
|
||||||
@@ -301,14 +314,20 @@ if [[ $TIKA_ENABLED == "yes" ]] ; then
|
|||||||
DOCKER_COMPOSE_VERSION="$DOCKER_COMPOSE_VERSION-tika"
|
DOCKER_COMPOSE_VERSION="$DOCKER_COMPOSE_VERSION-tika"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/master/docker/compose/docker-compose.$DOCKER_COMPOSE_VERSION.yml" -O docker-compose.yml
|
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docker/compose/docker-compose.$DOCKER_COMPOSE_VERSION.yml" -O docker-compose.yml
|
||||||
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/master/docker/compose/.env" -O .env
|
wget "https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/docker/compose/.env" -O .env
|
||||||
|
|
||||||
SECRET_KEY=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 64 | head -n 1)
|
SECRET_KEY=$(tr -dc 'a-zA-Z0-9' < /dev/urandom | fold -w 64 | head -n 1)
|
||||||
|
|
||||||
DEFAULT_LANGUAGES="deu eng fra ita spa"
|
DEFAULT_LANGUAGES=("deu eng fra ita spa")
|
||||||
|
|
||||||
|
_split_langs="${OCR_LANGUAGE//+/ }"
|
||||||
|
read -r -a OCR_LANGUAGES_ARRAY <<< "${_split_langs}"
|
||||||
|
|
||||||
{
|
{
|
||||||
|
if [[ ! $URL == "" ]] ; then
|
||||||
|
echo "PAPERLESS_URL=$URL"
|
||||||
|
fi
|
||||||
if [[ ! $USERMAP_UID == "1000" ]] ; then
|
if [[ ! $USERMAP_UID == "1000" ]] ; then
|
||||||
echo "USERMAP_UID=$USERMAP_UID"
|
echo "USERMAP_UID=$USERMAP_UID"
|
||||||
fi
|
fi
|
||||||
@@ -318,8 +337,8 @@ DEFAULT_LANGUAGES="deu eng fra ita spa"
|
|||||||
echo "PAPERLESS_TIME_ZONE=$TIME_ZONE"
|
echo "PAPERLESS_TIME_ZONE=$TIME_ZONE"
|
||||||
echo "PAPERLESS_OCR_LANGUAGE=$OCR_LANGUAGE"
|
echo "PAPERLESS_OCR_LANGUAGE=$OCR_LANGUAGE"
|
||||||
echo "PAPERLESS_SECRET_KEY=$SECRET_KEY"
|
echo "PAPERLESS_SECRET_KEY=$SECRET_KEY"
|
||||||
if [[ ! " ${DEFAULT_LANGUAGES[@]} " =~ " ${OCR_LANGUAGE} " ]] ; then
|
if [[ ! ${DEFAULT_LANGUAGES[*]} =~ ${OCR_LANGUAGES_ARRAY[*]} ]] ; then
|
||||||
echo "PAPERLESS_OCR_LANGUAGES=$OCR_LANGUAGE"
|
echo "PAPERLESS_OCR_LANGUAGES=${OCR_LANGUAGES_ARRAY[*]}"
|
||||||
fi
|
fi
|
||||||
} > docker-compose.env
|
} > docker-compose.env
|
||||||
|
|
||||||
@@ -329,18 +348,31 @@ sed -i "s#- \./consume:/usr/src/paperless/consume#- $CONSUME_FOLDER:/usr/src/pap
|
|||||||
|
|
||||||
if [[ -n $MEDIA_FOLDER ]] ; then
|
if [[ -n $MEDIA_FOLDER ]] ; then
|
||||||
sed -i "s#- media:/usr/src/paperless/media#- $MEDIA_FOLDER:/usr/src/paperless/media#g" docker-compose.yml
|
sed -i "s#- media:/usr/src/paperless/media#- $MEDIA_FOLDER:/usr/src/paperless/media#g" docker-compose.yml
|
||||||
|
sed -i "/^\s*media:/d" docker-compose.yml
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -n $DATA_FOLDER ]] ; then
|
if [[ -n $DATA_FOLDER ]] ; then
|
||||||
sed -i "s#- data:/usr/src/paperless/data#- $DATA_FOLDER:/usr/src/paperless/data#g" docker-compose.yml
|
sed -i "s#- data:/usr/src/paperless/data#- $DATA_FOLDER:/usr/src/paperless/data#g" docker-compose.yml
|
||||||
|
sed -i "/^\s*data:/d" docker-compose.yml
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -n $POSTGRES_FOLDER ]] ; then
|
if [[ -n $POSTGRES_FOLDER ]] ; then
|
||||||
sed -i "s#- pgdata:/var/lib/postgresql/data#- $POSTGRES_FOLDER:/var/lib/postgresql/data#g" docker-compose.yml
|
sed -i "s#- pgdata:/var/lib/postgresql/data#- $POSTGRES_FOLDER:/var/lib/postgresql/data#g" docker-compose.yml
|
||||||
|
sed -i "/^\s*pgdata:/d" docker-compose.yml
|
||||||
fi
|
fi
|
||||||
|
|
||||||
docker-compose pull
|
# remove trailing blank lines from end of file
|
||||||
|
sed -i -e :a -e '/^\n*$/{$d;N;};/\n$/ba' docker-compose.yml
|
||||||
|
# if last line in file contains "volumes:", remove that line since no more named volumes are left
|
||||||
|
l1=$(grep -n '^volumes:' docker-compose.yml | cut -d : -f 1) # get line number containing volume: at begin of line
|
||||||
|
l2=$(wc -l < docker-compose.yml) # get total number of lines
|
||||||
|
if [ "$l1" -eq "$l2" ] ; then
|
||||||
|
sed -i "/^volumes:/d" docker-compose.yml
|
||||||
|
fi
|
||||||
|
|
||||||
docker-compose run --rm -e DJANGO_SUPERUSER_PASSWORD="$PASSWORD" webserver createsuperuser --noinput --username "$USERNAME" --email "$EMAIL"
|
|
||||||
|
|
||||||
docker-compose up -d
|
${DOCKER_COMPOSE_CMD} pull
|
||||||
|
|
||||||
|
${DOCKER_COMPOSE_CMD} run --rm -e DJANGO_SUPERUSER_PASSWORD="$PASSWORD" webserver createsuperuser --noinput --username "$USERNAME" --email "$EMAIL"
|
||||||
|
|
||||||
|
${DOCKER_COMPOSE_CMD} up -d
|
||||||
|
|||||||
@@ -23,12 +23,15 @@
|
|||||||
#PAPERLESS_MEDIA_ROOT=../media
|
#PAPERLESS_MEDIA_ROOT=../media
|
||||||
#PAPERLESS_STATICDIR=../static
|
#PAPERLESS_STATICDIR=../static
|
||||||
#PAPERLESS_FILENAME_FORMAT=
|
#PAPERLESS_FILENAME_FORMAT=
|
||||||
|
#PAPERLESS_FILENAME_FORMAT_REMOVE_NONE=
|
||||||
|
|
||||||
# Security and hosting
|
# Security and hosting
|
||||||
|
|
||||||
#PAPERLESS_SECRET_KEY=change-me
|
#PAPERLESS_SECRET_KEY=change-me
|
||||||
#PAPERLESS_ALLOWED_HOSTS=example.com,www.example.com
|
#PAPERLESS_URL=https://example.com
|
||||||
#PAPERLESS_CORS_ALLOWED_HOSTS=http://example.com,http://localhost:8000
|
#PAPERLESS_CSRF_TRUSTED_ORIGINS=https://example.com # can be set using PAPERLESS_URL
|
||||||
|
#PAPERLESS_ALLOWED_HOSTS=example.com,www.example.com # can be set using PAPERLESS_URL
|
||||||
|
#PAPERLESS_CORS_ALLOWED_HOSTS=https://localhost:8080,https://example.com # can be set using PAPERLESS_URL
|
||||||
#PAPERLESS_FORCE_SCRIPT_NAME=
|
#PAPERLESS_FORCE_SCRIPT_NAME=
|
||||||
#PAPERLESS_STATIC_URL=/static/
|
#PAPERLESS_STATIC_URL=/static/
|
||||||
#PAPERLESS_AUTO_LOGIN_USERNAME=
|
#PAPERLESS_AUTO_LOGIN_USERNAME=
|
||||||
@@ -58,15 +61,17 @@
|
|||||||
#PAPERLESS_CONSUMER_POLLING=10
|
#PAPERLESS_CONSUMER_POLLING=10
|
||||||
#PAPERLESS_CONSUMER_DELETE_DUPLICATES=false
|
#PAPERLESS_CONSUMER_DELETE_DUPLICATES=false
|
||||||
#PAPERLESS_CONSUMER_RECURSIVE=false
|
#PAPERLESS_CONSUMER_RECURSIVE=false
|
||||||
#PAPERLESS_CONSUMER_IGNORE_PATTERNS=[".DS_STORE/*", "._*", ".stfolder/*"]
|
#PAPERLESS_CONSUMER_IGNORE_PATTERNS=[".DS_STORE/*", "._*", ".stfolder/*", ".stversions/*", ".localized/*", "desktop.ini"]
|
||||||
#PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS=false
|
#PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS=false
|
||||||
#PAPERLESS_OPTIMIZE_THUMBNAILS=true
|
#PAPERLESS_CONSUMER_ENABLE_BARCODES=false
|
||||||
|
#PAPERLESS_CONSUMER_ENABLE_BARCODES=PATCHT
|
||||||
#PAPERLESS_PRE_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
#PAPERLESS_PRE_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
||||||
#PAPERLESS_POST_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
#PAPERLESS_POST_CONSUME_SCRIPT=/path/to/an/arbitrary/script.sh
|
||||||
#PAPERLESS_FILENAME_DATE_ORDER=YMD
|
#PAPERLESS_FILENAME_DATE_ORDER=YMD
|
||||||
#PAPERLESS_FILENAME_PARSE_TRANSFORMS=[]
|
#PAPERLESS_FILENAME_PARSE_TRANSFORMS=[]
|
||||||
#PAPERLESS_THUMBNAIL_FONT_NAME=
|
#PAPERLESS_THUMBNAIL_FONT_NAME=
|
||||||
#PAPERLESS_IGNORE_DATES=
|
#PAPERLESS_IGNORE_DATES=
|
||||||
|
#PAPERLESS_ENABLE_UPDATE_CHECK=
|
||||||
|
|
||||||
# Tika settings
|
# Tika settings
|
||||||
|
|
||||||
@@ -78,4 +83,3 @@
|
|||||||
|
|
||||||
#PAPERLESS_CONVERT_BINARY=/usr/bin/convert
|
#PAPERLESS_CONVERT_BINARY=/usr/bin/convert
|
||||||
#PAPERLESS_GS_BINARY=/usr/bin/gs
|
#PAPERLESS_GS_BINARY=/usr/bin/gs
|
||||||
#PAPERLESS_OPTIPNG_BINARY=/usr/bin/optipng
|
|
||||||
|
|||||||
131
requirements.txt
@@ -1,109 +1,108 @@
|
|||||||
#
|
|
||||||
# These requirements were autogenerated by pipenv
|
|
||||||
# To regenerate from the project's Pipfile, run:
|
|
||||||
#
|
|
||||||
# pipenv lock --requirements
|
|
||||||
#
|
|
||||||
|
|
||||||
-i https://pypi.python.org/simple
|
-i https://pypi.python.org/simple
|
||||||
--extra-index-url https://www.piwheels.org/simple
|
--extra-index-url https://www.piwheels.org/simple
|
||||||
aioredis==1.3.1
|
aioredis==1.3.1
|
||||||
|
anyio==3.6.1; python_full_version >= '3.6.2'
|
||||||
arrow==1.2.2; python_version >= '3.6'
|
arrow==1.2.2; python_version >= '3.6'
|
||||||
asgiref==3.5.0; python_version >= '3.7'
|
asgiref==3.5.2; python_version >= '3.7'
|
||||||
async-timeout==4.0.2; python_version >= '3.6'
|
async-timeout==4.0.2; python_version >= '3.6'
|
||||||
attrs==21.4.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
attrs==21.4.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||||
autobahn==22.2.2; python_version >= '3.7'
|
autobahn==22.6.1; python_version >= '3.7'
|
||||||
automat==20.2.0
|
automat==20.2.0
|
||||||
backports.zoneinfo==0.2.1
|
backports.zoneinfo==0.2.1; python_version < '3.9'
|
||||||
blessed==1.19.1; python_version >= '2.7'
|
blessed==1.19.1; python_version >= '2.7'
|
||||||
certifi==2021.10.8
|
certifi==2022.6.15; python_version >= '3.6'
|
||||||
cffi==1.15.0
|
cffi==1.15.1
|
||||||
channels-redis==3.3.1
|
channels==3.0.5
|
||||||
channels==3.0.4
|
channels-redis==3.4.1
|
||||||
chardet==4.0.0; python_version >= '3.1'
|
charset-normalizer==2.1.0; python_version >= '3.6'
|
||||||
charset-normalizer==2.0.12; python_version >= '3'
|
click==8.1.3; python_version >= '3.7'
|
||||||
click==8.0.4; python_version >= '3.6'
|
|
||||||
coloredlogs==15.0.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
coloredlogs==15.0.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||||
concurrent-log-handler==0.9.20
|
concurrent-log-handler==0.9.20
|
||||||
constantly==15.1.0
|
constantly==15.1.0
|
||||||
cryptography==36.0.1
|
cryptography==37.0.4; python_version >= '3.6'
|
||||||
daphne==3.0.2; python_version >= '3.6'
|
daphne==3.0.2; python_version >= '3.6'
|
||||||
dateparser==1.1.0
|
dateparser==1.1.1
|
||||||
django-cors-headers==3.11.0
|
deprecated==1.2.13; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||||
django-extensions==3.1.5
|
deprecation==2.1.0
|
||||||
django-filter==21.1
|
django==4.0.6
|
||||||
django-picklefield==3.0.1; python_version >= '3'
|
django-cors-headers==3.13.0
|
||||||
django-q==1.3.9
|
django-extensions==3.2.0
|
||||||
django==3.2.12
|
django-filter==22.1
|
||||||
|
django-picklefield==3.1; python_version >= '3'
|
||||||
|
-e git+https://github.com/paperless-ngx/django-q.git@bf20d57f859a7d872d5979cd8879fac9c9df981c#egg=django-q
|
||||||
djangorestframework==3.13.1
|
djangorestframework==3.13.1
|
||||||
filelock==3.6.0
|
filelock==3.7.1
|
||||||
fuzzywuzzy[speedup]==0.18.0
|
fuzzywuzzy[speedup]==0.18.0
|
||||||
gunicorn==20.1.0
|
gunicorn==20.1.0
|
||||||
h11==0.13.0; python_version >= '3.6'
|
h11==0.13.0; python_version >= '3.6'
|
||||||
hiredis==2.0.0; python_version >= '3.6'
|
hiredis==2.0.0; python_version >= '3.6'
|
||||||
httptools==0.3.0
|
httptools==0.4.0
|
||||||
humanfriendly==10.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
humanfriendly==10.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||||
hyperlink==21.0.0
|
hyperlink==21.0.0
|
||||||
idna==3.3; python_version >= '3.5'
|
idna==3.3; python_version >= '3.5'
|
||||||
imap-tools==0.51.1
|
imap-tools==0.56.0
|
||||||
img2pdf==0.4.3
|
img2pdf==0.4.4
|
||||||
importlib-resources==5.4.0; python_version < '3.9'
|
importlib-resources==5.8.0; python_version < '3.9'
|
||||||
incremental==21.3.0
|
incremental==21.3.0
|
||||||
inotify-simple==1.3.5; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
inotify-simple==1.3.5; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||||
inotifyrecursive==0.3.5
|
inotifyrecursive==0.3.5
|
||||||
joblib==1.1.0; python_version >= '3.6'
|
joblib==1.1.0; python_version >= '3.6'
|
||||||
langdetect==1.0.9
|
langdetect==1.0.9
|
||||||
lxml==4.8.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
lxml==4.9.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||||
msgpack==1.0.3
|
msgpack==1.0.4
|
||||||
numpy==1.22.2
|
numpy==1.23.1; python_version >= '3.8'
|
||||||
ocrmypdf==13.4.0
|
ocrmypdf==13.6.1
|
||||||
packaging==21.3; python_version >= '3.6'
|
packaging==21.3; python_version >= '3.6'
|
||||||
pathvalidate==2.5.0
|
pathvalidate==2.5.0
|
||||||
pdfminer.six==20211012
|
pdf2image==1.16.0
|
||||||
pikepdf==5.0.1
|
pdfminer.six==20220524
|
||||||
pillow==9.0.1
|
pikepdf==5.4.0
|
||||||
|
pillow==9.2.0
|
||||||
pluggy==1.0.0; python_version >= '3.6'
|
pluggy==1.0.0; python_version >= '3.6'
|
||||||
portalocker==2.4.0; python_version >= '3'
|
portalocker==2.5.1; python_version >= '3'
|
||||||
psycopg2-binary==2.9.3
|
psycopg2==2.9.3
|
||||||
pyasn1-modules==0.2.8
|
|
||||||
pyasn1==0.4.8
|
pyasn1==0.4.8
|
||||||
pycparser==2.21; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
pyasn1-modules==0.2.8
|
||||||
pyopenssl==22.0.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
pycparser==2.21
|
||||||
pyparsing==3.0.7; python_version >= '3.6'
|
pyopenssl==22.0.0
|
||||||
|
pyparsing==3.0.9; python_full_version >= '3.6.8'
|
||||||
python-dateutil==2.8.2
|
python-dateutil==2.8.2
|
||||||
python-dotenv==0.19.2
|
python-dotenv==0.20.0
|
||||||
python-gnupg==0.4.8
|
python-gnupg==0.4.9
|
||||||
python-levenshtein==0.12.2
|
python-levenshtein==0.12.2
|
||||||
python-magic==0.4.25
|
python-magic==0.4.27
|
||||||
|
pytz==2022.1
|
||||||
pytz-deprecation-shim==0.1.0.post0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
|
pytz-deprecation-shim==0.1.0.post0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
|
||||||
pytz==2021.3
|
|
||||||
pyyaml==6.0
|
pyyaml==6.0
|
||||||
redis==3.5.3
|
pyzbar==0.1.9
|
||||||
regex==2022.1.18
|
redis==4.3.4
|
||||||
reportlab==3.6.7; python_version >= '3.6' and python_version < '4'
|
regex==2022.3.2; python_version >= '3.6'
|
||||||
requests==2.27.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'
|
reportlab==3.6.11; python_version >= '3.7' and python_version < '4'
|
||||||
scikit-learn==0.24.0
|
requests==2.28.1; python_version >= '3.7' and python_version < '4'
|
||||||
scipy==1.8.0; python_version < '3.11' and python_version >= '3.8'
|
scikit-learn==1.1.1
|
||||||
|
scipy==1.8.1; python_version < '3.11' and python_version >= '3.8'
|
||||||
service-identity==21.1.0
|
service-identity==21.1.0
|
||||||
setuptools==60.9.3; python_version >= '3.7'
|
setuptools==63.2.0; python_version >= '3.7'
|
||||||
six==1.16.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
six==1.16.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
|
||||||
|
sniffio==1.2.0; python_version >= '3.5'
|
||||||
sqlparse==0.4.2; python_version >= '3.5'
|
sqlparse==0.4.2; python_version >= '3.5'
|
||||||
threadpoolctl==3.1.0; python_version >= '3.6'
|
threadpoolctl==3.1.0; python_version >= '3.6'
|
||||||
tika==1.24
|
tika==1.24
|
||||||
tqdm==4.62.3
|
tqdm==4.64.0
|
||||||
twisted[tls]==22.1.0; python_full_version >= '3.6.7'
|
twisted[tls]==22.4.0; python_full_version >= '3.6.7'
|
||||||
txaio==22.2.1; python_version >= '3.6'
|
txaio==22.2.1; python_version >= '3.6'
|
||||||
typing-extensions==4.1.1; python_version >= '3.6'
|
typing-extensions==4.3.0; python_version >= '3.7'
|
||||||
tzdata==2021.5; python_version >= '3.6'
|
tzdata==2022.1; python_version >= '3.6'
|
||||||
tzlocal==4.1; python_version >= '3.6'
|
tzlocal==4.2; python_version >= '3.6'
|
||||||
urllib3==1.26.8; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'
|
urllib3==1.26.10; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'
|
||||||
uvicorn[standard]==0.17.5
|
uvicorn[standard]==0.18.2
|
||||||
uvloop==0.16.0
|
uvloop==0.16.0
|
||||||
watchdog==2.1.6
|
watchdog==2.1.9
|
||||||
watchgod==0.7
|
watchfiles==0.16.0
|
||||||
wcwidth==0.2.5
|
wcwidth==0.2.5
|
||||||
websockets==10.2
|
websockets==10.3
|
||||||
whitenoise==6.0.0
|
whitenoise==6.2.0
|
||||||
whoosh==2.7.4
|
whoosh==2.7.4
|
||||||
zipp==3.7.0; python_version < '3.10'
|
wrapt==1.14.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||||
|
zipp==3.8.1; python_version < '3.9'
|
||||||
zope.interface==5.4.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
zope.interface==5.4.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'
|
||||||
|
|||||||
@@ -1,21 +1,16 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
DOCUMENT_ID=${1}
|
|
||||||
DOCUMENT_FILE_NAME=${2}
|
|
||||||
DOCUMENT_SOURCE_PATH=${3}
|
|
||||||
DOCUMENT_THUMBNAIL_PATH=${4}
|
|
||||||
DOCUMENT_DOWNLOAD_URL=${5}
|
|
||||||
DOCUMENT_THUMBNAIL_URL=${6}
|
|
||||||
DOCUMENT_CORRESPONDENT=${7}
|
|
||||||
DOCUMENT_TAGS=${8}
|
|
||||||
|
|
||||||
echo "
|
echo "
|
||||||
|
|
||||||
A document with an id of ${DOCUMENT_ID} was just consumed. I know the
|
A document with an id of ${DOCUMENT_ID} was just consumed. I know the
|
||||||
following additional information about it:
|
following additional information about it:
|
||||||
|
|
||||||
* Generated File Name: ${DOCUMENT_FILE_NAME}
|
* Generated File Name: ${DOCUMENT_FILE_NAME}
|
||||||
|
* Archive Path: ${DOCUMENT_ARCHIVE_PATH}
|
||||||
* Source Path: ${DOCUMENT_SOURCE_PATH}
|
* Source Path: ${DOCUMENT_SOURCE_PATH}
|
||||||
|
* Created: ${DOCUMENT_CREATED}
|
||||||
|
* Added: ${DOCUMENT_ADDED}
|
||||||
|
* Modified: ${DOCUMENT_MODIFIED}
|
||||||
* Thumbnail Path: ${DOCUMENT_THUMBNAIL_PATH}
|
* Thumbnail Path: ${DOCUMENT_THUMBNAIL_PATH}
|
||||||
* Download URL: ${DOCUMENT_DOWNLOAD_URL}
|
* Download URL: ${DOCUMENT_DOWNLOAD_URL}
|
||||||
* Thumbnail URL: ${DOCUMENT_THUMBNAIL_URL}
|
* Thumbnail URL: ${DOCUMENT_THUMBNAIL_URL}
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
docker run -p 5432:5432 -e POSTGRES_PASSWORD=password -v paperless_pgdata:/var/lib/postgresql/data -d postgres:13
|
docker run -p 5432:5432 -e POSTGRES_PASSWORD=password -v paperless_pgdata:/var/lib/postgresql/data -d postgres:13
|
||||||
docker run -d -p 6379:6379 redis:latest
|
docker run -d -p 6379:6379 redis:latest
|
||||||
docker run -p 3000:3000 -d gotenberg/gotenberg:7
|
docker run -p 3000:3000 -d gotenberg/gotenberg:7.4
|
||||||
docker run -p 9998:9998 -d apache/tika
|
docker run -p 9998:9998 -d ghcr.io/paperless-ngx/tika:latest
|
||||||
|
|||||||
4
src-ui/.gitignore
vendored
@@ -45,3 +45,7 @@ testem.log
|
|||||||
# System Files
|
# System Files
|
||||||
.DS_Store
|
.DS_Store
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
|
|
||||||
|
# Cypress
|
||||||
|
cypress/videos/**/*
|
||||||
|
cypress/screenshots/**/*
|
||||||
|
|||||||
@@ -16,6 +16,7 @@
|
|||||||
"i18n": {
|
"i18n": {
|
||||||
"sourceLocale": "en-US",
|
"sourceLocale": "en-US",
|
||||||
"locales": {
|
"locales": {
|
||||||
|
"be-BY": "src/locale/messages.be_BY.xlf",
|
||||||
"cs-CZ": "src/locale/messages.cs_CZ.xlf",
|
"cs-CZ": "src/locale/messages.cs_CZ.xlf",
|
||||||
"da-DK": "src/locale/messages.da_DK.xlf",
|
"da-DK": "src/locale/messages.da_DK.xlf",
|
||||||
"de-DE": "src/locale/messages.de_DE.xlf",
|
"de-DE": "src/locale/messages.de_DE.xlf",
|
||||||
@@ -30,8 +31,12 @@
|
|||||||
"pt-PT": "src/locale/messages.pt_PT.xlf",
|
"pt-PT": "src/locale/messages.pt_PT.xlf",
|
||||||
"ro-RO": "src/locale/messages.ro_RO.xlf",
|
"ro-RO": "src/locale/messages.ro_RO.xlf",
|
||||||
"ru-RU": "src/locale/messages.ru_RU.xlf",
|
"ru-RU": "src/locale/messages.ru_RU.xlf",
|
||||||
"sv-SE": "src/locale/messages.sv_SE.xlf"
|
"sl-SI": "src/locale/messages.sl_SI.xlf",
|
||||||
}
|
"sr-CS": "src/locale/messages.sr_CS.xlf",
|
||||||
|
"sv-SE": "src/locale/messages.sv_SE.xlf",
|
||||||
|
"tr-TR": "src/locale/messages.tr_TR.xlf",
|
||||||
|
"zh-CN": "src/locale/messages.zh_CN.xlf"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"architect": {
|
"architect": {
|
||||||
"build": {
|
"build": {
|
||||||
@@ -121,12 +126,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"test": {
|
"test": {
|
||||||
"builder": "@angular-devkit/build-angular:karma",
|
"builder": "@angular-builders/jest:run",
|
||||||
"options": {
|
"options": {
|
||||||
"main": "src/test.ts",
|
|
||||||
"polyfills": "src/polyfills.ts",
|
|
||||||
"tsConfig": "tsconfig.spec.json",
|
"tsConfig": "tsconfig.spec.json",
|
||||||
"karmaConfig": "karma.conf.js",
|
|
||||||
"assets": [
|
"assets": [
|
||||||
"src/favicon.ico",
|
"src/favicon.ico",
|
||||||
"src/apple-touch-icon.png",
|
"src/apple-touch-icon.png",
|
||||||
@@ -140,9 +142,21 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"e2e": {
|
"e2e": {
|
||||||
"builder": "@angular-devkit/build-angular:protractor",
|
"builder": "@cypress/schematic:cypress",
|
||||||
|
"options": {
|
||||||
|
"devServerTarget": "paperless-ui:serve",
|
||||||
|
"watch": true,
|
||||||
|
"headless": false
|
||||||
|
},
|
||||||
|
"configurations": {
|
||||||
|
"production": {
|
||||||
|
"devServerTarget": "paperless-ui:serve:production"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cypress-run": {
|
||||||
|
"builder": "@cypress/schematic:cypress",
|
||||||
"options": {
|
"options": {
|
||||||
"protractorConfig": "e2e/protractor.conf.js",
|
|
||||||
"devServerTarget": "paperless-ui:serve"
|
"devServerTarget": "paperless-ui:serve"
|
||||||
},
|
},
|
||||||
"configurations": {
|
"configurations": {
|
||||||
@@ -150,6 +164,13 @@
|
|||||||
"devServerTarget": "paperless-ui:serve:production"
|
"devServerTarget": "paperless-ui:serve:production"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"cypress-open": {
|
||||||
|
"builder": "@cypress/schematic:cypress",
|
||||||
|
"options": {
|
||||||
|
"watch": true,
|
||||||
|
"headless": false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
13
src-ui/cypress.config.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { defineConfig } from 'cypress'
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
videosFolder: 'cypress/videos',
|
||||||
|
screenshotsFolder: 'cypress/screenshots',
|
||||||
|
fixturesFolder: 'cypress/fixtures',
|
||||||
|
e2e: {
|
||||||
|
setupNodeEvents(on, config) {
|
||||||
|
return require('./cypress/plugins/index.ts')(on, config)
|
||||||
|
},
|
||||||
|
baseUrl: 'http://localhost:4200',
|
||||||
|
},
|
||||||
|
})
|
||||||