mirror of
https://github.com/9001/copyparty.git
synced 2025-11-02 04:53:15 +00:00
Compare commits
1068 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e00e80ae39 | ||
|
|
4f4f106c48 | ||
|
|
a286cc9d55 | ||
|
|
53bb1c719b | ||
|
|
98d5aa17e2 | ||
|
|
aaaa80e4b8 | ||
|
|
e70e926a40 | ||
|
|
e80c1f6d59 | ||
|
|
24de360325 | ||
|
|
e0039bc1e6 | ||
|
|
ae5c4a0109 | ||
|
|
1d367a0da0 | ||
|
|
d285f7ee4a | ||
|
|
37c84021a2 | ||
|
|
8ee9de4291 | ||
|
|
249b63453b | ||
|
|
1c0017d763 | ||
|
|
df51e23639 | ||
|
|
32e71a43b8 | ||
|
|
47a1e6ddfa | ||
|
|
c5f41457bb | ||
|
|
f1e0c44bdd | ||
|
|
9d2e390b6a | ||
|
|
75a58b435d | ||
|
|
f5474d34ac | ||
|
|
c962d2544f | ||
|
|
0b87a4a810 | ||
|
|
1882afb8b6 | ||
|
|
2270c8737a | ||
|
|
d6794955a4 | ||
|
|
f5520f45ef | ||
|
|
9401b5ae13 | ||
|
|
df64a62a03 | ||
|
|
09cea66aa8 | ||
|
|
13cc33e0a5 | ||
|
|
ab36c8c9de | ||
|
|
f85d4ce82f | ||
|
|
6bec4c28ba | ||
|
|
fad1449259 | ||
|
|
86b3b57137 | ||
|
|
b235037dd3 | ||
|
|
3108139d51 | ||
|
|
2ae99ecfa0 | ||
|
|
e8ab53c270 | ||
|
|
5e9bc1127d | ||
|
|
415e61c3c9 | ||
|
|
5152f37ec8 | ||
|
|
0dbeb010cf | ||
|
|
17c465bed7 | ||
|
|
add04478e5 | ||
|
|
6db72d7166 | ||
|
|
868103a9c5 | ||
|
|
0f37718671 | ||
|
|
fa1445df86 | ||
|
|
a783e7071e | ||
|
|
a9919df5af | ||
|
|
b0af31ac35 | ||
|
|
c4c964a685 | ||
|
|
348ec71398 | ||
|
|
a257ccc8b3 | ||
|
|
fcc4296040 | ||
|
|
1684d05d49 | ||
|
|
0006f933a2 | ||
|
|
0484f97c9c | ||
|
|
e430b2567a | ||
|
|
fbc8ee15da | ||
|
|
68a9c05947 | ||
|
|
0a81aba899 | ||
|
|
d2ae822e15 | ||
|
|
fac4b08526 | ||
|
|
3a7b43c663 | ||
|
|
8fcb2d1554 | ||
|
|
590c763659 | ||
|
|
11d1267f8c | ||
|
|
8f5bae95ce | ||
|
|
e6b12ef14c | ||
|
|
b65674618b | ||
|
|
20dca2bea5 | ||
|
|
059e93cdcf | ||
|
|
635ab25013 | ||
|
|
995cd10df8 | ||
|
|
50f3820a6d | ||
|
|
617f3ea861 | ||
|
|
788db47b95 | ||
|
|
5fa8aaabb9 | ||
|
|
89d1af7f33 | ||
|
|
799cf27c5d | ||
|
|
c930d8f773 | ||
|
|
a7f921abb9 | ||
|
|
bc6234e032 | ||
|
|
558bfa4e1e | ||
|
|
5d19f23372 | ||
|
|
27f08cdbfa | ||
|
|
993213e2c0 | ||
|
|
49470c05fa | ||
|
|
ee0a060b79 | ||
|
|
500e3157b9 | ||
|
|
eba86b1d23 | ||
|
|
b69a563fc2 | ||
|
|
a900c36395 | ||
|
|
1d9b324d3e | ||
|
|
539e7b8efe | ||
|
|
50a477ee47 | ||
|
|
7000123a8b | ||
|
|
d48a7d2398 | ||
|
|
389a00ce59 | ||
|
|
7a460de3c2 | ||
|
|
8ea1f4a751 | ||
|
|
1c69ccc6cd | ||
|
|
84b5bbd3b6 | ||
|
|
9ccd327298 | ||
|
|
11df36f3cf | ||
|
|
f62dd0e3cc | ||
|
|
ad18b6e15e | ||
|
|
c00b80ca29 | ||
|
|
92ed4ba3f8 | ||
|
|
7de9775dd9 | ||
|
|
5ce9060e5c | ||
|
|
f727d5cb5a | ||
|
|
4735fb1ebb | ||
|
|
c7d05cc13d | ||
|
|
51c152ff4a | ||
|
|
eeed2a840c | ||
|
|
4aaa111925 | ||
|
|
e31248f018 | ||
|
|
8b4cf022f2 | ||
|
|
4e7455268a | ||
|
|
680f8ae814 | ||
|
|
90555a4cea | ||
|
|
56a62db591 | ||
|
|
cf51997680 | ||
|
|
f05cc18d61 | ||
|
|
5384c2e0f5 | ||
|
|
9bfbf80a0e | ||
|
|
f874d7754f | ||
|
|
a669f79480 | ||
|
|
1c3894743a | ||
|
|
75cdf17df4 | ||
|
|
de7dd1e60a | ||
|
|
0ee574a718 | ||
|
|
faac894706 | ||
|
|
dac2fad48e | ||
|
|
77f624b01e | ||
|
|
e24ffebfc8 | ||
|
|
70d07d1609 | ||
|
|
bfb3303d87 | ||
|
|
660705a436 | ||
|
|
74a3f97671 | ||
|
|
b3e35bb494 | ||
|
|
76adac7c72 | ||
|
|
5dc75ebb67 | ||
|
|
d686ce12b6 | ||
|
|
d3c40a423e | ||
|
|
2fb1e6dab8 | ||
|
|
10430b347f | ||
|
|
e0e3f6ac3e | ||
|
|
c694cbffdc | ||
|
|
bdd0e5d771 | ||
|
|
aa98e427f0 | ||
|
|
daa6f4c94c | ||
|
|
4a76663fb2 | ||
|
|
cebda5028a | ||
|
|
3fa377a580 | ||
|
|
a11c1005a8 | ||
|
|
4a6aea9328 | ||
|
|
4ca041e93e | ||
|
|
52a866a405 | ||
|
|
8b6bd0e6ac | ||
|
|
780fc4639a | ||
|
|
3692fc9d83 | ||
|
|
c2a0b1b4c6 | ||
|
|
21bbdb5419 | ||
|
|
aa1c08962c | ||
|
|
8a5d0399dd | ||
|
|
f2cd0b0c4a | ||
|
|
c2b66bbe73 | ||
|
|
48b957f1d5 | ||
|
|
3683984c8d | ||
|
|
a3431512d8 | ||
|
|
d832b787e7 | ||
|
|
6f75b02723 | ||
|
|
b8241710bd | ||
|
|
d638404b6a | ||
|
|
9362ca3ed9 | ||
|
|
d1a03c6d17 | ||
|
|
c6c31702c2 | ||
|
|
bd2d88c96e | ||
|
|
76b1857e4e | ||
|
|
095bd17d10 | ||
|
|
204bfac3fa | ||
|
|
ac49b0ca93 | ||
|
|
c5b04f6fef | ||
|
|
5c58fda46d | ||
|
|
062730c70c | ||
|
|
cade1990ce | ||
|
|
59b6e61816 | ||
|
|
daff7ff158 | ||
|
|
0862860961 | ||
|
|
1cb24045a0 | ||
|
|
622358b172 | ||
|
|
7998884a9d | ||
|
|
51ddecd101 | ||
|
|
7a35ab1d1e | ||
|
|
48564ba52a | ||
|
|
49efffd740 | ||
|
|
d6ac224c8f | ||
|
|
a772b8c3f2 | ||
|
|
b580953dcd | ||
|
|
d86653c763 | ||
|
|
dded4fca76 | ||
|
|
36365ffa6b | ||
|
|
0f9aeeaa27 | ||
|
|
d8ebcd0ef7 | ||
|
|
6e445487b1 | ||
|
|
6605e461c7 | ||
|
|
40ce4e2275 | ||
|
|
8fef9e363e | ||
|
|
4792c2770d | ||
|
|
87bb49da36 | ||
|
|
1c0071d9ce | ||
|
|
efded35c2e | ||
|
|
1d74240b9a | ||
|
|
098184ff7b | ||
|
|
4083533916 | ||
|
|
feb1acd43a | ||
|
|
a9591db734 | ||
|
|
9ebf148cbe | ||
|
|
a473e5e19a | ||
|
|
5d3034c231 | ||
|
|
c3a895af64 | ||
|
|
cea5aecbf2 | ||
|
|
0e61e70670 | ||
|
|
1e333c0939 | ||
|
|
917b6ec03c | ||
|
|
fe67c52ead | ||
|
|
909c7bee3e | ||
|
|
27ca54d138 | ||
|
|
2147c3a646 | ||
|
|
a99120116f | ||
|
|
802efeaff2 | ||
|
|
9ad3af1ef6 | ||
|
|
715727b811 | ||
|
|
c6eaa7b836 | ||
|
|
c2fceea2a5 | ||
|
|
190e11f7ea | ||
|
|
ad7413a5ff | ||
|
|
903b9e627a | ||
|
|
c5c1e96cf8 | ||
|
|
62fbb04c9d | ||
|
|
728dc62d0b | ||
|
|
2dfe1b1c6b | ||
|
|
35d4a1a6af | ||
|
|
eb3fa5aa6b | ||
|
|
438384425a | ||
|
|
0b6f102436 | ||
|
|
c9b7ec72d8 | ||
|
|
256c7f1789 | ||
|
|
4e5a323c62 | ||
|
|
f4a3bbd237 | ||
|
|
fe73f2d579 | ||
|
|
f79fcc7073 | ||
|
|
4c4b3790c7 | ||
|
|
bd60b464bb | ||
|
|
6bce852765 | ||
|
|
3b19a5a59d | ||
|
|
f024583011 | ||
|
|
1111baacb2 | ||
|
|
1b9c913efb | ||
|
|
3524c36e1b | ||
|
|
cf87cea9f8 | ||
|
|
bfa34404b8 | ||
|
|
0aba5f35bf | ||
|
|
663bc0842a | ||
|
|
7d10c96e73 | ||
|
|
6b2720fab0 | ||
|
|
e74ad5132a | ||
|
|
1f6f89c1fd | ||
|
|
4d55e60980 | ||
|
|
ddaaccd5af | ||
|
|
c20b7dac3d | ||
|
|
1f779d5094 | ||
|
|
715401ca8e | ||
|
|
e7cd922d8b | ||
|
|
187feee0c1 | ||
|
|
49e962a7dc | ||
|
|
633ff601e5 | ||
|
|
331cf37054 | ||
|
|
23e4b9002f | ||
|
|
c0de3c8053 | ||
|
|
a82a3b084a | ||
|
|
67c298e66b | ||
|
|
c110ccb9ae | ||
|
|
0143380306 | ||
|
|
af9000d3c8 | ||
|
|
097d798e5e | ||
|
|
1d9f9f221a | ||
|
|
214a367f48 | ||
|
|
2fb46551a2 | ||
|
|
6bcf330ae0 | ||
|
|
2075a8b18c | ||
|
|
1275ac6c42 | ||
|
|
708f20b7af | ||
|
|
a2c0c708e8 | ||
|
|
2f2c65d91e | ||
|
|
cd5fcc7ca7 | ||
|
|
aa29e7be48 | ||
|
|
93febe34b0 | ||
|
|
f086e6d3c1 | ||
|
|
22e51e1c96 | ||
|
|
63a5336f31 | ||
|
|
bfc6c53cc5 | ||
|
|
236017f310 | ||
|
|
0a1d9b4dfd | ||
|
|
b50d090946 | ||
|
|
00b5db52cf | ||
|
|
24cb30e2c5 | ||
|
|
4549145ab5 | ||
|
|
67b0217754 | ||
|
|
ccae9efdf0 | ||
|
|
59d596b222 | ||
|
|
4878eb2c45 | ||
|
|
7755392f57 | ||
|
|
dc2ea20959 | ||
|
|
8eaea2bd17 | ||
|
|
58e559918f | ||
|
|
f38a3fca5b | ||
|
|
1ea145b384 | ||
|
|
0d9567575a | ||
|
|
e82f176289 | ||
|
|
d4b51c040e | ||
|
|
125d0efbd8 | ||
|
|
3215afc504 | ||
|
|
c73ff3ce1b | ||
|
|
f9c159a051 | ||
|
|
2ab1325c90 | ||
|
|
5b0f7ff506 | ||
|
|
9269bc84f2 | ||
|
|
4e8b651e18 | ||
|
|
65b4f79534 | ||
|
|
5dd43dbc45 | ||
|
|
5f73074c7e | ||
|
|
f5d6ba27b2 | ||
|
|
73fa70b41f | ||
|
|
2a1cda42e7 | ||
|
|
1bd7e31466 | ||
|
|
eb49e1fb4a | ||
|
|
9838c2f0ce | ||
|
|
6041df8370 | ||
|
|
2933dce3ef | ||
|
|
dab377d37b | ||
|
|
f35e41baf1 | ||
|
|
c4083a2942 | ||
|
|
36c20bbe53 | ||
|
|
e34634f5af | ||
|
|
cba9e5b669 | ||
|
|
1f3c46a6b0 | ||
|
|
799a5ffa47 | ||
|
|
b000707c10 | ||
|
|
feba4de1d6 | ||
|
|
951fdb27ca | ||
|
|
9697fb3d84 | ||
|
|
2dbed4500a | ||
|
|
fd9d0e433d | ||
|
|
f096f3ef81 | ||
|
|
cc4a063695 | ||
|
|
b64cabc3c9 | ||
|
|
3dd460717c | ||
|
|
bf658a522b | ||
|
|
e9be7e712d | ||
|
|
e40cd2a809 | ||
|
|
dbabeb9692 | ||
|
|
8dd37d76b0 | ||
|
|
fd475aa358 | ||
|
|
f0988c0e32 | ||
|
|
0632f09bff | ||
|
|
ba599aaca0 | ||
|
|
ff05919e89 | ||
|
|
52e63fa101 | ||
|
|
96ceccd12a | ||
|
|
87994fe006 | ||
|
|
fa12c81a03 | ||
|
|
344ce63455 | ||
|
|
ec4daacf9e | ||
|
|
f3e8308718 | ||
|
|
515ac5d941 | ||
|
|
954c7e7e50 | ||
|
|
67ff57f3a3 | ||
|
|
c10c70c1e5 | ||
|
|
04592a98d2 | ||
|
|
c9c4aac6cf | ||
|
|
8b2c7586ce | ||
|
|
32e22dfe84 | ||
|
|
d70b885722 | ||
|
|
ac6c4b13f5 | ||
|
|
ececdad22d | ||
|
|
bf659781b0 | ||
|
|
2c6bb195a4 | ||
|
|
c032cd08b3 | ||
|
|
39e7a7a231 | ||
|
|
6e14cd2c39 | ||
|
|
aab3baaea7 | ||
|
|
b8453c3b4f | ||
|
|
6ce0e2cd5b | ||
|
|
76beaae7f2 | ||
|
|
c1a7f9edbe | ||
|
|
b5f2fe2f0a | ||
|
|
98a90d49cb | ||
|
|
f55e982cb5 | ||
|
|
686c7defeb | ||
|
|
0b1e483c53 | ||
|
|
457d7df129 | ||
|
|
ce776a547c | ||
|
|
ded0567cbf | ||
|
|
c9cac83d09 | ||
|
|
4fbe6b01a8 | ||
|
|
ee9585264e | ||
|
|
c9ffead7bf | ||
|
|
ed69d42005 | ||
|
|
0b47ee306b | ||
|
|
e4e63619d4 | ||
|
|
f32cca292a | ||
|
|
e87ea19ff1 | ||
|
|
0214793740 | ||
|
|
fc9dd5d743 | ||
|
|
9e6d5dd2b9 | ||
|
|
bdad197e2c | ||
|
|
7e139288a6 | ||
|
|
6e7935abaf | ||
|
|
3ba0cc20f1 | ||
|
|
dd28de1796 | ||
|
|
9eecc9e19a | ||
|
|
6530cb6b05 | ||
|
|
41ce613379 | ||
|
|
5e2785caba | ||
|
|
d7cc000976 | ||
|
|
50d8ff95ae | ||
|
|
b2de1459b6 | ||
|
|
f0ffbea0b2 | ||
|
|
199ccca0fe | ||
|
|
1d9b355743 | ||
|
|
f0437fbb07 | ||
|
|
abc404a5b7 | ||
|
|
04b9e21330 | ||
|
|
1044aa071b | ||
|
|
4c3192c8cc | ||
|
|
689e77a025 | ||
|
|
3bd89403d2 | ||
|
|
b4800d9bcb | ||
|
|
05485e8539 | ||
|
|
0e03dc0868 | ||
|
|
352b1ed10a | ||
|
|
0db1244d04 | ||
|
|
ece08b8179 | ||
|
|
b8945ae233 | ||
|
|
dcaf7b0a20 | ||
|
|
f982cdc178 | ||
|
|
b265e59834 | ||
|
|
4a843a6624 | ||
|
|
241ef5b99d | ||
|
|
f39f575a9c | ||
|
|
1521307f1e | ||
|
|
dd122111e6 | ||
|
|
00c177fa74 | ||
|
|
f6c7e49eb8 | ||
|
|
1a8dc3d18a | ||
|
|
38a163a09a | ||
|
|
8f031246d2 | ||
|
|
8f3d97dde7 | ||
|
|
4acaf24d65 | ||
|
|
9a8dbbbcf8 | ||
|
|
a3efc4c726 | ||
|
|
0278bf328f | ||
|
|
17ddd96cc6 | ||
|
|
0e82e79aea | ||
|
|
30f124c061 | ||
|
|
e19d90fcfc | ||
|
|
184bbdd23d | ||
|
|
30b50aec95 | ||
|
|
c3c3d81db1 | ||
|
|
49b7231283 | ||
|
|
edbedcdad3 | ||
|
|
e4ae5f74e6 | ||
|
|
2c7ffe08d7 | ||
|
|
3ca46bae46 | ||
|
|
7e82aaf843 | ||
|
|
315bd71adf | ||
|
|
2c612c9aeb | ||
|
|
36aee085f7 | ||
|
|
d01bb69a9c | ||
|
|
c9b1c48c72 | ||
|
|
aea3843cf2 | ||
|
|
131b6f4b9a | ||
|
|
6efb8b735a | ||
|
|
223b7af2ce | ||
|
|
e72c2a6982 | ||
|
|
dd9b93970e | ||
|
|
e4c7cd81a9 | ||
|
|
12b3a62586 | ||
|
|
2da3bdcd47 | ||
|
|
c1dccbe0ba | ||
|
|
9629fcde68 | ||
|
|
cae436b566 | ||
|
|
01714700ae | ||
|
|
51e6c4852b | ||
|
|
b206c5d64e | ||
|
|
62c3272351 | ||
|
|
c5d822c70a | ||
|
|
9c09b4061a | ||
|
|
c26fb43ced | ||
|
|
deb8f20db6 | ||
|
|
50e18ed8ff | ||
|
|
31f3895f40 | ||
|
|
615929268a | ||
|
|
b8b15814cf | ||
|
|
7766fffe83 | ||
|
|
2a16c150d1 | ||
|
|
418c2166cc | ||
|
|
a4dd44f648 | ||
|
|
5352f7cda7 | ||
|
|
5533b47099 | ||
|
|
e9b14464ee | ||
|
|
4e986e5cd1 | ||
|
|
8a59b40c53 | ||
|
|
391caca043 | ||
|
|
171ce348d6 | ||
|
|
c2cc729135 | ||
|
|
e7e71b76f0 | ||
|
|
a2af61cf6f | ||
|
|
e111edd5e4 | ||
|
|
3375377371 | ||
|
|
0ced020c67 | ||
|
|
c0d7aa9e4a | ||
|
|
e5b3d2a312 | ||
|
|
7b4a794981 | ||
|
|
86a859de17 | ||
|
|
b3aaa7bd0f | ||
|
|
a90586e6a8 | ||
|
|
807f272895 | ||
|
|
f050647b43 | ||
|
|
73baebbd16 | ||
|
|
f327f698b9 | ||
|
|
8164910fe8 | ||
|
|
3498644055 | ||
|
|
d31116b54c | ||
|
|
aced110cdf | ||
|
|
e9ab6aec77 | ||
|
|
15b261c861 | ||
|
|
970badce66 | ||
|
|
64304a9d65 | ||
|
|
d1983553d2 | ||
|
|
6b15df3bcd | ||
|
|
730b1fff71 | ||
|
|
c3add751e5 | ||
|
|
9da2dbdc1c | ||
|
|
977f09c470 | ||
|
|
4d0c6a8802 | ||
|
|
5345565037 | ||
|
|
be38c27c64 | ||
|
|
82a0401099 | ||
|
|
33bea1b663 | ||
|
|
f083acd46d | ||
|
|
5aacd15272 | ||
|
|
cb7674b091 | ||
|
|
3899c7ad56 | ||
|
|
d2debced09 | ||
|
|
b86c0ddc48 | ||
|
|
ba36f33bd8 | ||
|
|
49368a10ba | ||
|
|
ac1568cacf | ||
|
|
862ca3439d | ||
|
|
fdd4f9f2aa | ||
|
|
aa2dc49ebe | ||
|
|
cc23b7ee74 | ||
|
|
f6f9fc5a45 | ||
|
|
26c8589399 | ||
|
|
c2469935cb | ||
|
|
5e7c20955e | ||
|
|
967fa38108 | ||
|
|
280fe8e36b | ||
|
|
03ca96ccc3 | ||
|
|
b5b8a2c9d5 | ||
|
|
0008832730 | ||
|
|
c9b385db4b | ||
|
|
c951b66ae0 | ||
|
|
de735f3a45 | ||
|
|
19161425f3 | ||
|
|
c69e8d5bf4 | ||
|
|
3d3bce2788 | ||
|
|
1cb0dc7f8e | ||
|
|
cd5c56e601 | ||
|
|
8c979905e4 | ||
|
|
4d69f15f48 | ||
|
|
083f6572f7 | ||
|
|
4e7dd75266 | ||
|
|
3eb83f449b | ||
|
|
d31f69117b | ||
|
|
f5f9e3ac97 | ||
|
|
598d6c598c | ||
|
|
744727087a | ||
|
|
f93212a665 | ||
|
|
6dade82d2c | ||
|
|
6b737bf1d7 | ||
|
|
94dbd70677 | ||
|
|
527ae0348e | ||
|
|
79629c430a | ||
|
|
908dd61be5 | ||
|
|
88f77b8cca | ||
|
|
1e846657d1 | ||
|
|
ce70f62a88 | ||
|
|
bca0cdbb62 | ||
|
|
1ee11e04e6 | ||
|
|
6eef44f212 | ||
|
|
8bd94f4a1c | ||
|
|
4bc4701372 | ||
|
|
dfd89b503a | ||
|
|
060dc54832 | ||
|
|
f7a4ea5793 | ||
|
|
71b478e6e2 | ||
|
|
ed8fff8c52 | ||
|
|
95dc78db10 | ||
|
|
addeac64c7 | ||
|
|
d77ec22007 | ||
|
|
20030c91b7 | ||
|
|
8b366e255c | ||
|
|
6da366fcb0 | ||
|
|
2fa35f851e | ||
|
|
e4ca4260bb | ||
|
|
b69aace8d8 | ||
|
|
79097bb43c | ||
|
|
806fac1742 | ||
|
|
4f97d7cf8d | ||
|
|
42acc457af | ||
|
|
c02920607f | ||
|
|
452885c271 | ||
|
|
5c242a07b6 | ||
|
|
088899d59f | ||
|
|
1faff2a37e | ||
|
|
23c8d3d045 | ||
|
|
a033388d2b | ||
|
|
82fe45ac56 | ||
|
|
bcb7fcda6b | ||
|
|
726a98100b | ||
|
|
2f021a0c2b | ||
|
|
eb05cb6c6e | ||
|
|
7530af95da | ||
|
|
8399e95bda | ||
|
|
3b4dfe326f | ||
|
|
2e787a254e | ||
|
|
f888bed1a6 | ||
|
|
d865e9f35a | ||
|
|
fc7fe70f66 | ||
|
|
5aff39d2b2 | ||
|
|
d1be37a04a | ||
|
|
b0fd8bf7d4 | ||
|
|
b9cf8f3973 | ||
|
|
4588f11613 | ||
|
|
1a618c3c97 | ||
|
|
d500a51d97 | ||
|
|
734e9d3874 | ||
|
|
bd5cfc2f1b | ||
|
|
89f88ee78c | ||
|
|
b2ae14695a | ||
|
|
19d86b44d9 | ||
|
|
85be62e38b | ||
|
|
80f3d90200 | ||
|
|
0249fa6e75 | ||
|
|
2d0696e048 | ||
|
|
ff32ec515e | ||
|
|
a6935b0293 | ||
|
|
63eb08ba9f | ||
|
|
e5b67d2b3a | ||
|
|
9e10af6885 | ||
|
|
42bc9115d2 | ||
|
|
0a569ce413 | ||
|
|
9a16639a61 | ||
|
|
57953c68c6 | ||
|
|
088d08963f | ||
|
|
7bc8196821 | ||
|
|
7715299dd3 | ||
|
|
b8ac9b7994 | ||
|
|
98e7d8f728 | ||
|
|
e7fd871ffe | ||
|
|
14aab62f32 | ||
|
|
cb81fe962c | ||
|
|
fc970d2dea | ||
|
|
b0e203d1f9 | ||
|
|
37cef05b19 | ||
|
|
5886a42901 | ||
|
|
2fd99f807d | ||
|
|
3d4cbd7d10 | ||
|
|
f10d03c238 | ||
|
|
f9a66ffb0e | ||
|
|
777a50063d | ||
|
|
0bb9154747 | ||
|
|
30c3f45072 | ||
|
|
0d5ca67f32 | ||
|
|
4a8bf6aebd | ||
|
|
b11db090d8 | ||
|
|
189391fccd | ||
|
|
86d4c43909 | ||
|
|
5994f40982 | ||
|
|
076d32dee5 | ||
|
|
16c8e38ecd | ||
|
|
eacbcda8e5 | ||
|
|
59be76cd44 | ||
|
|
5bb0e7e8b3 | ||
|
|
b78d207121 | ||
|
|
0fcbcdd08c | ||
|
|
ed6c683922 | ||
|
|
9fe1edb02b | ||
|
|
fb3811a708 | ||
|
|
18f8658eec | ||
|
|
3ead4676b0 | ||
|
|
d30001d23d | ||
|
|
06bbf0d656 | ||
|
|
6ddd952e04 | ||
|
|
027ad0c3ee | ||
|
|
3abad2b87b | ||
|
|
32a1c7c5d5 | ||
|
|
f06e165bd4 | ||
|
|
1c843b24f7 | ||
|
|
2ace9ed380 | ||
|
|
5f30c0ae03 | ||
|
|
ef60adf7e2 | ||
|
|
7354b462e8 | ||
|
|
da904d6be8 | ||
|
|
c5fbbbbb5c | ||
|
|
5010387d8a | ||
|
|
f00c54a7fb | ||
|
|
9f52c169d0 | ||
|
|
bf18339404 | ||
|
|
2ad12b074b | ||
|
|
a6788ffe8d | ||
|
|
0e884df486 | ||
|
|
ef1c55286f | ||
|
|
abc0424c26 | ||
|
|
44e5c82e6d | ||
|
|
5849c446ed | ||
|
|
12b7317831 | ||
|
|
fe323f59af | ||
|
|
a00e56f219 | ||
|
|
1a7852794f | ||
|
|
22b1373a57 | ||
|
|
17d78b1469 | ||
|
|
4d8b32b249 | ||
|
|
b65bea2550 | ||
|
|
0b52ccd200 | ||
|
|
3006a07059 | ||
|
|
801dbc7a9a | ||
|
|
4f4e895fb7 | ||
|
|
cc57c3b655 | ||
|
|
ca6ec9c5c7 | ||
|
|
633b1f0a78 | ||
|
|
6136b9bf9c | ||
|
|
524a3ba566 | ||
|
|
58580320f9 | ||
|
|
759b0a994d | ||
|
|
d2800473e4 | ||
|
|
f5b1a2065e | ||
|
|
5e62532295 | ||
|
|
c1bee96c40 | ||
|
|
f273253a2b | ||
|
|
012bbcf770 | ||
|
|
b54cb47b2e | ||
|
|
1b15f43745 | ||
|
|
96771bf1bd | ||
|
|
580078bddb | ||
|
|
c5c7080ec6 | ||
|
|
408339b51d | ||
|
|
02e3d44998 | ||
|
|
156f13ded1 | ||
|
|
d288467cb7 | ||
|
|
21662c9f3f | ||
|
|
9149fe6cdd | ||
|
|
9a146192b7 | ||
|
|
3a9d3b7b61 | ||
|
|
f03f0973ab | ||
|
|
7ec0881e8c | ||
|
|
59e1ab42ff | ||
|
|
722216b901 | ||
|
|
bd8f3dc368 | ||
|
|
33cd94a141 | ||
|
|
053ac74734 | ||
|
|
cced99fafa | ||
|
|
a009ff53f7 | ||
|
|
ca16c4108d | ||
|
|
d1b6c67dc3 | ||
|
|
a61f8133d5 | ||
|
|
38d797a544 | ||
|
|
16c1877f50 | ||
|
|
da5f15a778 | ||
|
|
396c64ecf7 | ||
|
|
252c3a7985 | ||
|
|
a3ecbf0ae7 | ||
|
|
314327d8f2 | ||
|
|
bfacd06929 | ||
|
|
4f5e8f8cf5 | ||
|
|
1fbb4c09cc | ||
|
|
b332e1992b | ||
|
|
5955940b82 | ||
|
|
231a03bcfd | ||
|
|
bc85723657 | ||
|
|
be32b743c6 | ||
|
|
83c9843059 | ||
|
|
11cf43626d | ||
|
|
a6dc5e2ce3 | ||
|
|
38593a0394 | ||
|
|
95309afeea | ||
|
|
c2bf6fe2a3 | ||
|
|
99ac324fbd | ||
|
|
5562de330f | ||
|
|
95014236ac | ||
|
|
6aa7386138 | ||
|
|
3226a1f588 | ||
|
|
b4cf890cd8 | ||
|
|
ce09e323af | ||
|
|
941aedb177 | ||
|
|
87a0d502a3 | ||
|
|
cab7c1b0b8 | ||
|
|
d5892341b6 | ||
|
|
646557a43e | ||
|
|
ed8d34ab43 | ||
|
|
5e34463c77 | ||
|
|
1b14eb7959 | ||
|
|
ed48c2d0ed | ||
|
|
26fe84b660 | ||
|
|
5938230270 | ||
|
|
1a33a047fa | ||
|
|
43a8bcefb9 | ||
|
|
2e740e513f | ||
|
|
8a21a86b61 | ||
|
|
f600116205 | ||
|
|
1c03705de8 | ||
|
|
f7e461fac6 | ||
|
|
03ce6c97ff | ||
|
|
ffd9e76e07 | ||
|
|
fc49cb1e67 | ||
|
|
f5712d9f25 | ||
|
|
161d57bdda | ||
|
|
bae0d440bf | ||
|
|
fff052dde1 | ||
|
|
73b06eaa02 | ||
|
|
08a8ebed17 | ||
|
|
74d07426b3 | ||
|
|
69a2bba99a | ||
|
|
4d685d78ee | ||
|
|
5845ec3f49 | ||
|
|
13373426fe | ||
|
|
8e55551a06 | ||
|
|
12a3f0ac31 | ||
|
|
18e33edc88 | ||
|
|
c72c5ad4ee | ||
|
|
0fbc81ab2f | ||
|
|
af0a34cf82 | ||
|
|
b4590c5398 | ||
|
|
f787a66230 | ||
|
|
b21a99fd62 | ||
|
|
eb16306cde | ||
|
|
7bc23687e3 | ||
|
|
e1eaa057f2 | ||
|
|
97c264ca3e | ||
|
|
cf848ab1f7 | ||
|
|
cf83f9b0fd | ||
|
|
d98e361083 | ||
|
|
ce7f5309c7 | ||
|
|
75c485ced7 | ||
|
|
9c6e2ec012 | ||
|
|
1a02948a61 | ||
|
|
8b05ba4ba1 | ||
|
|
21e2874cb7 | ||
|
|
360ed5c46c | ||
|
|
5099bc365d | ||
|
|
12986da147 | ||
|
|
23e72797bc | ||
|
|
ac7b6f8f55 | ||
|
|
981b9ff11e | ||
|
|
4186906f4c | ||
|
|
0850d24e0c | ||
|
|
7ab8334c96 | ||
|
|
a4d7329ab7 | ||
|
|
3f4eae6bce | ||
|
|
518cf4be57 | ||
|
|
71096182be | ||
|
|
6452e927ea | ||
|
|
bc70cfa6f0 | ||
|
|
2b6e5ebd2d | ||
|
|
c761bd799a | ||
|
|
2f7c2fdee4 | ||
|
|
70a76ec343 | ||
|
|
7c3f64abf2 | ||
|
|
f5f38f195c | ||
|
|
7e84f4f015 | ||
|
|
4802f8cf07 | ||
|
|
cc05e67d8f | ||
|
|
2b6b174517 | ||
|
|
a1d05e6e12 | ||
|
|
f95ceb6a9b | ||
|
|
8f91b0726d | ||
|
|
97807f4383 | ||
|
|
5f42237f2c | ||
|
|
68289cfa54 | ||
|
|
42ea30270f | ||
|
|
ebbbbf3d82 | ||
|
|
27516e2d16 | ||
|
|
84bb6f915e | ||
|
|
46752f758a | ||
|
|
34c4c22e61 | ||
|
|
af2d0b8421 | ||
|
|
638b05a49a | ||
|
|
7a13e8a7fc | ||
|
|
d9fa74711d | ||
|
|
41867f578f | ||
|
|
0bf41ed4ef | ||
|
|
d080b4a731 | ||
|
|
ca4232ada9 | ||
|
|
ad348f91c9 | ||
|
|
990f915f42 | ||
|
|
53d720217b | ||
|
|
7a06ff480d | ||
|
|
3ef551f788 | ||
|
|
f0125cdc36 | ||
|
|
ed5f6736df | ||
|
|
15d8be0fae | ||
|
|
46f3e61360 | ||
|
|
87ad8c98d4 | ||
|
|
9bbdc4100f | ||
|
|
c80307e8ff | ||
|
|
c1d77e1041 | ||
|
|
d9e83650dc | ||
|
|
f6d635acd9 | ||
|
|
0dbd8a01ff | ||
|
|
8d755d41e0 | ||
|
|
190473bd32 | ||
|
|
030d1ec254 | ||
|
|
5a2b91a084 | ||
|
|
a50a05e4e7 | ||
|
|
6cb5a87c79 | ||
|
|
b9f89ca552 | ||
|
|
26c9fd5dea | ||
|
|
e81a9b6fe0 | ||
|
|
452450e451 | ||
|
|
419dd2d1c7 | ||
|
|
ee86b06676 | ||
|
|
953183f16d | ||
|
|
228f71708b | ||
|
|
621471a7cb | ||
|
|
8b58e951e3 | ||
|
|
1db489a0aa | ||
|
|
be65c3c6cf | ||
|
|
46e7fa31fe | ||
|
|
66e21bd499 | ||
|
|
8cab4c01fd | ||
|
|
d52038366b | ||
|
|
4fcfd87f5b | ||
|
|
f893c6baa4 | ||
|
|
9a45549b66 | ||
|
|
ae3a01038b | ||
|
|
e47a2a4ca2 | ||
|
|
95ea6d5f78 | ||
|
|
7d290f6b8f | ||
|
|
9db617ed5a | ||
|
|
514456940a | ||
|
|
33feefd9cd | ||
|
|
65e14cf348 | ||
|
|
1d61bcc4f3 | ||
|
|
c38bbaca3c | ||
|
|
246d245ebc | ||
|
|
f269a710e2 | ||
|
|
051998429c | ||
|
|
432cdd640f | ||
|
|
9ed9b0964e | ||
|
|
6a97b3526d | ||
|
|
451d757996 | ||
|
|
f9e9eba3b1 | ||
|
|
2a9a6aebd9 | ||
|
|
adbb6c449e | ||
|
|
3993605324 | ||
|
|
0ae574ec2c | ||
|
|
c56ded828c | ||
|
|
02c7061945 | ||
|
|
9209e44cd3 | ||
|
|
ebed37394e | ||
|
|
4c7a2a7ec3 | ||
|
|
0a25a88a34 | ||
|
|
6aa9025347 | ||
|
|
a918cc67eb | ||
|
|
08f4695283 | ||
|
|
44e76d5eeb | ||
|
|
cfa36fd279 | ||
|
|
3d4166e006 | ||
|
|
07bac1c592 | ||
|
|
755f2ce1ba | ||
|
|
cca2844deb | ||
|
|
24a2f760b7 | ||
|
|
79bbd8fe38 | ||
|
|
35dce1e3e4 | ||
|
|
f886fdf913 | ||
|
|
4476f2f0da | ||
|
|
160f161700 | ||
|
|
c164fc58a2 | ||
|
|
0c625a4e62 | ||
|
|
bf3941cf7a | ||
|
|
3649e8288a | ||
|
|
9a45e26026 | ||
|
|
e65f127571 | ||
|
|
3bfc699787 | ||
|
|
955318428a | ||
|
|
f6279b356a | ||
|
|
4cc3cdc989 | ||
|
|
f9aa20a3ad | ||
|
|
129d33f1a0 | ||
|
|
1ad7a3f378 | ||
|
|
b533be8818 | ||
|
|
fb729e5166 | ||
|
|
d337ecdb20 | ||
|
|
5f1f0a48b0 | ||
|
|
e0f1cb94a5 | ||
|
|
a362ee2246 | ||
|
|
19f23c686e | ||
|
|
23b20ff4a6 | ||
|
|
72574da834 | ||
|
|
d5a79455d1 | ||
|
|
070d4b9da9 | ||
|
|
0ace22fffe | ||
|
|
9e483d7694 | ||
|
|
26458b7a06 | ||
|
|
b6a4604952 | ||
|
|
af752fbbc2 | ||
|
|
279c9d706a | ||
|
|
806e7b5530 | ||
|
|
f3dc6a217b | ||
|
|
7671d791fa | ||
|
|
8cd84608a5 | ||
|
|
980c6fc810 | ||
|
|
fb40a484c5 | ||
|
|
daa9dedcaa | ||
|
|
0d634345ac | ||
|
|
e648252479 | ||
|
|
179d7a9ad8 | ||
|
|
19bc962ad5 | ||
|
|
27cce086c6 | ||
|
|
fec0c620d4 | ||
|
|
05a1a31cab | ||
|
|
d020527c6f | ||
|
|
4451485664 | ||
|
|
a4e1a3738a | ||
|
|
4339dbeb8d | ||
|
|
5b0605774c | ||
|
|
e3684e25f8 | ||
|
|
1359213196 | ||
|
|
03efc6a169 | ||
|
|
15b5982211 | ||
|
|
0eb3a5d387 | ||
|
|
7f8777389c | ||
|
|
4eb20f10ad | ||
|
|
daa11df558 | ||
|
|
1bb0db30a0 | ||
|
|
02910b0020 | ||
|
|
23b8901c9c | ||
|
|
99f6ed0cd7 | ||
|
|
890c310880 | ||
|
|
0194eeb31f | ||
|
|
f9be4c62b1 | ||
|
|
027e8c18f1 | ||
|
|
4a3bb35a95 | ||
|
|
4bfb0d4494 | ||
|
|
7e0ef03a1e | ||
|
|
f7dbd95a54 |
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: '9001'
|
||||
|
||||
---
|
||||
|
||||
NOTE:
|
||||
all of the below are optional, consider them as inspiration, delete and rewrite at will, thx md
|
||||
|
||||
|
||||
**Describe the bug**
|
||||
a description of what the bug is
|
||||
|
||||
**To Reproduce**
|
||||
List of steps to reproduce the issue, or, if it's hard to reproduce, then at least a detailed explanation of what you did to run into it
|
||||
|
||||
**Expected behavior**
|
||||
a description of what you expected to happen
|
||||
|
||||
**Screenshots**
|
||||
if applicable, add screenshots to help explain your problem, such as the kickass crashpage :^)
|
||||
|
||||
**Server details**
|
||||
if the issue is possibly on the server-side, then mention some of the following:
|
||||
* server OS / version:
|
||||
* python version:
|
||||
* copyparty arguments:
|
||||
* filesystem (`lsblk -f` on linux):
|
||||
|
||||
**Client details**
|
||||
if the issue is possibly on the client-side, then mention some of the following:
|
||||
* the device type and model:
|
||||
* OS version:
|
||||
* browser version:
|
||||
|
||||
**Additional context**
|
||||
any other context about the problem here
|
||||
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: '9001'
|
||||
|
||||
---
|
||||
|
||||
all of the below are optional, consider them as inspiration, delete and rewrite at will
|
||||
|
||||
**is your feature request related to a problem? Please describe.**
|
||||
a description of what the problem is, for example, `I'm always frustrated when [...]` or `Why is it not possible to [...]`
|
||||
|
||||
**Describe the idea / solution you'd like**
|
||||
a description of what you want to happen
|
||||
|
||||
**Describe any alternatives you've considered**
|
||||
a description of any alternative solutions or features you've considered
|
||||
|
||||
**Additional context**
|
||||
add any other context or screenshots about the feature request here
|
||||
10
.github/ISSUE_TEMPLATE/something-else.md
vendored
Normal file
10
.github/ISSUE_TEMPLATE/something-else.md
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
---
|
||||
name: Something else
|
||||
about: "┐(゚∀゚)┌"
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
|
||||
7
.github/branch-rename.md
vendored
Normal file
7
.github/branch-rename.md
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
modernize your local checkout of the repo like so,
|
||||
```sh
|
||||
git branch -m master hovudstraum
|
||||
git fetch origin
|
||||
git branch -u origin/hovudstraum hovudstraum
|
||||
git remote set-head origin -a
|
||||
```
|
||||
16
.gitignore
vendored
16
.gitignore
vendored
@@ -5,12 +5,16 @@ __pycache__/
|
||||
MANIFEST.in
|
||||
MANIFEST
|
||||
copyparty.egg-info/
|
||||
buildenv/
|
||||
build/
|
||||
dist/
|
||||
sfx/
|
||||
.venv/
|
||||
|
||||
/buildenv/
|
||||
/build/
|
||||
/dist/
|
||||
/py2/
|
||||
/sfx*
|
||||
/unt/
|
||||
/log/
|
||||
|
||||
# ide
|
||||
*.sublime-workspace
|
||||
|
||||
@@ -20,3 +24,7 @@ sfx/
|
||||
# derived
|
||||
copyparty/web/deps/
|
||||
srv/
|
||||
|
||||
# state/logs
|
||||
up.*.txt
|
||||
.hist/
|
||||
2
.vscode/launch.json
vendored
2
.vscode/launch.json
vendored
@@ -17,7 +17,7 @@
|
||||
"-mtp",
|
||||
".bpm=f,bin/mtag/audio-bpm.py",
|
||||
"-aed:wark",
|
||||
"-vsrv::r:aed:cnodupe",
|
||||
"-vsrv::r:rw,ed:c,dupe",
|
||||
"-vdist:dist:r"
|
||||
]
|
||||
},
|
||||
|
||||
26
.vscode/settings.json
vendored
26
.vscode/settings.json
vendored
@@ -23,7 +23,6 @@
|
||||
"terminal.ansiBrightWhite": "#ffffff",
|
||||
},
|
||||
"python.testing.pytestEnabled": false,
|
||||
"python.testing.nosetestsEnabled": false,
|
||||
"python.testing.unittestEnabled": true,
|
||||
"python.testing.unittestArgs": [
|
||||
"-v",
|
||||
@@ -35,18 +34,40 @@
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.banditEnabled": true,
|
||||
"python.linting.mypyEnabled": true,
|
||||
"python.linting.mypyArgs": [
|
||||
"--ignore-missing-imports",
|
||||
"--follow-imports=silent",
|
||||
"--show-column-numbers",
|
||||
"--strict"
|
||||
],
|
||||
"python.linting.flake8Args": [
|
||||
"--max-line-length=120",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128",
|
||||
],
|
||||
"python.linting.banditArgs": [
|
||||
"--ignore=B104"
|
||||
],
|
||||
"python.linting.pylintArgs": [
|
||||
"--disable=missing-module-docstring",
|
||||
"--disable=missing-class-docstring",
|
||||
"--disable=missing-function-docstring",
|
||||
"--disable=wrong-import-position",
|
||||
"--disable=raise-missing-from",
|
||||
"--disable=bare-except",
|
||||
"--disable=invalid-name",
|
||||
"--disable=line-too-long",
|
||||
"--disable=consider-using-f-string"
|
||||
],
|
||||
// python3 -m isort --py=27 --profile=black copyparty/
|
||||
"python.formatting.provider": "black",
|
||||
"editor.formatOnSave": true,
|
||||
"[html]": {
|
||||
"editor.formatOnSave": false,
|
||||
},
|
||||
"[css]": {
|
||||
"editor.formatOnSave": false,
|
||||
},
|
||||
"files.associations": {
|
||||
"*.makefile": "makefile"
|
||||
},
|
||||
@@ -55,4 +76,5 @@
|
||||
"py27"
|
||||
],
|
||||
"python.linting.enabled": true,
|
||||
"python.pythonPath": "/usr/bin/python3"
|
||||
}
|
||||
24
CODE_OF_CONDUCT.md
Normal file
24
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,24 @@
|
||||
in the words of Abraham Lincoln:
|
||||
|
||||
> Be excellent to each other... and... PARTY ON, DUDES!
|
||||
|
||||
more specifically I'll paraphrase some examples from a german automotive corporation as they cover all the bases without being too wordy
|
||||
|
||||
## Examples of unacceptable behavior
|
||||
* intimidation, harassment, trolling
|
||||
* insulting, derogatory, harmful or prejudicial comments
|
||||
* posting private information without permission
|
||||
* political or personal attacks
|
||||
|
||||
## Examples of expected behavior
|
||||
* being nice, friendly, welcoming, inclusive, mindful and empathetic
|
||||
* acting considerate, modest, respectful
|
||||
* using polite and inclusive language
|
||||
* criticize constructively and accept constructive criticism
|
||||
* respect different points of view
|
||||
|
||||
## finally and even more specifically,
|
||||
* parse opinions and feedback objectively without prejudice
|
||||
* it's the message that matters, not who said it
|
||||
|
||||
aaand that's how you say `be nice` in a way that fills half a floppy w
|
||||
3
CONTRIBUTING.md
Normal file
3
CONTRIBUTING.md
Normal file
@@ -0,0 +1,3 @@
|
||||
* do something cool
|
||||
|
||||
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight
|
||||
@@ -1,3 +1,16 @@
|
||||
# [`up2k.py`](up2k.py)
|
||||
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||
* faster than browsers
|
||||
* if something breaks just restart it
|
||||
|
||||
|
||||
# [`partyjournal.py`](partyjournal.py)
|
||||
produces a chronological list of all uploads by collecting info from up2k databases and the filesystem
|
||||
* outputs a standalone html file
|
||||
* optional mapping from IP-addresses to nicknames
|
||||
|
||||
|
||||
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||
@@ -47,6 +60,7 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
||||
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
||||
|
||||
|
||||
|
||||
# [`dbtool.py`](dbtool.py)
|
||||
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
||||
|
||||
@@ -61,3 +75,9 @@ cd /mnt/nas/music/.hist
|
||||
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy key
|
||||
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
|
||||
```
|
||||
|
||||
|
||||
|
||||
# [`prisonparty.sh`](prisonparty.sh)
|
||||
* run copyparty in a chroot, preventing any accidental file access
|
||||
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`
|
||||
|
||||
@@ -42,6 +42,7 @@ import threading
|
||||
import traceback
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
import calendar
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
@@ -495,7 +496,7 @@ class Gateway(object):
|
||||
ts = 60 * 60 * 24 * 2
|
||||
try:
|
||||
sz = int(fsize)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
ts = calendar.timegm(time.strptime(fdate, "%Y-%m-%d %H:%M:%S"))
|
||||
except:
|
||||
info("bad HTML or OS [{}] [{}]".format(fdate, fsize))
|
||||
# python cannot strptime(1959-01-01) on windows
|
||||
|
||||
@@ -22,7 +22,7 @@ dependencies:
|
||||
|
||||
note:
|
||||
you probably want to run this on windows clients:
|
||||
https://github.com/9001/copyparty/blob/master/contrib/explorer-nothumbs-nofoldertypes.reg
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/contrib/explorer-nothumbs-nofoldertypes.reg
|
||||
|
||||
get server cert:
|
||||
awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem
|
||||
@@ -45,6 +45,7 @@ import threading
|
||||
import traceback
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
import calendar
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
@@ -71,7 +72,7 @@ except:
|
||||
elif MACOS:
|
||||
libfuse = "install https://osxfuse.github.io/"
|
||||
else:
|
||||
libfuse = "apt install libfuse\n modprobe fuse"
|
||||
libfuse = "apt install libfuse3-3\n modprobe fuse"
|
||||
|
||||
print(
|
||||
"\n could not import fuse; these may help:"
|
||||
@@ -393,15 +394,16 @@ class Gateway(object):
|
||||
|
||||
rsp = json.loads(rsp.decode("utf-8"))
|
||||
ret = []
|
||||
for is_dir, nodes in [[True, rsp["dirs"]], [False, rsp["files"]]]:
|
||||
for statfun, nodes in [
|
||||
[self.stat_dir, rsp["dirs"]],
|
||||
[self.stat_file, rsp["files"]],
|
||||
]:
|
||||
for n in nodes:
|
||||
fname = unquote(n["href"]).rstrip(b"/")
|
||||
fname = fname.decode("wtf-8")
|
||||
fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
|
||||
if bad_good:
|
||||
fname = enwin(fname)
|
||||
|
||||
fun = self.stat_dir if is_dir else self.stat_file
|
||||
ret.append([fname, fun(n["ts"], n["sz"]), 0])
|
||||
ret.append([fname, statfun(n["ts"], n["sz"]), 0])
|
||||
|
||||
return ret
|
||||
|
||||
@@ -442,7 +444,7 @@ class Gateway(object):
|
||||
ts = 60 * 60 * 24 * 2
|
||||
try:
|
||||
sz = int(fsize)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
ts = calendar.timegm(time.strptime(fdate, "%Y-%m-%d %H:%M:%S"))
|
||||
except:
|
||||
info("bad HTML or OS [{}] [{}]".format(fdate, fsize))
|
||||
# python cannot strptime(1959-01-01) on windows
|
||||
|
||||
@@ -11,14 +11,18 @@ import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
import stat
|
||||
import errno
|
||||
import struct
|
||||
import codecs
|
||||
import platform
|
||||
import threading
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
try:
|
||||
import fuse
|
||||
@@ -38,7 +42,7 @@ except:
|
||||
mount a copyparty server (local or remote) as a filesystem
|
||||
|
||||
usage:
|
||||
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas
|
||||
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas
|
||||
|
||||
dependencies:
|
||||
sudo apk add fuse-dev python3-dev
|
||||
@@ -50,6 +54,10 @@ fork of copyparty-fuse.py based on fuse-python which
|
||||
"""
|
||||
|
||||
|
||||
WINDOWS = sys.platform == "win32"
|
||||
MACOS = platform.system() == "Darwin"
|
||||
|
||||
|
||||
def threadless_log(msg):
|
||||
print(msg + "\n", end="")
|
||||
|
||||
@@ -93,6 +101,41 @@ def html_dec(txt):
|
||||
)
|
||||
|
||||
|
||||
def register_wtf8():
|
||||
def wtf8_enc(text):
|
||||
return str(text).encode("utf-8", "surrogateescape"), len(text)
|
||||
|
||||
def wtf8_dec(binary):
|
||||
return bytes(binary).decode("utf-8", "surrogateescape"), len(binary)
|
||||
|
||||
def wtf8_search(encoding_name):
|
||||
return codecs.CodecInfo(wtf8_enc, wtf8_dec, name="wtf-8")
|
||||
|
||||
codecs.register(wtf8_search)
|
||||
|
||||
|
||||
bad_good = {}
|
||||
good_bad = {}
|
||||
|
||||
|
||||
def enwin(txt):
|
||||
return "".join([bad_good.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(bad, good)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
def dewin(txt):
|
||||
return "".join([good_bad.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(good, bad)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
class CacheNode(object):
|
||||
def __init__(self, tag, data):
|
||||
self.tag = tag
|
||||
@@ -115,8 +158,9 @@ class Stat(fuse.Stat):
|
||||
|
||||
|
||||
class Gateway(object):
|
||||
def __init__(self, base_url):
|
||||
def __init__(self, base_url, pw):
|
||||
self.base_url = base_url
|
||||
self.pw = pw
|
||||
|
||||
ui = urllib.parse.urlparse(base_url)
|
||||
self.web_root = ui.path.strip("/")
|
||||
@@ -135,8 +179,7 @@ class Gateway(object):
|
||||
self.conns = {}
|
||||
|
||||
def quotep(self, path):
|
||||
# TODO: mojibake support
|
||||
path = path.encode("utf-8", "ignore")
|
||||
path = path.encode("wtf-8")
|
||||
return quote(path, safe="/")
|
||||
|
||||
def getconn(self, tid=None):
|
||||
@@ -159,20 +202,29 @@ class Gateway(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
def sendreq(self, *args, **kwargs):
|
||||
def sendreq(self, *args, **ka):
|
||||
tid = get_tid()
|
||||
if self.pw:
|
||||
ck = "cppwd=" + self.pw
|
||||
try:
|
||||
ka["headers"]["Cookie"] = ck
|
||||
except:
|
||||
ka["headers"] = {"Cookie": ck}
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
c.request(*list(args), **ka)
|
||||
return c.getresponse()
|
||||
except:
|
||||
self.closeconn(tid)
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
c.request(*list(args), **ka)
|
||||
return c.getresponse()
|
||||
|
||||
def listdir(self, path):
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls"
|
||||
r = self.sendreq("GET", web_path)
|
||||
if r.status != 200:
|
||||
self.closeconn()
|
||||
@@ -182,9 +234,12 @@ class Gateway(object):
|
||||
)
|
||||
)
|
||||
|
||||
return self.parse_html(r)
|
||||
return self.parse_jls(r)
|
||||
|
||||
def download_file_range(self, path, ofs1, ofs2):
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
|
||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
|
||||
log("downloading {}".format(hdr_range))
|
||||
@@ -200,40 +255,27 @@ class Gateway(object):
|
||||
|
||||
return r.read()
|
||||
|
||||
def parse_html(self, datasrc):
|
||||
ret = []
|
||||
remainder = b""
|
||||
ptn = re.compile(
|
||||
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
|
||||
)
|
||||
|
||||
def parse_jls(self, datasrc):
|
||||
rsp = b""
|
||||
while True:
|
||||
buf = remainder + datasrc.read(4096)
|
||||
# print('[{}]'.format(buf.decode('utf-8')))
|
||||
buf = datasrc.read(1024 * 32)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
remainder = b""
|
||||
endpos = buf.rfind(b"\n")
|
||||
if endpos >= 0:
|
||||
remainder = buf[endpos + 1 :]
|
||||
buf = buf[:endpos]
|
||||
rsp += buf
|
||||
|
||||
lines = buf.decode("utf-8").split("\n")
|
||||
for line in lines:
|
||||
m = ptn.match(line)
|
||||
if not m:
|
||||
# print(line)
|
||||
continue
|
||||
rsp = json.loads(rsp.decode("utf-8"))
|
||||
ret = []
|
||||
for statfun, nodes in [
|
||||
[self.stat_dir, rsp["dirs"]],
|
||||
[self.stat_file, rsp["files"]],
|
||||
]:
|
||||
for n in nodes:
|
||||
fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
|
||||
if bad_good:
|
||||
fname = enwin(fname)
|
||||
|
||||
ftype, fname, fsize, fdate = m.groups()
|
||||
fname = html_dec(fname)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
sz = int(fsize)
|
||||
if ftype == "-":
|
||||
ret.append([fname, self.stat_file(ts, sz), 0])
|
||||
else:
|
||||
ret.append([fname, self.stat_dir(ts, sz), 0])
|
||||
ret.append([fname, statfun(n["ts"], n["sz"]), 0])
|
||||
|
||||
return ret
|
||||
|
||||
@@ -262,6 +304,7 @@ class CPPF(Fuse):
|
||||
Fuse.__init__(self, *args, **kwargs)
|
||||
|
||||
self.url = None
|
||||
self.pw = None
|
||||
|
||||
self.dircache = []
|
||||
self.dircache_mtx = threading.Lock()
|
||||
@@ -271,7 +314,7 @@ class CPPF(Fuse):
|
||||
|
||||
def init2(self):
|
||||
# TODO figure out how python-fuse wanted this to go
|
||||
self.gw = Gateway(self.url) # .decode('utf-8'))
|
||||
self.gw = Gateway(self.url, self.pw) # .decode('utf-8'))
|
||||
info("up")
|
||||
|
||||
def clean_dircache(self):
|
||||
@@ -536,6 +579,8 @@ class CPPF(Fuse):
|
||||
|
||||
def getattr(self, path):
|
||||
log("getattr [{}]".format(path))
|
||||
if WINDOWS:
|
||||
path = enwin(path) # windows occasionally decodes f0xx to xx
|
||||
|
||||
path = path.strip("/")
|
||||
try:
|
||||
@@ -568,9 +613,25 @@ class CPPF(Fuse):
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
register_wtf8()
|
||||
if WINDOWS:
|
||||
os.system("rem")
|
||||
|
||||
for ch in '<>:"\\|?*':
|
||||
# microsoft maps illegal characters to f0xx
|
||||
# (e000 to f8ff is basic-plane private-use)
|
||||
bad_good[ch] = chr(ord(ch) + 0xF000)
|
||||
|
||||
for n in range(0, 0x100):
|
||||
# map surrogateescape to another private-use area
|
||||
bad_good[chr(n + 0xDC00)] = chr(n + 0xF100)
|
||||
|
||||
for k, v in bad_good.items():
|
||||
good_bad[v] = k
|
||||
|
||||
server = CPPF()
|
||||
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
|
||||
server.parser.add_option(mountopt="pw", metavar="PASSWORD", default=None)
|
||||
server.parse(values=server, errex=1)
|
||||
if not server.url or not str(server.url).startswith("http"):
|
||||
print("\nerror:")
|
||||
@@ -578,7 +639,7 @@ def main():
|
||||
print(" need argument: mount-path")
|
||||
print("example:")
|
||||
print(
|
||||
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas"
|
||||
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
111
bin/dbtool.py
111
bin/dbtool.py
@@ -8,7 +8,10 @@ import sqlite3
|
||||
import argparse
|
||||
|
||||
DB_VER1 = 3
|
||||
DB_VER2 = 4
|
||||
DB_VER2 = 5
|
||||
|
||||
BY_PATH = None
|
||||
NC = None
|
||||
|
||||
|
||||
def die(msg):
|
||||
@@ -57,8 +60,13 @@ def compare(n1, d1, n2, d2, verbose):
|
||||
if rd.split("/", 1)[0] == ".hist":
|
||||
continue
|
||||
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
hit = d2.execute(q, (rd, fn)).fetchone()
|
||||
if BY_PATH:
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
hit = d2.execute(q, (rd, fn)).fetchone()
|
||||
else:
|
||||
q = "select w from up where substr(w,1,16) = ? and +w = ?"
|
||||
hit = d2.execute(q, (w1[:16], w1)).fetchone()
|
||||
|
||||
if not hit:
|
||||
miss += 1
|
||||
if verbose:
|
||||
@@ -70,27 +78,32 @@ def compare(n1, d1, n2, d2, verbose):
|
||||
n = 0
|
||||
miss = {}
|
||||
nmiss = 0
|
||||
for w1, k, v in d1.execute("select * from mt"):
|
||||
for w1s, k, v in d1.execute("select * from mt"):
|
||||
|
||||
n += 1
|
||||
if n % 100_000 == 0:
|
||||
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
|
||||
print(m)
|
||||
|
||||
q = "select rd, fn from up where substr(w,1,16) = ?"
|
||||
rd, fn = d1.execute(q, (w1,)).fetchone()
|
||||
q = "select w, rd, fn from up where substr(w,1,16) = ?"
|
||||
w1, rd, fn = d1.execute(q, (w1s,)).fetchone()
|
||||
if rd.split("/", 1)[0] == ".hist":
|
||||
continue
|
||||
|
||||
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
|
||||
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||
if BY_PATH:
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||
else:
|
||||
q = "select w from up where substr(w,1,16) = ? and +w = ?"
|
||||
w2 = d2.execute(q, (w1s, w1)).fetchone()
|
||||
|
||||
if w2:
|
||||
w2 = w2[0]
|
||||
|
||||
v2 = None
|
||||
if w2:
|
||||
v2 = d2.execute(
|
||||
"select v from mt where w = ? and +k = ?", (w2, k)
|
||||
"select v from mt where w = ? and +k = ?", (w2[:16], k)
|
||||
).fetchone()
|
||||
if v2:
|
||||
v2 = v2[0]
|
||||
@@ -124,7 +137,7 @@ def compare(n1, d1, n2, d2, verbose):
|
||||
|
||||
for k, v in sorted(miss.items()):
|
||||
if v:
|
||||
print(f"{n1} has {v:6} more {k:<6} tags than {n2}")
|
||||
print(f"{n1} has {v:7} more {k:<7} tags than {n2}")
|
||||
|
||||
print(f"in total, {nmiss} missing tags in {n2}\n")
|
||||
|
||||
@@ -132,47 +145,75 @@ def compare(n1, d1, n2, d2, verbose):
|
||||
def copy_mtp(d1, d2, tag, rm):
|
||||
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
|
||||
n = 0
|
||||
ndone = 0
|
||||
for w1, k, v in d1.execute("select * from mt where k = ?", (tag,)):
|
||||
ncopy = 0
|
||||
nskip = 0
|
||||
for w1s, k, v in d1.execute("select * from mt where k = ?", (tag,)):
|
||||
n += 1
|
||||
if n % 25_000 == 0:
|
||||
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
|
||||
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ncopy} copied, {nskip} skipped\033[0m"
|
||||
print(m)
|
||||
|
||||
q = "select rd, fn from up where substr(w,1,16) = ?"
|
||||
rd, fn = d1.execute(q, (w1,)).fetchone()
|
||||
q = "select w, rd, fn from up where substr(w,1,16) = ?"
|
||||
w1, rd, fn = d1.execute(q, (w1s,)).fetchone()
|
||||
if rd.split("/", 1)[0] == ".hist":
|
||||
continue
|
||||
|
||||
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
|
||||
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||
if BY_PATH:
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||
else:
|
||||
q = "select w from up where substr(w,1,16) = ? and +w = ?"
|
||||
w2 = d2.execute(q, (w1s, w1)).fetchone()
|
||||
|
||||
if not w2:
|
||||
continue
|
||||
|
||||
w2 = w2[0]
|
||||
hit = d2.execute("select v from mt where w = ? and +k = ?", (w2, k)).fetchone()
|
||||
w2s = w2[0][:16]
|
||||
hit = d2.execute("select v from mt where w = ? and +k = ?", (w2s, k)).fetchone()
|
||||
if hit:
|
||||
hit = hit[0]
|
||||
|
||||
if hit != v:
|
||||
ndone += 1
|
||||
if hit is not None:
|
||||
d2.execute("delete from mt where w = ? and +k = ?", (w2, k))
|
||||
if NC and hit is not None:
|
||||
nskip += 1
|
||||
continue
|
||||
|
||||
d2.execute("insert into mt values (?,?,?)", (w2, k, v))
|
||||
ncopy += 1
|
||||
if hit is not None:
|
||||
d2.execute("delete from mt where w = ? and +k = ?", (w2s, k))
|
||||
|
||||
d2.execute("insert into mt values (?,?,?)", (w2s, k, v))
|
||||
if rm:
|
||||
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2,))
|
||||
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2s,))
|
||||
|
||||
d2.commit()
|
||||
print(f"copied {ndone} {tag} tags over")
|
||||
print(f"copied {ncopy} {tag} tags over, skipped {nskip}")
|
||||
|
||||
|
||||
def examples():
|
||||
print(
|
||||
"""
|
||||
# clearing the journal
|
||||
./dbtool.py up2k.db
|
||||
|
||||
# copy tags ".bpm" and "key" from old.db to up2k.db, and remove the mtp flag from matching files (so copyparty won't run any mtps on it)
|
||||
./dbtool.py -ls up2k.db
|
||||
./dbtool.py -src old.db up2k.db -cmp
|
||||
./dbtool.py -src old.v3 up2k.db -rm-mtp-flag -copy key
|
||||
./dbtool.py -src old.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
global NC, BY_PATH
|
||||
os.system("")
|
||||
print()
|
||||
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument("db", help="database to work on")
|
||||
ap.add_argument("-h2", action="store_true", help="show examples")
|
||||
ap.add_argument("-src", metavar="DB", type=str, help="database to copy from")
|
||||
|
||||
ap2 = ap.add_argument_group("informational / read-only stuff")
|
||||
@@ -185,11 +226,29 @@ def main():
|
||||
ap2.add_argument(
|
||||
"-rm-mtp-flag",
|
||||
action="store_true",
|
||||
help="when an mtp tag is copied over, also mark that as done, so copyparty won't run mtp on it",
|
||||
help="when an mtp tag is copied over, also mark that file as done, so copyparty won't run any mtps on those files",
|
||||
)
|
||||
ap2.add_argument("-vac", action="store_true", help="optimize DB")
|
||||
|
||||
ap2 = ap.add_argument_group("behavior modifiers")
|
||||
ap2.add_argument(
|
||||
"-nc",
|
||||
action="store_true",
|
||||
help="no-clobber; don't replace/overwrite existing tags",
|
||||
)
|
||||
ap2.add_argument(
|
||||
"-by-path",
|
||||
action="store_true",
|
||||
help="match files based on location rather than warks (content-hash), use this if the databases have different wark salts",
|
||||
)
|
||||
|
||||
ar = ap.parse_args()
|
||||
if ar.h2:
|
||||
examples()
|
||||
return
|
||||
|
||||
NC = ar.nc
|
||||
BY_PATH = ar.by_path
|
||||
|
||||
for v in [ar.db, ar.src]:
|
||||
if v and not os.path.exists(v):
|
||||
|
||||
@@ -1,10 +1,23 @@
|
||||
standalone programs which take an audio file as argument
|
||||
|
||||
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
|
||||
|
||||
some of these rely on libraries which are not MIT-compatible
|
||||
|
||||
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
|
||||
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
|
||||
|
||||
these invoke standalone programs which are GPL or similar, so is legally fine for most purposes:
|
||||
|
||||
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
|
||||
* [image-noexif.py](./image-noexif.py) removes exif tags from images; uses exiftool (GPLv1 or artistic-license)
|
||||
|
||||
these do not have any problematic dependencies at all:
|
||||
|
||||
* [cksum.py](./cksum.py) computes various checksums
|
||||
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
||||
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
@@ -18,7 +31,10 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
|
||||
|
||||
# usage from copyparty
|
||||
|
||||
`copyparty -e2dsa -e2ts -mtp key=f,audio-key.py -mtp .bpm=f,audio-bpm.py`
|
||||
`copyparty -e2dsa -e2ts` followed by any combination of these:
|
||||
* `-mtp key=f,audio-key.py`
|
||||
* `-mtp .bpm=f,audio-bpm.py`
|
||||
* `-mtp ahash,vhash=f,media-hash.py`
|
||||
|
||||
* `f,` makes the detected value replace any existing values
|
||||
* the `.` in `.bpm` indicates numeric value
|
||||
@@ -26,9 +42,12 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
|
||||
* `mtp` modules will not run if a file has existing tags in the db, so clear out the tags with `-e2tsr` the first time you launch with new `mtp` options
|
||||
|
||||
|
||||
## usage with volume-flags
|
||||
## usage with volflags
|
||||
|
||||
instead of affecting all volumes, you can set the options for just one volume like so:
|
||||
```
|
||||
copyparty -v /mnt/nas/music:/music:r:cmtp=key=f,audio-key.py:cmtp=.bpm=f,audio-bpm.py:ce2dsa:ce2ts
|
||||
```
|
||||
|
||||
`copyparty -v /mnt/nas/music:/music:r:c,e2dsa:c,e2ts` immediately followed by any combination of these:
|
||||
|
||||
* `:c,mtp=key=f,audio-key.py`
|
||||
* `:c,mtp=.bpm=f,audio-bpm.py`
|
||||
* `:c,mtp=ahash,vhash=f,media-hash.py`
|
||||
|
||||
@@ -19,17 +19,18 @@ dep: ffmpeg
|
||||
def det(tf):
|
||||
# fmt: off
|
||||
sp.check_call([
|
||||
"ffmpeg",
|
||||
"-nostdin",
|
||||
"-hide_banner",
|
||||
"-v", "fatal",
|
||||
"-ss", "13",
|
||||
"-y", "-i", fsenc(sys.argv[1]),
|
||||
"-ac", "1",
|
||||
"-ar", "22050",
|
||||
"-t", "300",
|
||||
"-f", "f32le",
|
||||
tf
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-v", b"fatal",
|
||||
b"-ss", b"13",
|
||||
b"-y", b"-i", fsenc(sys.argv[1]),
|
||||
b"-map", b"0:a:0",
|
||||
b"-ac", b"1",
|
||||
b"-ar", b"22050",
|
||||
b"-t", b"300",
|
||||
b"-f", b"f32le",
|
||||
fsenc(tf)
|
||||
])
|
||||
# fmt: on
|
||||
|
||||
|
||||
@@ -23,14 +23,15 @@ dep: ffmpeg
|
||||
def det(tf):
|
||||
# fmt: off
|
||||
sp.check_call([
|
||||
"ffmpeg",
|
||||
"-nostdin",
|
||||
"-hide_banner",
|
||||
"-v", "fatal",
|
||||
"-y", "-i", fsenc(sys.argv[1]),
|
||||
"-t", "300",
|
||||
"-sample_fmt", "s16",
|
||||
tf
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-v", b"fatal",
|
||||
b"-y", b"-i", fsenc(sys.argv[1]),
|
||||
b"-map", b"0:a:0",
|
||||
b"-t", b"300",
|
||||
b"-sample_fmt", b"s16",
|
||||
fsenc(tf)
|
||||
])
|
||||
# fmt: on
|
||||
|
||||
|
||||
89
bin/mtag/cksum.py
Executable file
89
bin/mtag/cksum.py
Executable file
@@ -0,0 +1,89 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import json
|
||||
import zlib
|
||||
import struct
|
||||
import base64
|
||||
import hashlib
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p
|
||||
|
||||
|
||||
"""
|
||||
calculates various checksums for uploads,
|
||||
usage: -mtp crc32,md5,sha1,sha256b=ad,bin/mtag/cksum.py
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
config = "crc32 md5 md5b sha1 sha1b sha256 sha256b sha512/240 sha512b/240"
|
||||
# b suffix = base64 encoded
|
||||
# slash = truncate to n bits
|
||||
|
||||
known = {
|
||||
"md5": hashlib.md5,
|
||||
"sha1": hashlib.sha1,
|
||||
"sha256": hashlib.sha256,
|
||||
"sha512": hashlib.sha512,
|
||||
}
|
||||
config = config.split()
|
||||
hashers = {
|
||||
k: v()
|
||||
for k, v in known.items()
|
||||
if k in [x.split("/")[0].rstrip("b") for x in known]
|
||||
}
|
||||
crc32 = 0 if "crc32" in config else None
|
||||
|
||||
with open(fsenc(sys.argv[1]), "rb", 512 * 1024) as f:
|
||||
while True:
|
||||
buf = f.read(64 * 1024)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
for x in hashers.values():
|
||||
x.update(buf)
|
||||
|
||||
if crc32 is not None:
|
||||
crc32 = zlib.crc32(buf, crc32)
|
||||
|
||||
ret = {}
|
||||
for s in config:
|
||||
alg = s.split("/")[0]
|
||||
b64 = alg.endswith("b")
|
||||
alg = alg.rstrip("b")
|
||||
if alg in hashers:
|
||||
v = hashers[alg].digest()
|
||||
elif alg == "crc32":
|
||||
v = crc32
|
||||
if v < 0:
|
||||
v &= 2 ** 32 - 1
|
||||
v = struct.pack(">L", v)
|
||||
else:
|
||||
raise Exception("what is {}".format(s))
|
||||
|
||||
if "/" in s:
|
||||
v = v[: int(int(s.split("/")[1]) / 8)]
|
||||
|
||||
if b64:
|
||||
v = base64.b64encode(v).decode("ascii").rstrip("=")
|
||||
else:
|
||||
try:
|
||||
v = v.hex()
|
||||
except:
|
||||
import binascii
|
||||
|
||||
v = binascii.hexlify(v)
|
||||
|
||||
ret[s] = v
|
||||
|
||||
print(json.dumps(ret, indent=4))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
61
bin/mtag/guestbook-read.py
Executable file
61
bin/mtag/guestbook-read.py
Executable file
@@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
fetch latest msg from guestbook and return as tag
|
||||
|
||||
example copyparty config to use this:
|
||||
--urlform save,get -vsrv/hello:hello:w:c,e2ts,mtp=guestbook=t10,ad,p,bin/mtag/guestbook-read.py:mte=+guestbook
|
||||
|
||||
explained:
|
||||
for realpath srv/hello (served at /hello), write-only for eveyrone,
|
||||
enable file analysis on upload (e2ts),
|
||||
use mtp plugin "bin/mtag/guestbook-read.py" to provide metadata tag "guestbook",
|
||||
do this on all uploads regardless of extension,
|
||||
t10 = 10 seconds timeout for each dwonload,
|
||||
ad = parse file regardless if FFmpeg thinks it is audio or not
|
||||
p = request upload info as json on stdin (need ip)
|
||||
mte=+guestbook enabled indexing of that tag for this volume
|
||||
|
||||
PS: this requires e2ts to be functional,
|
||||
meaning you need to do at least one of these:
|
||||
* apt install ffmpeg
|
||||
* pip3 install mutagen
|
||||
"""
|
||||
|
||||
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
|
||||
|
||||
# set 0 to allow infinite msgs from one IP,
|
||||
# other values delete older messages to make space,
|
||||
# so 1 only keeps latest msg
|
||||
NUM_MSGS_TO_KEEP = 1
|
||||
|
||||
|
||||
def main():
|
||||
fp = os.path.abspath(sys.argv[1])
|
||||
fdir = os.path.dirname(fp)
|
||||
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
md = json.loads(zs)
|
||||
|
||||
ip = md["up_ip"]
|
||||
|
||||
# can put the database inside `fdir` if you'd like,
|
||||
# by default it saves to PWD:
|
||||
# os.chdir(fdir)
|
||||
|
||||
db = sqlite3.connect("guestbook.db3")
|
||||
with db:
|
||||
t = "select msg from gb where ip = ? order by ts desc"
|
||||
r = db.execute(t, (ip,)).fetchone()
|
||||
if r:
|
||||
print(r[0])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
111
bin/mtag/guestbook.py
Normal file
111
bin/mtag/guestbook.py
Normal file
@@ -0,0 +1,111 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
store messages from users in an sqlite database
|
||||
which can be read from another mtp for example
|
||||
|
||||
takes input from application/x-www-form-urlencoded POSTs,
|
||||
for example using the message/pager function on the website
|
||||
|
||||
example copyparty config to use this:
|
||||
--urlform save,get -vsrv/hello:hello:w:c,e2ts,mtp=xgb=ebin,t10,ad,p,bin/mtag/guestbook.py:mte=+xgb
|
||||
|
||||
explained:
|
||||
for realpath srv/hello (served at /hello),write-only for eveyrone,
|
||||
enable file analysis on upload (e2ts),
|
||||
use mtp plugin "bin/mtag/guestbook.py" to provide metadata tag "xgb",
|
||||
do this on all uploads with the file extension "bin",
|
||||
t300 = 300 seconds timeout for each dwonload,
|
||||
ad = parse file regardless if FFmpeg thinks it is audio or not
|
||||
p = request upload info as json on stdin
|
||||
mte=+xgb enabled indexing of that tag for this volume
|
||||
|
||||
PS: this requires e2ts to be functional,
|
||||
meaning you need to do at least one of these:
|
||||
* apt install ffmpeg
|
||||
* pip3 install mutagen
|
||||
"""
|
||||
|
||||
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
# set 0 to allow infinite msgs from one IP,
|
||||
# other values delete older messages to make space,
|
||||
# so 1 only keeps latest msg
|
||||
NUM_MSGS_TO_KEEP = 1
|
||||
|
||||
|
||||
def main():
|
||||
fp = os.path.abspath(sys.argv[1])
|
||||
fdir = os.path.dirname(fp)
|
||||
fname = os.path.basename(fp)
|
||||
if not fname.startswith("put-") or not fname.endswith(".bin"):
|
||||
raise Exception("not a post file")
|
||||
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
md = json.loads(zs)
|
||||
|
||||
buf = b""
|
||||
with open(fp, "rb") as f:
|
||||
while True:
|
||||
b = f.read(4096)
|
||||
buf += b
|
||||
if len(buf) > 4096:
|
||||
raise Exception("too big")
|
||||
|
||||
if not b:
|
||||
break
|
||||
|
||||
if not buf:
|
||||
raise Exception("file is empty")
|
||||
|
||||
buf = unquote(buf.replace(b"+", b" "))
|
||||
txt = buf.decode("utf-8")
|
||||
|
||||
if not txt.startswith("msg="):
|
||||
raise Exception("does not start with msg=")
|
||||
|
||||
ip = md["up_ip"]
|
||||
ts = md["up_at"]
|
||||
txt = txt[4:]
|
||||
|
||||
# can put the database inside `fdir` if you'd like,
|
||||
# by default it saves to PWD:
|
||||
# os.chdir(fdir)
|
||||
|
||||
db = sqlite3.connect("guestbook.db3")
|
||||
try:
|
||||
db.execute("select 1 from gb").fetchone()
|
||||
except:
|
||||
with db:
|
||||
db.execute("create table gb (ip text, ts real, msg text)")
|
||||
db.execute("create index gb_ip on gb(ip)")
|
||||
|
||||
with db:
|
||||
if NUM_MSGS_TO_KEEP == 1:
|
||||
t = "delete from gb where ip = ?"
|
||||
db.execute(t, (ip,))
|
||||
|
||||
t = "insert into gb values (?,?,?)"
|
||||
db.execute(t, (ip, ts, txt))
|
||||
|
||||
if NUM_MSGS_TO_KEEP > 1:
|
||||
t = "select ts from gb where ip = ? order by ts desc"
|
||||
hits = db.execute(t, (ip,)).fetchall()
|
||||
|
||||
if len(hits) > NUM_MSGS_TO_KEEP:
|
||||
lim = hits[NUM_MSGS_TO_KEEP][0]
|
||||
t = "delete from gb where ip = ? and ts <= ?"
|
||||
db.execute(t, (ip, lim))
|
||||
|
||||
print(txt)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
95
bin/mtag/image-noexif.py
Normal file
95
bin/mtag/image-noexif.py
Normal file
@@ -0,0 +1,95 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
remove exif tags from uploaded images
|
||||
|
||||
dependencies:
|
||||
exiftool
|
||||
|
||||
about:
|
||||
creates a "noexif" subfolder and puts exif-stripped copies of each image there,
|
||||
the reason for the subfolder is to avoid issues with the up2k.db / deduplication:
|
||||
|
||||
if the original image is modified in-place, then copyparty will keep the original
|
||||
hash in up2k.db for a while (until the next volume rescan), so if the image is
|
||||
reuploaded after a rescan then the upload will be renamed and kept as a dupe
|
||||
|
||||
alternatively you could switch the logic around, making a copy of the original
|
||||
image into a subfolder named "exif" and modify the original in-place, but then
|
||||
up2k.db will be out of sync until the next rescan, so any additional uploads
|
||||
of the same image will get symlinked (deduplicated) to the modified copy
|
||||
instead of the original in "exif"
|
||||
|
||||
or maybe delete the original image after processing, that would kinda work too
|
||||
|
||||
example copyparty config to use this:
|
||||
-v/mnt/nas/pics:pics:rwmd,ed:c,e2ts,mte=+noexif:c,mtp=noexif=ejpg,ejpeg,ad,bin/mtag/image-noexif.py
|
||||
|
||||
explained:
|
||||
for realpath /mnt/nas/pics (served at /pics) with read-write-modify-delete for ed,
|
||||
enable file analysis on upload (e2ts),
|
||||
append "noexif" to the list of known tags (mtp),
|
||||
and use mtp plugin "bin/mtag/image-noexif.py" to provide that tag,
|
||||
do this on all uploads with the file extension "jpg" or "jpeg",
|
||||
ad = parse file regardless if FFmpeg thinks it is audio or not
|
||||
|
||||
PS: this requires e2ts to be functional,
|
||||
meaning you need to do at least one of these:
|
||||
* apt install ffmpeg
|
||||
* pip3 install mutagen
|
||||
and your python must have sqlite3 support compiled in
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import filecmp
|
||||
import subprocess as sp
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
def main():
|
||||
cwd, fn = os.path.split(sys.argv[1])
|
||||
if os.path.basename(cwd) == "noexif":
|
||||
return
|
||||
|
||||
os.chdir(cwd)
|
||||
f1 = fsenc(fn)
|
||||
f2 = os.path.join(b"noexif", f1)
|
||||
cmd = [
|
||||
b"exiftool",
|
||||
b"-exif:all=",
|
||||
b"-iptc:all=",
|
||||
b"-xmp:all=",
|
||||
b"-P",
|
||||
b"-o",
|
||||
b"noexif/",
|
||||
b"--",
|
||||
f1,
|
||||
]
|
||||
sp.check_output(cmd)
|
||||
if not os.path.exists(f2):
|
||||
print("failed")
|
||||
return
|
||||
|
||||
if filecmp.cmp(f1, f2, shallow=False):
|
||||
print("clean")
|
||||
else:
|
||||
print("exif")
|
||||
|
||||
# lastmod = os.path.getmtime(f1)
|
||||
# times = (int(time.time()), int(lastmod))
|
||||
# os.utime(f2, times)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except:
|
||||
pass
|
||||
@@ -4,7 +4,8 @@ set -e
|
||||
|
||||
# install dependencies for audio-*.py
|
||||
#
|
||||
# linux: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf
|
||||
# linux/alpine: requires gcc g++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-dev py3-{wheel,pip} py3-numpy{,-dev}
|
||||
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3,libsndfile1}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
|
||||
# win64: requires msys2-mingw64 environment
|
||||
# macos: requires macports
|
||||
#
|
||||
@@ -100,8 +101,11 @@ export -f dl_files
|
||||
|
||||
|
||||
github_tarball() {
|
||||
rm -rf g
|
||||
mkdir g
|
||||
cd g
|
||||
dl_text "$1" |
|
||||
tee json |
|
||||
tee ../json |
|
||||
(
|
||||
# prefer jq if available
|
||||
jq -r '.tarball_url' ||
|
||||
@@ -110,8 +114,11 @@ github_tarball() {
|
||||
awk -F\" '/"tarball_url": "/ {print$4}'
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
head -n 1 |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
mv * ../tgz
|
||||
cd ..
|
||||
}
|
||||
|
||||
|
||||
@@ -126,6 +133,7 @@ gitlab_tarball() {
|
||||
tr \" '\n' | grep -E '\.tar\.gz$' | head -n 1
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
head -n 1 |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
tee links |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
@@ -137,10 +145,17 @@ install_keyfinder() {
|
||||
# use msys2 in mingw-w64 mode
|
||||
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python}
|
||||
|
||||
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
|
||||
[ -e $HOME/pe/keyfinder ] && {
|
||||
echo found a keyfinder build in ~/pe, skipping
|
||||
return
|
||||
}
|
||||
|
||||
tar -xf mixxxdj-libkeyfinder-*
|
||||
rm -- *.tar.gz
|
||||
cd "$td"
|
||||
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
|
||||
ls -al
|
||||
|
||||
tar -xf tgz
|
||||
rm tgz
|
||||
cd mixxxdj-libkeyfinder*
|
||||
|
||||
h="$HOME"
|
||||
@@ -207,6 +222,22 @@ install_vamp() {
|
||||
|
||||
$pybin -m pip install --user vamp
|
||||
|
||||
cd "$td"
|
||||
echo '#include <vamp-sdk/Plugin.h>' | gcc -x c -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
||||
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
|
||||
sha512sum -c <(
|
||||
echo "7ef7f837d19a08048b059e0da408373a7964ced452b290fae40b85d6d70ca9000bcfb3302cd0b4dc76cf2a848528456f78c1ce1ee0c402228d812bd347b6983b -"
|
||||
) <vamp-plugin-sdk-2.9.0.tar.gz
|
||||
tar -xf vamp-plugin-sdk-2.9.0.tar.gz
|
||||
rm -- *.tar.gz
|
||||
ls -al
|
||||
cd vamp-plugin-sdk-*
|
||||
./configure --prefix=$HOME/pe/vamp-sdk
|
||||
make -j1 install
|
||||
}
|
||||
|
||||
cd "$td"
|
||||
have_beatroot || {
|
||||
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz)
|
||||
@@ -214,8 +245,11 @@ install_vamp() {
|
||||
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
|
||||
) <beatroot-vamp-v1.0.tar.gz
|
||||
tar -xf beatroot-vamp-v1.0.tar.gz
|
||||
rm -- *.tar.gz
|
||||
cd beatroot-vamp-v1.0
|
||||
make -f Makefile.linux -j4
|
||||
[ -e ~/pe/vamp-sdk ] &&
|
||||
sed -ri 's`^(CFLAGS :=.*)`\1 -I'$HOME'/pe/vamp-sdk/include`' Makefile.linux
|
||||
make -f Makefile.linux -j4 LDFLAGS=-L$HOME/pe/vamp-sdk/lib
|
||||
# /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp
|
||||
mkdir ~/vamp
|
||||
cp -pv beatroot-vamp.* ~/vamp/
|
||||
@@ -229,6 +263,7 @@ install_vamp() {
|
||||
|
||||
# not in use because it kinda segfaults, also no windows support
|
||||
install_soundtouch() {
|
||||
cd "$td"
|
||||
gitlab_tarball https://gitlab.com/api/v4/projects/soundtouch%2Fsoundtouch/releases
|
||||
|
||||
tar -xvf soundtouch-*
|
||||
|
||||
73
bin/mtag/media-hash.py
Normal file
73
bin/mtag/media-hash.py
Normal file
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
import base64
|
||||
import hashlib
|
||||
import subprocess as sp
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
"""
|
||||
dep: ffmpeg
|
||||
"""
|
||||
|
||||
|
||||
def det():
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-v", b"fatal",
|
||||
b"-i", fsenc(sys.argv[1]),
|
||||
b"-f", b"framemd5",
|
||||
b"-"
|
||||
]
|
||||
# fmt: on
|
||||
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE)
|
||||
# ps = io.TextIOWrapper(p.stdout, encoding="utf-8")
|
||||
ps = p.stdout
|
||||
|
||||
chans = {}
|
||||
for ln in ps:
|
||||
if ln.startswith(b"#stream#"):
|
||||
break
|
||||
|
||||
m = re.match(r"^#media_type ([0-9]): ([a-zA-Z])", ln.decode("utf-8"))
|
||||
if m:
|
||||
chans[m.group(1)] = m.group(2)
|
||||
|
||||
hashers = [hashlib.sha512(), hashlib.sha512()]
|
||||
for ln in ps:
|
||||
n = int(ln[:1])
|
||||
v = ln.rsplit(b",", 1)[-1].strip()
|
||||
hashers[n].update(v)
|
||||
|
||||
r = {}
|
||||
for k, v in chans.items():
|
||||
dg = hashers[int(k)].digest()[:12]
|
||||
dg = base64.urlsafe_b64encode(dg).decode("ascii")
|
||||
r[v[0].lower() + "hash"] = dg
|
||||
|
||||
print(json.dumps(r, indent=4))
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
det()
|
||||
except:
|
||||
pass # mute
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
38
bin/mtag/mousepad.py
Normal file
38
bin/mtag/mousepad.py
Normal file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
"""
|
||||
mtp test -- opens a texteditor
|
||||
|
||||
usage:
|
||||
-vsrv/v1:v1:r:c,mte=+x1:c,mtp=x1=ad,p,bin/mtag/mousepad.py
|
||||
|
||||
explained:
|
||||
c,mte: list of tags to index in this volume
|
||||
c,mtp: add new tag provider
|
||||
x1: dummy tag to provide
|
||||
ad: dontcare if audio or not
|
||||
p: priority 1 (run after initial tag-scan with ffprobe or mutagen)
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
env = os.environ.copy()
|
||||
env["DISPLAY"] = ":0.0"
|
||||
|
||||
if False:
|
||||
# open the uploaded file
|
||||
fp = sys.argv[-1]
|
||||
else:
|
||||
# display stdin contents (`oth_tags`)
|
||||
fp = "/dev/stdin"
|
||||
|
||||
p = sp.Popen(["/usr/bin/mousepad", fp])
|
||||
p.communicate()
|
||||
|
||||
|
||||
main()
|
||||
76
bin/mtag/rclone-upload.py
Normal file
76
bin/mtag/rclone-upload.py
Normal file
@@ -0,0 +1,76 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess as sp
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
_ = r"""
|
||||
first checks the tag "vidchk" which must be "ok" to continue,
|
||||
then uploads all files to some cloud storage (RCLONE_REMOTE)
|
||||
and DELETES THE ORIGINAL FILES if rclone returns 0 ("success")
|
||||
|
||||
deps:
|
||||
rclone
|
||||
|
||||
usage:
|
||||
-mtp x2=t43200,ay,p2,bin/mtag/rclone-upload.py
|
||||
|
||||
explained:
|
||||
t43200: timeout 12h
|
||||
ay: only process files which contain audio (including video with audio)
|
||||
p2: set priority 2 (after vidchk's suggested priority of 1),
|
||||
so the output of vidchk will be passed in here
|
||||
|
||||
complete usage example as vflags along with vidchk:
|
||||
-vsrv/vidchk:vidchk:r:rw,ed:c,e2dsa,e2ts,mtp=vidchk=t600,p,bin/mtag/vidchk.py:c,mtp=rupload=t43200,ay,p2,bin/mtag/rclone-upload.py:c,mte=+vidchk,rupload
|
||||
|
||||
setup: see https://rclone.org/drive/
|
||||
|
||||
if you wanna use this script standalone / separately from copyparty,
|
||||
either set CONDITIONAL_UPLOAD False or provide the following stdin:
|
||||
{"vidchk":"ok"}
|
||||
"""
|
||||
|
||||
|
||||
RCLONE_REMOTE = "notmybox"
|
||||
CONDITIONAL_UPLOAD = True
|
||||
|
||||
|
||||
def main():
|
||||
fp = sys.argv[1]
|
||||
if CONDITIONAL_UPLOAD:
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
md = json.loads(zs)
|
||||
|
||||
chk = md.get("vidchk", None)
|
||||
if chk != "ok":
|
||||
print(f"vidchk={chk}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
dst = f"{RCLONE_REMOTE}:".encode("utf-8")
|
||||
cmd = [b"rclone", b"copy", b"--", fsenc(fp), dst]
|
||||
|
||||
t0 = time.time()
|
||||
try:
|
||||
sp.check_call(cmd)
|
||||
except:
|
||||
print("rclone failed", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print(f"{time.time() - t0:.1f} sec")
|
||||
os.unlink(fsenc(fp))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
21
bin/mtag/res/twitter-unmute.user.js
Normal file
21
bin/mtag/res/twitter-unmute.user.js
Normal file
@@ -0,0 +1,21 @@
|
||||
// ==UserScript==
|
||||
// @name twitter-unmute
|
||||
// @namespace http://ocv.me/
|
||||
// @version 0.1
|
||||
// @description memes
|
||||
// @author ed <irc.rizon.net>
|
||||
// @match https://twitter.com/*
|
||||
// @icon https://www.google.com/s2/favicons?domain=twitter.com
|
||||
// @grant GM_addStyle
|
||||
// ==/UserScript==
|
||||
|
||||
function grunnur() {
|
||||
setInterval(function () {
|
||||
//document.querySelector('div[aria-label="Unmute"]').click();
|
||||
document.querySelector('video').muted = false;
|
||||
}, 200);
|
||||
}
|
||||
|
||||
var scr = document.createElement('script');
|
||||
scr.textContent = '(' + grunnur.toString() + ')();';
|
||||
(document.head || document.getElementsByTagName('head')[0]).appendChild(scr);
|
||||
39
bin/mtag/res/yt-ipr.conf
Normal file
39
bin/mtag/res/yt-ipr.conf
Normal file
@@ -0,0 +1,39 @@
|
||||
# example config file to use copyparty as a youtube manifest collector,
|
||||
# use with copyparty like: python copyparty.py -c yt-ipr.conf
|
||||
#
|
||||
# see docs/example.conf for a better explanation of the syntax, but
|
||||
# newlines are block separators, so adding blank lines inside a volume definition is bad
|
||||
# (use comments as separators instead)
|
||||
|
||||
|
||||
# create user ed, password wark
|
||||
u ed:wark
|
||||
|
||||
|
||||
# create a volume at /ytm which stores files at ./srv/ytm
|
||||
./srv/ytm
|
||||
/ytm
|
||||
# write-only, but read-write for user ed
|
||||
w
|
||||
rw ed
|
||||
# rescan the volume on startup
|
||||
c e2dsa
|
||||
# collect tags from all new files since last scan
|
||||
c e2ts
|
||||
# optionally enable compression to make the files 50% smaller
|
||||
c pk
|
||||
# only allow uploads which are between 16k and 1m large
|
||||
c sz=16k-1m
|
||||
# allow up to 10 uploads over 5 minutes from each ip
|
||||
c maxn=10,300
|
||||
# move uploads into subfolders: YEAR-MONTH / DAY-HOUR / <upload>
|
||||
c rotf=%Y-%m/%d-%H
|
||||
# delete uploads when they are 24 hours old
|
||||
c lifetime=86400
|
||||
# add the parser and tell copyparty what tags it can expect from it
|
||||
c mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
|
||||
# decide which tags we want to index and in what order
|
||||
c mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
|
||||
|
||||
|
||||
# create any other volumes you'd like down here, or merge this with an existing config file
|
||||
47
bin/mtag/res/yt-ipr.user.js
Normal file
47
bin/mtag/res/yt-ipr.user.js
Normal file
@@ -0,0 +1,47 @@
|
||||
// ==UserScript==
|
||||
// @name youtube-playerdata-hub
|
||||
// @match https://youtube.com/*
|
||||
// @match https://*.youtube.com/*
|
||||
// @version 1.0
|
||||
// @grant GM_addStyle
|
||||
// ==/UserScript==
|
||||
|
||||
function main() {
|
||||
var server = 'https://127.0.0.1:3923/ytm?pw=wark',
|
||||
interval = 60; // sec
|
||||
|
||||
var sent = {};
|
||||
function send(txt, mf_url, desc) {
|
||||
if (sent[mf_url])
|
||||
return;
|
||||
|
||||
fetch(server + '&_=' + Date.now(), { method: "PUT", body: txt });
|
||||
console.log('[yt-pdh] yeet %d bytes, %s', txt.length, desc);
|
||||
sent[mf_url] = 1;
|
||||
}
|
||||
|
||||
function collect() {
|
||||
try {
|
||||
var pd = document.querySelector('ytd-watch-flexy');
|
||||
if (!pd)
|
||||
return console.log('[yt-pdh] no video found');
|
||||
|
||||
pd = pd.playerData;
|
||||
var mu = pd.streamingData.dashManifestUrl || pd.streamingData.hlsManifestUrl;
|
||||
if (!mu || !mu.length)
|
||||
return console.log('[yt-pdh] no manifest found');
|
||||
|
||||
var desc = pd.videoDetails.videoId + ', ' + pd.videoDetails.title;
|
||||
send(JSON.stringify(pd), mu, desc);
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("[yt-pdh]", ex);
|
||||
}
|
||||
}
|
||||
setInterval(collect, interval * 1000);
|
||||
}
|
||||
|
||||
var scr = document.createElement('script');
|
||||
scr.textContent = '(' + main.toString() + ')();';
|
||||
(document.head || document.getElementsByTagName('head')[0]).appendChild(scr);
|
||||
console.log('[yt-pdh] a');
|
||||
139
bin/mtag/very-bad-idea.py
Executable file
139
bin/mtag/very-bad-idea.py
Executable file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
use copyparty as a chromecast replacement:
|
||||
* post a URL and it will open in the default browser
|
||||
* upload a file and it will open in the default application
|
||||
* the `key` command simulates keyboard input
|
||||
* the `x` command executes other xdotool commands
|
||||
* the `c` command executes arbitrary unix commands
|
||||
|
||||
the android app makes it a breeze to post pics and links:
|
||||
https://github.com/9001/party-up/releases
|
||||
(iOS devices have to rely on the web-UI)
|
||||
|
||||
goes without saying, but this is HELLA DANGEROUS,
|
||||
GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS
|
||||
|
||||
example copyparty config to use this:
|
||||
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,c0,bin/mtag/very-bad-idea.py
|
||||
|
||||
recommended deps:
|
||||
apt install xdotool libnotify-bin
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js
|
||||
|
||||
and you probably want `twitter-unmute.user.js` from the res folder
|
||||
|
||||
|
||||
-----------------------------------------------------------------------
|
||||
-- startup script:
|
||||
-----------------------------------------------------------------------
|
||||
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# create qr code
|
||||
ip=$(ip r | awk '/^default/{print$(NF-2)}'); echo http://$ip:3923/ | qrencode -o - -s 4 >/dev/shm/cpp-qr.png
|
||||
/usr/bin/feh -x /dev/shm/cpp-qr.png &
|
||||
|
||||
# reposition and make topmost (with janky raspbian support)
|
||||
( sleep 0.5
|
||||
xdotool search --name cpp-qr.png windowactivate --sync windowmove 1780 0
|
||||
wmctrl -r :ACTIVE: -b toggle,above || true
|
||||
|
||||
ps aux | grep -E 'sleep[ ]7\.27' ||
|
||||
while true; do
|
||||
w=$(xdotool getactivewindow)
|
||||
xdotool search --name cpp-qr.png windowactivate windowraise windowfocus
|
||||
xdotool windowactivate $w
|
||||
xdotool windowfocus $w
|
||||
sleep 7.27 || break
|
||||
done &
|
||||
xeyes # distraction window to prevent ^w from closing the qr-code
|
||||
) &
|
||||
|
||||
# bail if copyparty is already running
|
||||
ps aux | grep -E '[3] copy[p]arty' && exit 0
|
||||
|
||||
# dumb chrome wrapper to allow autoplay
|
||||
cat >/usr/local/bin/chromium-browser <<'EOF'
|
||||
#!/bin/bash
|
||||
set -e
|
||||
/usr/bin/chromium-browser --autoplay-policy=no-user-gesture-required "$@"
|
||||
EOF
|
||||
chmod 755 /usr/local/bin/chromium-browser
|
||||
|
||||
# start the server (note: replace `-v.::rw:` with `-v.::w:` to disallow retrieving uploaded stuff)
|
||||
cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,very-bad-idea.py
|
||||
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import subprocess as sp
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
def main():
|
||||
fp = os.path.abspath(sys.argv[1])
|
||||
with open(fp, "rb") as f:
|
||||
txt = f.read(4096)
|
||||
|
||||
if txt.startswith(b"msg="):
|
||||
open_post(txt)
|
||||
else:
|
||||
open_url(fp)
|
||||
|
||||
|
||||
def open_post(txt):
|
||||
txt = unquote(txt.replace(b"+", b" ")).decode("utf-8")[4:]
|
||||
try:
|
||||
k, v = txt.split(" ", 1)
|
||||
except:
|
||||
open_url(txt)
|
||||
|
||||
if k == "key":
|
||||
sp.call(["xdotool", "key"] + v.split(" "))
|
||||
elif k == "x":
|
||||
sp.call(["xdotool"] + v.split(" "))
|
||||
elif k == "c":
|
||||
env = os.environ.copy()
|
||||
while " " in v:
|
||||
v1, v2 = v.split(" ", 1)
|
||||
if "=" not in v1:
|
||||
break
|
||||
|
||||
ek, ev = v1.split("=", 1)
|
||||
env[ek] = ev
|
||||
v = v2
|
||||
|
||||
sp.call(v.split(" "), env=env)
|
||||
else:
|
||||
open_url(txt)
|
||||
|
||||
|
||||
def open_url(txt):
|
||||
ext = txt.rsplit(".")[-1].lower()
|
||||
sp.call(["notify-send", "--", txt])
|
||||
if ext not in ["jpg", "jpeg", "png", "gif", "webp"]:
|
||||
# sp.call(["wmctrl", "-c", ":ACTIVE:"]) # closes the active window correctly
|
||||
sp.call(["killall", "vlc"])
|
||||
sp.call(["killall", "mpv"])
|
||||
sp.call(["killall", "feh"])
|
||||
time.sleep(0.5)
|
||||
for _ in range(20):
|
||||
sp.call(["xdotool", "key", "ctrl+w"]) # closes the open tab correctly
|
||||
# else:
|
||||
# sp.call(["xdotool", "getactivewindow", "windowminimize"]) # minimizes the focused windo
|
||||
|
||||
# close any error messages:
|
||||
sp.call(["xdotool", "search", "--name", "Error", "windowclose"])
|
||||
# sp.call(["xdotool", "key", "ctrl+alt+d"]) # doesnt work at all
|
||||
# sp.call(["xdotool", "keydown", "--delay", "100", "ctrl+alt+d"])
|
||||
# sp.call(["xdotool", "keyup", "ctrl+alt+d"])
|
||||
sp.call(["xdg-open", txt])
|
||||
|
||||
|
||||
main()
|
||||
131
bin/mtag/vidchk.py
Executable file
131
bin/mtag/vidchk.py
Executable file
@@ -0,0 +1,131 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
_ = r"""
|
||||
inspects video files for errors and such
|
||||
plus stores a bunch of metadata to filename.ff.json
|
||||
|
||||
usage:
|
||||
-mtp vidchk=t600,ay,p,bin/mtag/vidchk.py
|
||||
|
||||
explained:
|
||||
t600: timeout 10min
|
||||
ay: only process files which contain audio (including video with audio)
|
||||
p: set priority 1 (lowest priority after initial ffprobe/mutagen for base tags),
|
||||
makes copyparty feed base tags into this script as json
|
||||
|
||||
if you wanna use this script standalone / separately from copyparty,
|
||||
provide the video resolution on stdin as json: {"res":"1920x1080"}
|
||||
"""
|
||||
|
||||
|
||||
FAST = True # parse entire file at container level
|
||||
# FAST = False # fully decode audio and video streams
|
||||
|
||||
|
||||
# warnings to ignore
|
||||
harmless = re.compile(
|
||||
r"Unsupported codec with id |Could not find codec parameters.*Attachment:|analyzeduration"
|
||||
+ r"|timescale not set"
|
||||
)
|
||||
|
||||
|
||||
def wfilter(lines):
|
||||
return [x for x in lines if x.strip() and not harmless.search(x)]
|
||||
|
||||
|
||||
def errchk(so, se, rc, dbg):
|
||||
if dbg:
|
||||
with open(dbg, "wb") as f:
|
||||
f.write(b"so:\n" + so + b"\nse:\n" + se + b"\n")
|
||||
|
||||
if rc:
|
||||
err = (so + se).decode("utf-8", "replace").split("\n", 1)
|
||||
err = wfilter(err) or err
|
||||
return f"ERROR {rc}: {err[0]}"
|
||||
|
||||
if se:
|
||||
err = se.decode("utf-8", "replace").split("\n", 1)
|
||||
err = wfilter(err)
|
||||
if err:
|
||||
return f"Warning: {err[0]}"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def main():
|
||||
fp = sys.argv[1]
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
md = json.loads(zs)
|
||||
|
||||
fdir = os.path.dirname(os.path.realpath(fp))
|
||||
flag = os.path.join(fdir, ".processed")
|
||||
if os.path.exists(flag):
|
||||
return "already processed"
|
||||
|
||||
try:
|
||||
w, h = [int(x) for x in md["res"].split("x")]
|
||||
if not w + h:
|
||||
raise Exception()
|
||||
except:
|
||||
return "could not determine resolution"
|
||||
|
||||
# grab streams/format metadata + 2 seconds of frames at the start and end
|
||||
zs = "ffprobe -hide_banner -v warning -of json -show_streams -show_format -show_packets -show_data_hash crc32 -read_intervals %+2,999999%+2"
|
||||
cmd = zs.encode("ascii").split(b" ") + [fsenc(fp)]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
so, se = p.communicate()
|
||||
|
||||
# spaces to tabs, drops filesize from 69k to 48k
|
||||
so = b"\n".join(
|
||||
[
|
||||
b"\t" * int((len(x) - len(x.lstrip())) / 4) + x.lstrip()
|
||||
for x in (so or b"").split(b"\n")
|
||||
]
|
||||
)
|
||||
with open(fsenc(f"{fp}.ff.json"), "wb") as f:
|
||||
f.write(so)
|
||||
|
||||
err = errchk(so, se, p.returncode, f"{fp}.vidchk")
|
||||
if err:
|
||||
return err
|
||||
|
||||
if max(w, h) < 1280 and min(w, h) < 720:
|
||||
return "resolution too small"
|
||||
|
||||
zs = (
|
||||
"ffmpeg -y -hide_banner -nostdin -v warning"
|
||||
+ " -err_detect +crccheck+bitstream+buffer+careful+compliant+aggressive+explode"
|
||||
+ " -xerror -i"
|
||||
)
|
||||
|
||||
cmd = zs.encode("ascii").split(b" ") + [fsenc(fp)]
|
||||
|
||||
if FAST:
|
||||
zs = "-c copy -f null -"
|
||||
else:
|
||||
zs = "-vcodec rawvideo -acodec pcm_s16le -f null -"
|
||||
|
||||
cmd += zs.encode("ascii").split(b" ")
|
||||
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
so, se = p.communicate()
|
||||
return errchk(so, se, p.returncode, f"{fp}.vidchk")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(main() or "ok")
|
||||
85
bin/mtag/wget.py
Normal file
85
bin/mtag/wget.py
Normal file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
use copyparty as a file downloader by POSTing URLs as
|
||||
application/x-www-form-urlencoded (for example using the
|
||||
message/pager function on the website)
|
||||
|
||||
example copyparty config to use this:
|
||||
--urlform save,get -vsrv/wget:wget:rwmd,ed:c,e2ts,mtp=title=ebin,t300,ad,bin/mtag/wget.py
|
||||
|
||||
explained:
|
||||
for realpath srv/wget (served at /wget) with read-write-modify-delete for ed,
|
||||
enable file analysis on upload (e2ts),
|
||||
use mtp plugin "bin/mtag/wget.py" to provide metadata tag "title",
|
||||
do this on all uploads with the file extension "bin",
|
||||
t300 = 300 seconds timeout for each dwonload,
|
||||
ad = parse file regardless if FFmpeg thinks it is audio or not
|
||||
|
||||
PS: this requires e2ts to be functional,
|
||||
meaning you need to do at least one of these:
|
||||
* apt install ffmpeg
|
||||
* pip3 install mutagen
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
def main():
|
||||
fp = os.path.abspath(sys.argv[1])
|
||||
fdir = os.path.dirname(fp)
|
||||
fname = os.path.basename(fp)
|
||||
if not fname.startswith("put-") or not fname.endswith(".bin"):
|
||||
raise Exception("not a post file")
|
||||
|
||||
buf = b""
|
||||
with open(fp, "rb") as f:
|
||||
while True:
|
||||
b = f.read(4096)
|
||||
buf += b
|
||||
if len(buf) > 4096:
|
||||
raise Exception("too big")
|
||||
|
||||
if not b:
|
||||
break
|
||||
|
||||
if not buf:
|
||||
raise Exception("file is empty")
|
||||
|
||||
buf = unquote(buf.replace(b"+", b" "))
|
||||
url = buf.decode("utf-8")
|
||||
|
||||
if not url.startswith("msg="):
|
||||
raise Exception("does not start with msg=")
|
||||
|
||||
url = url[4:]
|
||||
if "://" not in url:
|
||||
url = "https://" + url
|
||||
|
||||
os.chdir(fdir)
|
||||
|
||||
name = url.split("?")[0].split("/")[-1]
|
||||
tfn = "-- DOWNLOADING " + name
|
||||
open(tfn, "wb").close()
|
||||
|
||||
cmd = ["wget", "--trust-server-names", "--", url]
|
||||
|
||||
try:
|
||||
sp.check_call(cmd)
|
||||
|
||||
# OPTIONAL:
|
||||
# on success, delete the .bin file which contains the URL
|
||||
os.unlink(fp)
|
||||
except:
|
||||
open("-- FAILED TO DONWLOAD " + name, "wb").close()
|
||||
|
||||
os.unlink(tfn)
|
||||
print(url)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
198
bin/mtag/yt-ipr.py
Normal file
198
bin/mtag/yt-ipr.py
Normal file
@@ -0,0 +1,198 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import gzip
|
||||
import json
|
||||
import base64
|
||||
import string
|
||||
import urllib.request
|
||||
from datetime import datetime
|
||||
|
||||
"""
|
||||
youtube initial player response
|
||||
|
||||
it's probably best to use this through a config file; see res/yt-ipr.conf
|
||||
|
||||
but if you want to use plain arguments instead then:
|
||||
-v srv/ytm:ytm:w:rw,ed
|
||||
:c,e2ts,e2dsa
|
||||
:c,sz=16k-1m:c,maxn=10,300:c,rotf=%Y-%m/%d-%H
|
||||
:c,mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
|
||||
:c,mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
|
||||
|
||||
see res/yt-ipr.user.js for the example userscript to go with this
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
with gzip.open(sys.argv[1], "rt", encoding="utf-8", errors="replace") as f:
|
||||
txt = f.read()
|
||||
except:
|
||||
with open(sys.argv[1], "r", encoding="utf-8", errors="replace") as f:
|
||||
txt = f.read()
|
||||
|
||||
txt = "{" + txt.split("{", 1)[1]
|
||||
|
||||
try:
|
||||
pd = json.loads(txt)
|
||||
except json.decoder.JSONDecodeError as ex:
|
||||
pd = json.loads(txt[: ex.pos])
|
||||
|
||||
# print(json.dumps(pd, indent=2))
|
||||
|
||||
if "videoDetails" in pd:
|
||||
parse_youtube(pd)
|
||||
else:
|
||||
parse_freg(pd)
|
||||
|
||||
|
||||
def get_expiration(url):
|
||||
et = re.search(r"[?&]expire=([0-9]+)", url).group(1)
|
||||
et = datetime.utcfromtimestamp(int(et))
|
||||
return et.strftime("%Y-%m-%d, %H:%M")
|
||||
|
||||
|
||||
def parse_youtube(pd):
|
||||
vd = pd["videoDetails"]
|
||||
sd = pd["streamingData"]
|
||||
|
||||
et = sd["adaptiveFormats"][0]["url"]
|
||||
et = get_expiration(et)
|
||||
|
||||
mf = []
|
||||
if "dashManifestUrl" in sd:
|
||||
mf.append("dash")
|
||||
if "hlsManifestUrl" in sd:
|
||||
mf.append("hls")
|
||||
|
||||
r = {
|
||||
"yt-id": vd["videoId"],
|
||||
"yt-title": vd["title"],
|
||||
"yt-author": vd["author"],
|
||||
"yt-channel": vd["channelId"],
|
||||
"yt-views": vd["viewCount"],
|
||||
"yt-private": vd["isPrivate"],
|
||||
# "yt-expires": sd["expiresInSeconds"],
|
||||
"yt-manifest": ",".join(mf),
|
||||
"yt-expires": et,
|
||||
}
|
||||
print(json.dumps(r))
|
||||
|
||||
freg_conv(pd)
|
||||
|
||||
|
||||
def parse_freg(pd):
|
||||
md = pd["metadata"]
|
||||
r = {
|
||||
"yt-id": md["id"],
|
||||
"yt-title": md["title"],
|
||||
"yt-author": md["channelName"],
|
||||
"yt-channel": md["channelURL"].strip("/").split("/")[-1],
|
||||
"yt-expires": get_expiration(list(pd["video"].values())[0]),
|
||||
}
|
||||
print(json.dumps(r))
|
||||
|
||||
|
||||
def freg_conv(pd):
|
||||
# based on getURLs.js v1.5 (2021-08-07)
|
||||
# fmt: off
|
||||
priority = {
|
||||
"video": [
|
||||
337, 315, 266, 138, # 2160p60
|
||||
313, 336, # 2160p
|
||||
308, # 1440p60
|
||||
271, 264, # 1440p
|
||||
335, 303, 299, # 1080p60
|
||||
248, 169, 137, # 1080p
|
||||
334, 302, 298, # 720p60
|
||||
247, 136 # 720p
|
||||
],
|
||||
"audio": [
|
||||
251, 141, 171, 140, 250, 249, 139
|
||||
]
|
||||
}
|
||||
|
||||
vid_id = pd["videoDetails"]["videoId"]
|
||||
chan_id = pd["videoDetails"]["channelId"]
|
||||
|
||||
try:
|
||||
thumb_url = pd["microformat"]["playerMicroformatRenderer"]["thumbnail"]["thumbnails"][0]["url"]
|
||||
start_ts = pd["microformat"]["playerMicroformatRenderer"]["liveBroadcastDetails"]["startTimestamp"]
|
||||
except:
|
||||
thumb_url = f"https://img.youtube.com/vi/{vid_id}/maxresdefault.jpg"
|
||||
start_ts = ""
|
||||
|
||||
# fmt: on
|
||||
|
||||
metadata = {
|
||||
"title": pd["videoDetails"]["title"],
|
||||
"id": vid_id,
|
||||
"channelName": pd["videoDetails"]["author"],
|
||||
"channelURL": "https://www.youtube.com/channel/" + chan_id,
|
||||
"description": pd["videoDetails"]["shortDescription"],
|
||||
"thumbnailUrl": thumb_url,
|
||||
"startTimestamp": start_ts,
|
||||
}
|
||||
|
||||
if [x for x in vid_id if x not in string.ascii_letters + string.digits + "_-"]:
|
||||
print(f"malicious json", file=sys.stderr)
|
||||
return
|
||||
|
||||
basepath = os.path.dirname(sys.argv[1])
|
||||
|
||||
thumb_fn = f"{basepath}/{vid_id}.jpg"
|
||||
tmp_fn = f"{thumb_fn}.{os.getpid()}"
|
||||
if not os.path.exists(thumb_fn) and (
|
||||
thumb_url.startswith("https://img.youtube.com/vi/")
|
||||
or thumb_url.startswith("https://i.ytimg.com/vi/")
|
||||
):
|
||||
try:
|
||||
with urllib.request.urlopen(thumb_url) as fi:
|
||||
with open(tmp_fn, "wb") as fo:
|
||||
fo.write(fi.read())
|
||||
|
||||
os.rename(tmp_fn, thumb_fn)
|
||||
except:
|
||||
if os.path.exists(tmp_fn):
|
||||
os.unlink(tmp_fn)
|
||||
|
||||
try:
|
||||
with open(thumb_fn, "rb") as f:
|
||||
thumb = base64.b64encode(f.read()).decode("ascii")
|
||||
except:
|
||||
thumb = "/9j/4AAQSkZJRgABAQEASABIAAD/2wBDAAMCAgICAgMCAgIDAwMDBAYEBAQEBAgGBgUGCQgKCgkICQkKDA8MCgsOCwkJDRENDg8QEBEQCgwSExIQEw8QEBD/yQALCAABAAEBAREA/8wABgAQEAX/2gAIAQEAAD8A0s8g/9k="
|
||||
|
||||
metadata["thumbnail"] = "data:image/jpeg;base64," + thumb
|
||||
|
||||
ret = {
|
||||
"metadata": metadata,
|
||||
"version": "1.5",
|
||||
"createTime": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
}
|
||||
|
||||
for stream, itags in priority.items():
|
||||
for itag in itags:
|
||||
url = None
|
||||
for afmt in pd["streamingData"]["adaptiveFormats"]:
|
||||
if itag == afmt["itag"]:
|
||||
url = afmt["url"]
|
||||
break
|
||||
|
||||
if url:
|
||||
ret[stream] = {itag: url}
|
||||
break
|
||||
|
||||
fn = f"{basepath}/{vid_id}.urls.json"
|
||||
with open(fn, "w", encoding="utf-8", errors="replace") as f:
|
||||
f.write(json.dumps(ret, indent=4))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except:
|
||||
# raise
|
||||
pass
|
||||
177
bin/partyjournal.py
Executable file
177
bin/partyjournal.py
Executable file
@@ -0,0 +1,177 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
partyjournal.py: chronological history of uploads
|
||||
2021-12-31, v0.1, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/partyjournal.py
|
||||
|
||||
produces a chronological list of all uploads,
|
||||
by collecting info from up2k databases and the filesystem
|
||||
|
||||
specify subnet `192.168.1.*` with argument `.=192.168.1.`,
|
||||
affecting all successive mappings
|
||||
|
||||
usage:
|
||||
./partyjournal.py > partyjournal.html .=192.168.1. cart=125 steen=114 steen=131 sleepy=121 fscarlet=144 ed=101 ed=123
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import base64
|
||||
import sqlite3
|
||||
import argparse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
FS_ENCODING = sys.getfilesystemencoding()
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
pass
|
||||
|
||||
|
||||
##
|
||||
## snibbed from copyparty
|
||||
|
||||
|
||||
def s3dec(v):
|
||||
if not v.startswith("//"):
|
||||
return v
|
||||
|
||||
v = base64.urlsafe_b64decode(v.encode("ascii")[2:])
|
||||
return v.decode(FS_ENCODING, "replace")
|
||||
|
||||
|
||||
def quotep(txt):
|
||||
btxt = txt.encode("utf-8", "replace")
|
||||
quot1 = quote(btxt, safe=b"/")
|
||||
quot1 = quot1.encode("ascii")
|
||||
quot2 = quot1.replace(b" ", b"+")
|
||||
return quot2.decode("utf-8", "replace")
|
||||
|
||||
|
||||
def html_escape(s, quote=False, crlf=False):
|
||||
"""html.escape but also newlines"""
|
||||
s = s.replace("&", "&").replace("<", "<").replace(">", ">")
|
||||
if quote:
|
||||
s = s.replace('"', """).replace("'", "'")
|
||||
if crlf:
|
||||
s = s.replace("\r", " ").replace("\n", " ")
|
||||
|
||||
return s
|
||||
|
||||
|
||||
## end snibs
|
||||
##
|
||||
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser(formatter_class=APF)
|
||||
ap.add_argument("who", nargs="*")
|
||||
ar = ap.parse_args()
|
||||
|
||||
imap = {}
|
||||
subnet = ""
|
||||
for v in ar.who:
|
||||
if "=" not in v:
|
||||
raise Exception("bad who: " + v)
|
||||
|
||||
k, v = v.split("=")
|
||||
if k == ".":
|
||||
subnet = v
|
||||
continue
|
||||
|
||||
imap["{}{}".format(subnet, v)] = k
|
||||
|
||||
print(repr(imap), file=sys.stderr)
|
||||
|
||||
print(
|
||||
"""\
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head><meta charset="utf-8"><style>
|
||||
|
||||
html, body {
|
||||
color: #ccc;
|
||||
background: #222;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
a {
|
||||
color: #fc5;
|
||||
}
|
||||
td, th {
|
||||
padding: .2em .5em;
|
||||
border: 1px solid #999;
|
||||
border-width: 0 1px 1px 0;
|
||||
white-space: nowrap;
|
||||
}
|
||||
td:nth-child(1),
|
||||
td:nth-child(2),
|
||||
td:nth-child(3) {
|
||||
font-family: monospace, monospace;
|
||||
text-align: right;
|
||||
}
|
||||
tr:first-child {
|
||||
position: sticky;
|
||||
top: -1px;
|
||||
}
|
||||
th {
|
||||
background: #222;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
</style></head><body><table><tr>
|
||||
<th>wark</th>
|
||||
<th>time</th>
|
||||
<th>size</th>
|
||||
<th>who</th>
|
||||
<th>link</th>
|
||||
</tr>"""
|
||||
)
|
||||
|
||||
db_path = ".hist/up2k.db"
|
||||
conn = sqlite3.connect(db_path)
|
||||
q = r"pragma table_info(up)"
|
||||
inf = conn.execute(q).fetchall()
|
||||
cols = [x[1] for x in inf]
|
||||
print("<!-- " + str(cols) + " -->")
|
||||
# ['w', 'mt', 'sz', 'rd', 'fn', 'ip', 'at']
|
||||
|
||||
q = r"select * from up order by case when at > 0 then at else mt end"
|
||||
for w, mt, sz, rd, fn, ip, at in conn.execute(q):
|
||||
link = "/".join([s3dec(x) for x in [rd, fn] if x])
|
||||
if fn.startswith("put-") and sz < 4096:
|
||||
try:
|
||||
with open(link, "rb") as f:
|
||||
txt = f.read().decode("utf-8", "replace")
|
||||
except:
|
||||
continue
|
||||
|
||||
if txt.startswith("msg="):
|
||||
txt = txt.encode("utf-8", "replace")
|
||||
txt = unquote(txt.replace(b"+", b" "))
|
||||
link = txt.decode("utf-8")[4:]
|
||||
|
||||
sz = "{:,}".format(sz)
|
||||
v = [
|
||||
w[:16],
|
||||
datetime.utcfromtimestamp(at if at > 0 else mt).strftime(
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
),
|
||||
sz,
|
||||
imap.get(ip, ip),
|
||||
]
|
||||
|
||||
row = "<tr>\n "
|
||||
row += "\n ".join(["<td>{}</th>".format(x) for x in v])
|
||||
row += '\n <td><a href="{}">{}</a></td>'.format(link, html_escape(link))
|
||||
row += "\n</tr>"
|
||||
print(row)
|
||||
|
||||
print("</table></body></html>")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
128
bin/prisonparty.sh
Executable file
128
bin/prisonparty.sh
Executable file
@@ -0,0 +1,128 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# runs copyparty (or any other program really) in a chroot
|
||||
#
|
||||
# assumption: these directories, and everything within, are owned by root
|
||||
sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
|
||||
|
||||
|
||||
# error-handler
|
||||
help() { cat <<'EOF'
|
||||
|
||||
usage:
|
||||
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]
|
||||
|
||||
example:
|
||||
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd
|
||||
|
||||
example for running straight from source (instead of using an sfx):
|
||||
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd
|
||||
|
||||
note that if you have python modules installed as --user (such as bpm/key detectors),
|
||||
you should add /home/foo/.local as a VOLDIR
|
||||
|
||||
EOF
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
# read arguments
|
||||
trap help EXIT
|
||||
jail="$(realpath "$1")"; shift
|
||||
uid="$1"; shift
|
||||
gid="$1"; shift
|
||||
|
||||
vols=()
|
||||
while true; do
|
||||
v="$1"; shift
|
||||
[ "$v" = -- ] && break # end of volumes
|
||||
[ "$#" -eq 0 ] && break # invalid usage
|
||||
vols+=( "$(realpath "$v")" )
|
||||
done
|
||||
pybin="$1"; shift
|
||||
pybin="$(command -v "$pybin")"
|
||||
pyarg=
|
||||
while true; do
|
||||
v="$1"
|
||||
[ "${v:0:1}" = - ] || break
|
||||
pyarg="$pyarg $v"
|
||||
shift
|
||||
done
|
||||
cpp="$1"; shift
|
||||
[ -d "$cpp" ] && cppdir="$PWD" || {
|
||||
# sfx, not module
|
||||
cpp="$(realpath "$cpp")"
|
||||
cppdir="$(dirname "$cpp")"
|
||||
}
|
||||
trap - EXIT
|
||||
|
||||
|
||||
# debug/vis
|
||||
echo
|
||||
echo "chroot-dir = $jail"
|
||||
echo "user:group = $uid:$gid"
|
||||
echo " copyparty = $cpp"
|
||||
echo
|
||||
printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:"
|
||||
for v in "${vols[@]}"; do
|
||||
printf '\033[36m ├─\033[0m %s \033[36m ── added by (You)\033[0m\n' "$v"
|
||||
done
|
||||
printf '\033[36m ├─\033[0m %s \033[36m ── where the copyparty binary is\033[0m\n' "$cppdir"
|
||||
printf '\033[36m ╰─\033[0m %s \033[36m ── the folder you are currently in\033[0m\n' "$PWD"
|
||||
vols+=("$cppdir" "$PWD")
|
||||
echo
|
||||
|
||||
|
||||
# remove any trailing slashes
|
||||
jail="${jail%/}"
|
||||
|
||||
|
||||
# bind-mount system directories and volumes
|
||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
||||
while IFS= read -r v; do
|
||||
[ -e "$v" ] || {
|
||||
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v"
|
||||
continue
|
||||
}
|
||||
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
||||
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b)
|
||||
# echo "v [$v] i1 [$i1] i2 [$i2]"
|
||||
[ $i1 = $i2 ] && continue
|
||||
|
||||
mkdir -p "$jail$v"
|
||||
mount --bind "$v" "$jail$v"
|
||||
done
|
||||
|
||||
|
||||
cln() {
|
||||
rv=$?
|
||||
# cleanup if not in use
|
||||
lsof "$jail" | grep -qF "$jail" &&
|
||||
echo "chroot is in use, will not cleanup" ||
|
||||
{
|
||||
mount | grep -F " on $jail" |
|
||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
|
||||
}
|
||||
exit $rv
|
||||
}
|
||||
trap cln EXIT
|
||||
|
||||
|
||||
# create a tmp
|
||||
mkdir -p "$jail/tmp"
|
||||
chmod 777 "$jail/tmp"
|
||||
|
||||
|
||||
# run copyparty
|
||||
export HOME=$(getent passwd $uid | cut -d: -f6)
|
||||
export USER=$(getent passwd $uid | cut -d: -f1)
|
||||
export LOGNAME="$USER"
|
||||
#echo "pybin [$pybin]"
|
||||
#echo "pyarg [$pyarg]"
|
||||
#echo "cpp [$cpp]"
|
||||
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
||||
p=$!
|
||||
trap 'kill $p' INT TERM
|
||||
wait
|
||||
99
bin/unforget.py
Executable file
99
bin/unforget.py
Executable file
@@ -0,0 +1,99 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
unforget.py: rebuild db from logfiles
|
||||
2022-09-07, v0.1, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/unforget.py
|
||||
|
||||
only makes sense if running copyparty with --no-forget
|
||||
(e.g. immediately shifting uploads to other storage)
|
||||
|
||||
usage:
|
||||
xz -d < log | ./unforget.py .hist/up2k.db
|
||||
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import base64
|
||||
import sqlite3
|
||||
import argparse
|
||||
|
||||
|
||||
FS_ENCODING = sys.getfilesystemencoding()
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
pass
|
||||
|
||||
|
||||
mem_cur = sqlite3.connect(":memory:").cursor()
|
||||
mem_cur.execute(r"create table a (b text)")
|
||||
|
||||
|
||||
def s3enc(rd: str, fn: str) -> tuple[str, str]:
|
||||
ret: list[str] = []
|
||||
for v in [rd, fn]:
|
||||
try:
|
||||
mem_cur.execute("select * from a where b = ?", (v,))
|
||||
ret.append(v)
|
||||
except:
|
||||
wtf8 = v.encode(FS_ENCODING, "surrogateescape")
|
||||
ret.append("//" + base64.urlsafe_b64encode(wtf8).decode("ascii"))
|
||||
|
||||
return ret[0], ret[1]
|
||||
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument("db")
|
||||
ar = ap.parse_args()
|
||||
|
||||
db = sqlite3.connect(ar.db).cursor()
|
||||
ptn_times = re.compile(r"no more chunks, setting times \(([0-9]+)")
|
||||
at = 0
|
||||
ctr = 0
|
||||
|
||||
for ln in [x.decode("utf-8", "replace").rstrip() for x in sys.stdin.buffer]:
|
||||
if "no more chunks, setting times (" in ln:
|
||||
m = ptn_times.search(ln)
|
||||
if m:
|
||||
at = int(m.group(1))
|
||||
|
||||
if '"hash": []' in ln:
|
||||
try:
|
||||
ofs = ln.find("{")
|
||||
j = json.loads(ln[ofs:])
|
||||
except:
|
||||
pass
|
||||
|
||||
w = j["wark"]
|
||||
if db.execute("select w from up where w = ?", (w,)).fetchone():
|
||||
continue
|
||||
|
||||
# PYTHONPATH=/home/ed/dev/copyparty/ python3 -m copyparty -e2dsa -v foo:foo:rwmd,ed -aed:wark --no-forget
|
||||
# 05:34:43.845 127.0.0.1 42496 no more chunks, setting times (1662528883, 1658001882)
|
||||
# 05:34:43.863 127.0.0.1 42496 {"name": "f\"2", "purl": "/foo/bar/baz/", "size": 1674, "lmod": 1658001882, "sprs": true, "hash": [], "wark": "LKIWpp2jEAh9dH3fu-DobuURFGEKlODXDGTpZ1otMhUg"}
|
||||
# | w | mt | sz | rd | fn | ip | at |
|
||||
# | LKIWpp2jEAh9dH3fu-DobuURFGEKlODXDGTpZ1otMhUg | 1658001882 | 1674 | bar/baz | f"2 | 127.0.0.1 | 1662528883 |
|
||||
|
||||
rd, fn = s3enc(j["purl"].strip("/"), j["name"])
|
||||
ip = ln.split(" ")[1].split("m")[-1]
|
||||
|
||||
q = "insert into up values (?,?,?,?,?,?,?)"
|
||||
v = (w, int(j["lmod"]), int(j["size"]), rd, fn, ip, at)
|
||||
db.execute(q, v)
|
||||
ctr += 1
|
||||
if ctr % 1024 == 1023:
|
||||
print(f"{ctr} commit...")
|
||||
db.connection.commit()
|
||||
|
||||
if ctr:
|
||||
db.connection.commit()
|
||||
|
||||
print(f"unforgot {ctr} files")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
964
bin/up2k.py
Executable file
964
bin/up2k.py
Executable file
@@ -0,0 +1,964 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
"""
|
||||
up2k.py: upload to copyparty
|
||||
2022-09-05, v0.19, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||
|
||||
- dependencies: requests
|
||||
- supports python 2.6, 2.7, and 3.3 through 3.11
|
||||
|
||||
- almost zero error-handling
|
||||
- but if something breaks just try again and it'll autoresume
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import math
|
||||
import time
|
||||
import atexit
|
||||
import signal
|
||||
import base64
|
||||
import hashlib
|
||||
import platform
|
||||
import threading
|
||||
import datetime
|
||||
|
||||
try:
|
||||
import argparse
|
||||
except:
|
||||
m = "\n ERROR: need 'argparse'; download it here:\n https://github.com/ThomasWaldmann/argparse/raw/master/argparse.py\n"
|
||||
print(m)
|
||||
raise
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError:
|
||||
if sys.version_info > (2, 7):
|
||||
m = "\nERROR: need 'requests'; please run this command:\n {0} -m pip install --user requests\n"
|
||||
else:
|
||||
m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7"
|
||||
m = [" https://pypi.org/project/" + x + "/#files" for x in m.split()]
|
||||
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
|
||||
|
||||
print(m.format(sys.executable))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# from copyparty/__init__.py
|
||||
PY2 = sys.version_info[0] == 2
|
||||
if PY2:
|
||||
from Queue import Queue
|
||||
from urllib import unquote
|
||||
from urllib import quote
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
bytes = str
|
||||
else:
|
||||
from queue import Queue
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
|
||||
unicode = str
|
||||
|
||||
VT100 = platform.system() != "Windows"
|
||||
|
||||
|
||||
req_ses = requests.Session()
|
||||
|
||||
|
||||
class File(object):
|
||||
"""an up2k upload task; represents a single file"""
|
||||
|
||||
def __init__(self, top, rel, size, lmod):
|
||||
self.top = top # type: bytes
|
||||
self.rel = rel.replace(b"\\", b"/") # type: bytes
|
||||
self.size = size # type: int
|
||||
self.lmod = lmod # type: float
|
||||
|
||||
self.abs = os.path.join(top, rel) # type: bytes
|
||||
self.name = self.rel.split(b"/")[-1].decode("utf-8", "replace") # type: str
|
||||
|
||||
# set by get_hashlist
|
||||
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
|
||||
self.kchunks = {} # type: dict[str, tuple[int, int]] # hash: [ ofs, sz ]
|
||||
|
||||
# set by handshake
|
||||
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
||||
self.wark = None # type: str
|
||||
self.url = None # type: str
|
||||
|
||||
# set by upload
|
||||
self.up_b = 0 # type: int
|
||||
self.up_c = 0 # type: int
|
||||
|
||||
# t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||
# eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||
|
||||
|
||||
class FileSlice(object):
|
||||
"""file-like object providing a fixed window into a file"""
|
||||
|
||||
def __init__(self, file, cid):
|
||||
# type: (File, str) -> None
|
||||
|
||||
self.car, self.len = file.kchunks[cid]
|
||||
self.cdr = self.car + self.len
|
||||
self.ofs = 0 # type: int
|
||||
self.f = open(file.abs, "rb", 512 * 1024)
|
||||
self.f.seek(self.car)
|
||||
|
||||
# https://stackoverflow.com/questions/4359495/what-is-exactly-a-file-like-object-in-python
|
||||
# IOBase, RawIOBase, BufferedIOBase
|
||||
funs = "close closed __enter__ __exit__ __iter__ isatty __next__ readable seekable writable"
|
||||
try:
|
||||
for fun in funs.split():
|
||||
setattr(self, fun, getattr(self.f, fun))
|
||||
except:
|
||||
pass # py27 probably
|
||||
|
||||
def tell(self):
|
||||
return self.ofs
|
||||
|
||||
def seek(self, ofs, wh=0):
|
||||
if wh == 1:
|
||||
ofs = self.ofs + ofs
|
||||
elif wh == 2:
|
||||
ofs = self.len + ofs # provided ofs is negative
|
||||
|
||||
if ofs < 0:
|
||||
ofs = 0
|
||||
elif ofs >= self.len:
|
||||
ofs = self.len - 1
|
||||
|
||||
self.ofs = ofs
|
||||
self.f.seek(self.car + ofs)
|
||||
|
||||
def read(self, sz):
|
||||
sz = min(sz, self.len - self.ofs)
|
||||
ret = self.f.read(sz)
|
||||
self.ofs += len(ret)
|
||||
return ret
|
||||
|
||||
|
||||
class MTHash(object):
|
||||
def __init__(self, cores):
|
||||
self.f = None
|
||||
self.sz = 0
|
||||
self.csz = 0
|
||||
self.omutex = threading.Lock()
|
||||
self.imutex = threading.Lock()
|
||||
self.work_q = Queue()
|
||||
self.done_q = Queue()
|
||||
self.thrs = []
|
||||
for _ in range(cores):
|
||||
t = threading.Thread(target=self.worker)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
self.thrs.append(t)
|
||||
|
||||
def hash(self, f, fsz, chunksz, pcb=None, pcb_opaque=None):
|
||||
with self.omutex:
|
||||
self.f = f
|
||||
self.sz = fsz
|
||||
self.csz = chunksz
|
||||
|
||||
chunks = {}
|
||||
nchunks = int(math.ceil(fsz / chunksz))
|
||||
for nch in range(nchunks):
|
||||
self.work_q.put(nch)
|
||||
|
||||
ex = ""
|
||||
for nch in range(nchunks):
|
||||
qe = self.done_q.get()
|
||||
try:
|
||||
nch, dig, ofs, csz = qe
|
||||
chunks[nch] = [dig, ofs, csz]
|
||||
except:
|
||||
ex = ex or qe
|
||||
|
||||
if pcb:
|
||||
pcb(pcb_opaque, chunksz * nch)
|
||||
|
||||
if ex:
|
||||
raise Exception(ex)
|
||||
|
||||
ret = []
|
||||
for n in range(nchunks):
|
||||
ret.append(chunks[n])
|
||||
|
||||
self.f = None
|
||||
self.csz = 0
|
||||
self.sz = 0
|
||||
return ret
|
||||
|
||||
def worker(self):
|
||||
while True:
|
||||
ofs = self.work_q.get()
|
||||
try:
|
||||
v = self.hash_at(ofs)
|
||||
except Exception as ex:
|
||||
v = str(ex)
|
||||
|
||||
self.done_q.put(v)
|
||||
|
||||
def hash_at(self, nch):
|
||||
f = self.f
|
||||
ofs = ofs0 = nch * self.csz
|
||||
hashobj = hashlib.sha512()
|
||||
chunk_sz = chunk_rem = min(self.csz, self.sz - ofs)
|
||||
while chunk_rem > 0:
|
||||
with self.imutex:
|
||||
f.seek(ofs)
|
||||
buf = f.read(min(chunk_rem, 1024 * 1024 * 12))
|
||||
|
||||
if not buf:
|
||||
raise Exception("EOF at " + str(ofs))
|
||||
|
||||
hashobj.update(buf)
|
||||
chunk_rem -= len(buf)
|
||||
ofs += len(buf)
|
||||
|
||||
digest = hashobj.digest()[:33]
|
||||
digest = base64.urlsafe_b64encode(digest).decode("utf-8")
|
||||
return nch, digest, ofs0, chunk_sz
|
||||
|
||||
|
||||
_print = print
|
||||
|
||||
|
||||
def eprint(*a, **ka):
|
||||
ka["file"] = sys.stderr
|
||||
ka["end"] = ""
|
||||
if not PY2:
|
||||
ka["flush"] = True
|
||||
|
||||
_print(*a, **ka)
|
||||
if PY2 or not VT100:
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
def flushing_print(*a, **ka):
|
||||
_print(*a, **ka)
|
||||
if "flush" not in ka:
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
if not VT100:
|
||||
print = flushing_print
|
||||
|
||||
|
||||
def termsize():
|
||||
env = os.environ
|
||||
|
||||
def ioctl_GWINSZ(fd):
|
||||
try:
|
||||
import fcntl, termios, struct
|
||||
|
||||
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
|
||||
except:
|
||||
return
|
||||
return cr
|
||||
|
||||
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||
if not cr:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
cr = ioctl_GWINSZ(fd)
|
||||
os.close(fd)
|
||||
except:
|
||||
pass
|
||||
if not cr:
|
||||
try:
|
||||
cr = (env["LINES"], env["COLUMNS"])
|
||||
except:
|
||||
cr = (25, 80)
|
||||
return int(cr[1]), int(cr[0])
|
||||
|
||||
|
||||
class CTermsize(object):
|
||||
def __init__(self):
|
||||
self.ev = False
|
||||
self.margin = None
|
||||
self.g = None
|
||||
self.w, self.h = termsize()
|
||||
|
||||
try:
|
||||
signal.signal(signal.SIGWINCH, self.ev_sig)
|
||||
except:
|
||||
return
|
||||
|
||||
thr = threading.Thread(target=self.worker)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def worker(self):
|
||||
while True:
|
||||
time.sleep(0.5)
|
||||
if not self.ev:
|
||||
continue
|
||||
|
||||
self.ev = False
|
||||
self.w, self.h = termsize()
|
||||
|
||||
if self.margin is not None:
|
||||
self.scroll_region(self.margin)
|
||||
|
||||
def ev_sig(self, *a, **ka):
|
||||
self.ev = True
|
||||
|
||||
def scroll_region(self, margin):
|
||||
self.margin = margin
|
||||
if margin is None:
|
||||
self.g = None
|
||||
eprint("\033[s\033[r\033[u")
|
||||
else:
|
||||
self.g = 1 + self.h - margin
|
||||
t = "{0}\033[{1}A".format("\n" * margin, margin)
|
||||
eprint("{0}\033[s\033[1;{1}r\033[u".format(t, self.g - 1))
|
||||
|
||||
|
||||
ss = CTermsize()
|
||||
|
||||
|
||||
def _scd(err, top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
with os.scandir(top) as dh:
|
||||
for fh in dh:
|
||||
abspath = os.path.join(top, fh.name)
|
||||
try:
|
||||
yield [abspath, fh.stat()]
|
||||
except Exception as ex:
|
||||
err.append((abspath, str(ex)))
|
||||
|
||||
|
||||
def _lsd(err, top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
for name in os.listdir(top):
|
||||
abspath = os.path.join(top, name)
|
||||
try:
|
||||
yield [abspath, os.stat(abspath)]
|
||||
except Exception as ex:
|
||||
err.append((abspath, str(ex)))
|
||||
|
||||
|
||||
if hasattr(os, "scandir") and sys.version_info > (3, 6):
|
||||
statdir = _scd
|
||||
else:
|
||||
statdir = _lsd
|
||||
|
||||
|
||||
def walkdir(err, top, seen):
|
||||
"""recursive statdir"""
|
||||
atop = os.path.abspath(os.path.realpath(top))
|
||||
if atop in seen:
|
||||
err.append((top, "recursive-symlink"))
|
||||
return
|
||||
|
||||
seen = seen[:] + [atop]
|
||||
for ap, inf in sorted(statdir(err, top)):
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
try:
|
||||
for x in walkdir(err, ap, seen):
|
||||
yield x
|
||||
except Exception as ex:
|
||||
err.append((ap, str(ex)))
|
||||
else:
|
||||
yield ap, inf
|
||||
|
||||
|
||||
def walkdirs(err, tops):
|
||||
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||
sep = "{0}".format(os.sep).encode("ascii")
|
||||
for top in tops:
|
||||
if top[-1:] == sep:
|
||||
stop = top.rstrip(sep)
|
||||
else:
|
||||
stop = os.path.dirname(top)
|
||||
|
||||
if os.path.isdir(top):
|
||||
for ap, inf in walkdir(err, top, []):
|
||||
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||
else:
|
||||
d, n = top.rsplit(sep, 1)
|
||||
yield d, n, os.stat(top)
|
||||
|
||||
|
||||
# mostly from copyparty/util.py
|
||||
def quotep(btxt):
|
||||
quot1 = quote(btxt, safe=b"/")
|
||||
if not PY2:
|
||||
quot1 = quot1.encode("ascii")
|
||||
|
||||
return quot1.replace(b" ", b"+")
|
||||
|
||||
|
||||
# from copyparty/util.py
|
||||
def humansize(sz, terse=False):
|
||||
"""picks a sensible unit for the given extent"""
|
||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||
if sz < 1024:
|
||||
break
|
||||
|
||||
sz /= 1024.0
|
||||
|
||||
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||
|
||||
if not terse:
|
||||
return ret
|
||||
|
||||
return ret.replace("iB", "").replace(" ", "")
|
||||
|
||||
|
||||
# from copyparty/up2k.py
|
||||
def up2k_chunksize(filesize):
|
||||
"""gives The correct chunksize for up2k hashing"""
|
||||
chunksize = 1024 * 1024
|
||||
stepsize = 512 * 1024
|
||||
while True:
|
||||
for mul in [1, 2]:
|
||||
nchunks = math.ceil(filesize * 1.0 / chunksize)
|
||||
if nchunks <= 256 or chunksize >= 32 * 1024 * 1024:
|
||||
return chunksize
|
||||
|
||||
chunksize += stepsize
|
||||
stepsize *= mul
|
||||
|
||||
|
||||
# mostly from copyparty/up2k.py
|
||||
def get_hashlist(file, pcb, mth):
|
||||
# type: (File, any, any) -> None
|
||||
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
||||
|
||||
chunk_sz = up2k_chunksize(file.size)
|
||||
file_rem = file.size
|
||||
file_ofs = 0
|
||||
ret = []
|
||||
with open(file.abs, "rb", 512 * 1024) as f:
|
||||
if mth and file.size >= 1024 * 512:
|
||||
ret = mth.hash(f, file.size, chunk_sz, pcb, file)
|
||||
file_rem = 0
|
||||
|
||||
while file_rem > 0:
|
||||
# same as `hash_at` except for `imutex` / bufsz
|
||||
hashobj = hashlib.sha512()
|
||||
chunk_sz = chunk_rem = min(chunk_sz, file_rem)
|
||||
while chunk_rem > 0:
|
||||
buf = f.read(min(chunk_rem, 64 * 1024))
|
||||
if not buf:
|
||||
raise Exception("EOF at " + str(f.tell()))
|
||||
|
||||
hashobj.update(buf)
|
||||
chunk_rem -= len(buf)
|
||||
|
||||
digest = hashobj.digest()[:33]
|
||||
digest = base64.urlsafe_b64encode(digest).decode("utf-8")
|
||||
|
||||
ret.append([digest, file_ofs, chunk_sz])
|
||||
file_ofs += chunk_sz
|
||||
file_rem -= chunk_sz
|
||||
|
||||
if pcb:
|
||||
pcb(file, file_ofs)
|
||||
|
||||
file.cids = ret
|
||||
file.kchunks = {}
|
||||
for k, v1, v2 in ret:
|
||||
file.kchunks[k] = [v1, v2]
|
||||
|
||||
|
||||
def handshake(req_ses, url, file, pw, search):
|
||||
# type: (requests.Session, str, File, any, bool) -> list[str]
|
||||
"""
|
||||
performs a handshake with the server; reply is:
|
||||
if search, a list of search results
|
||||
otherwise, a list of chunks to upload
|
||||
"""
|
||||
|
||||
req = {
|
||||
"hash": [x[0] for x in file.cids],
|
||||
"name": file.name,
|
||||
"lmod": file.lmod,
|
||||
"size": file.size,
|
||||
}
|
||||
if search:
|
||||
req["srch"] = 1
|
||||
|
||||
headers = {"Content-Type": "text/plain"} # wtf ed
|
||||
if pw:
|
||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||
|
||||
if file.url:
|
||||
url = file.url
|
||||
elif b"/" in file.rel:
|
||||
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
|
||||
|
||||
while True:
|
||||
try:
|
||||
r = req_ses.post(url, headers=headers, json=req)
|
||||
break
|
||||
except Exception as ex:
|
||||
em = str(ex).split("SSLError(")[-1]
|
||||
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
||||
time.sleep(1)
|
||||
|
||||
try:
|
||||
r = r.json()
|
||||
except:
|
||||
raise Exception(r.text)
|
||||
|
||||
if search:
|
||||
return r["hits"], False
|
||||
|
||||
try:
|
||||
pre, url = url.split("://")
|
||||
pre += "://"
|
||||
except:
|
||||
pre = ""
|
||||
|
||||
file.url = pre + url.split("/")[0] + r["purl"]
|
||||
file.name = r["name"]
|
||||
file.wark = r["wark"]
|
||||
|
||||
return r["hash"], r["sprs"]
|
||||
|
||||
|
||||
def upload(req_ses, file, cid, pw):
|
||||
# type: (requests.Session, File, str, any) -> None
|
||||
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
||||
|
||||
headers = {
|
||||
"X-Up2k-Hash": cid,
|
||||
"X-Up2k-Wark": file.wark,
|
||||
"Content-Type": "application/octet-stream",
|
||||
}
|
||||
if pw:
|
||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||
|
||||
f = FileSlice(file, cid)
|
||||
try:
|
||||
r = req_ses.post(file.url, headers=headers, data=f)
|
||||
if not r:
|
||||
raise Exception(repr(r))
|
||||
|
||||
_ = r.content
|
||||
finally:
|
||||
f.f.close()
|
||||
|
||||
|
||||
class Daemon(threading.Thread):
|
||||
def __init__(self, *a, **ka):
|
||||
threading.Thread.__init__(self, *a, **ka)
|
||||
self.daemon = True
|
||||
|
||||
|
||||
class Ctl(object):
|
||||
"""
|
||||
this will be the coordinator which runs everything in parallel
|
||||
(hashing, handshakes, uploads) but right now it's p dumb
|
||||
"""
|
||||
|
||||
def __init__(self, ar):
|
||||
self.ar = ar
|
||||
ar.files = [
|
||||
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
||||
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
|
||||
for x in ar.files
|
||||
]
|
||||
ar.url = ar.url.rstrip("/") + "/"
|
||||
if "://" not in ar.url:
|
||||
ar.url = "http://" + ar.url
|
||||
|
||||
eprint("\nscanning {0} locations\n".format(len(ar.files)))
|
||||
|
||||
nfiles = 0
|
||||
nbytes = 0
|
||||
err = []
|
||||
for _, _, inf in walkdirs(err, ar.files):
|
||||
nfiles += 1
|
||||
nbytes += inf.st_size
|
||||
|
||||
if err:
|
||||
eprint("\n# failed to access {0} paths:\n".format(len(err)))
|
||||
for ap, msg in err:
|
||||
if ar.v:
|
||||
eprint("{0}\n `-{1}\n\n".format(ap.decode("utf-8", "replace"), msg))
|
||||
else:
|
||||
eprint(ap.decode("utf-8", "replace") + "\n")
|
||||
|
||||
eprint("^ failed to access those {0} paths ^\n\n".format(len(err)))
|
||||
|
||||
if not ar.v:
|
||||
eprint("hint: set -v for detailed error messages\n")
|
||||
|
||||
if not ar.ok:
|
||||
eprint("hint: aborting because --ok is not set\n")
|
||||
return
|
||||
|
||||
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
||||
self.nfiles = nfiles
|
||||
self.nbytes = nbytes
|
||||
|
||||
if ar.td:
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
req_ses.verify = False
|
||||
if ar.te:
|
||||
req_ses.verify = ar.te
|
||||
|
||||
self.filegen = walkdirs([], ar.files)
|
||||
if ar.safe:
|
||||
self._safe()
|
||||
else:
|
||||
self.hash_f = 0
|
||||
self.hash_c = 0
|
||||
self.hash_b = 0
|
||||
self.up_f = 0
|
||||
self.up_c = 0
|
||||
self.up_b = 0
|
||||
self.up_br = 0
|
||||
self.hasher_busy = 1
|
||||
self.handshaker_busy = 0
|
||||
self.uploader_busy = 0
|
||||
self.serialized = False
|
||||
|
||||
self.t0 = time.time()
|
||||
self.t0_up = None
|
||||
self.spd = None
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.q_handshake = Queue() # type: Queue[File]
|
||||
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
|
||||
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
||||
|
||||
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
|
||||
self.mth = MTHash(ar.J) if ar.J > 1 else None
|
||||
|
||||
self._fancy()
|
||||
|
||||
def _safe(self):
|
||||
"""minimal basic slow boring fallback codepath"""
|
||||
search = self.ar.s
|
||||
for nf, (top, rel, inf) in enumerate(self.filegen):
|
||||
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
|
||||
print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
|
||||
get_hashlist(file, None, None)
|
||||
|
||||
burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
|
||||
while True:
|
||||
print(" hs...")
|
||||
hs, _ = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
print(" found: {0}{1}".format(burl, hit["rp"]))
|
||||
else:
|
||||
print(" NOT found")
|
||||
break
|
||||
|
||||
file.ucids = hs
|
||||
if not hs:
|
||||
break
|
||||
|
||||
print("{0} {1}".format(self.nfiles - nf, upath))
|
||||
ncs = len(hs)
|
||||
for nc, cid in enumerate(hs):
|
||||
print(" {0} up {1}".format(ncs - nc, cid))
|
||||
upload(req_ses, file, cid, self.ar.a)
|
||||
|
||||
print(" ok!")
|
||||
|
||||
def _fancy(self):
|
||||
if VT100:
|
||||
atexit.register(self.cleanup_vt100)
|
||||
ss.scroll_region(3)
|
||||
|
||||
Daemon(target=self.hasher).start()
|
||||
for _ in range(self.ar.j):
|
||||
Daemon(target=self.handshaker).start()
|
||||
Daemon(target=self.uploader).start()
|
||||
|
||||
idles = 0
|
||||
while idles < 3:
|
||||
time.sleep(0.07)
|
||||
with self.mutex:
|
||||
if (
|
||||
self.q_handshake.empty()
|
||||
and self.q_upload.empty()
|
||||
and not self.hasher_busy
|
||||
and not self.handshaker_busy
|
||||
and not self.uploader_busy
|
||||
):
|
||||
idles += 1
|
||||
else:
|
||||
idles = 0
|
||||
|
||||
if VT100:
|
||||
maxlen = ss.w - len(str(self.nfiles)) - 14
|
||||
txt = "\033[s\033[{0}H".format(ss.g)
|
||||
for y, k, st, f in [
|
||||
[0, "hash", self.st_hash, self.hash_f],
|
||||
[1, "send", self.st_up, self.up_f],
|
||||
]:
|
||||
txt += "\033[{0}H{1}:".format(ss.g + y, k)
|
||||
file, arg = st
|
||||
if not file:
|
||||
txt += " {0}\033[K".format(arg)
|
||||
else:
|
||||
if y:
|
||||
p = 100 * file.up_b / file.size
|
||||
else:
|
||||
p = 100 * arg / file.size
|
||||
|
||||
name = file.abs.decode("utf-8", "replace")[-maxlen:]
|
||||
if "/" in name:
|
||||
name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1))
|
||||
|
||||
t = "{0:6.1f}% {1} {2}\033[K"
|
||||
txt += t.format(p, self.nfiles - f, name)
|
||||
|
||||
txt += "\033[{0}H ".format(ss.g + 2)
|
||||
else:
|
||||
txt = " "
|
||||
|
||||
if not self.up_br:
|
||||
spd = self.hash_b / (time.time() - self.t0)
|
||||
eta = (self.nbytes - self.hash_b) / (spd + 1)
|
||||
else:
|
||||
spd = self.up_br / (time.time() - self.t0_up)
|
||||
spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1
|
||||
eta = (self.nbytes - self.up_b) / (spd + 1)
|
||||
|
||||
spd = humansize(spd)
|
||||
eta = str(datetime.timedelta(seconds=int(eta)))
|
||||
sleft = humansize(self.nbytes - self.up_b)
|
||||
nleft = self.nfiles - self.up_f
|
||||
tail = "\033[K\033[u" if VT100 else "\r"
|
||||
|
||||
t = "{0} eta @ {1}/s, {2}, {3}# left".format(eta, spd, sleft, nleft)
|
||||
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
||||
|
||||
def cleanup_vt100(self):
|
||||
ss.scroll_region(None)
|
||||
eprint("\033[J\033]0;\033\\")
|
||||
|
||||
def cb_hasher(self, file, ofs):
|
||||
self.st_hash = [file, ofs]
|
||||
|
||||
def hasher(self):
|
||||
prd = None
|
||||
ls = {}
|
||||
for top, rel, inf in self.filegen:
|
||||
if self.ar.z:
|
||||
rd = os.path.dirname(rel)
|
||||
if prd != rd:
|
||||
prd = rd
|
||||
headers = {}
|
||||
if self.ar.a:
|
||||
headers["Cookie"] = "=".join(["cppwd", self.ar.a])
|
||||
|
||||
ls = {}
|
||||
try:
|
||||
print(" ls ~{0}".format(rd.decode("utf-8", "replace")))
|
||||
r = req_ses.get(
|
||||
self.ar.url.encode("utf-8") + quotep(rd) + b"?ls",
|
||||
headers=headers,
|
||||
)
|
||||
for f in r.json()["files"]:
|
||||
rfn = f["href"].split("?")[0].encode("utf-8", "replace")
|
||||
ls[unquote(rfn)] = f
|
||||
except:
|
||||
print(" mkdir ~{0}".format(rd.decode("utf-8", "replace")))
|
||||
|
||||
rf = ls.get(os.path.basename(rel), None)
|
||||
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1:
|
||||
self.nfiles -= 1
|
||||
self.nbytes -= inf.st_size
|
||||
continue
|
||||
|
||||
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||
while True:
|
||||
with self.mutex:
|
||||
if (
|
||||
self.hash_b - self.up_b < 1024 * 1024 * 128
|
||||
and self.hash_c - self.up_c < 64
|
||||
and (
|
||||
not self.ar.nh
|
||||
or (
|
||||
self.q_upload.empty()
|
||||
and self.q_handshake.empty()
|
||||
and not self.uploader_busy
|
||||
)
|
||||
)
|
||||
):
|
||||
break
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
get_hashlist(file, self.cb_hasher, self.mth)
|
||||
with self.mutex:
|
||||
self.hash_f += 1
|
||||
self.hash_c += len(file.cids)
|
||||
self.hash_b += file.size
|
||||
|
||||
self.q_handshake.put(file)
|
||||
|
||||
self.hasher_busy = 0
|
||||
self.st_hash = [None, "(finished)"]
|
||||
|
||||
def handshaker(self):
|
||||
search = self.ar.s
|
||||
q = self.q_handshake
|
||||
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
|
||||
while True:
|
||||
file = q.get()
|
||||
if not file:
|
||||
if q == self.q_handshake:
|
||||
q = self.q_recheck
|
||||
q.put(None)
|
||||
continue
|
||||
|
||||
self.q_upload.put(None)
|
||||
break
|
||||
|
||||
with self.mutex:
|
||||
self.handshaker_busy += 1
|
||||
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
|
||||
try:
|
||||
hs, sprs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||
except Exception as ex:
|
||||
if q == self.q_handshake and "<pre>partial upload exists" in str(ex):
|
||||
self.q_recheck.put(file)
|
||||
hs = []
|
||||
else:
|
||||
raise
|
||||
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
t = "found: {0}\n {1}{2}\n"
|
||||
print(t.format(upath, burl, hit["rp"]), end="")
|
||||
else:
|
||||
print("NOT found: {0}\n".format(upath), end="")
|
||||
|
||||
with self.mutex:
|
||||
self.up_f += 1
|
||||
self.up_c += len(file.cids)
|
||||
self.up_b += file.size
|
||||
self.handshaker_busy -= 1
|
||||
|
||||
continue
|
||||
|
||||
with self.mutex:
|
||||
if not sprs and not self.serialized:
|
||||
t = "server filesystem does not support sparse files; serializing uploads\n"
|
||||
eprint(t)
|
||||
self.serialized = True
|
||||
for _ in range(self.ar.j - 1):
|
||||
self.q_upload.put(None)
|
||||
if not hs:
|
||||
# all chunks done
|
||||
self.up_f += 1
|
||||
self.up_c += len(file.cids) - file.up_c
|
||||
self.up_b += file.size - file.up_b
|
||||
|
||||
if hs and file.up_c:
|
||||
# some chunks failed
|
||||
self.up_c -= len(hs)
|
||||
file.up_c -= len(hs)
|
||||
for cid in hs:
|
||||
sz = file.kchunks[cid][1]
|
||||
self.up_b -= sz
|
||||
file.up_b -= sz
|
||||
|
||||
file.ucids = hs
|
||||
self.handshaker_busy -= 1
|
||||
|
||||
if not hs:
|
||||
kw = "uploaded" if file.up_b else " found"
|
||||
print("{0} {1}".format(kw, upath))
|
||||
for cid in hs:
|
||||
self.q_upload.put([file, cid])
|
||||
|
||||
def uploader(self):
|
||||
while True:
|
||||
task = self.q_upload.get()
|
||||
if not task:
|
||||
self.st_up = [None, "(finished)"]
|
||||
break
|
||||
|
||||
with self.mutex:
|
||||
self.uploader_busy += 1
|
||||
self.t0_up = self.t0_up or time.time()
|
||||
|
||||
file, cid = task
|
||||
try:
|
||||
upload(req_ses, file, cid, self.ar.a)
|
||||
except:
|
||||
eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
|
||||
pass # handshake will fix it
|
||||
|
||||
with self.mutex:
|
||||
sz = file.kchunks[cid][1]
|
||||
file.ucids = [x for x in file.ucids if x != cid]
|
||||
if not file.ucids:
|
||||
self.q_handshake.put(file)
|
||||
|
||||
self.st_up = [file, cid]
|
||||
file.up_b += sz
|
||||
self.up_b += sz
|
||||
self.up_br += sz
|
||||
file.up_c += 1
|
||||
self.up_c += 1
|
||||
self.uploader_busy -= 1
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if not VT100:
|
||||
os.system("rem") # enables colors
|
||||
|
||||
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
||||
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
||||
|
||||
# fmt: off
|
||||
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
|
||||
NOTE:
|
||||
source file/folder selection uses rsync syntax, meaning that:
|
||||
"foo" uploads the entire folder to URL/foo/
|
||||
"foo/" uploads the CONTENTS of the folder into URL/
|
||||
""")
|
||||
|
||||
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||
ap.add_argument("-v", action="store_true", help="verbose")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||
ap = app.add_argument_group("performance tweaks")
|
||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||
ap = app.add_argument_group("tls")
|
||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||
# fmt: on
|
||||
|
||||
Ctl(app.parse_args())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
24
bin/up2k.sh
Executable file → Normal file
24
bin/up2k.sh
Executable file → Normal file
@@ -8,7 +8,7 @@ set -e
|
||||
##
|
||||
## config
|
||||
|
||||
datalen=$((2*1024*1024*1024))
|
||||
datalen=$((128*1024*1024))
|
||||
target=127.0.0.1
|
||||
posturl=/inc
|
||||
passwd=wark
|
||||
@@ -37,10 +37,10 @@ gendata() {
|
||||
# pipe a chunk, get the base64 checksum
|
||||
gethash() {
|
||||
printf $(
|
||||
sha512sum | cut -c-64 |
|
||||
sha512sum | cut -c-66 |
|
||||
sed -r 's/ .*//;s/(..)/\\x\1/g'
|
||||
) |
|
||||
base64 -w0 | cut -c-43 |
|
||||
base64 -w0 | cut -c-44 |
|
||||
tr '+/' '-_'
|
||||
}
|
||||
|
||||
@@ -123,7 +123,7 @@ printf '\033[36m'
|
||||
{
|
||||
{
|
||||
cat <<EOF
|
||||
POST $posturl/handshake.php HTTP/1.1
|
||||
POST $posturl/ HTTP/1.1
|
||||
Connection: Close
|
||||
Cookie: cppwd=$passwd
|
||||
Content-Type: text/plain;charset=UTF-8
|
||||
@@ -145,14 +145,16 @@ printf '\033[0m\nwark: %s\n' $wark
|
||||
##
|
||||
## wait for signal to continue
|
||||
|
||||
w8=/dev/shm/$salt.w8
|
||||
touch $w8
|
||||
true || {
|
||||
w8=/dev/shm/$salt.w8
|
||||
touch $w8
|
||||
|
||||
echo "ready; rm -f $w8"
|
||||
echo "ready; rm -f $w8"
|
||||
|
||||
while [ -e $w8 ]; do
|
||||
sleep 0.2
|
||||
done
|
||||
while [ -e $w8 ]; do
|
||||
sleep 0.2
|
||||
done
|
||||
}
|
||||
|
||||
|
||||
##
|
||||
@@ -175,7 +177,7 @@ while [ $remains -gt 0 ]; do
|
||||
|
||||
{
|
||||
cat <<EOF
|
||||
POST $posturl/chunkpit.php HTTP/1.1
|
||||
POST $posturl/ HTTP/1.1
|
||||
Connection: Keep-Alive
|
||||
Cookie: cppwd=$passwd
|
||||
Content-Type: application/octet-stream
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
### [`plugins/`](plugins/)
|
||||
* example extensions
|
||||
|
||||
### [`copyparty.bat`](copyparty.bat)
|
||||
* launches copyparty with no arguments (anon read+write within same folder)
|
||||
* intended for windows machines with no python.exe in PATH
|
||||
@@ -19,6 +22,9 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
|
||||
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
|
||||
* `pw`: password (remove `Parameters` if anon-write)
|
||||
|
||||
### [`media-osd-bgone.ps1`](media-osd-bgone.ps1)
|
||||
* disables the [windows OSD popup](https://user-images.githubusercontent.com/241032/122821375-0e08df80-d2dd-11eb-9fd9-184e8aacf1d0.png) (the thing on the left) which appears every time you hit media hotkeys to adjust volume or change song while playing music with the copyparty web-ui, or most other audio players really
|
||||
|
||||
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
|
||||
* disables thumbnails and folder-type detection in windows explorer
|
||||
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
|
||||
@@ -26,10 +32,13 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
|
||||
### [`cfssl.sh`](cfssl.sh)
|
||||
* creates CA and server certificates using cfssl
|
||||
* give a 3rd argument to install it to your copyparty config
|
||||
* systemd service at [`systemd/cfssl.service`](systemd/cfssl.service)
|
||||
|
||||
# OS integration
|
||||
init-scripts to start copyparty as a service
|
||||
* [`systemd/copyparty.service`](systemd/copyparty.service)
|
||||
* [`systemd/copyparty.service`](systemd/copyparty.service) runs the sfx normally
|
||||
* [`rc/copyparty`](rc/copyparty) runs sfx normally on freebsd, create a `copyparty` user
|
||||
* [`systemd/prisonparty.service`](systemd/prisonparty.service) runs the sfx in a chroot
|
||||
* [`openrc/copyparty`](openrc/copyparty)
|
||||
|
||||
# Reverse-proxy
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# ca-name and server-name
|
||||
# ca-name and server-fqdn
|
||||
ca_name="$1"
|
||||
srv_name="$2"
|
||||
srv_fqdn="$2"
|
||||
|
||||
[ -z "$srv_name" ] && {
|
||||
[ -z "$srv_fqdn" ] && {
|
||||
echo "need arg 1: ca name"
|
||||
echo "need arg 2: server name"
|
||||
echo "need arg 2: server fqdn and/or IPs, comma-separated"
|
||||
echo "optional arg 3: if set, write cert into copyparty cfg"
|
||||
exit 1
|
||||
}
|
||||
|
||||
@@ -31,15 +32,15 @@ EOF
|
||||
gen_srv() {
|
||||
(tee /dev/stderr <<EOF
|
||||
{"key": {"algo":"rsa", "size":4096},
|
||||
"names": [{"O":"$ca_name - $srv_name"}]}
|
||||
"names": [{"O":"$ca_name - $srv_fqdn"}]}
|
||||
EOF
|
||||
)|
|
||||
cfssl gencert -ca ca.pem -ca-key ca.key \
|
||||
-profile=www -hostname="$srv_name.$ca_name" - |
|
||||
cfssljson -bare "$srv_name"
|
||||
-profile=www -hostname="$srv_fqdn" - |
|
||||
cfssljson -bare "$srv_fqdn"
|
||||
|
||||
mv "$srv_name-key.pem" "$srv_name.key"
|
||||
rm "$srv_name.csr"
|
||||
mv "$srv_fqdn-key.pem" "$srv_fqdn.key"
|
||||
rm "$srv_fqdn.csr"
|
||||
}
|
||||
|
||||
|
||||
@@ -57,13 +58,13 @@ show() {
|
||||
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
|
||||
}
|
||||
show ca.pem
|
||||
show "$srv_name.pem"
|
||||
show "$srv_fqdn.pem"
|
||||
|
||||
|
||||
# write cert into copyparty config
|
||||
[ -z "$3" ] || {
|
||||
mkdir -p ~/.config/copyparty
|
||||
cat "$srv_name".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||
cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||
}
|
||||
|
||||
|
||||
|
||||
104
contrib/media-osd-bgone.ps1
Normal file
104
contrib/media-osd-bgone.ps1
Normal file
@@ -0,0 +1,104 @@
|
||||
# media-osd-bgone.ps1: disable media-control OSD on win10do
|
||||
# v1.1, 2021-06-25, ed <irc.rizon.net>, MIT-licensed
|
||||
# https://github.com/9001/copyparty/blob/hovudstraum/contrib/media-osd-bgone.ps1
|
||||
#
|
||||
# locates the first window that looks like the media OSD and minimizes it;
|
||||
# doing this once after each reboot should do the trick
|
||||
# (adjust the width/height filter if it doesn't work)
|
||||
#
|
||||
# ---------------------------------------------------------------------
|
||||
#
|
||||
# tip: save the following as "media-osd-bgone.bat" next to this script:
|
||||
# start cmd /c "powershell -command ""set-executionpolicy -scope process bypass; .\media-osd-bgone.ps1"" & ping -n 2 127.1 >nul"
|
||||
#
|
||||
# then create a shortcut to that bat-file and move the shortcut here:
|
||||
# %appdata%\Microsoft\Windows\Start Menu\Programs\Startup
|
||||
#
|
||||
# and now this will autorun on bootup
|
||||
|
||||
|
||||
Add-Type -TypeDefinition @"
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Diagnostics;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Windows.Forms;
|
||||
|
||||
namespace A {
|
||||
public class B : Control {
|
||||
|
||||
[DllImport("user32.dll")]
|
||||
static extern void keybd_event(byte bVk, byte bScan, uint dwFlags, int dwExtraInfo);
|
||||
|
||||
[DllImport("user32.dll", SetLastError = true)]
|
||||
static extern IntPtr FindWindowEx(IntPtr hwndParent, IntPtr hwndChildAfter, string lpszClass, string lpszWindow);
|
||||
|
||||
[DllImport("user32.dll", SetLastError=true)]
|
||||
static extern bool GetWindowRect(IntPtr hwnd, out RECT lpRect);
|
||||
|
||||
[DllImport("user32.dll")]
|
||||
static extern bool ShowWindow(IntPtr hWnd, int nCmdShow);
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
public struct RECT {
|
||||
public int x;
|
||||
public int y;
|
||||
public int x2;
|
||||
public int y2;
|
||||
}
|
||||
|
||||
bool fa() {
|
||||
RECT r;
|
||||
IntPtr it = IntPtr.Zero;
|
||||
while ((it = FindWindowEx(IntPtr.Zero, it, "NativeHWNDHost", "")) != IntPtr.Zero) {
|
||||
if (FindWindowEx(it, IntPtr.Zero, "DirectUIHWND", "") == IntPtr.Zero)
|
||||
continue;
|
||||
|
||||
if (!GetWindowRect(it, out r))
|
||||
continue;
|
||||
|
||||
int w = r.x2 - r.x + 1;
|
||||
int h = r.y2 - r.y + 1;
|
||||
|
||||
Console.WriteLine("[*] hwnd {0:x} @ {1}x{2} sz {3}x{4}", it, r.x, r.y, w, h);
|
||||
if (h != 141)
|
||||
continue;
|
||||
|
||||
ShowWindow(it, 6);
|
||||
Console.WriteLine("[+] poof");
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void fb() {
|
||||
keybd_event((byte)Keys.VolumeMute, 0, 0, 0);
|
||||
keybd_event((byte)Keys.VolumeMute, 0, 2, 0);
|
||||
Thread.Sleep(500);
|
||||
keybd_event((byte)Keys.VolumeMute, 0, 0, 0);
|
||||
keybd_event((byte)Keys.VolumeMute, 0, 2, 0);
|
||||
|
||||
while (true) {
|
||||
if (fa()) {
|
||||
break;
|
||||
}
|
||||
Console.WriteLine("[!] not found");
|
||||
Thread.Sleep(1000);
|
||||
}
|
||||
this.Invoke((MethodInvoker)delegate {
|
||||
Application.Exit();
|
||||
});
|
||||
}
|
||||
|
||||
public void Run() {
|
||||
Console.WriteLine("[+] hi");
|
||||
new Thread(new ThreadStart(fb)).Start();
|
||||
Application.Run();
|
||||
Console.WriteLine("[+] bye");
|
||||
}
|
||||
}
|
||||
}
|
||||
"@ -ReferencedAssemblies System.Windows.Forms
|
||||
|
||||
(New-Object -TypeName A.B).Run()
|
||||
@@ -13,7 +13,7 @@
|
||||
|
||||
upstream cpp {
|
||||
server 127.0.0.1:3923;
|
||||
keepalive 120;
|
||||
keepalive 1;
|
||||
}
|
||||
server {
|
||||
listen 443 ssl;
|
||||
|
||||
@@ -8,11 +8,11 @@
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
# change '/mnt::rw' to another location or permission-set
|
||||
|
||||
name="$SVCNAME"
|
||||
command_background=true
|
||||
pidfile="/var/run/$SVCNAME.pid"
|
||||
|
||||
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
|
||||
command_args="-q -v /mnt::a"
|
||||
command_args="-q -v /mnt::rw"
|
||||
|
||||
33
contrib/plugins/README.md
Normal file
33
contrib/plugins/README.md
Normal file
@@ -0,0 +1,33 @@
|
||||
# example resource files
|
||||
|
||||
can be provided to copyparty to tweak things
|
||||
|
||||
|
||||
|
||||
## example `.epilogue.html`
|
||||
save one of these as `.epilogue.html` inside a folder to customize it:
|
||||
|
||||
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||
|
||||
|
||||
|
||||
## example browser-js
|
||||
point `--js-browser` to one of these by URL:
|
||||
|
||||
* [`minimal-up2k.js`](minimal-up2k.js) is similar to the above `minimal-up2k.html` except it applies globally to all write-only folders
|
||||
* [`up2k-hooks.js`](up2k-hooks.js) lets you specify a ruleset for files to skip uploading
|
||||
* [`up2k-hook-ytid.js`](up2k-hook-ytid.js) is a more specific example checking youtube-IDs against some API
|
||||
|
||||
|
||||
|
||||
## example browser-css
|
||||
point `--css-browser` to one of these by URL:
|
||||
|
||||
* [`browser-icons.css`](browser-icons.css) adds filetype icons
|
||||
|
||||
|
||||
|
||||
## meadup.js
|
||||
|
||||
* turns copyparty into chromecast just more flexible (and probably way more buggy)
|
||||
* usage: put the js somewhere in the webroot and `--js-browser /memes/meadup.js`
|
||||
71
contrib/plugins/browser-icons.css
Normal file
71
contrib/plugins/browser-icons.css
Normal file
@@ -0,0 +1,71 @@
|
||||
/* video, alternative 1:
|
||||
top-left icon, just like the other formats
|
||||
=======================================================================
|
||||
|
||||
#ggrid>a:is(
|
||||
[href$=".mkv"i],
|
||||
[href$=".mp4"i],
|
||||
[href$=".webm"i],
|
||||
):before {
|
||||
content: '📺';
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
|
||||
/* video, alternative 2:
|
||||
play-icon in the middle of the thumbnail
|
||||
=======================================================================
|
||||
*/
|
||||
#ggrid>a:is(
|
||||
[href$=".mkv"i],
|
||||
[href$=".mp4"i],
|
||||
[href$=".webm"i],
|
||||
) {
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
}
|
||||
#ggrid>a:is(
|
||||
[href$=".mkv"i],
|
||||
[href$=".mp4"i],
|
||||
[href$=".webm"i],
|
||||
):before {
|
||||
content: '▶';
|
||||
opacity: .8;
|
||||
margin: 0;
|
||||
padding: 1em .5em 1em .7em;
|
||||
border-radius: 9em;
|
||||
line-height: 0;
|
||||
color: #fff;
|
||||
text-shadow: none;
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
left: calc(50% - 1em);
|
||||
top: calc(50% - 1.4em);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* audio */
|
||||
#ggrid>a:is(
|
||||
[href$=".mp3"i],
|
||||
[href$=".ogg"i],
|
||||
[href$=".opus"i],
|
||||
[href$=".flac"i],
|
||||
[href$=".m4a"i],
|
||||
[href$=".aac"i],
|
||||
):before {
|
||||
content: '🎵';
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* image */
|
||||
#ggrid>a:is(
|
||||
[href$=".jpg"i],
|
||||
[href$=".jpeg"i],
|
||||
[href$=".png"i],
|
||||
[href$=".gif"i],
|
||||
[href$=".webp"i],
|
||||
):before {
|
||||
content: '🎨';
|
||||
}
|
||||
506
contrib/plugins/meadup.js
Normal file
506
contrib/plugins/meadup.js
Normal file
@@ -0,0 +1,506 @@
|
||||
// USAGE:
|
||||
// place this file somewhere in the webroot and then
|
||||
// python3 -m copyparty --js-browser /memes/meadup.js
|
||||
//
|
||||
// FEATURES:
|
||||
// * adds an onscreen keyboard for operating a media center remotely,
|
||||
// relies on https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/very-bad-idea.py
|
||||
// * adds an interactive anime girl (if you can find the dependencies)
|
||||
|
||||
var hambagas = [
|
||||
"https://www.youtube.com/watch?v=pFA3KGp4GuU"
|
||||
];
|
||||
|
||||
// keybaord,
|
||||
// onscreen keyboard by @steinuil
|
||||
function initKeybaord(BASE_URL, HAMBAGA, consoleLog, consoleError) {
|
||||
document.querySelector('.keybaord-container').innerHTML = `
|
||||
<div class="keybaord-body">
|
||||
<div class="keybaord-row keybaord-row-1">
|
||||
<div class="keybaord-key" data-keybaord-key="Escape">
|
||||
esc
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F1">
|
||||
F1
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F2">
|
||||
F2
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F3">
|
||||
F3
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F4">
|
||||
F4
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F5">
|
||||
F5
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F6">
|
||||
F6
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F7">
|
||||
F7
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F8">
|
||||
F8
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F9">
|
||||
F9
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F10">
|
||||
F10
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F11">
|
||||
F11
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F12">
|
||||
F12
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Insert">
|
||||
ins
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Delete">
|
||||
del
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-2">
|
||||
<div class="keybaord-key" data-keybaord-key="\`">
|
||||
\`
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="1">
|
||||
1
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="2">
|
||||
2
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="3">
|
||||
3
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="4">
|
||||
4
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="5">
|
||||
5
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="6">
|
||||
6
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="7">
|
||||
7
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="8">
|
||||
8
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="9">
|
||||
9
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="0">
|
||||
0
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="-">
|
||||
-
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="=">
|
||||
=
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-backspace" data-keybaord-key="BackSpace">
|
||||
backspace
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-3">
|
||||
<div class="keybaord-key keybaord-tab" data-keybaord-key="Tab">
|
||||
tab
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="q">
|
||||
q
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="w">
|
||||
w
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="e">
|
||||
e
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="r">
|
||||
r
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="t">
|
||||
t
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="y">
|
||||
y
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="u">
|
||||
u
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="i">
|
||||
i
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="o">
|
||||
o
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="p">
|
||||
p
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="[">
|
||||
[
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="]">
|
||||
]
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-enter" data-keybaord-key="Return">
|
||||
enter
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-4">
|
||||
<div class="keybaord-key keybaord-capslock" data-keybaord-key="HAMBAGA">
|
||||
🍔
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="a">
|
||||
a
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="s">
|
||||
s
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="d">
|
||||
d
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="f">
|
||||
f
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="g">
|
||||
g
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="h">
|
||||
h
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="j">
|
||||
j
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="k">
|
||||
k
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="l">
|
||||
l
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key=";">
|
||||
;
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="'">
|
||||
'
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-backslash" data-keybaord-key="\\">
|
||||
\\
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-5">
|
||||
<div class="keybaord-key keybaord-lshift" data-keybaord-key="Shift_L">
|
||||
shift
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="\\">
|
||||
\\
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="z">
|
||||
z
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="x">
|
||||
x
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="c">
|
||||
c
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="v">
|
||||
v
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="b">
|
||||
b
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="n">
|
||||
n
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="m">
|
||||
m
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key=",">
|
||||
,
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key=".">
|
||||
.
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="/">
|
||||
/
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-rshift" data-keybaord-key="Shift_R">
|
||||
shift
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-6">
|
||||
<div class="keybaord-key keybaord-lctrl" data-keybaord-key="Control_L">
|
||||
ctrl
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-super" data-keybaord-key="Meta_L">
|
||||
win
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-alt" data-keybaord-key="Alt_L">
|
||||
alt
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-spacebar" data-keybaord-key="space">
|
||||
space
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-altgr" data-keybaord-key="Alt_R">
|
||||
altgr
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-what" data-keybaord-key="Menu">
|
||||
menu
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-rctrl" data-keybaord-key="Control_R">
|
||||
ctrl
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row">
|
||||
<div class="keybaord-key" data-keybaord-key="XF86AudioLowerVolume">
|
||||
🔉
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="XF86AudioRaiseVolume">
|
||||
🔊
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Left">
|
||||
⬅️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Down">
|
||||
⬇️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Up">
|
||||
⬆️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Right">
|
||||
➡️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Page_Up">
|
||||
PgUp
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Page_Down">
|
||||
PgDn
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Home">
|
||||
🏠
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="End">
|
||||
End
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
`;
|
||||
|
||||
function arraySample(array) {
|
||||
return array[Math.floor(Math.random() * array.length)];
|
||||
}
|
||||
|
||||
function sendMessage(msg) {
|
||||
return fetch(BASE_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded;charset=UTF-8",
|
||||
},
|
||||
body: "msg=" + encodeURIComponent(msg),
|
||||
}).then(
|
||||
(r) => r.text(), // so the response body shows up in network tab
|
||||
(err) => consoleError(err)
|
||||
);
|
||||
}
|
||||
const MODIFIER_ON_CLASS = "keybaord-modifier-on";
|
||||
const KEY_DATASET = "data-keybaord-key";
|
||||
const KEY_CLASS = "keybaord-key";
|
||||
|
||||
const modifiers = new Set()
|
||||
|
||||
function toggleModifier(button, key) {
|
||||
button.classList.toggle(MODIFIER_ON_CLASS);
|
||||
if (modifiers.has(key)) {
|
||||
modifiers.delete(key);
|
||||
} else {
|
||||
modifiers.add(key);
|
||||
}
|
||||
}
|
||||
|
||||
function popModifiers() {
|
||||
let modifierString = "";
|
||||
|
||||
modifiers.forEach((mod) => {
|
||||
document.querySelector("[" + KEY_DATASET + "='" + mod + "']")
|
||||
.classList.remove(MODIFIER_ON_CLASS);
|
||||
|
||||
modifierString += mod + "+";
|
||||
});
|
||||
|
||||
modifiers.clear();
|
||||
|
||||
return modifierString;
|
||||
}
|
||||
|
||||
Array.from(document.querySelectorAll("." + KEY_CLASS)).forEach((button) => {
|
||||
const key = button.dataset.keybaordKey;
|
||||
|
||||
button.addEventListener("click", (ev) => {
|
||||
switch (key) {
|
||||
case "HAMBAGA":
|
||||
sendMessage(arraySample(HAMBAGA));
|
||||
break;
|
||||
|
||||
case "Shift_L":
|
||||
case "Shift_R":
|
||||
|
||||
case "Control_L":
|
||||
case "Control_R":
|
||||
|
||||
case "Meta_L":
|
||||
|
||||
case "Alt_L":
|
||||
case "Alt_R":
|
||||
toggleModifier(button, key);
|
||||
break;
|
||||
|
||||
default: {
|
||||
const keyWithModifiers = popModifiers() + key;
|
||||
|
||||
consoleLog(keyWithModifiers);
|
||||
|
||||
sendMessage("key " + keyWithModifiers)
|
||||
.then(() => consoleLog(keyWithModifiers + " OK"));
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// keybaord integration
|
||||
(function () {
|
||||
var o = mknod('div');
|
||||
clmod(o, 'keybaord-container', 1);
|
||||
ebi('op_msg').appendChild(o);
|
||||
|
||||
o = mknod('style');
|
||||
o.innerHTML = `
|
||||
.keybaord-body {
|
||||
display: flex;
|
||||
flex-flow: column nowrap;
|
||||
margin: .6em 0;
|
||||
}
|
||||
|
||||
.keybaord-row {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.keybaord-key {
|
||||
border: 1px solid rgba(128,128,128,0.2);
|
||||
width: 41px;
|
||||
height: 40px;
|
||||
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.keybaord-key:active {
|
||||
background-color: lightgrey;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-modifier-on {
|
||||
background-color: lightblue;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-backspace {
|
||||
width: 82px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-tab {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-enter {
|
||||
width: 69px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-capslock {
|
||||
width: 80px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-backslash {
|
||||
width: 88px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-lshift {
|
||||
width: 65px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-rshift {
|
||||
width: 103px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-lctrl {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-super {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-alt {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-altgr {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-what {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-rctrl {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-spacebar {
|
||||
width: 302px;
|
||||
}
|
||||
`;
|
||||
document.head.appendChild(o);
|
||||
|
||||
initKeybaord('/', hambagas,
|
||||
(msg) => { toast.inf(2, msg.toString()) },
|
||||
(msg) => { toast.err(30, msg.toString()) });
|
||||
})();
|
||||
|
||||
|
||||
// live2d (dumb pointless meme)
|
||||
// dependencies for this part are not tracked in git
|
||||
// so delete this section if you wanna use this file
|
||||
// (or supply your own l2d model and js)
|
||||
(function () {
|
||||
var o = mknod('link');
|
||||
o.setAttribute('rel', 'stylesheet');
|
||||
o.setAttribute('href', "/bad-memes/pio.css");
|
||||
document.head.appendChild(o);
|
||||
|
||||
o = mknod('style');
|
||||
o.innerHTML = '.pio-container{text-shadow:none;z-index:1}';
|
||||
document.head.appendChild(o);
|
||||
|
||||
o = mknod('div');
|
||||
clmod(o, 'pio-container', 1);
|
||||
o.innerHTML = '<div class="pio-action"></div><canvas id="pio" width="280" height="500"></canvas>';
|
||||
document.body.appendChild(o);
|
||||
|
||||
var remaining = 3;
|
||||
for (var a of ['pio', 'l2d', 'fireworks']) {
|
||||
import_js(`/bad-memes/${a}.js`, function () {
|
||||
if (remaining --> 1)
|
||||
return;
|
||||
|
||||
o = mknod('script');
|
||||
o.innerHTML = 'var pio = new Paul_Pio({"selector":[],"mode":"fixed","hidden":false,"content":{"close":"ok bye"},"model":["/bad-memes/sagiri/model.json"]});';
|
||||
document.body.appendChild(o);
|
||||
});
|
||||
}
|
||||
})();
|
||||
@@ -7,11 +7,13 @@
|
||||
|
||||
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
|
||||
|
||||
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
|
||||
#ops, #tree, #path, #epi+h2, /* main tabs and navigators (tree/breadcrumbs) */
|
||||
|
||||
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||
#u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||
|
||||
#u2cards /* and the upload progress tabs */
|
||||
#srch_dz, #srch_zd, /* the filesearch dropzone */
|
||||
|
||||
#u2cards, #u2etaw /* and the upload progress tabs */
|
||||
|
||||
{display: none !important} /* do it! */
|
||||
|
||||
@@ -19,13 +21,16 @@
|
||||
|
||||
/* add some margins because now it's weird */
|
||||
.opview {margin-top: 2.5em}
|
||||
#op_up2k {margin-top: 3em}
|
||||
#op_up2k {margin-top: 6em}
|
||||
|
||||
/* and embiggen the upload button */
|
||||
#u2conf #u2btn, #u2btn {padding:1.5em 0}
|
||||
|
||||
/* adjust the button area a bit */
|
||||
#u2conf.has_btn {width: 35em !important; margin: 5em auto}
|
||||
#u2conf.w, #u2conf.ww {width: 35em !important; margin: 5em auto}
|
||||
|
||||
/* a */
|
||||
#op_up2k {min-height: 0}
|
||||
|
||||
</style>
|
||||
|
||||
59
contrib/plugins/minimal-up2k.js
Normal file
59
contrib/plugins/minimal-up2k.js
Normal file
@@ -0,0 +1,59 @@
|
||||
/*
|
||||
|
||||
makes the up2k ui REALLY minimal by hiding a bunch of stuff
|
||||
|
||||
almost the same as minimal-up2k.html except this one...:
|
||||
|
||||
-- applies to every write-only folder when used with --js-browser
|
||||
|
||||
-- only applies if javascript is enabled
|
||||
|
||||
-- doesn't hide the total upload ETA display
|
||||
|
||||
-- looks slightly better
|
||||
|
||||
*/
|
||||
|
||||
var u2min = `
|
||||
<style>
|
||||
|
||||
#ops, #path, #tree, #files, #epi+div+h2,
|
||||
#u2conf td.c+.c, #u2cards, #srch_dz, #srch_zd {
|
||||
display: none !important;
|
||||
}
|
||||
#u2conf {margin:5em auto 0 auto !important}
|
||||
#u2conf.ww {width:70em}
|
||||
#u2conf.w {width:50em}
|
||||
#u2conf.w .c,
|
||||
#u2conf.w #u2btn_cw {text-align:left}
|
||||
#u2conf.w #u2btn_cw {width:70%}
|
||||
#u2etaw {margin:3em auto}
|
||||
#u2etaw.w {
|
||||
text-align: center;
|
||||
margin: -3.5em auto 5em auto;
|
||||
}
|
||||
#u2etaw.w #u2etas {margin-right:-37em}
|
||||
#u2etaw.w #u2etas.o {margin-top:-2.2em}
|
||||
#u2etaw.ww {margin:-1em auto}
|
||||
#u2etaw.ww #u2etas {padding-left:4em}
|
||||
#u2etas {
|
||||
background: none !important;
|
||||
border: none !important;
|
||||
}
|
||||
#wrap {margin-left:2em !important}
|
||||
.logue {
|
||||
border: none !important;
|
||||
margin: 2em auto !important;
|
||||
}
|
||||
.logue:before {content:'' !important}
|
||||
|
||||
</style>
|
||||
|
||||
<a href="#" onclick="this.parentNode.innerHTML='';">show advanced options</a>
|
||||
`;
|
||||
|
||||
if (!has(perms, 'read')) {
|
||||
var e2 = mknod('div');
|
||||
e2.innerHTML = u2min;
|
||||
ebi('wrap').insertBefore(e2, QS('#epi+h2'));
|
||||
}
|
||||
297
contrib/plugins/up2k-hook-ytid.js
Normal file
297
contrib/plugins/up2k-hook-ytid.js
Normal file
@@ -0,0 +1,297 @@
|
||||
// way more specific example --
|
||||
// assumes all files dropped into the uploader have a youtube-id somewhere in the filename,
|
||||
// locates the youtube-ids and passes them to an API which returns a list of IDs which should be uploaded
|
||||
//
|
||||
// also tries to find the youtube-id in the embedded metadata
|
||||
//
|
||||
// assumes copyparty is behind nginx as /ytq is a standalone service which must be rproxied in place
|
||||
|
||||
function up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
var passthru = up2k.uc.fsearch;
|
||||
if (passthru)
|
||||
return hooks[0](good_files, nil_files, bad_files, hooks.slice(1));
|
||||
|
||||
a_up2k_namefilter(good_files, nil_files, bad_files, hooks).then(() => { });
|
||||
}
|
||||
|
||||
// ebi('op_up2k').appendChild(mknod('input','unick'));
|
||||
|
||||
function bstrpos(buf, ptn) {
|
||||
var ofs = 0,
|
||||
ch0 = ptn[0],
|
||||
sz = buf.byteLength;
|
||||
|
||||
while (true) {
|
||||
ofs = buf.indexOf(ch0, ofs);
|
||||
if (ofs < 0 || ofs >= sz)
|
||||
return -1;
|
||||
|
||||
for (var a = 1; a < ptn.length; a++)
|
||||
if (buf[ofs + a] !== ptn[a])
|
||||
break;
|
||||
|
||||
if (a === ptn.length)
|
||||
return ofs;
|
||||
|
||||
++ofs;
|
||||
}
|
||||
}
|
||||
|
||||
async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
var t0 = Date.now(),
|
||||
yt_ids = new Set(),
|
||||
textdec = new TextDecoder('latin1'),
|
||||
md_ptn = new TextEncoder().encode('youtube.com/watch?v='),
|
||||
file_ids = [], // all IDs found for each good_files
|
||||
md_only = [], // `${id} ${fn}` where ID was only found in metadata
|
||||
mofs = 0,
|
||||
mnchk = 0,
|
||||
mfile = '',
|
||||
myid = localStorage.getItem('ytid_t0');
|
||||
|
||||
if (!myid)
|
||||
localStorage.setItem('ytid_t0', myid = Date.now());
|
||||
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var [fobj, name] = good_files[a],
|
||||
cname = name, // will clobber
|
||||
sz = fobj.size,
|
||||
ids = [],
|
||||
fn_ids = [],
|
||||
md_ids = [],
|
||||
id_ok = false,
|
||||
m;
|
||||
|
||||
// all IDs found in this file
|
||||
file_ids.push(ids);
|
||||
|
||||
// look for ID in filename; reduce the
|
||||
// metadata-scan intensity if the id looks safe
|
||||
m = /[\[(-]([\w-]{11})[\])]?\.(?:mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name);
|
||||
id_ok = !!m;
|
||||
|
||||
while (true) {
|
||||
// fuzzy catch-all;
|
||||
// some ytdl fork did %(title)-%(id).%(ext) ...
|
||||
m = /(?:^|[^\w])([\w-]{11})(?:$|[^\w-])/.exec(cname);
|
||||
if (!m)
|
||||
break;
|
||||
|
||||
cname = cname.replace(m[1], '');
|
||||
yt_ids.add(m[1]);
|
||||
fn_ids.unshift(m[1]);
|
||||
}
|
||||
|
||||
// look for IDs in video metadata,
|
||||
if (/\.(mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name)) {
|
||||
toast.show('inf r', 0, `analyzing file ${a + 1} / ${good_files.length} :\n${name}\n\nhave analysed ${++mnchk} files in ${(Date.now() - t0) / 1000} seconds, ${humantime((good_files.length - (a + 1)) * (((Date.now() - t0) / 1000) / mnchk))} remaining,\n\nbiggest offset so far is ${mofs}, in this file:\n\n${mfile}`);
|
||||
|
||||
// check first and last 128 MiB;
|
||||
// pWxOroN5WCo.mkv @ 6edb98 (6.92M)
|
||||
// Nf-nN1wF5Xo.mp4 @ 4a98034 (74.6M)
|
||||
var chunksz = 1024 * 1024 * 2, // byte
|
||||
aspan = id_ok ? 128 : 512; // MiB
|
||||
|
||||
aspan = parseInt(Math.min(sz / 2, aspan * 1024 * 1024) / chunksz) * chunksz;
|
||||
if (!aspan)
|
||||
aspan = Math.min(sz, chunksz);
|
||||
|
||||
for (var side = 0; side < 2; side++) {
|
||||
var ofs = side ? Math.max(0, sz - aspan) : 0,
|
||||
nchunks = aspan / chunksz;
|
||||
|
||||
for (var chunk = 0; chunk < nchunks; chunk++) {
|
||||
var bchunk = await fobj.slice(ofs, ofs + chunksz + 16).arrayBuffer(),
|
||||
uchunk = new Uint8Array(bchunk, 0, bchunk.byteLength),
|
||||
bofs = bstrpos(uchunk, md_ptn),
|
||||
absofs = Math.min(ofs + bofs, (sz - ofs) + bofs),
|
||||
txt = bofs < 0 ? '' : textdec.decode(uchunk.subarray(bofs)),
|
||||
m;
|
||||
|
||||
//console.log(`side ${ side }, chunk ${ chunk }, ofs ${ ofs }, bchunk ${ bchunk.byteLength }, txt ${ txt.length }`);
|
||||
while (true) {
|
||||
// mkv/webm have [a-z] immediately after url
|
||||
m = /(youtube\.com\/watch\?v=[\w-]{11})/.exec(txt);
|
||||
if (!m)
|
||||
break;
|
||||
|
||||
txt = txt.replace(m[1], '');
|
||||
m = m[1].slice(-11);
|
||||
|
||||
console.log(`found ${m} @${bofs}, ${name} `);
|
||||
yt_ids.add(m);
|
||||
if (!has(fn_ids, m) && !has(md_ids, m)) {
|
||||
md_ids.push(m);
|
||||
md_only.push(`${m} ${name}`);
|
||||
}
|
||||
else
|
||||
// id appears several times; make it preferred
|
||||
md_ids.unshift(m);
|
||||
|
||||
// bail after next iteration
|
||||
chunk = nchunks - 1;
|
||||
side = 9;
|
||||
|
||||
if (mofs < absofs) {
|
||||
mofs = absofs;
|
||||
mfile = name;
|
||||
}
|
||||
}
|
||||
ofs += chunksz;
|
||||
if (ofs >= sz)
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (var yi of md_ids)
|
||||
ids.push(yi);
|
||||
|
||||
for (var yi of fn_ids)
|
||||
if (!has(ids, yi))
|
||||
ids.push(yi);
|
||||
}
|
||||
|
||||
if (md_only.length)
|
||||
console.log('recovered the following youtube-IDs by inspecting metadata:\n\n' + md_only.join('\n'));
|
||||
else if (yt_ids.size)
|
||||
console.log('did not discover any additional youtube-IDs by inspecting metadata; all the IDs also existed in the filenames');
|
||||
else
|
||||
console.log('failed to find any youtube-IDs at all, sorry');
|
||||
|
||||
if (false) {
|
||||
var msg = `finished analysing ${mnchk} files in ${(Date.now() - t0) / 1000} seconds,\n\nbiggest offset was ${mofs} in this file:\n\n${mfile}`,
|
||||
mfun = function () { toast.ok(0, msg); };
|
||||
|
||||
mfun();
|
||||
setTimeout(mfun, 200);
|
||||
|
||||
return hooks[0]([], [], [], hooks.slice(1));
|
||||
}
|
||||
|
||||
var el = ebi('unick'), unick = el ? el.value : '';
|
||||
if (unick) {
|
||||
console.log(`sending uploader nickname [${unick}]`);
|
||||
fetch(document.location, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8' },
|
||||
body: 'msg=' + encodeURIComponent(unick)
|
||||
});
|
||||
}
|
||||
|
||||
toast.inf(5, `running query for ${yt_ids.size} youtube-IDs...`);
|
||||
|
||||
var xhr = new XHR();
|
||||
xhr.open('POST', '/ytq', true);
|
||||
xhr.setRequestHeader('Content-Type', 'text/plain');
|
||||
xhr.onload = xhr.onerror = function () {
|
||||
if (this.status != 200)
|
||||
return toast.err(0, `sorry, database query failed ;_;\n\nplease let us know so we can look at it, thx!!\n\nerror ${this.status}: ${(this.response && this.response.err) || this.responseText}`);
|
||||
|
||||
process_id_list(this.responseText);
|
||||
};
|
||||
xhr.send(Array.from(yt_ids).join('\n'));
|
||||
|
||||
function process_id_list(txt) {
|
||||
var wanted_ids = new Set(txt.trim().split('\n')),
|
||||
name_id = {},
|
||||
wanted_names = new Set(), // basenames with a wanted ID -- not including relpath
|
||||
wanted_names_scoped = {}, // basenames with a wanted ID -> list of dirs to search under
|
||||
wanted_files = new Set(); // filedrops
|
||||
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var name = good_files[a][1];
|
||||
for (var b = 0; b < file_ids[a].length; b++)
|
||||
if (wanted_ids.has(file_ids[a][b])) {
|
||||
// let the next stage handle this to prevent dupes
|
||||
//wanted_files.add(good_files[a]);
|
||||
|
||||
var m = /(.*)\.(mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name);
|
||||
if (!m)
|
||||
continue;
|
||||
|
||||
var [rd, fn] = vsplit(m[1]);
|
||||
|
||||
if (fn in wanted_names_scoped)
|
||||
wanted_names_scoped[fn].push(rd);
|
||||
else
|
||||
wanted_names_scoped[fn] = [rd];
|
||||
|
||||
wanted_names.add(fn);
|
||||
name_id[m[1]] = file_ids[a][b];
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// add all files with the same basename as each explicitly wanted file
|
||||
// (infojson/chatlog/etc when ID was discovered from metadata)
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var [rd, name] = vsplit(good_files[a][1]);
|
||||
for (var b = 0; b < 3; b++) {
|
||||
name = name.replace(/\.[^\.]+$/, '');
|
||||
if (!wanted_names.has(name))
|
||||
continue;
|
||||
|
||||
var vid_fp = false;
|
||||
for (var c of wanted_names_scoped[name])
|
||||
if (rd.startsWith(c))
|
||||
vid_fp = c + name;
|
||||
|
||||
if (!vid_fp)
|
||||
continue;
|
||||
|
||||
var subdir = name_id[vid_fp];
|
||||
subdir = `v${subdir.slice(0, 1)}/${subdir}-${myid}`;
|
||||
var newpath = subdir + '/' + good_files[a][1].split(/\//g).pop();
|
||||
|
||||
// check if this file is a dupe
|
||||
for (var c of good_files)
|
||||
if (c[1] == newpath)
|
||||
newpath = null;
|
||||
|
||||
if (!newpath)
|
||||
break;
|
||||
|
||||
good_files[a][1] = newpath;
|
||||
wanted_files.add(good_files[a]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
function upload_filtered() {
|
||||
if (!wanted_files.size)
|
||||
return modal.alert('Good news -- turns out we already have all those.\n\nBut thank you for checking in!');
|
||||
|
||||
hooks[0](Array.from(wanted_files), nil_files, bad_files, hooks.slice(1));
|
||||
}
|
||||
|
||||
function upload_all() {
|
||||
hooks[0](good_files, nil_files, bad_files, hooks.slice(1));
|
||||
}
|
||||
|
||||
var n_skip = good_files.length - wanted_files.size,
|
||||
msg = `you added ${good_files.length} files; ${good_files.length == n_skip ? 'all' : n_skip} of them were skipped --\neither because we already have them,\nor because there is no youtube-ID in your filenames.\n\n<code>OK</code> / <code>Enter</code> = continue uploading just the ${wanted_files.size} files we definitely need\n\n<code>Cancel</code> / <code>ESC</code> = override the filter; upload ALL the files you added`;
|
||||
|
||||
if (!n_skip)
|
||||
upload_filtered();
|
||||
else
|
||||
modal.confirm(msg, upload_filtered, upload_all);
|
||||
};
|
||||
}
|
||||
|
||||
up2k_hooks.push(function () {
|
||||
up2k.gotallfiles.unshift(up2k_namefilter);
|
||||
});
|
||||
|
||||
// persist/restore nickname field if present
|
||||
setInterval(function () {
|
||||
var o = ebi('unick');
|
||||
if (!o || document.activeElement == o)
|
||||
return;
|
||||
|
||||
o.oninput = function () {
|
||||
localStorage.setItem('unick', o.value);
|
||||
};
|
||||
o.value = localStorage.getItem('unick') || '';
|
||||
}, 1000);
|
||||
45
contrib/plugins/up2k-hooks.js
Normal file
45
contrib/plugins/up2k-hooks.js
Normal file
@@ -0,0 +1,45 @@
|
||||
// hooks into up2k
|
||||
|
||||
function up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
// is called when stuff is dropped into the browser,
|
||||
// after iterating through the directory tree and discovering all files,
|
||||
// before the upload confirmation dialogue is shown
|
||||
|
||||
// good_files will successfully upload
|
||||
// nil_files are empty files and will show an alert in the final hook
|
||||
// bad_files are unreadable and cannot be uploaded
|
||||
var file_lists = [good_files, nil_files, bad_files];
|
||||
|
||||
// build a list of filenames
|
||||
var filenames = [];
|
||||
for (var lst of file_lists)
|
||||
for (var ent of lst)
|
||||
filenames.push(ent[1]);
|
||||
|
||||
toast.inf(5, "running database query...");
|
||||
|
||||
// simulate delay while passing the list to some api for checking
|
||||
setTimeout(function () {
|
||||
|
||||
// only keep webm files as an example
|
||||
var new_lists = [];
|
||||
for (var lst of file_lists) {
|
||||
var keep = [];
|
||||
new_lists.push(keep);
|
||||
|
||||
for (var ent of lst)
|
||||
if (/\.webm$/.test(ent[1]))
|
||||
keep.push(ent);
|
||||
}
|
||||
|
||||
// finally, call the next hook in the chain
|
||||
[good_files, nil_files, bad_files] = new_lists;
|
||||
hooks[0](good_files, nil_files, bad_files, hooks.slice(1));
|
||||
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
// register
|
||||
up2k_hooks.push(function () {
|
||||
up2k.gotallfiles.unshift(up2k_namefilter);
|
||||
});
|
||||
31
contrib/rc/copyparty
Normal file
31
contrib/rc/copyparty
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# PROVIDE: copyparty
|
||||
# REQUIRE: networking
|
||||
# KEYWORD:
|
||||
|
||||
. /etc/rc.subr
|
||||
|
||||
name="copyparty"
|
||||
rcvar="copyparty_enable"
|
||||
copyparty_user="copyparty"
|
||||
copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit
|
||||
copyparty_command="/usr/local/bin/python3.8 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
|
||||
pidfile="/var/run/copyparty/${name}.pid"
|
||||
command="/usr/sbin/daemon"
|
||||
command_args="-P ${pidfile} -r -f ${copyparty_command}"
|
||||
|
||||
stop_postcmd="copyparty_shutdown"
|
||||
|
||||
copyparty_shutdown()
|
||||
{
|
||||
if [ -e "${pidfile}" ]; then
|
||||
echo "Stopping supervising daemon."
|
||||
kill -s TERM `cat ${pidfile}`
|
||||
fi
|
||||
}
|
||||
|
||||
load_rc_config $name
|
||||
: ${copyparty_enable:=no}
|
||||
|
||||
run_rc_command "$1"
|
||||
23
contrib/systemd/cfssl.service
Normal file
23
contrib/systemd/cfssl.service
Normal file
@@ -0,0 +1,23 @@
|
||||
# systemd service which generates a new TLS certificate on each boot,
|
||||
# that way the one-year expiry time won't cause any issues --
|
||||
# just have everyone trust the ca.pem once every 10 years
|
||||
#
|
||||
# assumptions/placeholder values:
|
||||
# * this script and copyparty runs as user "cpp"
|
||||
# * copyparty repo is at ~cpp/dev/copyparty
|
||||
# * CA is named partylan
|
||||
# * server IPs = 10.1.2.3 and 192.168.123.1
|
||||
# * server hostname = party.lan
|
||||
|
||||
[Unit]
|
||||
Description=copyparty certificate generator
|
||||
Before=copyparty.service
|
||||
|
||||
[Service]
|
||||
User=cpp
|
||||
Type=oneshot
|
||||
SyslogIdentifier=cpp-cert
|
||||
ExecStart=/bin/bash -c 'cd ~/dev/copyparty/contrib && ./cfssl.sh partylan 10.1.2.3,192.168.123.1,party.lan y'
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -2,17 +2,34 @@
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
|
||||
# cp -pv copyparty.service /etc/systemd/system
|
||||
# restorecon -vr /etc/systemd/system/copyparty.service
|
||||
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
|
||||
# firewall-cmd --reload
|
||||
# systemctl daemon-reload && systemctl enable --now copyparty
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
# change "User=cpp" and "/home/cpp/" to another user
|
||||
# remove the nft lines to only listen on port 3923
|
||||
# and in the ExecStart= line:
|
||||
# change '/usr/bin/python3' to another interpreter
|
||||
# change '/mnt::rw' to another location or permission-set
|
||||
# add '-q' to disable logging on busy servers
|
||||
# add '-i 127.0.0.1' to only allow local connections
|
||||
# add '-e2dsa' to enable filesystem scanning + indexing
|
||||
# add '-e2ts' to enable metadata indexing
|
||||
#
|
||||
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
||||
# accept connections; correctly delaying units depending on copyparty.
|
||||
# But note that journalctl will get the timestamps wrong due to
|
||||
# python disabling line-buffering, so messages are out-of-order:
|
||||
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
|
||||
#
|
||||
# unless you add -q to disable logging, you may want to remove the
|
||||
# following line to allow buffering (slightly better performance):
|
||||
# Environment=PYTHONUNBUFFERED=x
|
||||
#
|
||||
# keep ExecStartPre before ExecStart, at least on rhel8
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
@@ -20,8 +37,25 @@ Description=copyparty file server
|
||||
[Service]
|
||||
Type=notify
|
||||
SyslogIdentifier=copyparty
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
|
||||
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
Environment=PYTHONUNBUFFERED=x
|
||||
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
|
||||
# user to run as + where the TLS certificate is (if any)
|
||||
User=cpp
|
||||
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
||||
|
||||
# setup forwarding from ports 80 and 443 to port 3923
|
||||
ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true'
|
||||
ExecStartPre=+nft add table ip nat
|
||||
ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; }
|
||||
ExecStartPre=+nft add rule ip nat prerouting tcp dport 80 redirect to :3923
|
||||
ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923
|
||||
|
||||
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
# copyparty settings
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -e2d -v /mnt::rw
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
27
contrib/systemd/prisonparty.service
Normal file
27
contrib/systemd/prisonparty.service
Normal file
@@ -0,0 +1,27 @@
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# in a chroot, preventing accidental access elsewhere
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# 1) put copyparty-sfx.py and prisonparty.sh in /usr/local/bin
|
||||
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
|
||||
#
|
||||
# you may want to:
|
||||
# change '/mnt::rw' to another location or permission-set
|
||||
# (remember to change the '/mnt' chroot arg too)
|
||||
#
|
||||
# enable line-buffering for realtime logging (slight performance cost):
|
||||
# inside the [Service] block, add the following line:
|
||||
# Environment=PYTHONUNBUFFERED=x
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
|
||||
[Service]
|
||||
SyslogIdentifier=prisonparty
|
||||
WorkingDirectory=/usr/local/bin
|
||||
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
|
||||
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -1,53 +1,48 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import platform
|
||||
import time
|
||||
import sys
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
from typing import TYPE_CHECKING, Any
|
||||
except:
|
||||
TYPE_CHECKING = False
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
if PY2:
|
||||
sys.dont_write_bytecode = True
|
||||
unicode = unicode
|
||||
unicode = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||
else:
|
||||
unicode = str
|
||||
|
||||
WINDOWS = False
|
||||
if platform.system() == "Windows":
|
||||
WINDOWS = [int(x) for x in platform.version().split(".")]
|
||||
WINDOWS: Any = (
|
||||
[int(x) for x in platform.version().split(".")]
|
||||
if platform.system() == "Windows"
|
||||
else False
|
||||
)
|
||||
|
||||
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393]
|
||||
# introduced in anniversary update
|
||||
|
||||
ANYWIN = WINDOWS or sys.platform in ["msys"]
|
||||
ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"]
|
||||
|
||||
MACOS = platform.system() == "Darwin"
|
||||
|
||||
try:
|
||||
CORES = len(os.sched_getaffinity(0))
|
||||
except:
|
||||
CORES = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
||||
|
||||
|
||||
class EnvParams(object):
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self.t0 = time.time()
|
||||
self.mod = os.path.dirname(os.path.realpath(__file__))
|
||||
if self.mod.endswith("__init__"):
|
||||
self.mod = os.path.dirname(self.mod)
|
||||
|
||||
if sys.platform == "win32":
|
||||
self.cfg = os.path.normpath(os.environ["APPDATA"] + "/copyparty")
|
||||
elif sys.platform == "darwin":
|
||||
self.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
|
||||
else:
|
||||
self.cfg = os.path.normpath(
|
||||
os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))
|
||||
+ "/copyparty"
|
||||
)
|
||||
|
||||
self.cfg = self.cfg.replace("\\", "/")
|
||||
try:
|
||||
os.makedirs(self.cfg)
|
||||
except:
|
||||
if not os.path.isdir(self.cfg):
|
||||
raise
|
||||
self.mod = None
|
||||
self.cfg = None
|
||||
self.ox = getattr(sys, "oxidized", None)
|
||||
|
||||
|
||||
E = EnvParams()
|
||||
|
||||
753
copyparty/__main__.py
Normal file → Executable file
753
copyparty/__main__.py
Normal file → Executable file
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
@@ -8,34 +8,60 @@ __copyright__ = 2019
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
import argparse
|
||||
import filecmp
|
||||
import locale
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
from textwrap import dedent
|
||||
|
||||
from .__init__ import E, WINDOWS, VT100, PY2, unicode
|
||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||
from .__init__ import ANYWIN, CORES, PY2, VT100, WINDOWS, E, EnvParams, unicode
|
||||
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
|
||||
from .authsrv import re_vol
|
||||
from .svchub import SvcHub
|
||||
from .util import py_desc, align_tab, IMPLICATIONS
|
||||
from .util import (
|
||||
IMPLICATIONS,
|
||||
JINJA_VER,
|
||||
PYFTPD_VER,
|
||||
SQLITE_VER,
|
||||
align_tab,
|
||||
ansi_re,
|
||||
min_ex,
|
||||
py_desc,
|
||||
termsize,
|
||||
wrap,
|
||||
)
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
from collections.abc import Callable
|
||||
from types import FrameType
|
||||
|
||||
from typing import Any, Optional
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
HAVE_SSL = True
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
printed = ""
|
||||
printed: list[str] = []
|
||||
|
||||
|
||||
class RiceFormatter(argparse.HelpFormatter):
|
||||
def _get_help_string(self, action):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
if PY2:
|
||||
kwargs["width"] = termsize()[0]
|
||||
|
||||
super(RiceFormatter, self).__init__(*args, **kwargs)
|
||||
|
||||
def _get_help_string(self, action: argparse.Action) -> str:
|
||||
"""
|
||||
same as ArgumentDefaultsHelpFormatter(HelpFormatter)
|
||||
except the help += [...] line now has colors
|
||||
@@ -44,37 +70,142 @@ class RiceFormatter(argparse.HelpFormatter):
|
||||
if not VT100:
|
||||
fmt = " (default: %(default)s)"
|
||||
|
||||
help = action.help
|
||||
if "%(default)" not in action.help:
|
||||
ret = unicode(action.help)
|
||||
if "%(default)" not in ret:
|
||||
if action.default is not argparse.SUPPRESS:
|
||||
defaulting_nargs = [argparse.OPTIONAL, argparse.ZERO_OR_MORE]
|
||||
if action.option_strings or action.nargs in defaulting_nargs:
|
||||
help += fmt
|
||||
return help
|
||||
ret += fmt
|
||||
return ret
|
||||
|
||||
def _fill_text(self, text, width, indent):
|
||||
def _fill_text(self, text: str, width: int, indent: str) -> str:
|
||||
"""same as RawDescriptionHelpFormatter(HelpFormatter)"""
|
||||
return "".join(indent + line + "\n" for line in text.splitlines())
|
||||
|
||||
def __add_whitespace(self, idx: int, iWSpace: int, text: str) -> str:
|
||||
return (" " * iWSpace) + text if idx else text
|
||||
|
||||
def _split_lines(self, text: str, width: int) -> list[str]:
|
||||
# https://stackoverflow.com/a/35925919
|
||||
textRows = text.splitlines()
|
||||
ptn = re.compile(r"\s*[0-9\-]{0,}\.?\s*")
|
||||
for idx, line in enumerate(textRows):
|
||||
search = ptn.search(line)
|
||||
if not line.strip():
|
||||
textRows[idx] = " "
|
||||
elif search:
|
||||
lWSpace = search.end()
|
||||
lines = [
|
||||
self.__add_whitespace(i, lWSpace, x)
|
||||
for i, x in enumerate(wrap(line, width, width - 1))
|
||||
]
|
||||
textRows[idx] = lines
|
||||
|
||||
return [item for sublist in textRows for item in sublist]
|
||||
|
||||
|
||||
class Dodge11874(RiceFormatter):
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
kwargs["width"] = 9003
|
||||
super(Dodge11874, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
def lprint(*a, **ka):
|
||||
global printed
|
||||
|
||||
printed += " ".join(unicode(x) for x in a) + ka.get("end", "\n")
|
||||
print(*a, **ka)
|
||||
class BasicDodge11874(
|
||||
argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter
|
||||
):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
kwargs["width"] = 9003
|
||||
super(BasicDodge11874, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
def warn(msg):
|
||||
def lprint(*a: Any, **ka: Any) -> None:
|
||||
eol = ka.pop("end", "\n")
|
||||
txt: str = " ".join(unicode(x) for x in a) + eol
|
||||
printed.append(txt)
|
||||
if not VT100:
|
||||
txt = ansi_re.sub("", txt)
|
||||
|
||||
print(txt, end="", **ka)
|
||||
|
||||
|
||||
def warn(msg: str) -> None:
|
||||
lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
|
||||
|
||||
|
||||
def ensure_locale():
|
||||
def init_E(E: EnvParams) -> None:
|
||||
# __init__ runs 18 times when oxidized; do expensive stuff here
|
||||
|
||||
def get_unixdir() -> str:
|
||||
paths: list[tuple[Callable[..., str], str]] = [
|
||||
(os.environ.get, "XDG_CONFIG_HOME"),
|
||||
(os.path.expanduser, "~/.config"),
|
||||
(os.environ.get, "TMPDIR"),
|
||||
(os.environ.get, "TEMP"),
|
||||
(os.environ.get, "TMP"),
|
||||
(unicode, "/tmp"),
|
||||
]
|
||||
for chk in [os.listdir, os.mkdir]:
|
||||
for pf, pa in paths:
|
||||
try:
|
||||
p = pf(pa)
|
||||
# print(chk.__name__, p, pa)
|
||||
if not p or p.startswith("~"):
|
||||
continue
|
||||
|
||||
p = os.path.normpath(p)
|
||||
chk(p) # type: ignore
|
||||
p = os.path.join(p, "copyparty")
|
||||
if not os.path.isdir(p):
|
||||
os.mkdir(p)
|
||||
|
||||
return p
|
||||
except:
|
||||
pass
|
||||
|
||||
raise Exception("could not find a writable path for config")
|
||||
|
||||
def _unpack() -> str:
|
||||
import atexit
|
||||
import tarfile
|
||||
import tempfile
|
||||
from importlib.resources import open_binary
|
||||
|
||||
td = tempfile.TemporaryDirectory(prefix="")
|
||||
atexit.register(td.cleanup)
|
||||
tdn = td.name
|
||||
|
||||
with open_binary("copyparty", "z.tar") as tgz:
|
||||
with tarfile.open(fileobj=tgz) as tf:
|
||||
tf.extractall(tdn)
|
||||
|
||||
return tdn
|
||||
|
||||
try:
|
||||
E.mod = os.path.dirname(os.path.realpath(__file__))
|
||||
if E.mod.endswith("__init__"):
|
||||
E.mod = os.path.dirname(E.mod)
|
||||
except:
|
||||
if not E.ox:
|
||||
raise
|
||||
|
||||
E.mod = _unpack()
|
||||
|
||||
if sys.platform == "win32":
|
||||
E.cfg = os.path.normpath(os.environ["APPDATA"] + "/copyparty")
|
||||
elif sys.platform == "darwin":
|
||||
E.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
|
||||
else:
|
||||
E.cfg = get_unixdir()
|
||||
|
||||
E.cfg = E.cfg.replace("\\", "/")
|
||||
try:
|
||||
os.makedirs(E.cfg)
|
||||
except:
|
||||
if not os.path.isdir(E.cfg):
|
||||
raise
|
||||
|
||||
|
||||
def ensure_locale() -> None:
|
||||
for x in [
|
||||
"en_US.UTF-8",
|
||||
"English_United States.UTF8",
|
||||
@@ -82,13 +213,13 @@ def ensure_locale():
|
||||
]:
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, x)
|
||||
lprint("Locale:", x)
|
||||
lprint("Locale: {}\n".format(x))
|
||||
break
|
||||
except:
|
||||
continue
|
||||
|
||||
|
||||
def ensure_cert():
|
||||
def ensure_cert() -> None:
|
||||
"""
|
||||
the default cert (and the entire TLS support) is only here to enable the
|
||||
crypto.subtle javascript API, which is necessary due to the webkit guys
|
||||
@@ -99,7 +230,7 @@ def ensure_cert():
|
||||
cert_insec = os.path.join(E.mod, "res/insecure.pem")
|
||||
cert_cfg = os.path.join(E.cfg, "cert.pem")
|
||||
if not os.path.exists(cert_cfg):
|
||||
shutil.copy2(cert_insec, cert_cfg)
|
||||
shutil.copy(cert_insec, cert_cfg)
|
||||
|
||||
try:
|
||||
if filecmp.cmp(cert_cfg, cert_insec):
|
||||
@@ -114,8 +245,8 @@ def ensure_cert():
|
||||
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
|
||||
|
||||
|
||||
def configure_ssl_ver(al):
|
||||
def terse_sslver(txt):
|
||||
def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
def terse_sslver(txt: str) -> str:
|
||||
txt = txt.lower()
|
||||
for c in ["_", "v", "."]:
|
||||
txt = txt.replace(c, "")
|
||||
@@ -130,8 +261,8 @@ def configure_ssl_ver(al):
|
||||
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
||||
# SSLv2 SSLv3 TLSv1 TLSv1_1 TLSv1_2 TLSv1_3
|
||||
if "help" in sslver:
|
||||
avail = [terse_sslver(x[6:]) for x in flags]
|
||||
avail = " ".join(sorted(avail) + ["all"])
|
||||
avail1 = [terse_sslver(x[6:]) for x in flags]
|
||||
avail = " ".join(sorted(avail1) + ["all"])
|
||||
lprint("\navailable ssl/tls versions:\n " + avail)
|
||||
sys.exit(0)
|
||||
|
||||
@@ -152,12 +283,12 @@ def configure_ssl_ver(al):
|
||||
|
||||
for k in ["ssl_flags_en", "ssl_flags_de"]:
|
||||
num = getattr(al, k)
|
||||
lprint("{}: {:8x} ({})".format(k, num, num))
|
||||
lprint("{0}: {1:8x} ({1})".format(k, num))
|
||||
|
||||
# think i need that beer now
|
||||
|
||||
|
||||
def configure_ssl_ciphers(al):
|
||||
def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
if al.ssl_ver:
|
||||
ctx.options &= ~al.ssl_flags_en
|
||||
@@ -181,186 +312,499 @@ def configure_ssl_ciphers(al):
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def sighandler(sig=None, frame=None):
|
||||
def args_from_cfg(cfg_path: str) -> list[str]:
|
||||
ret: list[str] = []
|
||||
skip = False
|
||||
with open(cfg_path, "rb") as f:
|
||||
for ln in [x.decode("utf-8").strip() for x in f]:
|
||||
if not ln:
|
||||
skip = False
|
||||
continue
|
||||
|
||||
if ln.startswith("#"):
|
||||
continue
|
||||
|
||||
if not ln.startswith("-"):
|
||||
continue
|
||||
|
||||
if skip:
|
||||
continue
|
||||
|
||||
try:
|
||||
ret.extend(ln.split(" ", 1))
|
||||
except:
|
||||
ret.append(ln)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def sighandler(sig: Optional[int] = None, frame: Optional[FrameType] = None) -> None:
|
||||
msg = [""] * 5
|
||||
for th in threading.enumerate():
|
||||
stk = sys._current_frames()[th.ident] # type: ignore
|
||||
msg.append(str(th))
|
||||
msg.extend(traceback.format_stack(sys._current_frames()[th.ident]))
|
||||
msg.extend(traceback.format_stack(stk))
|
||||
|
||||
msg.append("\n")
|
||||
print("\n".join(msg))
|
||||
|
||||
|
||||
def run_argparse(argv, formatter):
|
||||
def disable_quickedit() -> None:
|
||||
import atexit
|
||||
import ctypes
|
||||
from ctypes import wintypes
|
||||
|
||||
def ecb(ok: bool, fun: Any, args: list[Any]) -> list[Any]:
|
||||
if not ok:
|
||||
err: int = ctypes.get_last_error() # type: ignore
|
||||
if err:
|
||||
raise ctypes.WinError(err) # type: ignore
|
||||
return args
|
||||
|
||||
k32 = ctypes.WinDLL(str("kernel32"), use_last_error=True) # type: ignore
|
||||
if PY2:
|
||||
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
|
||||
|
||||
k32.GetStdHandle.errcheck = ecb
|
||||
k32.GetConsoleMode.errcheck = ecb
|
||||
k32.SetConsoleMode.errcheck = ecb
|
||||
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
|
||||
k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
|
||||
|
||||
def cmode(out: bool, mode: Optional[int] = None) -> int:
|
||||
h = k32.GetStdHandle(-11 if out else -10)
|
||||
if mode:
|
||||
return k32.SetConsoleMode(h, mode) # type: ignore
|
||||
|
||||
cmode = wintypes.DWORD()
|
||||
k32.GetConsoleMode(h, ctypes.byref(cmode))
|
||||
return cmode.value
|
||||
|
||||
# disable quickedit
|
||||
mode = orig_in = cmode(False)
|
||||
quickedit = 0x40
|
||||
extended = 0x80
|
||||
mask = quickedit + extended
|
||||
if mode & mask != extended:
|
||||
atexit.register(cmode, False, orig_in)
|
||||
cmode(False, mode & ~mask | extended)
|
||||
|
||||
# enable colors in case the os.system("rem") trick ever stops working
|
||||
if VT100:
|
||||
mode = orig_out = cmode(True)
|
||||
if mode & 4 != 4:
|
||||
atexit.register(cmode, True, orig_out)
|
||||
cmode(True, mode | 4)
|
||||
|
||||
|
||||
def showlic() -> None:
|
||||
p = os.path.join(E.mod, "res", "COPYING.txt")
|
||||
if not os.path.exists(p):
|
||||
print("no relevant license info to display")
|
||||
return
|
||||
|
||||
with open(p, "rb") as f:
|
||||
print(f.read().decode("utf-8", "replace"))
|
||||
|
||||
|
||||
def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Namespace:
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=formatter,
|
||||
prog="copyparty",
|
||||
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
||||
epilog=dedent(
|
||||
"""
|
||||
)
|
||||
|
||||
try:
|
||||
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
|
||||
except:
|
||||
fk_salt = "hunter2"
|
||||
|
||||
hcores = min(CORES, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
||||
|
||||
sects = [
|
||||
[
|
||||
"accounts",
|
||||
"accounts and volumes",
|
||||
dedent(
|
||||
"""
|
||||
-a takes username:password,
|
||||
-v takes src:dst:permset:permset:cflag:cflag:...
|
||||
where "permset" is accesslevel followed by username (no separator)
|
||||
and "cflag" is config flags to set on this volume
|
||||
|
||||
list of cflags:
|
||||
"cnodupe" rejects existing files (instead of symlinking them)
|
||||
"ce2d" sets -e2d (all -e2* args can be set using ce2* cflags)
|
||||
"cd2t" disables metadata collection, overrides -e2t*
|
||||
"cd2d" disables all database stuff, overrides -e2*
|
||||
-v takes src:dst:\033[33mperm\033[0m1:\033[33mperm\033[0m2:\033[33mperm\033[0mN:\033[32mvolflag\033[0m1:\033[32mvolflag\033[0m2:\033[32mvolflag\033[0mN:...
|
||||
* "\033[33mperm\033[0m" is "permissions,username1,username2,..."
|
||||
* "\033[32mvolflag\033[0m" is config flags to set on this volume
|
||||
|
||||
list of permissions:
|
||||
"r" (read): list folder contents, download files
|
||||
"w" (write): upload files; need "r" to see the uploads
|
||||
"m" (move): move files and folders; need "w" at destination
|
||||
"d" (delete): permanently delete files and folders
|
||||
"g" (get): download files, but cannot see folder contents
|
||||
|
||||
too many volflags to list here, see the other sections
|
||||
|
||||
example:\033[35m
|
||||
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
|
||||
-a ed:hunter2 -v .::r:rw,ed -v ../inc:dump:w:rw,ed:c,nodupe \033[36m
|
||||
mount current directory at "/" with
|
||||
* r (read-only) for everyone
|
||||
* a (read+write) for ed
|
||||
* rw (read+write) for ed
|
||||
mount ../inc at "/dump" with
|
||||
* w (write-only) for everyone
|
||||
* a (read+write) for ed
|
||||
* rw (read+write) for ed
|
||||
* reject duplicate files \033[0m
|
||||
|
||||
|
||||
if no accounts or volumes are configured,
|
||||
current folder will be read/write for everyone
|
||||
|
||||
consider the config file for more flexible account/volume management,
|
||||
including dynamic reload at runtime (and being more readable w)
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"flags",
|
||||
"list of volflags",
|
||||
dedent(
|
||||
"""
|
||||
volflags are appended to volume definitions, for example,
|
||||
to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags:
|
||||
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub
|
||||
|
||||
\033[0muploads, general:
|
||||
\033[36mnodupe\033[35m rejects existing files (instead of symlinking them)
|
||||
\033[36mnosub\033[35m forces all uploads into the top folder of the vfs
|
||||
\033[36mmagic$\033[35m enables filetype detection for nameless uploads
|
||||
\033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz)
|
||||
\033[36mpk\033[35m forces server-side compression, optional arg: xz,9
|
||||
|
||||
\033[0mupload rules:
|
||||
\033[36mmaxn=250,600\033[35m max 250 uploads over 15min
|
||||
\033[36mmaxb=1g,300\033[35m max 1 GiB over 5min (suffixes: b, k, m, g)
|
||||
\033[36msz=1k-3m\033[35m allow filesizes between 1 KiB and 3MiB
|
||||
\033[36mdf=1g\033[35m ensure 1 GiB free disk space
|
||||
|
||||
\033[0mupload rotation:
|
||||
(moves all uploads into the specified folder structure)
|
||||
\033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each
|
||||
\033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing
|
||||
\033[36mlifetime=3600\033[35m uploads are deleted after 1 hour
|
||||
|
||||
\033[0mdatabase, general:
|
||||
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
|
||||
\033[36md2ts\033[35m disables metadata collection for existing files
|
||||
\033[36md2ds\033[35m disables onboot indexing, overrides -e2ds*
|
||||
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
|
||||
\033[36md2v\033[35m disables file verification, overrides -e2v*
|
||||
\033[36md2d\033[35m disables all database stuff, overrides -e2*
|
||||
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
|
||||
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
|
||||
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
|
||||
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
|
||||
\033[36mnoforget$\033[35m don't forget files when deleted from disk
|
||||
\033[36mxdev\033[35m do not descend into other filesystems
|
||||
\033[36mxvol\033[35m skip symlinks leaving the volume root
|
||||
|
||||
\033[0mdatabase, audio tags:
|
||||
"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...
|
||||
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
|
||||
generate ".bpm" tags from uploads (f = overwrite tags)
|
||||
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
|
||||
|
||||
\033[0mthumbnails:
|
||||
\033[36mdthumb\033[35m disables all thumbnails
|
||||
\033[36mdvthumb\033[35m disables video thumbnails
|
||||
\033[36mdathumb\033[35m disables audio thumbnails (spectrograms)
|
||||
\033[36mdithumb\033[35m disables image thumbnails
|
||||
|
||||
\033[0mclient and ux:
|
||||
\033[36mhtml_head=TXT\033[35m includes TXT in the <head>
|
||||
\033[36mrobots\033[35m allows indexing by search engines (default)
|
||||
\033[36mnorobots\033[35m kindly asks search engines to leave
|
||||
|
||||
\033[0mothers:
|
||||
\033[36mfk=8\033[35m generates per-file accesskeys,
|
||||
which will then be required at the "g" permission
|
||||
\033[0m"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"urlform",
|
||||
"how to handle url-form POSTs",
|
||||
dedent(
|
||||
"""
|
||||
values for --urlform:
|
||||
"stash" dumps the data to file and returns length + checksum
|
||||
"save,get" dumps to file and returns the page like a GET
|
||||
"print,get" prints the data in the log and returns GET
|
||||
\033[36mstash\033[35m dumps the data to file and returns length + checksum
|
||||
\033[36msave,get\033[35m dumps to file and returns the page like a GET
|
||||
\033[36mprint,get\033[35m prints the data in the log and returns GET
|
||||
(leave out the ",get" to return an error instead)
|
||||
|
||||
values for --ls:
|
||||
"USR" is a user to browse as; * is anonymous, ** is all users
|
||||
"VOL" is a single volume to scan, default is * (all vols)
|
||||
"FLAG" is flags;
|
||||
"v" in addition to realpaths, print usernames and vpaths
|
||||
"ln" only prints symlinks leaving the volume mountpoint
|
||||
"p" exits 1 if any such symlinks are found
|
||||
"r" resumes startup after the listing
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"ls",
|
||||
"volume inspection",
|
||||
dedent(
|
||||
"""
|
||||
\033[35m--ls USR,VOL,FLAGS
|
||||
\033[36mUSR\033[0m is a user to browse as; * is anonymous, ** is all users
|
||||
\033[36mVOL\033[0m is a single volume to scan, default is * (all vols)
|
||||
\033[36mFLAG\033[0m is flags;
|
||||
\033[36mv\033[0m in addition to realpaths, print usernames and vpaths
|
||||
\033[36mln\033[0m only prints symlinks leaving the volume mountpoint
|
||||
\033[36mp\033[0m exits 1 if any such symlinks are found
|
||||
\033[36mr\033[0m resumes startup after the listing
|
||||
examples:
|
||||
--ls '**' # list all files which are possible to read
|
||||
--ls '**,*,ln' # check for dangerous symlinks
|
||||
--ls '**,*,ln,p,r' # check, then start normally if safe
|
||||
\033[0m
|
||||
"""
|
||||
),
|
||||
)
|
||||
),
|
||||
],
|
||||
]
|
||||
|
||||
# fmt: off
|
||||
u = unicode
|
||||
ap2 = ap.add_argument_group('general options')
|
||||
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
|
||||
ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
||||
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
||||
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark")
|
||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
|
||||
ap2.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all")
|
||||
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark]")
|
||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; examples [.::r], [/mnt/nas/music:/music:r:aed]")
|
||||
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files")
|
||||
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
|
||||
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
|
||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see --help-urlform")
|
||||
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="window title, for example '$ip-10.1.2.' or '$ip-'")
|
||||
ap2.add_argument("--license", action="store_true", help="show licenses and exit")
|
||||
ap2.add_argument("--version", action="store_true", help="show versions and exit")
|
||||
|
||||
ap2 = ap.add_argument_group('upload options')
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
|
||||
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
|
||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
|
||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
|
||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without -- probably useful on nfs and cow filesystems (zfs, btrfs)")
|
||||
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem)")
|
||||
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made")
|
||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead")
|
||||
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads")
|
||||
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure GiB free disk space by rejecting upload requests")
|
||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; 0 = off and warn if enabled, 1 = off, 2 = on, 3 = on and disable datecheck")
|
||||
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; s=smallest-first, n=alphabetical, fs=force-s, fn=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
||||
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
||||
|
||||
ap2 = ap.add_argument_group('network options')
|
||||
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
|
||||
|
||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
||||
|
||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls")
|
||||
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [help] shows available versions; default is what your python version considers safe")
|
||||
ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [help] shows available ciphers")
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets for later decryption in wireshark")
|
||||
|
||||
ap2 = ap.add_argument_group('FTP options')
|
||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example 3921")
|
||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example 3990")
|
||||
ap2.add_argument("--ftp-dbg", action="store_true", help="enable debug logging")
|
||||
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
||||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example 12000-13000")
|
||||
|
||||
ap2 = ap.add_argument_group('opt-outs')
|
||||
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||
ap2.add_argument("-nih", action="store_true", help="no info hostname")
|
||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||
ap2.add_argument("-nw", action="store_true", help="never write anything to disk (debug/benchmark)")
|
||||
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows (it is disabled to avoid accidental text selection which will deadlock copyparty)")
|
||||
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
|
||||
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
||||
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (lifetime volflag)")
|
||||
|
||||
ap2 = ap.add_argument_group('safety options')
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
||||
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
|
||||
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
|
||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --no-dot-mv --no-dot-ren --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
|
||||
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
||||
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; used to generate unpredictable internal identifiers for uploads -- doesn't really matter")
|
||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter")
|
||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
||||
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
||||
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore --no-robots")
|
||||
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything")
|
||||
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after H hours of inactivity (0.0028=10sec, 0.1=6min, 24=day, 168=week, 720=month, 8760=year)")
|
||||
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than N wrong passwords in W minutes = ban for B minutes (disable with \"no\")")
|
||||
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="no", help="hitting more than N 404's in W minutes = ban for B minutes (disabled by default since turbo-up2k counts as 404s)")
|
||||
|
||||
ap2 = ap.add_argument_group('shutdown options')
|
||||
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; for example 'idx' will do volume indexing + metadata analysis")
|
||||
|
||||
ap2 = ap.add_argument_group('logging options')
|
||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
|
||||
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
||||
ap2.add_argument("--log-htp", action="store_true", help="print http-server threadpool scaling")
|
||||
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
|
||||
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
||||
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
|
||||
|
||||
ap2 = ap.add_argument_group('admin panel options')
|
||||
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
|
||||
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
||||
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
||||
|
||||
ap2 = ap.add_argument_group('thumbnail options')
|
||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
|
||||
ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms)")
|
||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
|
||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
|
||||
ap2.add_argument("--th-convt", metavar="SEC", type=int, default=60, help="conversion timeout in seconds")
|
||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
||||
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
|
||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
|
||||
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg output for video thumbs")
|
||||
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs")
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
|
||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for")
|
||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||
# https://github.com/libvips/libvips
|
||||
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="bmp,dib,gif,icns,ico,jpg,jpeg,jp2,jpx,pcx,png,pbm,pgm,ppm,pnm,sgi,tga,tif,tiff,webp,xbm,dds,xpm,heif,heifs,heic,heics,avif,avifs", help="image formats to decode using pillow")
|
||||
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="jpg,jpeg,jp2,jpx,jxl,tif,tiff,png,webp,heic,avif,fit,fits,fts,exr,svg,hdr,ppm,pgm,pfm,gif,nii", help="image formats to decode using pyvips")
|
||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="av1,asf,avi,flv,m4v,mkv,mjpeg,mjpg,mpg,mpeg,mpg2,mpeg2,h264,avc,mts,h265,hevc,mov,3gp,mp4,ts,mpegts,nut,ogv,ogm,rm,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,m4a,ogg,opus,flac,alac,mp3,mp2,ac3,dts,wma,ra,wav,aif,aiff,au,alaw,ulaw,mulaw,amr,gsm,ape,tak,tta,wv,mpc", help="audio formats to decode using ffmpeg")
|
||||
|
||||
ap2 = ap.add_argument_group('database options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
|
||||
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
||||
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume state")
|
||||
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
|
||||
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead")
|
||||
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
||||
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader")
|
||||
ap2 = ap.add_argument_group('transcoding options')
|
||||
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds")
|
||||
|
||||
ap2 = ap.add_argument_group('general db options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplocation")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets -e2d")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets -e2ds")
|
||||
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
|
||||
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
||||
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
|
||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice")
|
||||
ap2.add_argument("--xdev", action="store_true", help="do not descend into other filesystems (symlink or bind-mount to another HDD, ...)")
|
||||
ap2.add_argument("--xvol", action="store_true", help="skip symlinks leaving the volume root")
|
||||
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
|
||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than SEC seconds")
|
||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
||||
|
||||
ap2 = ap.add_argument_group('metadata db options')
|
||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing; makes it possible to search for artist/title/codec/resolution/...")
|
||||
ap2.add_argument("-e2ts", action="store_true", help="scan existing files on startup; sets -e2t")
|
||||
ap2.add_argument("-e2tsr", action="store_true", help="delete all metadata from DB and do a full rescan; sets -e2ts")
|
||||
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead; will catch more tags")
|
||||
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader; is probably safer")
|
||||
ap2.add_argument("--mtag-to", metavar="SEC", type=int, default=60, help="timeout for ffprobe tag-scan")
|
||||
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for tag scanning")
|
||||
ap2.add_argument("--mtag-v", action="store_true", help="verbose tag scanning; print errors from mtp subprocesses and such")
|
||||
ap2.add_argument("--mtag-vv", action="store_true", help="debug mtp settings")
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash")
|
||||
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)",
|
||||
default=".vq,.aq,vc,ac,fmt,res,.fps")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file")
|
||||
|
||||
ap2 = ap.add_argument_group('appearance options')
|
||||
ap2 = ap.add_argument_group('ui options')
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
|
||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
|
||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="favicon text [ foreground [ background ] ], set blank to disable")
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
|
||||
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
||||
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
|
||||
|
||||
ap2 = ap.add_argument_group('debug options')
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing")
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile; instead using a traditional file read loop")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd")
|
||||
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second")
|
||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second, for example --stackmon=./st/%%Y-%%m/%%d/%%H%%M.xz,60")
|
||||
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
|
||||
|
||||
return ap.parse_args(args=argv[1:])
|
||||
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches REGEX; '.' (a single dot) = all files")
|
||||
# fmt: on
|
||||
|
||||
ap2 = ap.add_argument_group("help sections")
|
||||
for k, h, _ in sects:
|
||||
ap2.add_argument("--help-" + k, action="store_true", help=h)
|
||||
|
||||
def main(argv=None):
|
||||
ret = ap.parse_args(args=argv[1:])
|
||||
for k, h, t in sects:
|
||||
k2 = "help_" + k.replace("-", "_")
|
||||
if vars(ret)[k2]:
|
||||
lprint("# {} help page".format(k))
|
||||
lprint(t + "\033[0m")
|
||||
sys.exit(0)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def main(argv: Optional[list[str]] = None) -> None:
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if WINDOWS:
|
||||
os.system("rem") # enables colors
|
||||
|
||||
init_E(E)
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
desc = py_desc().replace("[", "\033[1;30m[")
|
||||
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0;36m\n sqlite v{} | jinja2 v{} | pyftpd v{}\n\033[0m'
|
||||
f = f.format(
|
||||
S_VERSION,
|
||||
CODENAME,
|
||||
S_BUILD_DT,
|
||||
py_desc().replace("[", "\033[1;30m["),
|
||||
SQLITE_VER,
|
||||
JINJA_VER,
|
||||
PYFTPD_VER,
|
||||
)
|
||||
lprint(f)
|
||||
|
||||
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0m\n'
|
||||
lprint(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
|
||||
if "--version" in argv:
|
||||
sys.exit(0)
|
||||
|
||||
if "--license" in argv:
|
||||
showlic()
|
||||
sys.exit(0)
|
||||
|
||||
ensure_locale()
|
||||
if HAVE_SSL:
|
||||
ensure_cert()
|
||||
|
||||
deprecated = [["-e2s", "-e2ds"]]
|
||||
for k, v in zip(argv[1:], argv[2:]):
|
||||
if k == "-c":
|
||||
supp = args_from_cfg(v)
|
||||
argv.extend(supp)
|
||||
|
||||
deprecated: list[tuple[str, str]] = []
|
||||
for dk, nk in deprecated:
|
||||
try:
|
||||
idx = argv.index(dk)
|
||||
@@ -373,9 +817,68 @@ def main(argv=None):
|
||||
time.sleep(2)
|
||||
|
||||
try:
|
||||
al = run_argparse(argv, RiceFormatter)
|
||||
except AssertionError:
|
||||
al = run_argparse(argv, Dodge11874)
|
||||
if len(argv) == 1 and (ANYWIN or not os.geteuid()):
|
||||
argv.extend(["-p80,443,3923", "--ign-ebind"])
|
||||
except:
|
||||
pass
|
||||
|
||||
retry = False
|
||||
for fmtr in [RiceFormatter, RiceFormatter, Dodge11874, BasicDodge11874]:
|
||||
try:
|
||||
al = run_argparse(argv, fmtr, retry)
|
||||
except SystemExit:
|
||||
raise
|
||||
except:
|
||||
retry = True
|
||||
lprint("\n[ {} ]:\n{}\n".format(fmtr, min_ex()))
|
||||
|
||||
assert al
|
||||
al.E = E # __init__ is not shared when oxidized
|
||||
|
||||
if WINDOWS and not al.keep_qem:
|
||||
try:
|
||||
disable_quickedit()
|
||||
except:
|
||||
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
|
||||
|
||||
if not VT100:
|
||||
al.wintitle = ""
|
||||
|
||||
nstrs: list[str] = []
|
||||
anymod = False
|
||||
for ostr in al.v or []:
|
||||
m = re_vol.match(ostr)
|
||||
if not m:
|
||||
# not our problem
|
||||
nstrs.append(ostr)
|
||||
continue
|
||||
|
||||
src, dst, perms = m.groups()
|
||||
na = [src, dst]
|
||||
mod = False
|
||||
for opt in perms.split(":"):
|
||||
if re.match("c[^,]", opt):
|
||||
mod = True
|
||||
na.append("c," + opt[1:])
|
||||
elif re.sub("^[rwmdg]*", "", opt) and "," not in opt:
|
||||
mod = True
|
||||
perm = opt[0]
|
||||
if perm == "a":
|
||||
perm = "rw"
|
||||
na.append(perm + "," + opt[1:])
|
||||
else:
|
||||
na.append(opt)
|
||||
|
||||
nstr = ":".join(na)
|
||||
nstrs.append(nstr if mod else ostr)
|
||||
if mod:
|
||||
msg = "\033[1;31mWARNING:\033[0;1m\n -v {} \033[0;33mwas replaced with\033[0;1m\n -v {} \n\033[0m"
|
||||
lprint(msg.format(ostr, nstr))
|
||||
anymod = True
|
||||
|
||||
if anymod:
|
||||
al.v = nstrs
|
||||
time.sleep(2)
|
||||
|
||||
# propagate implications
|
||||
for k1, k2 in IMPLICATIONS:
|
||||
@@ -392,6 +895,12 @@ def main(argv=None):
|
||||
except:
|
||||
raise Exception("invalid value for -p")
|
||||
|
||||
for arg, kname, okays in [["--u2sort", "u2sort", "s n fs fn"]]:
|
||||
val = unicode(getattr(al, kname))
|
||||
if val not in okays.split():
|
||||
zs = "argument {} cannot be '{}'; try one of these: {}"
|
||||
raise Exception(zs.format(arg, val, okays))
|
||||
|
||||
if HAVE_SSL:
|
||||
if al.ssl_ver:
|
||||
configure_ssl_ver(al)
|
||||
@@ -412,7 +921,7 @@ def main(argv=None):
|
||||
|
||||
# signal.signal(signal.SIGINT, sighandler)
|
||||
|
||||
SvcHub(al, argv, printed).run()
|
||||
SvcHub(al, argv, "".join(printed)).run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 11, 42)
|
||||
CODENAME = "the grid"
|
||||
BUILD_DT = (2021, 7, 18)
|
||||
VERSION = (1, 4, 2)
|
||||
CODENAME = "mostly reliable"
|
||||
BUILD_DT = (2022, 9, 25)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
1148
copyparty/authsrv.py
1148
copyparty/authsrv.py
File diff suppressed because it is too large
Load Diff
0
copyparty/bos/__init__.py
Normal file
0
copyparty/bos/__init__.py
Normal file
76
copyparty/bos/bos.py
Normal file
76
copyparty/bos/bos.py
Normal file
@@ -0,0 +1,76 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
from ..util import SYMTIME, fsdec, fsenc
|
||||
from . import path
|
||||
|
||||
try:
|
||||
from typing import Optional
|
||||
except:
|
||||
pass
|
||||
|
||||
_ = (path,)
|
||||
|
||||
# grep -hRiE '(^|[^a-zA-Z_\.-])os\.' . | gsed -r 's/ /\n/g;s/\(/(\n/g' | grep -hRiE '(^|[^a-zA-Z_\.-])os\.' | sort | uniq -c
|
||||
# printf 'os\.(%s)' "$(grep ^def bos/__init__.py | gsed -r 's/^def //;s/\(.*//' | tr '\n' '|' | gsed -r 's/.$//')"
|
||||
|
||||
|
||||
def chmod(p: str, mode: int) -> None:
|
||||
return os.chmod(fsenc(p), mode)
|
||||
|
||||
|
||||
def listdir(p: str = ".") -> list[str]:
|
||||
return [fsdec(x) for x in os.listdir(fsenc(p))]
|
||||
|
||||
|
||||
def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> None:
|
||||
bname = fsenc(name)
|
||||
try:
|
||||
os.makedirs(bname, mode)
|
||||
except:
|
||||
if not exist_ok or not os.path.isdir(bname):
|
||||
raise
|
||||
|
||||
|
||||
def mkdir(p: str, mode: int = 0o755) -> None:
|
||||
return os.mkdir(fsenc(p), mode)
|
||||
|
||||
|
||||
def rename(src: str, dst: str) -> None:
|
||||
return os.rename(fsenc(src), fsenc(dst))
|
||||
|
||||
|
||||
def replace(src: str, dst: str) -> None:
|
||||
return os.replace(fsenc(src), fsenc(dst))
|
||||
|
||||
|
||||
def rmdir(p: str) -> None:
|
||||
return os.rmdir(fsenc(p))
|
||||
|
||||
|
||||
def stat(p: str) -> os.stat_result:
|
||||
return os.stat(fsenc(p))
|
||||
|
||||
|
||||
def unlink(p: str) -> None:
|
||||
return os.unlink(fsenc(p))
|
||||
|
||||
|
||||
def utime(
|
||||
p: str, times: Optional[tuple[float, float]] = None, follow_symlinks: bool = True
|
||||
) -> None:
|
||||
if SYMTIME:
|
||||
return os.utime(fsenc(p), times, follow_symlinks=follow_symlinks)
|
||||
else:
|
||||
return os.utime(fsenc(p), times)
|
||||
|
||||
|
||||
if hasattr(os, "lstat"):
|
||||
|
||||
def lstat(p: str) -> os.stat_result:
|
||||
return os.lstat(fsenc(p))
|
||||
|
||||
else:
|
||||
lstat = stat
|
||||
45
copyparty/bos/path.py
Normal file
45
copyparty/bos/path.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
from ..util import SYMTIME, fsdec, fsenc
|
||||
|
||||
|
||||
def abspath(p: str) -> str:
|
||||
return fsdec(os.path.abspath(fsenc(p)))
|
||||
|
||||
|
||||
def exists(p: str) -> bool:
|
||||
return os.path.exists(fsenc(p))
|
||||
|
||||
|
||||
def getmtime(p: str, follow_symlinks: bool = True) -> float:
|
||||
if not follow_symlinks and SYMTIME:
|
||||
return os.lstat(fsenc(p)).st_mtime
|
||||
else:
|
||||
return os.path.getmtime(fsenc(p))
|
||||
|
||||
|
||||
def getsize(p: str) -> int:
|
||||
return os.path.getsize(fsenc(p))
|
||||
|
||||
|
||||
def isfile(p: str) -> bool:
|
||||
return os.path.isfile(fsenc(p))
|
||||
|
||||
|
||||
def isdir(p: str) -> bool:
|
||||
return os.path.isdir(fsenc(p))
|
||||
|
||||
|
||||
def islink(p: str) -> bool:
|
||||
return os.path.islink(fsenc(p))
|
||||
|
||||
|
||||
def lexists(p: str) -> bool:
|
||||
return os.path.lexists(fsenc(p))
|
||||
|
||||
|
||||
def realpath(p: str) -> str:
|
||||
return fsdec(os.path.realpath(fsenc(p)))
|
||||
@@ -1,40 +1,56 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import time
|
||||
import threading
|
||||
import time
|
||||
|
||||
from .broker_util import try_exec
|
||||
import queue
|
||||
|
||||
from .__init__ import CORES, TYPE_CHECKING
|
||||
from .broker_mpw import MpWorker
|
||||
from .broker_util import try_exec
|
||||
from .util import mp
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
try:
|
||||
from typing import Any
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class MProcess(mp.Process):
|
||||
def __init__(
|
||||
self,
|
||||
q_pend: queue.Queue[tuple[int, str, list[Any]]],
|
||||
q_yield: queue.Queue[tuple[int, str, list[Any]]],
|
||||
target: Any,
|
||||
args: Any,
|
||||
) -> None:
|
||||
super(MProcess, self).__init__(target=target, args=args)
|
||||
self.q_pend = q_pend
|
||||
self.q_yield = q_yield
|
||||
|
||||
|
||||
class BrokerMp(object):
|
||||
"""external api; manages MpWorkers"""
|
||||
|
||||
def __init__(self, hub):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
self.hub = hub
|
||||
self.log = hub.log
|
||||
self.args = hub.args
|
||||
|
||||
self.procs = []
|
||||
self.retpend = {}
|
||||
self.retpend_mutex = threading.Lock()
|
||||
self.mutex = threading.Lock()
|
||||
|
||||
cores = self.args.j
|
||||
if not cores:
|
||||
cores = mp.cpu_count()
|
||||
self.num_workers = self.args.j or CORES
|
||||
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
||||
for n in range(1, self.num_workers + 1):
|
||||
q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1)
|
||||
q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64)
|
||||
|
||||
self.log("broker", "booting {} subprocesses".format(cores))
|
||||
for n in range(1, cores + 1):
|
||||
q_pend = mp.Queue(1)
|
||||
q_yield = mp.Queue(64)
|
||||
|
||||
proc = mp.Process(target=MpWorker, args=(q_pend, q_yield, self.args, n))
|
||||
proc.q_pend = q_pend
|
||||
proc.q_yield = q_yield
|
||||
proc.clients = {}
|
||||
proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n))
|
||||
|
||||
thr = threading.Thread(
|
||||
target=self.collector, args=(proc,), name="mp-sink-{}".format(n)
|
||||
@@ -45,11 +61,11 @@ class BrokerMp(object):
|
||||
self.procs.append(proc)
|
||||
proc.start()
|
||||
|
||||
def shutdown(self):
|
||||
def shutdown(self) -> None:
|
||||
self.log("broker", "shutting down")
|
||||
for n, proc in enumerate(self.procs):
|
||||
thr = threading.Thread(
|
||||
target=proc.q_pend.put([0, "shutdown", []]),
|
||||
target=proc.q_pend.put((0, "shutdown", [])),
|
||||
name="mp-shutdown-{}-{}".format(n, len(self.procs)),
|
||||
)
|
||||
thr.start()
|
||||
@@ -65,7 +81,12 @@ class BrokerMp(object):
|
||||
|
||||
procs.pop()
|
||||
|
||||
def collector(self, proc):
|
||||
def reload(self) -> None:
|
||||
self.log("broker", "reloading")
|
||||
for _, proc in enumerate(self.procs):
|
||||
proc.q_pend.put((0, "reload", []))
|
||||
|
||||
def collector(self, proc: MProcess) -> None:
|
||||
"""receive message from hub in other process"""
|
||||
while True:
|
||||
msg = proc.q_yield.get()
|
||||
@@ -76,10 +97,7 @@ class BrokerMp(object):
|
||||
|
||||
elif dest == "retq":
|
||||
# response from previous ipc call
|
||||
with self.retpend_mutex:
|
||||
retq = self.retpend.pop(retq_id)
|
||||
|
||||
retq.put(args)
|
||||
raise Exception("invalid broker_mp usage")
|
||||
|
||||
else:
|
||||
# new ipc invoking managed service in hub
|
||||
@@ -91,9 +109,9 @@ class BrokerMp(object):
|
||||
rv = try_exec(retq_id, obj, *args)
|
||||
|
||||
if retq_id:
|
||||
proc.q_pend.put([retq_id, "retq", rv])
|
||||
proc.q_pend.put((retq_id, "retq", rv))
|
||||
|
||||
def put(self, want_retval, dest, *args):
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
"""
|
||||
send message to non-hub component in other process,
|
||||
returns a Queue object which eventually contains the response if want_retval
|
||||
@@ -101,7 +119,10 @@ class BrokerMp(object):
|
||||
"""
|
||||
if dest == "listen":
|
||||
for p in self.procs:
|
||||
p.q_pend.put([0, dest, [args[0], len(self.procs)]])
|
||||
p.q_pend.put((0, dest, [args[0], len(self.procs)]))
|
||||
|
||||
elif dest == "cb_httpsrv_up":
|
||||
self.hub.cb_httpsrv_up()
|
||||
|
||||
else:
|
||||
raise Exception("what is " + str(dest))
|
||||
|
||||
@@ -1,20 +1,39 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
from copyparty.authsrv import AuthSrv
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from .broker_util import ExceptionalQueue
|
||||
import queue
|
||||
|
||||
from .authsrv import AuthSrv
|
||||
from .broker_util import BrokerCli, ExceptionalQueue
|
||||
from .httpsrv import HttpSrv
|
||||
from .util import FAKE_MP
|
||||
from .util import FAKE_MP, HMaccas
|
||||
|
||||
try:
|
||||
from types import FrameType
|
||||
|
||||
from typing import Any, Optional, Union
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class MpWorker(object):
|
||||
class MpWorker(BrokerCli):
|
||||
"""one single mp instance"""
|
||||
|
||||
def __init__(self, q_pend, q_yield, args, n):
|
||||
def __init__(
|
||||
self,
|
||||
q_pend: queue.Queue[tuple[int, str, list[Any]]],
|
||||
q_yield: queue.Queue[tuple[int, str, list[Any]]],
|
||||
args: argparse.Namespace,
|
||||
n: int,
|
||||
) -> None:
|
||||
super(MpWorker, self).__init__()
|
||||
|
||||
self.q_pend = q_pend
|
||||
self.q_yield = q_yield
|
||||
self.args = args
|
||||
@@ -22,20 +41,21 @@ class MpWorker(object):
|
||||
|
||||
self.log = self._log_disabled if args.q and not args.lo else self._log_enabled
|
||||
|
||||
self.retpend = {}
|
||||
self.retpend: dict[int, Any] = {}
|
||||
self.retpend_mutex = threading.Lock()
|
||||
self.mutex = threading.Lock()
|
||||
|
||||
# we inherited signal_handler from parent,
|
||||
# replace it with something harmless
|
||||
if not FAKE_MP:
|
||||
for sig in [signal.SIGINT, signal.SIGTERM]:
|
||||
for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGUSR1]:
|
||||
signal.signal(sig, self.signal_handler)
|
||||
|
||||
# starting to look like a good idea
|
||||
self.asrv = AuthSrv(args, None, False)
|
||||
|
||||
# instantiate all services here (TODO: inheritance?)
|
||||
self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8)
|
||||
self.httpsrv = HttpSrv(self, n)
|
||||
|
||||
# on winxp and some other platforms,
|
||||
@@ -45,20 +65,20 @@ class MpWorker(object):
|
||||
thr.start()
|
||||
thr.join()
|
||||
|
||||
def signal_handler(self, sig, frame):
|
||||
def signal_handler(self, sig: Optional[int], frame: Optional[FrameType]) -> None:
|
||||
# print('k')
|
||||
pass
|
||||
|
||||
def _log_enabled(self, src, msg, c=0):
|
||||
self.q_yield.put([0, "log", [src, msg, c]])
|
||||
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.q_yield.put((0, "log", [src, msg, c]))
|
||||
|
||||
def _log_disabled(self, src, msg, c=0):
|
||||
def _log_disabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
pass
|
||||
|
||||
def logw(self, msg, c=0):
|
||||
def logw(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log("mp{}".format(self.n), msg, c)
|
||||
|
||||
def main(self):
|
||||
def main(self) -> None:
|
||||
while True:
|
||||
retq_id, dest, args = self.q_pend.get()
|
||||
|
||||
@@ -69,6 +89,11 @@ class MpWorker(object):
|
||||
sys.exit(0)
|
||||
return
|
||||
|
||||
elif dest == "reload":
|
||||
self.logw("mpw.asrv reloading")
|
||||
self.asrv.reload()
|
||||
self.logw("mpw.asrv reloaded")
|
||||
|
||||
elif dest == "listen":
|
||||
self.httpsrv.listen(args[0], args[1])
|
||||
|
||||
@@ -82,15 +107,14 @@ class MpWorker(object):
|
||||
else:
|
||||
raise Exception("what is " + str(dest))
|
||||
|
||||
def put(self, want_retval, dest, *args):
|
||||
if want_retval:
|
||||
retq = ExceptionalQueue(1)
|
||||
retq_id = id(retq)
|
||||
with self.retpend_mutex:
|
||||
self.retpend[retq_id] = retq
|
||||
else:
|
||||
retq = None
|
||||
retq_id = 0
|
||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
||||
retq = ExceptionalQueue(1)
|
||||
retq_id = id(retq)
|
||||
with self.retpend_mutex:
|
||||
self.retpend[retq_id] = retq
|
||||
|
||||
self.q_yield.put([retq_id, dest, args])
|
||||
self.q_yield.put((retq_id, dest, list(args)))
|
||||
return retq
|
||||
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
self.q_yield.put((0, dest, list(args)))
|
||||
|
||||
@@ -1,47 +1,71 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import threading
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .broker_util import BrokerCli, ExceptionalQueue, try_exec
|
||||
from .httpsrv import HttpSrv
|
||||
from .broker_util import ExceptionalQueue, try_exec
|
||||
from .util import HMaccas
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
try:
|
||||
from typing import Any
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class BrokerThr(object):
|
||||
class BrokerThr(BrokerCli):
|
||||
"""external api; behaves like BrokerMP but using plain threads"""
|
||||
|
||||
def __init__(self, hub):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
super(BrokerThr, self).__init__()
|
||||
|
||||
self.hub = hub
|
||||
self.log = hub.log
|
||||
self.args = hub.args
|
||||
self.asrv = hub.asrv
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.num_workers = 1
|
||||
|
||||
# instantiate all services here (TODO: inheritance?)
|
||||
self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8)
|
||||
self.httpsrv = HttpSrv(self, None)
|
||||
self.reload = self.noop
|
||||
|
||||
def shutdown(self):
|
||||
def shutdown(self) -> None:
|
||||
# self.log("broker", "shutting down")
|
||||
self.httpsrv.shutdown()
|
||||
|
||||
def noop(self) -> None:
|
||||
pass
|
||||
|
||||
def put(self, want_retval, dest, *args):
|
||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
||||
|
||||
# new ipc invoking managed service in hub
|
||||
obj = self.hub
|
||||
for node in dest.split("."):
|
||||
obj = getattr(obj, node)
|
||||
|
||||
rv = try_exec(True, obj, *args)
|
||||
|
||||
# pretend we're broker_mp
|
||||
retq = ExceptionalQueue(1)
|
||||
retq.put(rv)
|
||||
return retq
|
||||
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
if dest == "listen":
|
||||
self.httpsrv.listen(args[0], 1)
|
||||
return
|
||||
|
||||
else:
|
||||
# new ipc invoking managed service in hub
|
||||
obj = self.hub
|
||||
for node in dest.split("."):
|
||||
obj = getattr(obj, node)
|
||||
# new ipc invoking managed service in hub
|
||||
obj = self.hub
|
||||
for node in dest.split("."):
|
||||
obj = getattr(obj, node)
|
||||
|
||||
# TODO will deadlock if dest performs another ipc
|
||||
rv = try_exec(want_retval, obj, *args)
|
||||
if not want_retval:
|
||||
return
|
||||
|
||||
# pretend we're broker_mp
|
||||
retq = ExceptionalQueue(1)
|
||||
retq.put(rv)
|
||||
return retq
|
||||
try_exec(False, obj, *args)
|
||||
|
||||
@@ -1,17 +1,30 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
|
||||
import argparse
|
||||
import traceback
|
||||
|
||||
from .util import Pebkac, Queue
|
||||
from queue import Queue
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .authsrv import AuthSrv
|
||||
from .util import HMaccas, Pebkac
|
||||
|
||||
try:
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from .util import RootLogger
|
||||
except:
|
||||
pass
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
|
||||
class ExceptionalQueue(Queue, object):
|
||||
def get(self, block=True, timeout=None):
|
||||
def get(self, block: bool = True, timeout: Optional[float] = None) -> Any:
|
||||
rv = super(ExceptionalQueue, self).get(block, timeout)
|
||||
|
||||
# TODO: how expensive is this?
|
||||
if isinstance(rv, list):
|
||||
if rv[0] == "exception":
|
||||
if rv[1] == "pebkac":
|
||||
@@ -22,7 +35,27 @@ class ExceptionalQueue(Queue, object):
|
||||
return rv
|
||||
|
||||
|
||||
def try_exec(want_retval, func, *args):
|
||||
class BrokerCli(object):
|
||||
"""
|
||||
helps mypy understand httpsrv.broker but still fails a few levels deeper,
|
||||
for example resolving httpconn.* in httpcli -- see lines tagged #mypy404
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.log: "RootLogger" = None
|
||||
self.args: argparse.Namespace = None
|
||||
self.asrv: AuthSrv = None
|
||||
self.httpsrv: "HttpSrv" = None
|
||||
self.iphash: HMaccas = None
|
||||
|
||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
||||
return ExceptionalQueue(1)
|
||||
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def try_exec(want_retval: Union[bool, int], func: Any, *args: list[Any]) -> Any:
|
||||
try:
|
||||
return func(*args)
|
||||
|
||||
|
||||
154
copyparty/fsutil.py
Normal file
154
copyparty/fsutil.py
Normal file
@@ -0,0 +1,154 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
|
||||
from .__init__ import ANYWIN, MACOS
|
||||
from .authsrv import AXS, VFS
|
||||
from .bos import bos
|
||||
from .util import chkcmd, min_ex
|
||||
|
||||
try:
|
||||
from typing import Optional, Union
|
||||
|
||||
from .util import RootLogger
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class Fstab(object):
|
||||
def __init__(self, log: "RootLogger"):
|
||||
self.log_func = log
|
||||
|
||||
self.trusted = False
|
||||
self.tab: Optional[VFS] = None
|
||||
self.cache: dict[str, str] = {}
|
||||
self.age = 0.0
|
||||
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("fstab", msg + "\033[K", c)
|
||||
|
||||
def get(self, path: str) -> str:
|
||||
if len(self.cache) > 9000:
|
||||
self.age = time.time()
|
||||
self.tab = None
|
||||
self.cache = {}
|
||||
|
||||
fs = "ext4"
|
||||
msg = "failed to determine filesystem at [{}]; assuming {}\n{}"
|
||||
|
||||
if ANYWIN:
|
||||
fs = "vfat"
|
||||
try:
|
||||
path = self._winpath(path)
|
||||
except:
|
||||
self.log(msg.format(path, fs, min_ex()), 3)
|
||||
return fs
|
||||
|
||||
path = path.lstrip("/")
|
||||
try:
|
||||
return self.cache[path]
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
fs = self.get_w32(path) if ANYWIN else self.get_unix(path)
|
||||
except:
|
||||
self.log(msg.format(path, fs, min_ex()), 3)
|
||||
|
||||
fs = fs.lower()
|
||||
self.cache[path] = fs
|
||||
self.log("found {} at {}".format(fs, path))
|
||||
return fs
|
||||
|
||||
def _winpath(self, path: str) -> str:
|
||||
# try to combine volume-label + st_dev (vsn)
|
||||
path = path.replace("/", "\\")
|
||||
vid = path.split(":", 1)[0].strip("\\").split("\\", 1)[0]
|
||||
try:
|
||||
return "{}*{}".format(vid, bos.stat(path).st_dev)
|
||||
except:
|
||||
return vid
|
||||
|
||||
def build_fallback(self) -> None:
|
||||
self.tab = VFS(self.log_func, "idk", "/", AXS(), {})
|
||||
self.trusted = False
|
||||
|
||||
def build_tab(self) -> None:
|
||||
self.log("building tab")
|
||||
|
||||
sptn = r"^.*? on (.*) type ([^ ]+) \(.*"
|
||||
if MACOS:
|
||||
sptn = r"^.*? on (.*) \(([^ ]+), .*"
|
||||
|
||||
ptn = re.compile(sptn)
|
||||
so, _ = chkcmd(["mount"])
|
||||
tab1: list[tuple[str, str]] = []
|
||||
for ln in so.split("\n"):
|
||||
m = ptn.match(ln)
|
||||
if not m:
|
||||
continue
|
||||
|
||||
zs1, zs2 = m.groups()
|
||||
tab1.append((str(zs1), str(zs2)))
|
||||
|
||||
tab1.sort(key=lambda x: (len(x[0]), x[0]))
|
||||
path1, fs1 = tab1[0]
|
||||
tab = VFS(self.log_func, fs1, path1, AXS(), {})
|
||||
for path, fs in tab1[1:]:
|
||||
tab.add(fs, path.lstrip("/"))
|
||||
|
||||
self.tab = tab
|
||||
|
||||
def relabel(self, path: str, nval: str) -> None:
|
||||
assert self.tab
|
||||
self.cache = {}
|
||||
if ANYWIN:
|
||||
path = self._winpath(path)
|
||||
|
||||
path = path.lstrip("/")
|
||||
ptn = re.compile(r"^[^\\/]*")
|
||||
vn, rem = self.tab._find(path)
|
||||
if not self.trusted:
|
||||
# no mtab access; have to build as we go
|
||||
if "/" in rem:
|
||||
self.tab.add("idk", os.path.join(vn.vpath, rem.split("/")[0]))
|
||||
if rem:
|
||||
self.tab.add(nval, path)
|
||||
else:
|
||||
vn.realpath = nval
|
||||
|
||||
return
|
||||
|
||||
visit = [vn]
|
||||
while visit:
|
||||
vn = visit.pop()
|
||||
vn.realpath = ptn.sub(nval, vn.realpath)
|
||||
visit.extend(list(vn.nodes.values()))
|
||||
|
||||
def get_unix(self, path: str) -> str:
|
||||
if not self.tab:
|
||||
try:
|
||||
self.build_tab()
|
||||
self.trusted = True
|
||||
except:
|
||||
# prisonparty or other restrictive environment
|
||||
self.log("failed to build tab:\n{}".format(min_ex()), 3)
|
||||
self.build_fallback()
|
||||
|
||||
assert self.tab
|
||||
ret = self.tab._find(path)[0]
|
||||
if self.trusted or path == ret.vpath:
|
||||
return ret.realpath.split("/")[0]
|
||||
else:
|
||||
return "idk"
|
||||
|
||||
def get_w32(self, path: str) -> str:
|
||||
if not self.tab:
|
||||
self.build_fallback()
|
||||
|
||||
assert self.tab
|
||||
ret = self.tab._find(path)[0]
|
||||
return ret.realpath
|
||||
403
copyparty/ftpd.py
Normal file
403
copyparty/ftpd.py
Normal file
@@ -0,0 +1,403 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
|
||||
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
|
||||
from pyftpdlib.handlers import FTPHandler
|
||||
from pyftpdlib.log import config_logging
|
||||
from pyftpdlib.servers import FTPServer
|
||||
|
||||
from .__init__ import PY2, TYPE_CHECKING, E
|
||||
from .bos import bos
|
||||
from .util import Pebkac, exclude_dotfiles, fsenc
|
||||
|
||||
try:
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
except ImportError:
|
||||
p = os.path.join(E.mod, "vend")
|
||||
print("loading asynchat from " + p)
|
||||
sys.path.append(p)
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
try:
|
||||
import typing
|
||||
from typing import Any, Optional
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class FtpAuth(DummyAuthorizer):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
super(FtpAuth, self).__init__()
|
||||
self.hub = hub
|
||||
|
||||
def validate_authentication(
|
||||
self, username: str, password: str, handler: Any
|
||||
) -> None:
|
||||
asrv = self.hub.asrv
|
||||
if username == "anonymous":
|
||||
password = ""
|
||||
|
||||
uname = "*"
|
||||
if password:
|
||||
uname = asrv.iacct.get(password, "")
|
||||
|
||||
handler.username = uname
|
||||
|
||||
if (password and not uname) or not (
|
||||
asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)
|
||||
):
|
||||
raise AuthenticationFailed("Authentication failed.")
|
||||
|
||||
def get_home_dir(self, username: str) -> str:
|
||||
return "/"
|
||||
|
||||
def has_user(self, username: str) -> bool:
|
||||
asrv = self.hub.asrv
|
||||
return username in asrv.acct
|
||||
|
||||
def has_perm(self, username: str, perm: int, path: Optional[str] = None) -> bool:
|
||||
return True # handled at filesystem layer
|
||||
|
||||
def get_perms(self, username: str) -> str:
|
||||
return "elradfmwMT"
|
||||
|
||||
def get_msg_login(self, username: str) -> str:
|
||||
return "sup {}".format(username)
|
||||
|
||||
def get_msg_quit(self, username: str) -> str:
|
||||
return "cya"
|
||||
|
||||
|
||||
class FtpFs(AbstractedFS):
|
||||
def __init__(
|
||||
self, root: str, cmd_channel: Any
|
||||
) -> None: # pylint: disable=super-init-not-called
|
||||
self.h = self.cmd_channel = cmd_channel # type: FTPHandler
|
||||
self.hub: "SvcHub" = cmd_channel.hub
|
||||
self.args = cmd_channel.args
|
||||
|
||||
self.uname = self.hub.asrv.iacct.get(cmd_channel.password, "*")
|
||||
|
||||
self.cwd = "/" # pyftpdlib convention of leading slash
|
||||
self.root = "/var/lib/empty"
|
||||
|
||||
self.listdirinfo = self.listdir
|
||||
self.chdir(".")
|
||||
|
||||
def v2a(
|
||||
self,
|
||||
vpath: str,
|
||||
r: bool = False,
|
||||
w: bool = False,
|
||||
m: bool = False,
|
||||
d: bool = False,
|
||||
) -> str:
|
||||
try:
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
||||
if not vfs.realpath:
|
||||
raise FilesystemError("no filesystem mounted at this path")
|
||||
|
||||
return os.path.join(vfs.realpath, rem)
|
||||
except Pebkac as ex:
|
||||
raise FilesystemError(str(ex))
|
||||
|
||||
def rv2a(
|
||||
self,
|
||||
vpath: str,
|
||||
r: bool = False,
|
||||
w: bool = False,
|
||||
m: bool = False,
|
||||
d: bool = False,
|
||||
) -> str:
|
||||
return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d)
|
||||
|
||||
def ftp2fs(self, ftppath: str) -> str:
|
||||
# return self.v2a(ftppath)
|
||||
return ftppath # self.cwd must be vpath
|
||||
|
||||
def fs2ftp(self, fspath: str) -> str:
|
||||
# raise NotImplementedError()
|
||||
return fspath
|
||||
|
||||
def validpath(self, path: str) -> bool:
|
||||
if "/.hist/" in path:
|
||||
if "/up2k." in path or path.endswith("/dir.txt"):
|
||||
raise FilesystemError("access to this file is forbidden")
|
||||
|
||||
return True
|
||||
|
||||
def open(self, filename: str, mode: str) -> typing.IO[Any]:
|
||||
r = "r" in mode
|
||||
w = "w" in mode or "a" in mode or "+" in mode
|
||||
|
||||
ap = self.rv2a(filename, r, w)
|
||||
if w and bos.path.exists(ap):
|
||||
raise FilesystemError("cannot open existing file for writing")
|
||||
|
||||
self.validpath(ap)
|
||||
return open(fsenc(ap), mode)
|
||||
|
||||
def chdir(self, path: str) -> None:
|
||||
self.cwd = join(self.cwd, path)
|
||||
x = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username)
|
||||
self.can_read, self.can_write, self.can_move, self.can_delete, self.can_get = x
|
||||
|
||||
def mkdir(self, path: str) -> None:
|
||||
ap = self.rv2a(path, w=True)
|
||||
bos.mkdir(ap)
|
||||
|
||||
def listdir(self, path: str) -> list[str]:
|
||||
vpath = join(self.cwd, path).lstrip("/")
|
||||
try:
|
||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, True, False)
|
||||
|
||||
fsroot, vfs_ls1, vfs_virt = vfs.ls(
|
||||
rem, self.uname, not self.args.no_scandir, [[True], [False, True]]
|
||||
)
|
||||
vfs_ls = [x[0] for x in vfs_ls1]
|
||||
vfs_ls.extend(vfs_virt.keys())
|
||||
|
||||
if not self.args.ed:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
vfs_ls.sort()
|
||||
return vfs_ls
|
||||
except:
|
||||
if vpath:
|
||||
# display write-only folders as empty
|
||||
return []
|
||||
|
||||
# return list of volumes
|
||||
r = {x.split("/")[0]: 1 for x in self.hub.asrv.vfs.all_vols.keys()}
|
||||
return list(sorted(list(r.keys())))
|
||||
|
||||
def rmdir(self, path: str) -> None:
|
||||
ap = self.rv2a(path, d=True)
|
||||
bos.rmdir(ap)
|
||||
|
||||
def remove(self, path: str) -> None:
|
||||
if self.args.no_del:
|
||||
raise FilesystemError("the delete feature is disabled in server config")
|
||||
|
||||
vp = join(self.cwd, path).lstrip("/")
|
||||
try:
|
||||
self.hub.up2k.handle_rm(self.uname, self.h.remote_ip, [vp])
|
||||
except Exception as ex:
|
||||
raise FilesystemError(str(ex))
|
||||
|
||||
def rename(self, src: str, dst: str) -> None:
|
||||
if not self.can_move:
|
||||
raise FilesystemError("not allowed for user " + self.h.username)
|
||||
|
||||
if self.args.no_mv:
|
||||
t = "the rename/move feature is disabled in server config"
|
||||
raise FilesystemError(t)
|
||||
|
||||
svp = join(self.cwd, src).lstrip("/")
|
||||
dvp = join(self.cwd, dst).lstrip("/")
|
||||
try:
|
||||
self.hub.up2k.handle_mv(self.uname, svp, dvp)
|
||||
except Exception as ex:
|
||||
raise FilesystemError(str(ex))
|
||||
|
||||
def chmod(self, path: str, mode: str) -> None:
|
||||
pass
|
||||
|
||||
def stat(self, path: str) -> os.stat_result:
|
||||
try:
|
||||
ap = self.rv2a(path, r=True)
|
||||
return bos.stat(ap)
|
||||
except:
|
||||
ap = self.rv2a(path)
|
||||
st = bos.stat(ap)
|
||||
if not stat.S_ISDIR(st.st_mode):
|
||||
raise
|
||||
|
||||
return st
|
||||
|
||||
def utime(self, path: str, timeval: float) -> None:
|
||||
ap = self.rv2a(path, w=True)
|
||||
return bos.utime(ap, (timeval, timeval))
|
||||
|
||||
def lstat(self, path: str) -> os.stat_result:
|
||||
ap = self.rv2a(path)
|
||||
return bos.lstat(ap)
|
||||
|
||||
def isfile(self, path: str) -> bool:
|
||||
st = self.stat(path)
|
||||
return stat.S_ISREG(st.st_mode)
|
||||
|
||||
def islink(self, path: str) -> bool:
|
||||
ap = self.rv2a(path)
|
||||
return bos.path.islink(ap)
|
||||
|
||||
def isdir(self, path: str) -> bool:
|
||||
try:
|
||||
st = self.stat(path)
|
||||
return stat.S_ISDIR(st.st_mode)
|
||||
except:
|
||||
return True
|
||||
|
||||
def getsize(self, path: str) -> int:
|
||||
ap = self.rv2a(path)
|
||||
return bos.path.getsize(ap)
|
||||
|
||||
def getmtime(self, path: str) -> float:
|
||||
ap = self.rv2a(path)
|
||||
return bos.path.getmtime(ap)
|
||||
|
||||
def realpath(self, path: str) -> str:
|
||||
return path
|
||||
|
||||
def lexists(self, path: str) -> bool:
|
||||
ap = self.rv2a(path)
|
||||
return bos.path.lexists(ap)
|
||||
|
||||
def get_user_by_uid(self, uid: int) -> str:
|
||||
return "root"
|
||||
|
||||
def get_group_by_uid(self, gid: int) -> str:
|
||||
return "root"
|
||||
|
||||
|
||||
class FtpHandler(FTPHandler):
|
||||
abstracted_fs = FtpFs
|
||||
hub: "SvcHub" = None
|
||||
args: argparse.Namespace = None
|
||||
|
||||
def __init__(self, conn: Any, server: Any, ioloop: Any = None) -> None:
|
||||
self.hub: "SvcHub" = FtpHandler.hub
|
||||
self.args: argparse.Namespace = FtpHandler.args
|
||||
|
||||
if PY2:
|
||||
FTPHandler.__init__(self, conn, server, ioloop)
|
||||
else:
|
||||
super(FtpHandler, self).__init__(conn, server, ioloop)
|
||||
|
||||
# abspath->vpath mapping to resolve log_transfer paths
|
||||
self.vfs_map: dict[str, str] = {}
|
||||
|
||||
def ftp_STOR(self, file: str, mode: str = "w") -> Any:
|
||||
# Optional[str]
|
||||
vp = join(self.fs.cwd, file).lstrip("/")
|
||||
ap = self.fs.v2a(vp)
|
||||
self.vfs_map[ap] = vp
|
||||
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
|
||||
ret = FTPHandler.ftp_STOR(self, file, mode)
|
||||
# print("ftp_STOR: {} {} OK".format(vp, mode))
|
||||
return ret
|
||||
|
||||
def log_transfer(
|
||||
self,
|
||||
cmd: str,
|
||||
filename: bytes,
|
||||
receive: bool,
|
||||
completed: bool,
|
||||
elapsed: float,
|
||||
bytes: int,
|
||||
) -> Any:
|
||||
# None
|
||||
ap = filename.decode("utf-8", "replace")
|
||||
vp = self.vfs_map.pop(ap, None)
|
||||
# print("xfer_end: {} => {}".format(ap, vp))
|
||||
if vp:
|
||||
vp, fn = os.path.split(vp)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vp, self.username, False, True)
|
||||
vfs, rem = vfs.get_dbv(rem)
|
||||
self.hub.up2k.hash_file(
|
||||
vfs.realpath,
|
||||
vfs.flags,
|
||||
rem,
|
||||
fn,
|
||||
self.remote_ip,
|
||||
time.time(),
|
||||
)
|
||||
|
||||
return FTPHandler.log_transfer(
|
||||
self, cmd, filename, receive, completed, elapsed, bytes
|
||||
)
|
||||
|
||||
|
||||
try:
|
||||
from pyftpdlib.handlers import TLS_FTPHandler
|
||||
|
||||
class SftpHandler(FtpHandler, TLS_FTPHandler):
|
||||
pass
|
||||
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class Ftpd(object):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
self.hub = hub
|
||||
self.args = hub.args
|
||||
|
||||
hs = []
|
||||
if self.args.ftp:
|
||||
hs.append([FtpHandler, self.args.ftp])
|
||||
if self.args.ftps:
|
||||
try:
|
||||
h1 = SftpHandler
|
||||
except:
|
||||
t = "\nftps requires pyopenssl;\nplease run the following:\n\n {} -m pip install --user pyopenssl\n"
|
||||
print(t.format(sys.executable))
|
||||
sys.exit(1)
|
||||
|
||||
h1.certfile = os.path.join(self.args.E.cfg, "cert.pem")
|
||||
h1.tls_control_required = True
|
||||
h1.tls_data_required = True
|
||||
|
||||
hs.append([h1, self.args.ftps])
|
||||
|
||||
for h_lp in hs:
|
||||
h2, lp = h_lp
|
||||
h2.hub = hub
|
||||
h2.args = hub.args
|
||||
h2.authorizer = FtpAuth(hub)
|
||||
|
||||
if self.args.ftp_pr:
|
||||
p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")]
|
||||
if self.args.ftp and self.args.ftps:
|
||||
# divide port range in half
|
||||
d = int((p2 - p1) / 2)
|
||||
if lp == self.args.ftp:
|
||||
p2 = p1 + d
|
||||
else:
|
||||
p1 += d + 1
|
||||
|
||||
h2.passive_ports = list(range(p1, p2 + 1))
|
||||
|
||||
if self.args.ftp_nat:
|
||||
h2.masquerade_address = self.args.ftp_nat
|
||||
|
||||
if self.args.ftp_dbg:
|
||||
config_logging(level=logging.DEBUG)
|
||||
|
||||
ioloop = IOLoop()
|
||||
for ip in self.args.i:
|
||||
for h, lp in hs:
|
||||
FTPServer((ip, int(lp)), h, ioloop)
|
||||
|
||||
thr = threading.Thread(target=ioloop.loop, name="ftp")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
|
||||
def join(p1: str, p2: str) -> str:
|
||||
w = os.path.join(p1, p2.replace("\\", "/"))
|
||||
return os.path.normpath(w).replace("\\", "/")
|
||||
2074
copyparty/httpcli.py
2074
copyparty/httpcli.py
File diff suppressed because it is too large
Load Diff
@@ -1,24 +1,37 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import argparse # typechk
|
||||
import os
|
||||
import time
|
||||
import re
|
||||
import socket
|
||||
import threading # typechk
|
||||
import time
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
HAVE_SSL = True
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
from .__init__ import E
|
||||
from .util import Unrecv
|
||||
from . import util as Util
|
||||
from .__init__ import TYPE_CHECKING, EnvParams
|
||||
from .authsrv import AuthSrv # typechk
|
||||
from .httpcli import HttpCli
|
||||
from .u2idx import U2idx
|
||||
from .th_cli import ThumbCli
|
||||
from .th_srv import HAVE_PIL
|
||||
from .ico import Ico
|
||||
from .mtag import HAVE_FFMPEG
|
||||
from .th_cli import ThumbCli
|
||||
from .th_srv import HAVE_PIL, HAVE_VIPS
|
||||
from .u2idx import U2idx
|
||||
from .util import HMaccas, shut_socket
|
||||
|
||||
try:
|
||||
from typing import Optional, Pattern, Union
|
||||
except:
|
||||
pass
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
|
||||
class HttpConn(object):
|
||||
@@ -27,37 +40,46 @@ class HttpConn(object):
|
||||
creates an HttpCli for each request (Connection: Keep-Alive)
|
||||
"""
|
||||
|
||||
def __init__(self, sck, addr, hsrv):
|
||||
def __init__(
|
||||
self, sck: socket.socket, addr: tuple[str, int], hsrv: "HttpSrv"
|
||||
) -> None:
|
||||
self.s = sck
|
||||
self.sr: Optional[Util._Unrecv] = None
|
||||
self.addr = addr
|
||||
self.hsrv = hsrv
|
||||
|
||||
self.args = hsrv.args
|
||||
self.asrv = hsrv.asrv
|
||||
self.mutex: threading.Lock = hsrv.mutex # mypy404
|
||||
self.args: argparse.Namespace = hsrv.args # mypy404
|
||||
self.E: EnvParams = self.args.E
|
||||
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
||||
self.cert_path = hsrv.cert_path
|
||||
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
||||
self.iphash: HMaccas = hsrv.broker.iphash
|
||||
|
||||
enth = HAVE_PIL and not self.args.no_thumb
|
||||
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
|
||||
self.ico = Ico(self.args)
|
||||
enth = (HAVE_PIL or HAVE_VIPS or HAVE_FFMPEG) and not self.args.no_thumb
|
||||
self.thumbcli: Optional[ThumbCli] = ThumbCli(hsrv) if enth else None # mypy404
|
||||
self.ico: Ico = Ico(self.args) # mypy404
|
||||
|
||||
self.t0 = time.time()
|
||||
self.t0: float = time.time() # mypy404
|
||||
self.stopping = False
|
||||
self.nreq = 0
|
||||
self.nbyte = 0
|
||||
self.u2idx = None
|
||||
self.log_func = hsrv.log
|
||||
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None
|
||||
self.nreq: int = 0 # mypy404
|
||||
self.nbyte: int = 0 # mypy404
|
||||
self.u2idx: Optional[U2idx] = None
|
||||
self.log_func: "Util.RootLogger" = hsrv.log # mypy404
|
||||
self.log_src: str = "httpconn" # mypy404
|
||||
self.lf_url: Optional[Pattern[str]] = (
|
||||
re.compile(self.args.lf_url) if self.args.lf_url else None
|
||||
) # mypy404
|
||||
self.set_rproxy()
|
||||
|
||||
def shutdown(self):
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
try:
|
||||
self.s.shutdown(socket.SHUT_RDWR)
|
||||
self.s.close()
|
||||
shut_socket(self.log, self.s, 1)
|
||||
except:
|
||||
pass
|
||||
|
||||
def set_rproxy(self, ip=None):
|
||||
def set_rproxy(self, ip: Optional[str] = None) -> str:
|
||||
if ip is None:
|
||||
color = 36
|
||||
ip = self.addr[0]
|
||||
@@ -70,35 +92,37 @@ class HttpConn(object):
|
||||
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
|
||||
return self.log_src
|
||||
|
||||
def respath(self, res_name):
|
||||
return os.path.join(E.mod, "web", res_name)
|
||||
def respath(self, res_name: str) -> str:
|
||||
return os.path.join(self.E.mod, "web", res_name)
|
||||
|
||||
def log(self, msg, c=0):
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func(self.log_src, msg, c)
|
||||
|
||||
def get_u2idx(self):
|
||||
def get_u2idx(self) -> U2idx:
|
||||
# one u2idx per tcp connection;
|
||||
# sqlite3 fully parallelizes under python threads
|
||||
if not self.u2idx:
|
||||
self.u2idx = U2idx(self)
|
||||
|
||||
return self.u2idx
|
||||
|
||||
def _detect_https(self):
|
||||
def _detect_https(self) -> bool:
|
||||
method = None
|
||||
if self.cert_path:
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
except socket.timeout:
|
||||
return
|
||||
return False
|
||||
except AttributeError:
|
||||
# jython does not support msg_peek; forget about https
|
||||
method = self.s.recv(4)
|
||||
self.sr = Unrecv(self.s)
|
||||
self.sr = Util.Unrecv(self.s, self.log)
|
||||
self.sr.buf = method
|
||||
|
||||
# jython used to do this, they stopped since it's broken
|
||||
# but reimplementing sendall is out of scope for now
|
||||
if not getattr(self.s, "sendall", None):
|
||||
self.s.sendall = self.s.send
|
||||
self.s.sendall = self.s.send # type: ignore
|
||||
|
||||
if len(method) != 4:
|
||||
err = "need at least 4 bytes in the first packet; got {}".format(
|
||||
@@ -108,17 +132,18 @@ class HttpConn(object):
|
||||
self.log(err)
|
||||
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return
|
||||
return False
|
||||
|
||||
return method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]
|
||||
|
||||
def run(self):
|
||||
def run(self) -> None:
|
||||
self.sr = None
|
||||
if self.args.https_only:
|
||||
is_https = True
|
||||
elif self.args.http_only or not HAVE_SSL:
|
||||
is_https = False
|
||||
else:
|
||||
# raise Exception("asdf")
|
||||
is_https = self._detect_https()
|
||||
|
||||
if is_https:
|
||||
@@ -147,14 +172,15 @@ class HttpConn(object):
|
||||
self.s = ctx.wrap_socket(self.s, server_side=True)
|
||||
msg = [
|
||||
"\033[1;3{:d}m{}".format(c, s)
|
||||
for c, s in zip([0, 5, 0], self.s.cipher())
|
||||
for c, s in zip([0, 5, 0], self.s.cipher()) # type: ignore
|
||||
]
|
||||
self.log(" ".join(msg) + "\033[0m")
|
||||
|
||||
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
|
||||
overlap = [y[::-1] for y in self.s.shared_ciphers()]
|
||||
lines = [str(x) for x in (["TLS cipher overlap:"] + overlap)]
|
||||
self.log("\n".join(lines))
|
||||
ciphers = self.s.shared_ciphers()
|
||||
assert ciphers
|
||||
overlap = [str(y[::-1]) for y in ciphers]
|
||||
self.log("TLS cipher overlap:" + "\n".join(overlap))
|
||||
for k, v in [
|
||||
["compression", self.s.compression()],
|
||||
["ALPN proto", self.s.selected_alpn_protocol()],
|
||||
@@ -165,11 +191,7 @@ class HttpConn(object):
|
||||
except Exception as ex:
|
||||
em = str(ex)
|
||||
|
||||
if "ALERT_BAD_CERTIFICATE" in em:
|
||||
# firefox-linux if there is no exception yet
|
||||
self.log("client rejected our certificate (nice)")
|
||||
|
||||
elif "ALERT_CERTIFICATE_UNKNOWN" in em:
|
||||
if "ALERT_CERTIFICATE_UNKNOWN" in em:
|
||||
# android-chrome keeps doing this
|
||||
pass
|
||||
|
||||
@@ -179,7 +201,7 @@ class HttpConn(object):
|
||||
return
|
||||
|
||||
if not self.sr:
|
||||
self.sr = Unrecv(self.s)
|
||||
self.sr = Util.Unrecv(self.s, self.log)
|
||||
|
||||
while not self.stopping:
|
||||
self.nreq += 1
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import math
|
||||
import base64
|
||||
import math
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
import queue
|
||||
|
||||
try:
|
||||
import jinja2
|
||||
@@ -26,14 +28,27 @@ except ImportError:
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
from .__init__ import E, PY2, MACOS
|
||||
from .util import spack, min_ex, start_stackmon, start_log_thrs
|
||||
from .__init__ import MACOS, TYPE_CHECKING, EnvParams
|
||||
from .bos import bos
|
||||
from .httpconn import HttpConn
|
||||
from .util import (
|
||||
FHC,
|
||||
Garda,
|
||||
Magician,
|
||||
min_ex,
|
||||
shut_socket,
|
||||
spack,
|
||||
start_log_thrs,
|
||||
start_stackmon,
|
||||
)
|
||||
|
||||
if PY2:
|
||||
import Queue as queue
|
||||
else:
|
||||
import queue
|
||||
if TYPE_CHECKING:
|
||||
from .broker_util import BrokerCli
|
||||
|
||||
try:
|
||||
from typing import Any, Optional
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class HttpSrv(object):
|
||||
@@ -42,50 +57,61 @@ class HttpSrv(object):
|
||||
relying on MpSrv for performance (HttpSrv is just plain threads)
|
||||
"""
|
||||
|
||||
def __init__(self, broker, nid):
|
||||
def __init__(self, broker: "BrokerCli", nid: Optional[int]) -> None:
|
||||
self.broker = broker
|
||||
self.nid = nid
|
||||
self.args = broker.args
|
||||
self.E: EnvParams = self.args.E
|
||||
self.log = broker.log
|
||||
self.asrv = broker.asrv
|
||||
|
||||
self.name = "httpsrv" + ("-n{}-i{:x}".format(nid, os.getpid()) if nid else "")
|
||||
# redefine in case of multiprocessing
|
||||
socket.setdefaulttimeout(120)
|
||||
|
||||
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
|
||||
self.magician = Magician()
|
||||
self.bans: dict[str, int] = {}
|
||||
self.gpwd = Garda(self.args.ban_pw)
|
||||
self.g404 = Garda(self.args.ban_404)
|
||||
|
||||
self.name = "hsrv" + nsuf
|
||||
self.mutex = threading.Lock()
|
||||
self.stopping = False
|
||||
|
||||
self.tp_nthr = 0 # actual
|
||||
self.tp_ncli = 0 # fading
|
||||
self.tp_time = None # latest worker collect
|
||||
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
|
||||
self.tp_time = 0.0 # latest worker collect
|
||||
self.tp_q: Optional[queue.LifoQueue[Any]] = (
|
||||
None if self.args.no_htp else queue.LifoQueue()
|
||||
)
|
||||
self.t_periodic: Optional[threading.Thread] = None
|
||||
|
||||
self.srvs = []
|
||||
self.u2fh = FHC()
|
||||
self.srvs: list[socket.socket] = []
|
||||
self.ncli = 0 # exact
|
||||
self.clients = {} # laggy
|
||||
self.clients: set[HttpConn] = set() # laggy
|
||||
self.nclimax = 0
|
||||
self.cb_ts = 0
|
||||
self.cb_v = 0
|
||||
self.cb_ts = 0.0
|
||||
self.cb_v = ""
|
||||
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web"))
|
||||
self.j2 = {
|
||||
x: env.get_template(x + ".html")
|
||||
for x in ["splash", "browser", "browser2", "msg", "md", "mde"]
|
||||
for x in ["splash", "browser", "browser2", "msg", "md", "mde", "cf"]
|
||||
}
|
||||
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
||||
self.prism = os.path.exists(zs)
|
||||
|
||||
cert_path = os.path.join(E.cfg, "cert.pem")
|
||||
if os.path.exists(cert_path):
|
||||
cert_path = os.path.join(self.E.cfg, "cert.pem")
|
||||
if bos.path.exists(cert_path):
|
||||
self.cert_path = cert_path
|
||||
else:
|
||||
self.cert_path = None
|
||||
self.cert_path = ""
|
||||
|
||||
if self.tp_q:
|
||||
self.start_threads(4)
|
||||
|
||||
name = "httpsrv-scaler" + ("-{}".format(nid) if nid else "")
|
||||
t = threading.Thread(target=self.thr_scaler, name=name)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
if nid:
|
||||
if self.args.stackmon:
|
||||
start_stackmon(self.args.stackmon, nid)
|
||||
@@ -93,7 +119,19 @@ class HttpSrv(object):
|
||||
if self.args.log_thrs:
|
||||
start_log_thrs(self.log, self.args.log_thrs, nid)
|
||||
|
||||
def start_threads(self, n):
|
||||
self.th_cfg: dict[str, Any] = {}
|
||||
t = threading.Thread(target=self.post_init, name="hsrv-init2")
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def post_init(self) -> None:
|
||||
try:
|
||||
x = self.broker.ask("thumbsrv.getcfg")
|
||||
self.th_cfg = x.get()
|
||||
except:
|
||||
pass
|
||||
|
||||
def start_threads(self, n: int) -> None:
|
||||
self.tp_nthr += n
|
||||
if self.args.log_htp:
|
||||
self.log(self.name, "workers += {} = {}".format(n, self.tp_nthr), 6)
|
||||
@@ -106,23 +144,36 @@ class HttpSrv(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def stop_threads(self, n):
|
||||
def stop_threads(self, n: int) -> None:
|
||||
self.tp_nthr -= n
|
||||
if self.args.log_htp:
|
||||
self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6)
|
||||
|
||||
assert self.tp_q
|
||||
for _ in range(n):
|
||||
self.tp_q.put(None)
|
||||
|
||||
def thr_scaler(self):
|
||||
def periodic(self) -> None:
|
||||
while True:
|
||||
time.sleep(2 if self.tp_ncli else 30)
|
||||
time.sleep(2 if self.tp_ncli or self.ncli else 10)
|
||||
with self.mutex:
|
||||
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
||||
if self.tp_nthr > self.tp_ncli + 8:
|
||||
self.stop_threads(4)
|
||||
self.u2fh.clean()
|
||||
if self.tp_q:
|
||||
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
||||
if self.tp_nthr > self.tp_ncli + 8:
|
||||
self.stop_threads(4)
|
||||
|
||||
if not self.ncli and not self.u2fh.cache and self.tp_nthr <= 8:
|
||||
self.t_periodic = None
|
||||
return
|
||||
|
||||
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
||||
if self.args.j != 1:
|
||||
# lost in the pickle; redefine
|
||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
sck.settimeout(None) # < does not inherit, ^ does
|
||||
|
||||
def listen(self, sck, nlisteners):
|
||||
ip, port = sck.getsockname()
|
||||
self.srvs.append(sck)
|
||||
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
||||
@@ -134,12 +185,18 @@ class HttpSrv(object):
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def thr_listen(self, srv_sck):
|
||||
def thr_listen(self, srv_sck: socket.socket) -> None:
|
||||
"""listens on a shared tcp server"""
|
||||
ip, port = srv_sck.getsockname()
|
||||
fno = srv_sck.fileno()
|
||||
msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
|
||||
msg = "subscribed @ {}:{} f{} p{}".format(ip, port, fno, os.getpid())
|
||||
self.log(self.name, msg)
|
||||
|
||||
def fun() -> None:
|
||||
self.broker.say("cb_httpsrv_up")
|
||||
|
||||
threading.Thread(target=fun, name="sig-hsrv-up1").start()
|
||||
|
||||
while not self.stopping:
|
||||
if self.args.log_conn:
|
||||
self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30")
|
||||
@@ -160,36 +217,47 @@ class HttpSrv(object):
|
||||
continue
|
||||
|
||||
if self.args.log_conn:
|
||||
m = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||
t = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||
"-" * 3, ip, port % 8, port
|
||||
)
|
||||
self.log("%s %s" % addr, m, c="1;30")
|
||||
self.log("%s %s" % addr, t, c="1;30")
|
||||
|
||||
self.accept(sck, addr)
|
||||
|
||||
def accept(self, sck, addr):
|
||||
def accept(self, sck: socket.socket, addr: tuple[str, int]) -> None:
|
||||
"""takes an incoming tcp connection and creates a thread to handle it"""
|
||||
now = time.time()
|
||||
|
||||
if now - (self.tp_time or now) > 300:
|
||||
t = "httpserver threadpool died: tpt {:.2f}, now {:.2f}, nthr {}, ncli {}"
|
||||
self.log(self.name, t.format(self.tp_time, now, self.tp_nthr, self.ncli), 1)
|
||||
self.tp_time = 0
|
||||
self.tp_q = None
|
||||
|
||||
if self.tp_q:
|
||||
self.tp_q.put((sck, addr))
|
||||
with self.mutex:
|
||||
self.ncli += 1
|
||||
self.tp_time = self.tp_time or now
|
||||
self.tp_ncli = max(self.tp_ncli, self.ncli + 1)
|
||||
if self.tp_nthr < self.ncli + 4:
|
||||
self.start_threads(8)
|
||||
return
|
||||
|
||||
if not self.args.no_htp:
|
||||
m = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n"
|
||||
self.log(self.name, m, 1)
|
||||
|
||||
with self.mutex:
|
||||
self.ncli += 1
|
||||
if not self.t_periodic:
|
||||
name = "hsrv-pt"
|
||||
if self.nid:
|
||||
name += "-{}".format(self.nid)
|
||||
|
||||
thr = threading.Thread(target=self.periodic, name=name)
|
||||
self.t_periodic = thr
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if self.tp_q:
|
||||
self.tp_time = self.tp_time or now
|
||||
self.tp_ncli = max(self.tp_ncli, self.ncli)
|
||||
if self.tp_nthr < self.ncli + 4:
|
||||
self.start_threads(8)
|
||||
|
||||
self.tp_q.put((sck, addr))
|
||||
return
|
||||
|
||||
if not self.args.no_htp:
|
||||
t = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n"
|
||||
self.log(self.name, t, 1)
|
||||
|
||||
thr = threading.Thread(
|
||||
target=self.thr_client,
|
||||
@@ -199,14 +267,15 @@ class HttpSrv(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def thr_poolw(self):
|
||||
def thr_poolw(self) -> None:
|
||||
assert self.tp_q
|
||||
while True:
|
||||
task = self.tp_q.get()
|
||||
if not task:
|
||||
break
|
||||
|
||||
with self.mutex:
|
||||
self.tp_time = None
|
||||
self.tp_time = 0
|
||||
|
||||
try:
|
||||
sck, addr = task
|
||||
@@ -216,10 +285,13 @@ class HttpSrv(object):
|
||||
)
|
||||
self.thr_client(sck, addr)
|
||||
me.name = self.name + "-poolw"
|
||||
except:
|
||||
self.log(self.name, "thr_client: " + min_ex(), 3)
|
||||
except Exception as ex:
|
||||
if str(ex).startswith("client d/c "):
|
||||
self.log(self.name, "thr_client: " + str(ex), 6)
|
||||
else:
|
||||
self.log(self.name, "thr_client: " + min_ex(), 3)
|
||||
|
||||
def shutdown(self):
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
for srv in self.srvs:
|
||||
try:
|
||||
@@ -227,12 +299,12 @@ class HttpSrv(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
clients = list(self.clients.keys())
|
||||
thrs = []
|
||||
clients = list(self.clients)
|
||||
for cli in clients:
|
||||
try:
|
||||
cli.shutdown()
|
||||
except:
|
||||
pass
|
||||
t = threading.Thread(target=cli.shutdown)
|
||||
thrs.append(t)
|
||||
t.start()
|
||||
|
||||
if self.tp_q:
|
||||
self.stop_threads(self.tp_nthr)
|
||||
@@ -241,15 +313,16 @@ class HttpSrv(object):
|
||||
if self.tp_q.empty():
|
||||
break
|
||||
|
||||
for t in thrs:
|
||||
t.join()
|
||||
|
||||
self.log(self.name, "ok bye")
|
||||
|
||||
def thr_client(self, sck, addr):
|
||||
def thr_client(self, sck: socket.socket, addr: tuple[str, int]) -> None:
|
||||
"""thread managing one tcp client"""
|
||||
sck.settimeout(120)
|
||||
|
||||
cli = HttpConn(sck, addr, self)
|
||||
with self.mutex:
|
||||
self.clients[cli] = 0
|
||||
self.clients.add(cli)
|
||||
|
||||
fno = sck.fileno()
|
||||
try:
|
||||
@@ -273,8 +346,7 @@ class HttpSrv(object):
|
||||
|
||||
try:
|
||||
fno = sck.fileno()
|
||||
sck.shutdown(socket.SHUT_RDWR)
|
||||
sck.close()
|
||||
shut_socket(cli.log, sck)
|
||||
except (OSError, socket.error) as ex:
|
||||
if not MACOS:
|
||||
self.log(
|
||||
@@ -292,10 +364,10 @@ class HttpSrv(object):
|
||||
raise
|
||||
finally:
|
||||
with self.mutex:
|
||||
del self.clients[cli]
|
||||
self.clients.remove(cli)
|
||||
self.ncli -= 1
|
||||
|
||||
def cachebuster(self):
|
||||
def cachebuster(self) -> str:
|
||||
if time.time() - self.cb_ts < 1:
|
||||
return self.cb_v
|
||||
|
||||
@@ -303,11 +375,11 @@ class HttpSrv(object):
|
||||
if time.time() - self.cb_ts < 1:
|
||||
return self.cb_v
|
||||
|
||||
v = E.t0
|
||||
v = self.E.t0
|
||||
try:
|
||||
with os.scandir(os.path.join(E.mod, "web")) as dh:
|
||||
with os.scandir(os.path.join(self.E.mod, "web")) as dh:
|
||||
for fh in dh:
|
||||
inf = fh.stat(follow_symlinks=False)
|
||||
inf = fh.stat()
|
||||
v = max(v, inf.st_mtime)
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -1,33 +1,69 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import hashlib
|
||||
import argparse # typechk
|
||||
import colorsys
|
||||
import hashlib
|
||||
|
||||
from .__init__ import PY2
|
||||
from .th_srv import HAVE_PIL
|
||||
from .util import BytesIO
|
||||
|
||||
|
||||
class Ico(object):
|
||||
def __init__(self, args):
|
||||
def __init__(self, args: argparse.Namespace) -> None:
|
||||
self.args = args
|
||||
|
||||
def get(self, ext, as_thumb):
|
||||
def get(self, ext: str, as_thumb: bool, chrome: bool) -> tuple[str, bytes]:
|
||||
"""placeholder to make thumbnails not break"""
|
||||
|
||||
h = hashlib.md5(ext.encode("utf-8")).digest()[:2]
|
||||
zb = hashlib.sha1(ext.encode("utf-8")).digest()[2:4]
|
||||
if PY2:
|
||||
h = [ord(x) for x in h]
|
||||
zb = [ord(x) for x in zb]
|
||||
|
||||
c1 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 0.3)
|
||||
c2 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 1)
|
||||
c = list(c1) + list(c2)
|
||||
c = [int(x * 255) for x in c]
|
||||
c = "".join(["{:02x}".format(x) for x in c])
|
||||
c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3)
|
||||
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 1)
|
||||
ci = [int(x * 255) for x in list(c1) + list(c2)]
|
||||
c = "".join(["{:02x}".format(x) for x in ci])
|
||||
|
||||
w = 100
|
||||
h = 30
|
||||
if not self.args.th_no_crop and as_thumb:
|
||||
w, h = self.args.th_size.split("x")
|
||||
h = int(100 / (float(w) / float(h)))
|
||||
sw, sh = self.args.th_size.split("x")
|
||||
h = int(100 / (float(sw) / float(sh)))
|
||||
w = 100
|
||||
|
||||
if chrome and as_thumb:
|
||||
# cannot handle more than ~2000 unique SVGs
|
||||
if HAVE_PIL:
|
||||
# svg: 3s, cache: 6s, this: 8s
|
||||
from PIL import Image, ImageDraw
|
||||
|
||||
h = int(64 * h / w)
|
||||
w = 64
|
||||
img = Image.new("RGB", (w, h), "#" + c[:6])
|
||||
pb = ImageDraw.Draw(img)
|
||||
tw, th = pb.textsize(ext)
|
||||
pb.text(((w - tw) // 2, (h - th) // 2), ext, fill="#" + c[6:])
|
||||
img = img.resize((w * 3, h * 3), Image.NEAREST)
|
||||
|
||||
buf = BytesIO()
|
||||
img.save(buf, format="PNG", compress_level=1)
|
||||
return "image/png", buf.getvalue()
|
||||
|
||||
elif False:
|
||||
# 48s, too slow
|
||||
import pyvips
|
||||
|
||||
h = int(192 * h / w)
|
||||
w = 192
|
||||
img = pyvips.Image.text(
|
||||
ext, width=w, height=h, dpi=192, align=pyvips.Align.CENTRE
|
||||
)
|
||||
img = img.ifthenelse(ci[3:], ci[:3], blend=True)
|
||||
# i = i.resize(3, kernel=pyvips.Kernel.NEAREST)
|
||||
buf = img.write_to_buffer(".png[compression=1]")
|
||||
return "image/png", buf
|
||||
|
||||
svg = """\
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
@@ -37,6 +73,6 @@ class Ico(object):
|
||||
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
|
||||
</g></svg>
|
||||
"""
|
||||
svg = svg.format(h, c[:6], c[6:], ext).encode("utf-8")
|
||||
svg = svg.format(h, c[:6], c[6:], ext)
|
||||
|
||||
return ["image/svg+xml", svg]
|
||||
return "image/svg+xml", svg.encode("utf-8")
|
||||
|
||||
@@ -1,17 +1,26 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
import sys
|
||||
|
||||
from .__init__ import PY2, WINDOWS, unicode
|
||||
from .util import fsenc, fsdec, uncyg, REKOBO_LKEY
|
||||
from .__init__ import PY2, WINDOWS, E, unicode
|
||||
from .bos import bos
|
||||
from .util import REKOBO_LKEY, fsenc, min_ex, retchk, runcmd, uncyg
|
||||
|
||||
try:
|
||||
from typing import Any, Union
|
||||
|
||||
from .util import RootLogger
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def have_ff(cmd):
|
||||
def have_ff(cmd: str) -> bool:
|
||||
if PY2:
|
||||
print("# checking {}".format(cmd))
|
||||
cmd = (cmd + " -version").encode("ascii").split(b" ")
|
||||
@@ -29,13 +38,16 @@ HAVE_FFPROBE = have_ff("ffprobe")
|
||||
|
||||
|
||||
class MParser(object):
|
||||
def __init__(self, cmdline):
|
||||
def __init__(self, cmdline: str) -> None:
|
||||
self.tag, args = cmdline.split("=", 1)
|
||||
self.tags = self.tag.split(",")
|
||||
|
||||
self.timeout = 30
|
||||
self.timeout = 60
|
||||
self.force = False
|
||||
self.kill = "t" # tree; all children recursively
|
||||
self.capture = 3 # outputs to consume
|
||||
self.audio = "y"
|
||||
self.pri = 0 # priority; higher = later
|
||||
self.ext = []
|
||||
|
||||
while True:
|
||||
@@ -44,7 +56,7 @@ class MParser(object):
|
||||
if WINDOWS:
|
||||
bp = uncyg(bp)
|
||||
|
||||
if os.path.exists(bp):
|
||||
if bos.path.exists(bp):
|
||||
self.bin = bp
|
||||
return
|
||||
except:
|
||||
@@ -57,6 +69,14 @@ class MParser(object):
|
||||
self.audio = arg[1:] # [r]equire [n]ot [d]ontcare
|
||||
continue
|
||||
|
||||
if arg.startswith("k"):
|
||||
self.kill = arg[1:] # [t]ree [m]ain [n]one
|
||||
continue
|
||||
|
||||
if arg.startswith("c"):
|
||||
self.capture = int(arg[1:]) # 0=none 1=stdout 2=stderr 3=both
|
||||
continue
|
||||
|
||||
if arg == "f":
|
||||
self.force = True
|
||||
continue
|
||||
@@ -69,10 +89,16 @@ class MParser(object):
|
||||
self.ext.append(arg[1:])
|
||||
continue
|
||||
|
||||
if arg.startswith("p"):
|
||||
self.pri = int(arg[1:] or "1")
|
||||
continue
|
||||
|
||||
raise Exception()
|
||||
|
||||
|
||||
def ffprobe(abspath):
|
||||
def ffprobe(
|
||||
abspath: str, timeout: int = 60
|
||||
) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
|
||||
cmd = [
|
||||
b"ffprobe",
|
||||
b"-hide_banner",
|
||||
@@ -81,21 +107,20 @@ def ffprobe(abspath):
|
||||
b"--",
|
||||
fsenc(abspath),
|
||||
]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
r = p.communicate()
|
||||
txt = r[0].decode("utf-8", "replace")
|
||||
return parse_ffprobe(txt)
|
||||
rc, so, se = runcmd(cmd, timeout=timeout)
|
||||
retchk(rc, cmd, se)
|
||||
return parse_ffprobe(so)
|
||||
|
||||
|
||||
def parse_ffprobe(txt):
|
||||
def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]:
|
||||
"""ffprobe -show_format -show_streams"""
|
||||
streams = []
|
||||
fmt = {}
|
||||
g = None
|
||||
g = {}
|
||||
for ln in [x.rstrip("\r") for x in txt.split("\n")]:
|
||||
try:
|
||||
k, v = ln.split("=", 1)
|
||||
g[k] = v
|
||||
sk, sv = ln.split("=", 1)
|
||||
g[sk] = sv
|
||||
continue
|
||||
except:
|
||||
pass
|
||||
@@ -109,8 +134,8 @@ def parse_ffprobe(txt):
|
||||
fmt = g
|
||||
|
||||
streams = [fmt] + streams
|
||||
ret = {} # processed
|
||||
md = {} # raw tags
|
||||
ret: dict[str, Any] = {} # processed
|
||||
md: dict[str, list[Any]] = {} # raw tags
|
||||
|
||||
is_audio = fmt.get("format_name") in ["mp3", "ogg", "flac", "wav"]
|
||||
if fmt.get("filename", "").split(".")[-1].lower() in ["m4a", "aac"]:
|
||||
@@ -158,52 +183,55 @@ def parse_ffprobe(txt):
|
||||
]
|
||||
|
||||
if typ == "format":
|
||||
kvm = [["duration", ".dur"], ["bit_rate", ".q"]]
|
||||
kvm = [["duration", ".dur"], ["bit_rate", ".q"], ["format_name", "fmt"]]
|
||||
|
||||
for sk, rk in kvm:
|
||||
v = strm.get(sk)
|
||||
if v is None:
|
||||
v1 = strm.get(sk)
|
||||
if v1 is None:
|
||||
continue
|
||||
|
||||
if rk.startswith("."):
|
||||
try:
|
||||
v = float(v)
|
||||
zf = float(v1)
|
||||
v2 = ret.get(rk)
|
||||
if v2 is None or v > v2:
|
||||
ret[rk] = v
|
||||
if v2 is None or zf > v2:
|
||||
ret[rk] = zf
|
||||
except:
|
||||
# sqlite doesnt care but the code below does
|
||||
if v not in ["N/A"]:
|
||||
ret[rk] = v
|
||||
if v1 not in ["N/A"]:
|
||||
ret[rk] = v1
|
||||
else:
|
||||
ret[rk] = v
|
||||
ret[rk] = v1
|
||||
|
||||
if ret.get("vc") == "ansi": # shellscript
|
||||
return {}, {}
|
||||
|
||||
for strm in streams:
|
||||
for k, v in strm.items():
|
||||
if not k.startswith("TAG:"):
|
||||
for sk, sv in strm.items():
|
||||
if not sk.startswith("TAG:"):
|
||||
continue
|
||||
|
||||
k = k[4:].strip()
|
||||
v = v.strip()
|
||||
if k and v and k not in md:
|
||||
md[k] = [v]
|
||||
sk = sk[4:].strip()
|
||||
sv = sv.strip()
|
||||
if sk and sv and sk not in md:
|
||||
md[sk] = [sv]
|
||||
|
||||
for k in [".q", ".vq", ".aq"]:
|
||||
if k in ret:
|
||||
ret[k] /= 1000 # bit_rate=320000
|
||||
for sk in [".q", ".vq", ".aq"]:
|
||||
if sk in ret:
|
||||
ret[sk] /= 1000 # bit_rate=320000
|
||||
|
||||
for k in [".q", ".vq", ".aq", ".resw", ".resh"]:
|
||||
if k in ret:
|
||||
ret[k] = int(ret[k])
|
||||
for sk in [".q", ".vq", ".aq", ".resw", ".resh"]:
|
||||
if sk in ret:
|
||||
ret[sk] = int(ret[sk])
|
||||
|
||||
if ".fps" in ret:
|
||||
fps = ret[".fps"]
|
||||
if "/" in fps:
|
||||
fa, fb = fps.split("/")
|
||||
fps = int(fa) * 1.0 / int(fb)
|
||||
try:
|
||||
fps = int(fa) * 1.0 / int(fb)
|
||||
except:
|
||||
fps = 9001
|
||||
|
||||
if fps < 1000 and fmt.get("format_name") not in ["image2", "png_pipe"]:
|
||||
ret[".fps"] = round(fps, 3)
|
||||
@@ -216,33 +244,32 @@ def parse_ffprobe(txt):
|
||||
if ".q" in ret:
|
||||
del ret[".q"]
|
||||
|
||||
if "fmt" in ret:
|
||||
ret["fmt"] = ret["fmt"].split(",")[0]
|
||||
|
||||
if ".resw" in ret and ".resh" in ret:
|
||||
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
|
||||
|
||||
ret = {k: [0, v] for k, v in ret.items()}
|
||||
zd = {k: (0, v) for k, v in ret.items()}
|
||||
|
||||
return ret, md
|
||||
return zd, md
|
||||
|
||||
|
||||
class MTag(object):
|
||||
def __init__(self, log_func, args):
|
||||
def __init__(self, log_func: "RootLogger", args: argparse.Namespace) -> None:
|
||||
self.log_func = log_func
|
||||
self.args = args
|
||||
self.usable = True
|
||||
self.prefer_mt = not args.no_mtag_ff
|
||||
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
|
||||
self.can_ffprobe = (
|
||||
HAVE_FFPROBE
|
||||
and not args.no_mtag_ff
|
||||
and (not WINDOWS or sys.version_info >= (3, 8))
|
||||
)
|
||||
self.can_ffprobe = HAVE_FFPROBE and not args.no_mtag_ff
|
||||
mappings = args.mtm
|
||||
or_ffprobe = " or FFprobe"
|
||||
|
||||
if self.backend == "mutagen":
|
||||
self.get = self.get_mutagen
|
||||
try:
|
||||
import mutagen
|
||||
import mutagen # noqa: F401 # pylint: disable=unused-import,import-outside-toplevel
|
||||
except:
|
||||
self.log("could not load Mutagen, trying FFprobe instead", c=3)
|
||||
self.backend = "ffprobe"
|
||||
@@ -259,11 +286,6 @@ class MTag(object):
|
||||
msg = "found FFprobe but it was disabled by --no-mtag-ff"
|
||||
self.log(msg, c=3)
|
||||
|
||||
elif WINDOWS and sys.version_info < (3, 8):
|
||||
or_ffprobe = " or python >= 3.8"
|
||||
msg = "found FFprobe but your python is too old; need 3.8 or newer"
|
||||
self.log(msg, c=1)
|
||||
|
||||
if not self.usable:
|
||||
msg = "need Mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
|
||||
pybin = os.path.basename(sys.executable)
|
||||
@@ -339,31 +361,33 @@ class MTag(object):
|
||||
}
|
||||
# self.get = self.compare
|
||||
|
||||
def log(self, msg, c=0):
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("mtag", msg, c)
|
||||
|
||||
def normalize_tags(self, ret, md):
|
||||
for k, v in dict(md).items():
|
||||
if not v:
|
||||
def normalize_tags(
|
||||
self, parser_output: dict[str, tuple[int, Any]], md: dict[str, list[Any]]
|
||||
) -> dict[str, Union[str, float]]:
|
||||
for sk, tv in dict(md).items():
|
||||
if not tv:
|
||||
continue
|
||||
|
||||
k = k.lower().split("::")[0].strip()
|
||||
mk = self.rmap.get(k)
|
||||
if not mk:
|
||||
sk = sk.lower().split("::")[0].strip()
|
||||
key_mapping = self.rmap.get(sk)
|
||||
if not key_mapping:
|
||||
continue
|
||||
|
||||
pref, mk = mk
|
||||
if mk not in ret or ret[mk][0] > pref:
|
||||
ret[mk] = [pref, v[0]]
|
||||
priority, alias = key_mapping
|
||||
if alias not in parser_output or parser_output[alias][0] > priority:
|
||||
parser_output[alias] = (priority, tv[0])
|
||||
|
||||
# take first value
|
||||
ret = {k: unicode(v[1]).strip() for k, v in ret.items()}
|
||||
# take first value (lowest priority / most preferred)
|
||||
ret = {sk: unicode(tv[1]).strip() for sk, tv in parser_output.items()}
|
||||
|
||||
# track 3/7 => track 3
|
||||
for k, v in ret.items():
|
||||
if k[0] == ".":
|
||||
v = v.split("/")[0].strip().lstrip("0")
|
||||
ret[k] = v or 0
|
||||
for sk, tv in ret.items():
|
||||
if sk[0] == ".":
|
||||
sv = str(tv).split("/")[0].strip().lstrip("0")
|
||||
ret[sk] = sv or 0
|
||||
|
||||
# normalize key notation to rkeobo
|
||||
okey = ret.get("key")
|
||||
@@ -373,7 +397,7 @@ class MTag(object):
|
||||
|
||||
return ret
|
||||
|
||||
def compare(self, abspath):
|
||||
def compare(self, abspath: str) -> dict[str, Union[str, float]]:
|
||||
if abspath.endswith(".au"):
|
||||
return {}
|
||||
|
||||
@@ -411,17 +435,26 @@ class MTag(object):
|
||||
|
||||
return r1
|
||||
|
||||
def get_mutagen(self, abspath):
|
||||
def get_mutagen(self, abspath: str) -> dict[str, Union[str, float]]:
|
||||
ret: dict[str, tuple[int, Any]] = {}
|
||||
|
||||
if not bos.path.isfile(abspath):
|
||||
return {}
|
||||
|
||||
import mutagen
|
||||
|
||||
try:
|
||||
md = mutagen.File(fsenc(abspath), easy=True)
|
||||
x = md.info.length
|
||||
except Exception as ex:
|
||||
if not md.info.length and not md.info.codec:
|
||||
raise Exception()
|
||||
except:
|
||||
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
||||
|
||||
sz = os.path.getsize(fsenc(abspath))
|
||||
ret = {".q": [0, int((sz / md.info.length) / 128)]}
|
||||
sz = bos.path.getsize(abspath)
|
||||
try:
|
||||
ret[".q"] = (0, int((sz / md.info.length) / 128))
|
||||
except:
|
||||
pass
|
||||
|
||||
for attr, k, norm in [
|
||||
["codec", "ac", unicode],
|
||||
@@ -433,7 +466,15 @@ class MTag(object):
|
||||
try:
|
||||
v = getattr(md.info, attr)
|
||||
except:
|
||||
continue
|
||||
if k != "ac":
|
||||
continue
|
||||
|
||||
try:
|
||||
v = str(md.info).split(".")[1]
|
||||
if v.startswith("ogg"):
|
||||
v = v[3:]
|
||||
except:
|
||||
continue
|
||||
|
||||
if not v:
|
||||
continue
|
||||
@@ -444,45 +485,74 @@ class MTag(object):
|
||||
if k == "ac" and v.startswith("mp4a.40."):
|
||||
v = "aac"
|
||||
|
||||
ret[k] = [0, norm(v)]
|
||||
ret[k] = (0, norm(v))
|
||||
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_ffprobe(self, abspath):
|
||||
ret, md = ffprobe(abspath)
|
||||
def get_ffprobe(self, abspath: str) -> dict[str, Union[str, float]]:
|
||||
if not bos.path.isfile(abspath):
|
||||
return {}
|
||||
|
||||
ret, md = ffprobe(abspath, self.args.mtag_to)
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_bin(self, parsers, abspath):
|
||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
pypath = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||
pypath = str(os.pathsep.join(pypath))
|
||||
def get_bin(
|
||||
self, parsers: dict[str, MParser], abspath: str, oth_tags: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
if not bos.path.isfile(abspath):
|
||||
return {}
|
||||
|
||||
env = os.environ.copy()
|
||||
env["PYTHONPATH"] = pypath
|
||||
try:
|
||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||
pypath = str(os.pathsep.join(zsl))
|
||||
env["PYTHONPATH"] = pypath
|
||||
except:
|
||||
if not E.ox:
|
||||
raise
|
||||
|
||||
ret = {}
|
||||
for tagname, mp in parsers.items():
|
||||
ret: dict[str, Any] = {}
|
||||
for tagname, parser in sorted(parsers.items(), key=lambda x: (x[1].pri, x[0])):
|
||||
try:
|
||||
cmd = [sys.executable, mp.bin, abspath]
|
||||
args = {"env": env, "timeout": mp.timeout}
|
||||
cmd = [parser.bin, abspath]
|
||||
if parser.bin.endswith(".py"):
|
||||
cmd = [sys.executable] + cmd
|
||||
|
||||
args = {
|
||||
"env": env,
|
||||
"timeout": parser.timeout,
|
||||
"kill": parser.kill,
|
||||
"capture": parser.capture,
|
||||
}
|
||||
|
||||
if parser.pri:
|
||||
zd = oth_tags.copy()
|
||||
zd.update(ret)
|
||||
args["sin"] = json.dumps(zd).encode("utf-8", "replace")
|
||||
|
||||
if WINDOWS:
|
||||
args["creationflags"] = 0x4000
|
||||
else:
|
||||
cmd = ["nice"] + cmd
|
||||
|
||||
cmd = [fsenc(x) for x in cmd]
|
||||
v = sp.check_output(cmd, **args).strip()
|
||||
bcmd = [fsenc(x) for x in cmd]
|
||||
rc, v, err = runcmd(bcmd, **args) # type: ignore
|
||||
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
|
||||
v = v.strip()
|
||||
if not v:
|
||||
continue
|
||||
|
||||
if "," not in tagname:
|
||||
ret[tagname] = v.decode("utf-8")
|
||||
ret[tagname] = v
|
||||
else:
|
||||
v = json.loads(v)
|
||||
zj = json.loads(v)
|
||||
for tag in tagname.split(","):
|
||||
if tag and tag in v:
|
||||
ret[tag] = v[tag]
|
||||
if tag and tag in zj:
|
||||
ret[tag] = zj[tag]
|
||||
except:
|
||||
pass
|
||||
if self.args.mtag_v:
|
||||
t = "mtag error: tagname {}, parser {}, file {} => {}"
|
||||
self.log(t.format(tagname, parser.bin, abspath, min_ex()))
|
||||
|
||||
return ret
|
||||
|
||||
@@ -1,23 +1,32 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import tarfile
|
||||
import threading
|
||||
|
||||
from .sutil import errdesc
|
||||
from .util import Queue, fsenc
|
||||
from queue import Queue
|
||||
|
||||
from .bos import bos
|
||||
from .sutil import StreamArc, errdesc
|
||||
from .util import fsenc, min_ex
|
||||
|
||||
try:
|
||||
from typing import Any, Generator, Optional
|
||||
|
||||
from .util import NamedLogger
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class QFile(object):
|
||||
class QFile(object): # inherit io.StringIO for painful typing
|
||||
"""file-like object which buffers writes into a queue"""
|
||||
|
||||
def __init__(self):
|
||||
self.q = Queue(64)
|
||||
self.bq = []
|
||||
def __init__(self) -> None:
|
||||
self.q: Queue[Optional[bytes]] = Queue(64)
|
||||
self.bq: list[bytes] = []
|
||||
self.nq = 0
|
||||
|
||||
def write(self, buf):
|
||||
def write(self, buf: Optional[bytes]) -> None:
|
||||
if buf is None or self.nq >= 240 * 1024:
|
||||
self.q.put(b"".join(self.bq))
|
||||
self.bq = []
|
||||
@@ -30,40 +39,47 @@ class QFile(object):
|
||||
self.nq += len(buf)
|
||||
|
||||
|
||||
class StreamTar(object):
|
||||
class StreamTar(StreamArc):
|
||||
"""construct in-memory tar file from the given path"""
|
||||
|
||||
def __init__(self, log, fgen, **kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
log: "NamedLogger",
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
**kwargs: Any
|
||||
):
|
||||
super(StreamTar, self).__init__(log, fgen)
|
||||
|
||||
self.ci = 0
|
||||
self.co = 0
|
||||
self.qfile = QFile()
|
||||
self.log = log
|
||||
self.fgen = fgen
|
||||
self.errf = None
|
||||
self.errf: dict[str, Any] = {}
|
||||
|
||||
# python 3.8 changed to PAX_FORMAT as default,
|
||||
# waste of space and don't care about the new features
|
||||
fmt = tarfile.GNU_FORMAT
|
||||
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
|
||||
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) # type: ignore
|
||||
|
||||
w = threading.Thread(target=self._gen, name="star-gen")
|
||||
w.daemon = True
|
||||
w.start()
|
||||
|
||||
def gen(self):
|
||||
while True:
|
||||
buf = self.qfile.q.get()
|
||||
if not buf:
|
||||
break
|
||||
def gen(self) -> Generator[Optional[bytes], None, None]:
|
||||
try:
|
||||
while True:
|
||||
buf = self.qfile.q.get()
|
||||
if not buf:
|
||||
break
|
||||
|
||||
self.co += len(buf)
|
||||
yield buf
|
||||
self.co += len(buf)
|
||||
yield buf
|
||||
|
||||
yield None
|
||||
if self.errf:
|
||||
os.unlink(self.errf["ap"])
|
||||
yield None
|
||||
finally:
|
||||
if self.errf:
|
||||
bos.unlink(self.errf["ap"])
|
||||
|
||||
def ser(self, f):
|
||||
def ser(self, f: dict[str, Any]) -> None:
|
||||
name = f["vp"]
|
||||
src = f["ap"]
|
||||
fsi = f["st"]
|
||||
@@ -76,20 +92,21 @@ class StreamTar(object):
|
||||
inf.gid = 0
|
||||
|
||||
self.ci += inf.size
|
||||
with open(fsenc(src), "rb", 512 * 1024) as f:
|
||||
self.tar.addfile(inf, f)
|
||||
with open(fsenc(src), "rb", 512 * 1024) as fo:
|
||||
self.tar.addfile(inf, fo)
|
||||
|
||||
def _gen(self):
|
||||
def _gen(self) -> None:
|
||||
errors = []
|
||||
for f in self.fgen:
|
||||
if "err" in f:
|
||||
errors.append([f["vp"], f["err"]])
|
||||
errors.append((f["vp"], f["err"]))
|
||||
continue
|
||||
|
||||
try:
|
||||
self.ser(f)
|
||||
except Exception as ex:
|
||||
errors.append([f["vp"], repr(ex)])
|
||||
except:
|
||||
ex = min_ex(5, True).replace("\n", "\n-- ")
|
||||
errors.append((f["vp"], ex))
|
||||
|
||||
if errors:
|
||||
self.errf, txt = errdesc(errors)
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error
|
||||
handler of Python 3.
|
||||
@@ -10,23 +12,28 @@ Original source: misc/python/surrogateescape.py in https://bitbucket.org/haypo/m
|
||||
|
||||
# This code is released under the Python license and the BSD 2-clause license
|
||||
|
||||
import platform
|
||||
import codecs
|
||||
import platform
|
||||
import sys
|
||||
|
||||
PY3 = sys.version_info[0] > 2
|
||||
WINDOWS = platform.system() == "Windows"
|
||||
FS_ERRORS = "surrogateescape"
|
||||
|
||||
try:
|
||||
from typing import Any
|
||||
except:
|
||||
pass
|
||||
|
||||
def u(text):
|
||||
|
||||
def u(text: Any) -> str:
|
||||
if PY3:
|
||||
return text
|
||||
else:
|
||||
return text.decode("unicode_escape")
|
||||
|
||||
|
||||
def b(data):
|
||||
def b(data: Any) -> bytes:
|
||||
if PY3:
|
||||
return data.encode("latin1")
|
||||
else:
|
||||
@@ -41,7 +48,7 @@ else:
|
||||
bytes_chr = chr
|
||||
|
||||
|
||||
def surrogateescape_handler(exc):
|
||||
def surrogateescape_handler(exc: Any) -> tuple[str, int]:
|
||||
"""
|
||||
Pure Python implementation of the PEP 383: the "surrogateescape" error
|
||||
handler of Python 3. Undecodable bytes will be replaced by a Unicode
|
||||
@@ -72,7 +79,7 @@ class NotASurrogateError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def replace_surrogate_encode(mystring):
|
||||
def replace_surrogate_encode(mystring: str) -> str:
|
||||
"""
|
||||
Returns a (unicode) string, not the more logical bytes, because the codecs
|
||||
register_error functionality expects this.
|
||||
@@ -98,7 +105,7 @@ def replace_surrogate_encode(mystring):
|
||||
return str().join(decoded)
|
||||
|
||||
|
||||
def replace_surrogate_decode(mybytes):
|
||||
def replace_surrogate_decode(mybytes: bytes) -> str:
|
||||
"""
|
||||
Returns a (unicode) string
|
||||
"""
|
||||
@@ -119,7 +126,7 @@ def replace_surrogate_decode(mybytes):
|
||||
return str().join(decoded)
|
||||
|
||||
|
||||
def encodefilename(fn):
|
||||
def encodefilename(fn: str) -> bytes:
|
||||
if FS_ENCODING == "ascii":
|
||||
# ASCII encoder of Python 2 expects that the error handler returns a
|
||||
# Unicode string encodable to ASCII, whereas our surrogateescape error
|
||||
@@ -159,7 +166,7 @@ def encodefilename(fn):
|
||||
return fn.encode(FS_ENCODING, FS_ERRORS)
|
||||
|
||||
|
||||
def decodefilename(fn):
|
||||
def decodefilename(fn: bytes) -> str:
|
||||
return fn.decode(FS_ENCODING, FS_ERRORS)
|
||||
|
||||
|
||||
@@ -171,7 +178,7 @@ FS_ENCODING = sys.getfilesystemencoding()
|
||||
|
||||
if WINDOWS and not PY3:
|
||||
# py2 thinks win* is mbcs, probably a bug? anyways this works
|
||||
FS_ENCODING = 'utf-8'
|
||||
FS_ENCODING = "utf-8"
|
||||
|
||||
|
||||
# normalize the filesystem encoding name.
|
||||
@@ -179,7 +186,7 @@ if WINDOWS and not PY3:
|
||||
FS_ENCODING = codecs.lookup(FS_ENCODING).name
|
||||
|
||||
|
||||
def register_surrogateescape():
|
||||
def register_surrogateescape() -> None:
|
||||
"""
|
||||
Registers the surrogateescape error handler on Python 2 (only)
|
||||
"""
|
||||
|
||||
@@ -1,13 +1,34 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import time
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
|
||||
from .bos import bos
|
||||
|
||||
def errdesc(errors):
|
||||
try:
|
||||
from typing import Any, Generator, Optional
|
||||
|
||||
from .util import NamedLogger
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class StreamArc(object):
|
||||
def __init__(
|
||||
self,
|
||||
log: "NamedLogger",
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
**kwargs: Any
|
||||
):
|
||||
self.log = log
|
||||
self.fgen = fgen
|
||||
|
||||
def gen(self) -> Generator[Optional[bytes], None, None]:
|
||||
pass
|
||||
|
||||
|
||||
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
|
||||
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||
|
||||
for fn, err in errors:
|
||||
@@ -17,12 +38,11 @@ def errdesc(errors):
|
||||
tf_path = tf.name
|
||||
tf.write("\r\n".join(report).encode("utf-8", "replace"))
|
||||
|
||||
dt = datetime.utcfromtimestamp(time.time())
|
||||
dt = dt.strftime("%Y-%m%d-%H%M%S")
|
||||
dt = datetime.utcnow().strftime("%Y-%m%d-%H%M%S")
|
||||
|
||||
os.chmod(tf_path, 0o444)
|
||||
bos.chmod(tf_path, 0o444)
|
||||
return {
|
||||
"vp": "archive-errors-{}.txt".format(dt),
|
||||
"ap": tf_path,
|
||||
"st": os.stat(tf_path),
|
||||
"st": bos.stat(tf_path),
|
||||
}, report
|
||||
|
||||
@@ -1,48 +1,103 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import argparse
|
||||
import base64
|
||||
import calendar
|
||||
import gzip
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import re
|
||||
import shlex
|
||||
import string
|
||||
import signal
|
||||
import socket
|
||||
import string
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
import calendar
|
||||
|
||||
from .__init__ import E, PY2, WINDOWS, MACOS, VT100, unicode
|
||||
from .util import mp, start_log_thrs, start_stackmon, min_ex
|
||||
try:
|
||||
from types import FrameType
|
||||
|
||||
import typing
|
||||
from typing import Any, Optional, Union
|
||||
except:
|
||||
pass
|
||||
|
||||
from .__init__ import ANYWIN, MACOS, PY2, VT100, WINDOWS, EnvParams, unicode
|
||||
from .authsrv import AuthSrv
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||
from .tcpsrv import TcpSrv
|
||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
||||
from .up2k import Up2k
|
||||
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
|
||||
from .util import (
|
||||
VERSIONS,
|
||||
HMaccas,
|
||||
alltrace,
|
||||
ansi_re,
|
||||
min_ex,
|
||||
mp,
|
||||
start_log_thrs,
|
||||
start_stackmon,
|
||||
)
|
||||
|
||||
|
||||
class SvcHub(object):
|
||||
"""
|
||||
Hosts all services which cannot be parallelized due to reliance on monolithic resources.
|
||||
Creates a Broker which does most of the heavy stuff; hosted services can use this to perform work:
|
||||
hub.broker.put(want_reply, destination, args_list).
|
||||
hub.broker.<say|ask>(destination, args_list).
|
||||
|
||||
Either BrokerThr (plain threads) or BrokerMP (multiprocessing) is used depending on configuration.
|
||||
Nothing is returned synchronously; if you want any value returned from the call,
|
||||
put() can return a queue (if want_reply=True) which has a blocking get() with the response.
|
||||
"""
|
||||
|
||||
def __init__(self, args, argv, printed):
|
||||
def __init__(self, args: argparse.Namespace, argv: list[str], printed: str) -> None:
|
||||
self.args = args
|
||||
self.argv = argv
|
||||
self.logf = None
|
||||
self.E: EnvParams = args.E
|
||||
self.logf: Optional[typing.TextIO] = None
|
||||
self.logf_base_fn = ""
|
||||
self.stop_req = False
|
||||
self.stopping = False
|
||||
self.stopped = False
|
||||
self.reload_req = False
|
||||
self.reloading = False
|
||||
self.stop_cond = threading.Condition()
|
||||
self.nsigs = 3
|
||||
self.retcode = 0
|
||||
self.httpsrv_up = 0
|
||||
|
||||
self.ansi_re = re.compile("\033\\[[^m]*m")
|
||||
self.log_mutex = threading.Lock()
|
||||
self.next_day = 0
|
||||
self.tstack = 0.0
|
||||
|
||||
self.iphash = HMaccas(os.path.join(self.E.cfg, "iphash"), 8)
|
||||
|
||||
if args.sss or args.s >= 3:
|
||||
args.ss = True
|
||||
args.lo = args.lo or "cpp-%Y-%m%d-%H%M%S.txt.xz"
|
||||
args.ls = args.ls or "**,*,ln,p,r"
|
||||
|
||||
if args.ss or args.s >= 2:
|
||||
args.s = True
|
||||
args.no_logues = True
|
||||
args.no_readme = True
|
||||
args.unpost = 0
|
||||
args.no_del = True
|
||||
args.no_mv = True
|
||||
args.hardlink = True
|
||||
args.vague_403 = True
|
||||
args.ban_404 = "50,60,1440"
|
||||
args.nih = True
|
||||
|
||||
if args.s:
|
||||
args.dotpart = True
|
||||
args.no_thumb = True
|
||||
args.no_mtag_ff = True
|
||||
args.no_robots = True
|
||||
args.force_js = True
|
||||
|
||||
self.log = self._log_disabled if args.q else self._log_enabled
|
||||
if args.lo:
|
||||
@@ -54,41 +109,129 @@ class SvcHub(object):
|
||||
if args.log_thrs:
|
||||
start_log_thrs(self.log, args.log_thrs, 0)
|
||||
|
||||
if not args.use_fpool and args.j != 1:
|
||||
args.no_fpool = True
|
||||
t = "multithreading enabled with -j {}, so disabling fpool -- this can reduce upload performance on some filesystems"
|
||||
self.log("root", t.format(args.j))
|
||||
|
||||
if not args.no_fpool and args.j != 1:
|
||||
t = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
|
||||
if ANYWIN:
|
||||
t = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
|
||||
args.no_fpool = True
|
||||
|
||||
self.log("root", t, c=3)
|
||||
|
||||
bri = "zy"[args.theme % 2 :][:1]
|
||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||
|
||||
if not args.hardlink and args.never_symlink:
|
||||
args.no_dedup = True
|
||||
|
||||
if args.log_fk:
|
||||
args.log_fk = re.compile(args.log_fk)
|
||||
|
||||
# initiate all services to manage
|
||||
self.asrv = AuthSrv(self.args, self.log, False)
|
||||
self.asrv = AuthSrv(self.args, self.log)
|
||||
if args.ls:
|
||||
self.asrv.dbg_ls()
|
||||
|
||||
self.tcpsrv = TcpSrv(self)
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
decs = {k: 1 for k in self.args.th_dec.split(",")}
|
||||
if not HAVE_VIPS:
|
||||
decs.pop("vips", None)
|
||||
if not HAVE_PIL:
|
||||
decs.pop("pil", None)
|
||||
if not HAVE_FFMPEG or not HAVE_FFPROBE:
|
||||
decs.pop("ff", None)
|
||||
|
||||
self.args.th_dec = list(decs.keys())
|
||||
self.thumbsrv = None
|
||||
if not args.no_thumb:
|
||||
if HAVE_PIL:
|
||||
if not HAVE_WEBP:
|
||||
args.th_no_webp = True
|
||||
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
|
||||
self.log("thumb", msg, c=3)
|
||||
t = ", ".join(self.args.th_dec) or "(None available)"
|
||||
self.log("thumb", "decoder preference: {}".format(t))
|
||||
|
||||
if "pil" in self.args.th_dec and not HAVE_WEBP:
|
||||
msg = "disabling webp thumbnails because either libwebp is not available or your Pillow is too old"
|
||||
self.log("thumb", msg, c=3)
|
||||
|
||||
if self.args.th_dec:
|
||||
self.thumbsrv = ThumbSrv(self)
|
||||
else:
|
||||
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n"
|
||||
self.log(
|
||||
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3
|
||||
)
|
||||
msg = "need either Pillow, pyvips, or FFmpeg to create thumbnails; for example:\n{0}{1} -m pip install --user Pillow\n{0}{1} -m pip install --user pyvips\n{0}apt install ffmpeg"
|
||||
msg = msg.format(" " * 37, os.path.basename(sys.executable))
|
||||
self.log("thumb", msg, c=3)
|
||||
|
||||
if not args.no_acode and args.no_thumb:
|
||||
msg = "setting --no-acode because --no-thumb (sorry)"
|
||||
self.log("thumb", msg, c=6)
|
||||
args.no_acode = True
|
||||
|
||||
if not args.no_acode and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
||||
msg = "setting --no-acode because either FFmpeg or FFprobe is not available"
|
||||
self.log("thumb", msg, c=6)
|
||||
args.no_acode = True
|
||||
|
||||
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
||||
|
||||
if args.ftp or args.ftps:
|
||||
from .ftpd import Ftpd
|
||||
|
||||
self.ftpd = Ftpd(self)
|
||||
|
||||
# decide which worker impl to use
|
||||
if self.check_mp_enable():
|
||||
from .broker_mp import BrokerMp as Broker
|
||||
else:
|
||||
self.log("root", "cannot efficiently use multiple CPU cores")
|
||||
from .broker_thr import BrokerThr as Broker
|
||||
from .broker_thr import BrokerThr as Broker # type: ignore
|
||||
|
||||
self.broker = Broker(self)
|
||||
|
||||
def _logname(self):
|
||||
dt = datetime.utcfromtimestamp(time.time())
|
||||
fn = self.args.lo
|
||||
def thr_httpsrv_up(self) -> None:
|
||||
time.sleep(1 if self.args.ign_ebind_all else 5)
|
||||
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
||||
failed = expected - self.httpsrv_up
|
||||
if not failed:
|
||||
return
|
||||
|
||||
if self.args.ign_ebind_all:
|
||||
if not self.tcpsrv.srv:
|
||||
for _ in range(self.broker.num_workers):
|
||||
self.broker.say("cb_httpsrv_up")
|
||||
return
|
||||
|
||||
if self.args.ign_ebind and self.tcpsrv.srv:
|
||||
return
|
||||
|
||||
t = "{}/{} workers failed to start"
|
||||
t = t.format(failed, expected)
|
||||
self.log("root", t, 1)
|
||||
|
||||
self.retcode = 1
|
||||
self.sigterm()
|
||||
|
||||
def sigterm(self) -> None:
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
|
||||
def cb_httpsrv_up(self) -> None:
|
||||
self.httpsrv_up += 1
|
||||
if self.httpsrv_up != self.broker.num_workers:
|
||||
return
|
||||
|
||||
time.sleep(0.1) # purely cosmetic dw
|
||||
self.log("root", "workers OK\n")
|
||||
self.up2k.init_vols()
|
||||
|
||||
thr = threading.Thread(target=self.sd_notify, name="sd-notify")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def _logname(self) -> str:
|
||||
dt = datetime.utcnow()
|
||||
fn = str(self.args.lo)
|
||||
for fs in "YmdHMS":
|
||||
fs = "%" + fs
|
||||
if fs in fn:
|
||||
@@ -96,7 +239,7 @@ class SvcHub(object):
|
||||
|
||||
return fn
|
||||
|
||||
def _setup_logfile(self, printed):
|
||||
def _setup_logfile(self, printed: str) -> None:
|
||||
base_fn = fn = sel_fn = self._logname()
|
||||
if fn != self.args.lo:
|
||||
ctr = 0
|
||||
@@ -118,8 +261,6 @@ class SvcHub(object):
|
||||
|
||||
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
|
||||
|
||||
lh.base_fn = base_fn
|
||||
|
||||
argv = [sys.executable] + self.argv
|
||||
if hasattr(shlex, "quote"):
|
||||
argv = [shlex.quote(x) for x in argv]
|
||||
@@ -128,57 +269,117 @@ class SvcHub(object):
|
||||
|
||||
msg = "[+] opened logfile [{}]\n".format(fn)
|
||||
printed += msg
|
||||
lh.write("t0: {:.3f}\nargv: {}\n\n{}".format(E.t0, " ".join(argv), printed))
|
||||
lh.write("t0: {:.3f}\nargv: {}\n\n{}".format(self.E.t0, " ".join(argv), printed))
|
||||
self.logf = lh
|
||||
self.logf_base_fn = base_fn
|
||||
print(msg, end="")
|
||||
|
||||
def run(self):
|
||||
def run(self) -> None:
|
||||
self.tcpsrv.run()
|
||||
|
||||
thr = threading.Thread(target=self.sd_notify, name="sd-notify")
|
||||
thr = threading.Thread(target=self.thr_httpsrv_up, name="sig-hsrv-up2")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
thr = threading.Thread(target=self.stop_thr, name="svchub-sig")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
sigs = [signal.SIGINT, signal.SIGTERM]
|
||||
if not ANYWIN:
|
||||
sigs.append(signal.SIGUSR1)
|
||||
|
||||
for sig in [signal.SIGINT, signal.SIGTERM]:
|
||||
for sig in sigs:
|
||||
signal.signal(sig, self.signal_handler)
|
||||
|
||||
try:
|
||||
while not self.stop_req:
|
||||
time.sleep(9001)
|
||||
except:
|
||||
pass
|
||||
# macos hangs after shutdown on sigterm with while-sleep,
|
||||
# windows cannot ^c stop_cond (and win10 does the macos thing but winxp is fine??)
|
||||
# linux is fine with both,
|
||||
# never lucky
|
||||
if ANYWIN:
|
||||
# msys-python probably fine but >msys-python
|
||||
thr = threading.Thread(target=self.stop_thr, name="svchub-sig")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
self.shutdown()
|
||||
try:
|
||||
while not self.stop_req:
|
||||
time.sleep(1)
|
||||
except:
|
||||
pass
|
||||
|
||||
def stop_thr(self):
|
||||
self.shutdown()
|
||||
# cant join; eats signals on win10
|
||||
while not self.stopped:
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
self.stop_thr()
|
||||
|
||||
def reload(self) -> str:
|
||||
if self.reloading:
|
||||
return "cannot reload; already in progress"
|
||||
|
||||
self.reloading = True
|
||||
t = threading.Thread(target=self._reload, name="reloading")
|
||||
t.daemon = True
|
||||
t.start()
|
||||
return "reload initiated"
|
||||
|
||||
def _reload(self) -> None:
|
||||
self.log("root", "reload scheduled")
|
||||
with self.up2k.mutex:
|
||||
self.asrv.reload()
|
||||
self.up2k.reload()
|
||||
self.broker.reload()
|
||||
|
||||
self.reloading = False
|
||||
|
||||
def stop_thr(self) -> None:
|
||||
while not self.stop_req:
|
||||
with self.stop_cond:
|
||||
self.stop_cond.wait(9001)
|
||||
|
||||
if self.reload_req:
|
||||
self.reload_req = False
|
||||
self.reload()
|
||||
|
||||
self.shutdown()
|
||||
|
||||
def signal_handler(self):
|
||||
def signal_handler(self, sig: int, frame: Optional[FrameType]) -> None:
|
||||
if self.stopping:
|
||||
if self.nsigs <= 0:
|
||||
try:
|
||||
threading.Thread(target=self.pr, args=("OMBO BREAKER",)).start()
|
||||
time.sleep(0.1)
|
||||
except:
|
||||
pass
|
||||
|
||||
if ANYWIN:
|
||||
os.system("taskkill /f /pid {}".format(os.getpid()))
|
||||
else:
|
||||
os.kill(os.getpid(), signal.SIGKILL)
|
||||
else:
|
||||
self.nsigs -= 1
|
||||
return
|
||||
|
||||
if not ANYWIN and sig == signal.SIGUSR1:
|
||||
self.reload_req = True
|
||||
else:
|
||||
self.stop_req = True
|
||||
|
||||
with self.stop_cond:
|
||||
self.stop_cond.notify_all()
|
||||
|
||||
def shutdown(self) -> None:
|
||||
if self.stopping:
|
||||
return
|
||||
|
||||
# start_log_thrs(print, 0.1, 1)
|
||||
|
||||
self.stopping = True
|
||||
self.stop_req = True
|
||||
with self.stop_cond:
|
||||
self.stop_cond.notify_all()
|
||||
|
||||
def shutdown(self):
|
||||
if self.stopping:
|
||||
return
|
||||
|
||||
self.stopping = True
|
||||
self.stop_req = True
|
||||
ret = 1
|
||||
try:
|
||||
with self.log_mutex:
|
||||
print("OPYTHAT")
|
||||
|
||||
self.pr("OPYTHAT")
|
||||
self.tcpsrv.shutdown()
|
||||
self.broker.shutdown()
|
||||
self.up2k.shutdown()
|
||||
@@ -191,33 +392,43 @@ class SvcHub(object):
|
||||
break
|
||||
|
||||
if n == 3:
|
||||
print("waiting for thumbsrv (10sec)...")
|
||||
self.pr("waiting for thumbsrv (10sec)...")
|
||||
|
||||
print("nailed it", end="")
|
||||
self.pr("nailed it", end="")
|
||||
ret = self.retcode
|
||||
except:
|
||||
self.pr("\033[31m[ error during shutdown ]\n{}\033[0m".format(min_ex()))
|
||||
raise
|
||||
finally:
|
||||
print("\033[0m")
|
||||
if self.args.wintitle:
|
||||
print("\033]0;\033\\", file=sys.stderr, end="")
|
||||
sys.stderr.flush()
|
||||
|
||||
self.pr("\033[0m")
|
||||
if self.logf:
|
||||
self.logf.close()
|
||||
|
||||
def _log_disabled(self, src, msg, c=0):
|
||||
self.stopped = True
|
||||
sys.exit(ret)
|
||||
|
||||
def _log_disabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
if not self.logf:
|
||||
return
|
||||
|
||||
with self.log_mutex:
|
||||
ts = datetime.utcfromtimestamp(time.time())
|
||||
ts = ts.strftime("%Y-%m%d-%H%M%S.%f")[:-3]
|
||||
ts = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")[:-3]
|
||||
self.logf.write("@{} [{}] {}\n".format(ts, src, msg))
|
||||
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
self._set_next_day()
|
||||
|
||||
def _set_next_day(self):
|
||||
if self.next_day and self.logf and self.logf.base_fn != self._logname():
|
||||
def _set_next_day(self) -> None:
|
||||
if self.next_day and self.logf and self.logf_base_fn != self._logname():
|
||||
self.logf.close()
|
||||
self._setup_logfile("")
|
||||
|
||||
dt = datetime.utcfromtimestamp(time.time())
|
||||
dt = datetime.utcnow()
|
||||
|
||||
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
||||
day_now = dt.day
|
||||
@@ -227,7 +438,7 @@ class SvcHub(object):
|
||||
dt = dt.replace(hour=0, minute=0, second=0)
|
||||
self.next_day = calendar.timegm(dt.utctimetuple())
|
||||
|
||||
def _log_enabled(self, src, msg, c=0):
|
||||
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
"""handles logging from all components"""
|
||||
with self.log_mutex:
|
||||
now = time.time()
|
||||
@@ -240,12 +451,12 @@ class SvcHub(object):
|
||||
if not VT100:
|
||||
fmt = "{} {:21} {}\n"
|
||||
if "\033" in msg:
|
||||
msg = self.ansi_re.sub("", msg)
|
||||
msg = ansi_re.sub("", msg)
|
||||
if "\033" in src:
|
||||
src = self.ansi_re.sub("", src)
|
||||
src = ansi_re.sub("", src)
|
||||
elif c:
|
||||
if isinstance(c, int):
|
||||
msg = "\033[3{}m{}".format(c, msg)
|
||||
msg = "\033[3{}m{}\033[0m".format(c, msg)
|
||||
elif "\033" not in c:
|
||||
msg = "\033[{}m{}\033[0m".format(c, msg)
|
||||
else:
|
||||
@@ -264,7 +475,11 @@ class SvcHub(object):
|
||||
if self.logf:
|
||||
self.logf.write(msg)
|
||||
|
||||
def check_mp_support(self):
|
||||
def pr(self, *a: Any, **ka: Any) -> None:
|
||||
with self.log_mutex:
|
||||
print(*a, **ka)
|
||||
|
||||
def check_mp_support(self) -> str:
|
||||
vmin = sys.version_info[1]
|
||||
if WINDOWS:
|
||||
msg = "need python 3.3 or newer for multiprocessing;"
|
||||
@@ -278,21 +493,24 @@ class SvcHub(object):
|
||||
return msg
|
||||
|
||||
try:
|
||||
x = mp.Queue(1)
|
||||
x.put(["foo", "bar"])
|
||||
x: mp.Queue[tuple[str, str]] = mp.Queue(1)
|
||||
x.put(("foo", "bar"))
|
||||
if x.get()[0] != "foo":
|
||||
raise Exception()
|
||||
except:
|
||||
return "multiprocessing is not supported on your platform;"
|
||||
|
||||
return None
|
||||
return ""
|
||||
|
||||
def check_mp_enable(self):
|
||||
def check_mp_enable(self) -> bool:
|
||||
if self.args.j == 1:
|
||||
self.log("root", "multiprocessing disabled by argument -j 1;")
|
||||
return False
|
||||
|
||||
if mp.cpu_count() <= 1:
|
||||
try:
|
||||
if mp.cpu_count() <= 1:
|
||||
raise Exception()
|
||||
except:
|
||||
self.log("svchub", "only one CPU detected; multiprocessing disabled")
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -307,23 +525,37 @@ class SvcHub(object):
|
||||
return True
|
||||
else:
|
||||
self.log("svchub", err)
|
||||
self.log("svchub", "cannot efficiently use multiple CPU cores")
|
||||
return False
|
||||
|
||||
def sd_notify(self):
|
||||
def sd_notify(self) -> None:
|
||||
try:
|
||||
addr = os.getenv("NOTIFY_SOCKET")
|
||||
if not addr:
|
||||
zb = os.getenv("NOTIFY_SOCKET")
|
||||
if not zb:
|
||||
return
|
||||
|
||||
addr = unicode(addr)
|
||||
addr = unicode(zb)
|
||||
if addr.startswith("@"):
|
||||
addr = "\0" + addr[1:]
|
||||
|
||||
m = "".join(x for x in addr if x in string.printable)
|
||||
self.log("sd_notify", m)
|
||||
t = "".join(x for x in addr if x in string.printable)
|
||||
self.log("sd_notify", t)
|
||||
|
||||
sck = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
|
||||
sck.connect(addr)
|
||||
sck.sendall(b"READY=1")
|
||||
except:
|
||||
self.log("sd_notify", min_ex())
|
||||
|
||||
def log_stacks(self) -> None:
|
||||
td = time.time() - self.tstack
|
||||
if td < 300:
|
||||
self.log("stacks", "cooldown {}".format(td))
|
||||
return
|
||||
|
||||
self.tstack = time.time()
|
||||
zs = "{}\n{}".format(VERSIONS, alltrace())
|
||||
zb = zs.encode("utf-8", "replace")
|
||||
zb = gzip.compress(zb)
|
||||
zs = base64.b64encode(zb).decode("ascii")
|
||||
self.log("stacks", zs)
|
||||
|
||||
@@ -1,16 +1,23 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import calendar
|
||||
import time
|
||||
import zlib
|
||||
from datetime import datetime
|
||||
|
||||
from .sutil import errdesc
|
||||
from .util import yieldfile, sanitize_fn, spack, sunpack
|
||||
from .bos import bos
|
||||
from .sutil import StreamArc, errdesc
|
||||
from .util import min_ex, sanitize_fn, spack, sunpack, yieldfile
|
||||
|
||||
try:
|
||||
from typing import Any, Generator, Optional
|
||||
|
||||
from .util import NamedLogger
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def dostime2unix(buf):
|
||||
def dostime2unix(buf: bytes) -> int:
|
||||
t, d = sunpack(b"<HH", buf)
|
||||
|
||||
ts = (t & 0x1F) * 2
|
||||
@@ -25,27 +32,38 @@ def dostime2unix(buf):
|
||||
tf = "{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}"
|
||||
iso = tf.format(*tt)
|
||||
|
||||
dt = datetime.strptime(iso, "%Y-%m-%d %H:%M:%S")
|
||||
return int(dt.timestamp())
|
||||
dt = time.strptime(iso, "%Y-%m-%d %H:%M:%S")
|
||||
return int(calendar.timegm(dt))
|
||||
|
||||
|
||||
def unixtime2dos(ts):
|
||||
tt = time.gmtime(ts)
|
||||
def unixtime2dos(ts: int) -> bytes:
|
||||
tt = time.gmtime(ts + 1)
|
||||
dy, dm, dd, th, tm, ts = list(tt)[:6]
|
||||
|
||||
bd = ((dy - 1980) << 9) + (dm << 5) + dd
|
||||
bt = (th << 11) + (tm << 5) + ts // 2
|
||||
return spack(b"<HH", bt, bd)
|
||||
try:
|
||||
return spack(b"<HH", bt, bd)
|
||||
except:
|
||||
return b"\x00\x00\x21\x00"
|
||||
|
||||
|
||||
def gen_fdesc(sz, crc32, z64):
|
||||
def gen_fdesc(sz: int, crc32: int, z64: bool) -> bytes:
|
||||
ret = b"\x50\x4b\x07\x08"
|
||||
fmt = b"<LQQ" if z64 else b"<LLL"
|
||||
ret += spack(fmt, crc32, sz, sz)
|
||||
return ret
|
||||
|
||||
|
||||
def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
||||
def gen_hdr(
|
||||
h_pos: Optional[int],
|
||||
fn: str,
|
||||
sz: int,
|
||||
lastmod: int,
|
||||
utf8: bool,
|
||||
icrc32: int,
|
||||
pre_crc: bool,
|
||||
) -> bytes:
|
||||
"""
|
||||
does regular file headers
|
||||
and the central directory meme if h_pos is set
|
||||
@@ -64,8 +82,8 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
||||
# confusingly this doesn't bump if h_pos
|
||||
req_ver = b"\x2d\x00" if z64 else b"\x0a\x00"
|
||||
|
||||
if crc32:
|
||||
crc32 = spack(b"<L", crc32)
|
||||
if icrc32:
|
||||
crc32 = spack(b"<L", icrc32)
|
||||
else:
|
||||
crc32 = b"\x00" * 4
|
||||
|
||||
@@ -73,7 +91,7 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
||||
# 4b magic, 2b min-ver
|
||||
ret = b"\x50\x4b\x03\x04" + req_ver
|
||||
else:
|
||||
# 4b magic, 2b spec-ver, 2b min-ver
|
||||
# 4b magic, 2b spec-ver (1b compat, 1b os (00 dos, 03 unix)), 2b min-ver
|
||||
ret = b"\x50\x4b\x01\x02\x1e\x03" + req_ver
|
||||
|
||||
ret += b"\x00" if pre_crc else b"\x08" # streaming
|
||||
@@ -92,30 +110,43 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
||||
fn = sanitize_fn(fn, "/", [])
|
||||
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
|
||||
|
||||
# add ntfs (0x24) and/or unix (0x10) extrafields for utc, add z64 if requested
|
||||
z64_len = len(z64v) * 8 + 4 if z64v else 0
|
||||
ret += spack(b"<HH", len(bfn), z64_len)
|
||||
ret += spack(b"<HH", len(bfn), 0x10 + z64_len)
|
||||
|
||||
if h_pos is not None:
|
||||
# 2b comment, 2b diskno
|
||||
ret += b"\x00" * 4
|
||||
|
||||
# 2b internal.attr, 4b external.attr
|
||||
# infozip-macos: 0100 0000 a481 file:644
|
||||
# infozip-macos: 0100 0100 0080 file:000
|
||||
ret += b"\x01\x00\x00\x00\xa4\x81"
|
||||
# infozip-macos: 0100 0000 a481 (spec-ver 1e03) file:644
|
||||
# infozip-macos: 0100 0100 0080 (spec-ver 1e03) file:000
|
||||
# win10-zip: 0000 2000 0000 (spec-ver xx00) FILE_ATTRIBUTE_ARCHIVE
|
||||
ret += b"\x00\x00\x00\x00\xa4\x81" # unx
|
||||
# ret += b"\x00\x00\x20\x00\x00\x00" # fat
|
||||
|
||||
# 4b local-header-ofs
|
||||
ret += spack(b"<L", min(h_pos, 0xFFFFFFFF))
|
||||
|
||||
ret += bfn
|
||||
|
||||
# ntfs: type 0a, size 20, rsvd, attr1, len 18, mtime, atime, ctime
|
||||
# b"\xa3\x2f\x82\x41\x55\x68\xd8\x01" 1652616838.798941100 ~5.861518 132970904387989411 ~58615181
|
||||
# nt = int((lastmod + 11644473600) * 10000000)
|
||||
# ret += spack(b"<HHLHHQQQ", 0xA, 0x20, 0, 1, 0x18, nt, nt, nt)
|
||||
|
||||
# unix: type 0d, size 0c, atime, mtime, uid, gid
|
||||
ret += spack(b"<HHLLHH", 0xD, 0xC, int(lastmod), int(lastmod), 1000, 1000)
|
||||
|
||||
if z64v:
|
||||
ret += spack(b"<HH" + b"Q" * len(z64v), 1, len(z64v) * 8, *z64v)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def gen_ecdr(items, cdir_pos, cdir_end):
|
||||
def gen_ecdr(
|
||||
items: list[tuple[str, int, int, int, int]], cdir_pos: int, cdir_end: int
|
||||
) -> tuple[bytes, bool]:
|
||||
"""
|
||||
summary of all file headers,
|
||||
usually the zipfile footer unless something clamps
|
||||
@@ -140,10 +171,12 @@ def gen_ecdr(items, cdir_pos, cdir_end):
|
||||
# 2b comment length
|
||||
ret += b"\x00\x00"
|
||||
|
||||
return [ret, need_64]
|
||||
return ret, need_64
|
||||
|
||||
|
||||
def gen_ecdr64(items, cdir_pos, cdir_end):
|
||||
def gen_ecdr64(
|
||||
items: list[tuple[str, int, int, int, int]], cdir_pos: int, cdir_end: int
|
||||
) -> bytes:
|
||||
"""
|
||||
z64 end of central directory
|
||||
added when numfiles or a headerptr clamps
|
||||
@@ -167,7 +200,7 @@ def gen_ecdr64(items, cdir_pos, cdir_end):
|
||||
return ret
|
||||
|
||||
|
||||
def gen_ecdr64_loc(ecdr64_pos):
|
||||
def gen_ecdr64_loc(ecdr64_pos: int) -> bytes:
|
||||
"""
|
||||
z64 end of central directory locator
|
||||
points to ecdr64
|
||||
@@ -182,31 +215,36 @@ def gen_ecdr64_loc(ecdr64_pos):
|
||||
return ret
|
||||
|
||||
|
||||
class StreamZip(object):
|
||||
def __init__(self, log, fgen, utf8=False, pre_crc=False):
|
||||
self.log = log
|
||||
self.fgen = fgen
|
||||
class StreamZip(StreamArc):
|
||||
def __init__(
|
||||
self,
|
||||
log: "NamedLogger",
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
utf8: bool = False,
|
||||
pre_crc: bool = False,
|
||||
) -> None:
|
||||
super(StreamZip, self).__init__(log, fgen)
|
||||
|
||||
self.utf8 = utf8
|
||||
self.pre_crc = pre_crc
|
||||
|
||||
self.pos = 0
|
||||
self.items = []
|
||||
self.items: list[tuple[str, int, int, int, int]] = []
|
||||
|
||||
def _ct(self, buf):
|
||||
def _ct(self, buf: bytes) -> bytes:
|
||||
self.pos += len(buf)
|
||||
return buf
|
||||
|
||||
def ser(self, f):
|
||||
def ser(self, f: dict[str, Any]) -> Generator[bytes, None, None]:
|
||||
name = f["vp"]
|
||||
src = f["ap"]
|
||||
st = f["st"]
|
||||
|
||||
sz = st.st_size
|
||||
ts = st.st_mtime + 1
|
||||
ts = st.st_mtime
|
||||
|
||||
crc = None
|
||||
crc = 0
|
||||
if self.pre_crc:
|
||||
crc = 0
|
||||
for buf in yieldfile(src):
|
||||
crc = zlib.crc32(buf, crc)
|
||||
|
||||
@@ -216,7 +254,6 @@ class StreamZip(object):
|
||||
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||
yield self._ct(buf)
|
||||
|
||||
crc = crc or 0
|
||||
for buf in yieldfile(src):
|
||||
if not self.pre_crc:
|
||||
crc = zlib.crc32(buf, crc)
|
||||
@@ -225,7 +262,7 @@ class StreamZip(object):
|
||||
|
||||
crc &= 0xFFFFFFFF
|
||||
|
||||
self.items.append([name, sz, ts, crc, h_pos])
|
||||
self.items.append((name, sz, ts, crc, h_pos))
|
||||
|
||||
z64 = sz >= 4 * 1024 * 1024 * 1024
|
||||
|
||||
@@ -233,42 +270,46 @@ class StreamZip(object):
|
||||
buf = gen_fdesc(sz, crc, z64)
|
||||
yield self._ct(buf)
|
||||
|
||||
def gen(self):
|
||||
def gen(self) -> Generator[bytes, None, None]:
|
||||
errors = []
|
||||
for f in self.fgen:
|
||||
if "err" in f:
|
||||
errors.append([f["vp"], f["err"]])
|
||||
continue
|
||||
try:
|
||||
for f in self.fgen:
|
||||
if "err" in f:
|
||||
errors.append((f["vp"], f["err"]))
|
||||
continue
|
||||
|
||||
try:
|
||||
for x in self.ser(f):
|
||||
try:
|
||||
for x in self.ser(f):
|
||||
yield x
|
||||
except GeneratorExit:
|
||||
raise
|
||||
except:
|
||||
ex = min_ex(5, True).replace("\n", "\n-- ")
|
||||
errors.append((f["vp"], ex))
|
||||
|
||||
if errors:
|
||||
errf, txt = errdesc(errors)
|
||||
self.log("\n".join(([repr(errf)] + txt[1:])))
|
||||
for x in self.ser(errf):
|
||||
yield x
|
||||
except Exception as ex:
|
||||
errors.append([f["vp"], repr(ex)])
|
||||
|
||||
if errors:
|
||||
errf, txt = errdesc(errors)
|
||||
self.log("\n".join(([repr(errf)] + txt[1:])))
|
||||
for x in self.ser(errf):
|
||||
yield x
|
||||
cdir_pos = self.pos
|
||||
for name, sz, ts, crc, h_pos in self.items:
|
||||
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||
yield self._ct(buf)
|
||||
cdir_end = self.pos
|
||||
|
||||
cdir_pos = self.pos
|
||||
for name, sz, ts, crc, h_pos in self.items:
|
||||
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||
yield self._ct(buf)
|
||||
cdir_end = self.pos
|
||||
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
if need_64:
|
||||
ecdir64_pos = self.pos
|
||||
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(buf)
|
||||
|
||||
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
if need_64:
|
||||
ecdir64_pos = self.pos
|
||||
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(buf)
|
||||
buf = gen_ecdr64_loc(ecdir64_pos)
|
||||
yield self._ct(buf)
|
||||
|
||||
buf = gen_ecdr64_loc(ecdir64_pos)
|
||||
yield self._ct(buf)
|
||||
|
||||
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(ecdr)
|
||||
|
||||
if errors:
|
||||
os.unlink(errf["ap"])
|
||||
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(ecdr)
|
||||
finally:
|
||||
if errors:
|
||||
bos.unlink(errf["ap"])
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
|
||||
from .__init__ import ANYWIN, MACOS, TYPE_CHECKING, unicode
|
||||
from .util import chkcmd
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
|
||||
class TcpSrv(object):
|
||||
"""
|
||||
@@ -13,12 +19,37 @@ class TcpSrv(object):
|
||||
which then uses the least busy HttpSrv to handle it
|
||||
"""
|
||||
|
||||
def __init__(self, hub):
|
||||
def __init__(self, hub: "SvcHub"):
|
||||
self.hub = hub
|
||||
self.args = hub.args
|
||||
self.log = hub.log
|
||||
|
||||
# mp-safe since issue6056
|
||||
socket.setdefaulttimeout(120)
|
||||
|
||||
self.stopping = False
|
||||
self.srv: list[socket.socket] = []
|
||||
self.nsrv = 0
|
||||
ok: dict[str, list[int]] = {}
|
||||
for ip in self.args.i:
|
||||
ok[ip] = []
|
||||
for port in self.args.p:
|
||||
self.nsrv += 1
|
||||
try:
|
||||
self._listen(ip, port)
|
||||
ok[ip].append(port)
|
||||
except Exception as ex:
|
||||
if self.args.ign_ebind or self.args.ign_ebind_all:
|
||||
t = "could not listen on {}:{}: {}"
|
||||
self.log("tcpsrv", t.format(ip, port, ex), c=3)
|
||||
else:
|
||||
raise
|
||||
|
||||
if not self.srv and not self.args.ign_ebind_all:
|
||||
raise Exception("could not listen on any of the given interfaces")
|
||||
|
||||
if self.nsrv != len(self.srv):
|
||||
self.log("tcpsrv", "")
|
||||
|
||||
ip = "127.0.0.1"
|
||||
eps = {ip: "local only"}
|
||||
@@ -29,27 +60,64 @@ class TcpSrv(object):
|
||||
for x in nonlocals:
|
||||
eps[x] = "external"
|
||||
|
||||
msgs = []
|
||||
title_tab: dict[str, dict[str, int]] = {}
|
||||
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
|
||||
t = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
|
||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||
for port in sorted(self.args.p):
|
||||
self.log(
|
||||
"tcpsrv",
|
||||
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
|
||||
ip, port, desc
|
||||
),
|
||||
)
|
||||
if port not in ok.get(ip, ok.get("0.0.0.0", [])):
|
||||
continue
|
||||
|
||||
self.srv = []
|
||||
for ip in self.args.i:
|
||||
for port in self.args.p:
|
||||
self.srv.append(self._listen(ip, port))
|
||||
proto = " http"
|
||||
if self.args.http_only:
|
||||
pass
|
||||
elif self.args.https_only or port == 443:
|
||||
proto = "https"
|
||||
|
||||
def _listen(self, ip, port):
|
||||
msgs.append(t.format(proto, ip, port, desc))
|
||||
|
||||
if not self.args.wintitle:
|
||||
continue
|
||||
|
||||
if port in [80, 443]:
|
||||
ep = ip
|
||||
else:
|
||||
ep = "{}:{}".format(ip, port)
|
||||
|
||||
hits = []
|
||||
if "pub" in title_vars and "external" in unicode(desc):
|
||||
hits.append(("pub", ep))
|
||||
|
||||
if "pub" in title_vars or "all" in title_vars:
|
||||
hits.append(("all", ep))
|
||||
|
||||
for var in title_vars:
|
||||
if var.startswith("ip-") and ep.startswith(var[3:]):
|
||||
hits.append((var, ep))
|
||||
|
||||
for tk, tv in hits:
|
||||
try:
|
||||
title_tab[tk][tv] = 1
|
||||
except:
|
||||
title_tab[tk] = {tv: 1}
|
||||
|
||||
if msgs:
|
||||
msgs[-1] += "\n"
|
||||
for t in msgs:
|
||||
self.log("tcpsrv", t)
|
||||
|
||||
if self.args.wintitle:
|
||||
self._set_wintitle(title_tab)
|
||||
|
||||
def _listen(self, ip: str, port: int) -> None:
|
||||
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
srv.settimeout(None) # < does not inherit, ^ does
|
||||
try:
|
||||
srv.bind((ip, port))
|
||||
return srv
|
||||
self.srv.append(srv)
|
||||
except (OSError, socket.error) as ex:
|
||||
if ex.errno in [98, 48]:
|
||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
@@ -59,19 +127,19 @@ class TcpSrv(object):
|
||||
raise
|
||||
raise Exception(e)
|
||||
|
||||
def run(self):
|
||||
def run(self) -> None:
|
||||
for srv in self.srv:
|
||||
srv.listen(self.args.nc)
|
||||
ip, port = srv.getsockname()
|
||||
fno = srv.fileno()
|
||||
msg = "listening @ {}:{} f{}".format(ip, port, fno)
|
||||
msg = "listening @ {}:{} f{} p{}".format(ip, port, fno, os.getpid())
|
||||
self.log("tcpsrv", msg)
|
||||
if self.args.q:
|
||||
print(msg)
|
||||
|
||||
self.hub.broker.put(False, "listen", srv)
|
||||
self.hub.broker.say("listen", srv)
|
||||
|
||||
def shutdown(self):
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
try:
|
||||
for srv in self.srv:
|
||||
@@ -81,25 +149,148 @@ class TcpSrv(object):
|
||||
|
||||
self.log("tcpsrv", "ok bye")
|
||||
|
||||
def detect_interfaces(self, listen_ips):
|
||||
eps = {}
|
||||
|
||||
# get all ips and their interfaces
|
||||
def ips_linux_ifconfig(self) -> dict[str, str]:
|
||||
# for termux
|
||||
try:
|
||||
ip_addr, _ = chkcmd("ip", "addr")
|
||||
txt, _ = chkcmd(["ifconfig"])
|
||||
except:
|
||||
ip_addr = None
|
||||
return {}
|
||||
|
||||
if ip_addr:
|
||||
r = re.compile(r"^\s+inet ([^ ]+)/.* (.*)")
|
||||
for ln in ip_addr.split("\n"):
|
||||
try:
|
||||
ip, dev = r.match(ln.rstrip()).groups()
|
||||
for lip in listen_ips:
|
||||
if lip in ["0.0.0.0", ip]:
|
||||
eps[ip] = dev
|
||||
except:
|
||||
pass
|
||||
eps: dict[str, str] = {}
|
||||
dev = None
|
||||
ip = None
|
||||
up = None
|
||||
for ln in (txt + "\n").split("\n"):
|
||||
if not ln.strip() and dev and ip:
|
||||
eps[ip] = dev + ("" if up else ", \033[31mLINK-DOWN")
|
||||
dev = ip = up = None
|
||||
continue
|
||||
|
||||
if ln == ln.lstrip():
|
||||
dev = re.split(r"[: ]", ln)[0]
|
||||
|
||||
if "UP" in re.split(r"[<>, \t]", ln):
|
||||
up = True
|
||||
|
||||
m = re.match(r"^\s+inet\s+([^ ]+)", ln)
|
||||
if m:
|
||||
ip = m.group(1)
|
||||
|
||||
return eps
|
||||
|
||||
def ips_linux(self) -> dict[str, str]:
|
||||
try:
|
||||
txt, _ = chkcmd(["ip", "addr"])
|
||||
except:
|
||||
return self.ips_linux_ifconfig()
|
||||
|
||||
r = re.compile(r"^\s+inet ([^ ]+)/.* (.*)")
|
||||
ri = re.compile(r"^\s*[0-9]+\s*:.*")
|
||||
up = False
|
||||
eps: dict[str, str] = {}
|
||||
for ln in txt.split("\n"):
|
||||
if ri.match(ln):
|
||||
up = "UP" in re.split("[>,< ]", ln)
|
||||
|
||||
try:
|
||||
ip, dev = r.match(ln.rstrip()).groups() # type: ignore
|
||||
eps[ip] = dev + ("" if up else ", \033[31mLINK-DOWN")
|
||||
except:
|
||||
pass
|
||||
|
||||
return eps
|
||||
|
||||
def ips_macos(self) -> dict[str, str]:
|
||||
eps: dict[str, str] = {}
|
||||
try:
|
||||
txt, _ = chkcmd(["ifconfig"])
|
||||
except:
|
||||
return eps
|
||||
|
||||
rdev = re.compile(r"^([^ ]+):")
|
||||
rip = re.compile(r"^\tinet ([0-9\.]+) ")
|
||||
dev = "UNKNOWN"
|
||||
for ln in txt.split("\n"):
|
||||
m = rdev.match(ln)
|
||||
if m:
|
||||
dev = m.group(1)
|
||||
|
||||
m = rip.match(ln)
|
||||
if m:
|
||||
eps[m.group(1)] = dev
|
||||
dev = "UNKNOWN"
|
||||
|
||||
return eps
|
||||
|
||||
def ips_windows_ipconfig(self) -> tuple[dict[str, str], set[str]]:
|
||||
eps: dict[str, str] = {}
|
||||
offs: set[str] = set()
|
||||
try:
|
||||
txt, _ = chkcmd(["ipconfig"])
|
||||
except:
|
||||
return eps, offs
|
||||
|
||||
rdev = re.compile(r"(^[^ ].*):$")
|
||||
rip = re.compile(r"^ +IPv?4? [^:]+: *([0-9\.]{7,15})$")
|
||||
roff = re.compile(r".*: Media disconnected$")
|
||||
dev = None
|
||||
for ln in txt.replace("\r", "").split("\n"):
|
||||
m = rdev.match(ln)
|
||||
if m:
|
||||
if dev and dev not in eps.values():
|
||||
offs.add(dev)
|
||||
|
||||
dev = m.group(1).split(" adapter ", 1)[-1]
|
||||
|
||||
if dev and roff.match(ln):
|
||||
offs.add(dev)
|
||||
dev = None
|
||||
|
||||
m = rip.match(ln)
|
||||
if m and dev:
|
||||
eps[m.group(1)] = dev
|
||||
dev = None
|
||||
|
||||
if dev and dev not in eps.values():
|
||||
offs.add(dev)
|
||||
|
||||
return eps, offs
|
||||
|
||||
def ips_windows_netsh(self) -> dict[str, str]:
|
||||
eps: dict[str, str] = {}
|
||||
try:
|
||||
txt, _ = chkcmd("netsh interface ip show address".split())
|
||||
except:
|
||||
return eps
|
||||
|
||||
rdev = re.compile(r'.* "([^"]+)"$')
|
||||
rip = re.compile(r".* IP\b.*: +([0-9\.]{7,15})$")
|
||||
dev = None
|
||||
for ln in txt.replace("\r", "").split("\n"):
|
||||
m = rdev.match(ln)
|
||||
if m:
|
||||
dev = m.group(1)
|
||||
|
||||
m = rip.match(ln)
|
||||
if m and dev:
|
||||
eps[m.group(1)] = dev
|
||||
|
||||
return eps
|
||||
|
||||
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, str]:
|
||||
if MACOS:
|
||||
eps = self.ips_macos()
|
||||
elif ANYWIN:
|
||||
eps, off = self.ips_windows_ipconfig() # sees more interfaces + link state
|
||||
eps.update(self.ips_windows_netsh()) # has better names
|
||||
for k, v in eps.items():
|
||||
if v in off:
|
||||
eps[k] += ", \033[31mLINK-DOWN"
|
||||
else:
|
||||
eps = self.ips_linux()
|
||||
|
||||
if "0.0.0.0" not in listen_ips:
|
||||
eps = {k: v for k, v in eps.items() if k in listen_ips}
|
||||
|
||||
default_route = None
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
@@ -113,7 +304,6 @@ class TcpSrv(object):
|
||||
]:
|
||||
try:
|
||||
s.connect((ip, 1))
|
||||
# raise OSError(13, "a")
|
||||
default_route = s.getsockname()[0]
|
||||
break
|
||||
except (OSError, socket.error) as ex:
|
||||
@@ -133,3 +323,26 @@ class TcpSrv(object):
|
||||
eps[default_route] = desc
|
||||
|
||||
return eps
|
||||
|
||||
def _set_wintitle(self, vs: dict[str, dict[str, int]]) -> None:
|
||||
vs["all"] = vs.get("all", {"Local-Only": 1})
|
||||
vs["pub"] = vs.get("pub", vs["all"])
|
||||
|
||||
vs2 = {}
|
||||
for k, eps in vs.items():
|
||||
vs2[k] = {
|
||||
ep: 1
|
||||
for ep in eps.keys()
|
||||
if ":" not in ep or ep.split(":")[0] not in eps
|
||||
}
|
||||
|
||||
title = ""
|
||||
vs = vs2
|
||||
for p in self.args.wintitle.split(" "):
|
||||
if p.startswith("$"):
|
||||
p = " and ".join(sorted(vs.get(p[1:], {"(None)": 1}).keys()))
|
||||
|
||||
title += "{} ".format(p)
|
||||
|
||||
print("\033]0;{}\033\\".format(title), file=sys.stderr, end="")
|
||||
sys.stderr.flush()
|
||||
|
||||
@@ -3,53 +3,133 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .th_srv import HAVE_WEBP, thumb_path
|
||||
from .util import Cooldown
|
||||
from .th_srv import thumb_path, THUMBABLE, FMT_FF
|
||||
|
||||
try:
|
||||
from typing import Optional, Union
|
||||
except:
|
||||
pass
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
|
||||
class ThumbCli(object):
|
||||
def __init__(self, broker):
|
||||
self.broker = broker
|
||||
self.args = broker.args
|
||||
self.asrv = broker.asrv
|
||||
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||
self.broker = hsrv.broker
|
||||
self.log_func = hsrv.log
|
||||
self.args = hsrv.args
|
||||
self.asrv = hsrv.asrv
|
||||
|
||||
# cache on both sides for less broker spam
|
||||
self.cooldown = Cooldown(self.args.th_poke)
|
||||
|
||||
def get(self, ptop, rem, mtime, fmt):
|
||||
try:
|
||||
c = hsrv.th_cfg
|
||||
except:
|
||||
c = {k: {} for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
|
||||
|
||||
self.thumbable = c["thumbable"]
|
||||
self.fmt_pil = c["pil"]
|
||||
self.fmt_vips = c["vips"]
|
||||
self.fmt_ffi = c["ffi"]
|
||||
self.fmt_ffv = c["ffv"]
|
||||
self.fmt_ffa = c["ffa"]
|
||||
|
||||
# defer args.th_ff_jpg, can change at runtime
|
||||
d = next((x for x in self.args.th_dec if x in ("vips", "pil")), None)
|
||||
self.can_webp = HAVE_WEBP or d == "vips"
|
||||
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("thumbcli", msg, c)
|
||||
|
||||
def get(self, dbv: VFS, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||
ptop = dbv.realpath
|
||||
ext = rem.rsplit(".")[-1].lower()
|
||||
if ext not in THUMBABLE:
|
||||
if ext not in self.thumbable or "dthumb" in dbv.flags:
|
||||
return None
|
||||
|
||||
is_vid = ext in FMT_FF
|
||||
if is_vid and self.args.no_vthumb:
|
||||
is_vid = ext in self.fmt_ffv
|
||||
if is_vid and "dvthumb" in dbv.flags:
|
||||
return None
|
||||
|
||||
want_opus = fmt in ("opus", "caf")
|
||||
is_au = ext in self.fmt_ffa
|
||||
if is_au:
|
||||
if want_opus:
|
||||
if self.args.no_acode:
|
||||
return None
|
||||
else:
|
||||
if "dathumb" in dbv.flags:
|
||||
return None
|
||||
elif want_opus:
|
||||
return None
|
||||
|
||||
is_img = not is_vid and not is_au
|
||||
if is_img and "dithumb" in dbv.flags:
|
||||
return None
|
||||
|
||||
preferred = self.args.th_dec[0] if self.args.th_dec else ""
|
||||
|
||||
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg", "png"]:
|
||||
return os.path.join(ptop, rem)
|
||||
|
||||
if fmt == "j" and self.args.th_no_jpg:
|
||||
fmt = "w"
|
||||
|
||||
if fmt == "w":
|
||||
if self.args.th_no_webp or (is_vid and self.args.th_ff_jpg):
|
||||
if (
|
||||
self.args.th_no_webp
|
||||
or (is_img and not self.can_webp)
|
||||
or (self.args.th_ff_jpg and (not is_img or preferred == "ff"))
|
||||
):
|
||||
fmt = "j"
|
||||
|
||||
histpath = self.asrv.vfs.histtab[ptop]
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
return None
|
||||
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
||||
tpaths = [tpath]
|
||||
if fmt == "w":
|
||||
# also check for jpg (maybe webp is unavailable)
|
||||
tpaths.append(tpath.rsplit(".", 1)[0] + ".jpg")
|
||||
|
||||
ret = None
|
||||
try:
|
||||
st = os.stat(tpath)
|
||||
if st.st_size:
|
||||
ret = tpath
|
||||
else:
|
||||
return None
|
||||
except:
|
||||
pass
|
||||
abort = False
|
||||
for tp in tpaths:
|
||||
try:
|
||||
st = bos.stat(tp)
|
||||
if st.st_size:
|
||||
ret = tpath = tp
|
||||
fmt = ret.rsplit(".")[1]
|
||||
else:
|
||||
abort = True
|
||||
except:
|
||||
pass
|
||||
|
||||
if ret:
|
||||
tdir = os.path.dirname(tpath)
|
||||
if self.cooldown.poke(tdir):
|
||||
self.broker.put(False, "thumbsrv.poke", tdir)
|
||||
self.broker.say("thumbsrv.poke", tdir)
|
||||
|
||||
if want_opus:
|
||||
# audio files expire individually
|
||||
if self.cooldown.poke(tpath):
|
||||
self.broker.say("thumbsrv.poke", tpath)
|
||||
|
||||
return ret
|
||||
|
||||
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)
|
||||
return x.get()
|
||||
if abort:
|
||||
return None
|
||||
|
||||
if not bos.path.getsize(os.path.join(ptop, rem)):
|
||||
return None
|
||||
|
||||
x = self.broker.ask("thumbsrv.get", ptop, rem, mtime, fmt)
|
||||
return x.get() # type: ignore
|
||||
|
||||
@@ -1,18 +1,28 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import time
|
||||
import shutil
|
||||
import base64
|
||||
import hashlib
|
||||
import threading
|
||||
import os
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
import threading
|
||||
import time
|
||||
|
||||
from .__init__ import PY2, unicode
|
||||
from .util import fsenc, runcmd, Queue, Cooldown, BytesIO, min_ex
|
||||
from queue import Queue
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .bos import bos
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||
from .util import BytesIO, Cooldown, Pebkac, fsenc, min_ex, runcmd, statdir, vsplit
|
||||
|
||||
try:
|
||||
from typing import Optional, Union
|
||||
except:
|
||||
pass
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
HAVE_PIL = False
|
||||
HAVE_HEIF = False
|
||||
@@ -20,7 +30,7 @@ HAVE_AVIF = False
|
||||
HAVE_WEBP = False
|
||||
|
||||
try:
|
||||
from PIL import Image, ImageOps
|
||||
from PIL import ExifTags, Image, ImageOps
|
||||
|
||||
HAVE_PIL = True
|
||||
try:
|
||||
@@ -38,7 +48,7 @@ try:
|
||||
pass
|
||||
|
||||
try:
|
||||
import pillow_avif
|
||||
import pillow_avif # noqa: F401 # pylint: disable=unused-import
|
||||
|
||||
HAVE_AVIF = True
|
||||
except:
|
||||
@@ -46,39 +56,18 @@ try:
|
||||
except:
|
||||
pass
|
||||
|
||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||
# ffmpeg -formats
|
||||
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
|
||||
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
|
||||
|
||||
if HAVE_HEIF:
|
||||
FMT_PIL += " heif heifs heic heics"
|
||||
|
||||
if HAVE_AVIF:
|
||||
FMT_PIL += " avif avifs"
|
||||
|
||||
FMT_PIL, FMT_FF = [{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FF]]
|
||||
try:
|
||||
HAVE_VIPS = True
|
||||
import pyvips
|
||||
except:
|
||||
HAVE_VIPS = False
|
||||
|
||||
|
||||
THUMBABLE = {}
|
||||
|
||||
if HAVE_PIL:
|
||||
THUMBABLE.update(FMT_PIL)
|
||||
|
||||
if HAVE_FFMPEG and HAVE_FFPROBE:
|
||||
THUMBABLE.update(FMT_FF)
|
||||
|
||||
|
||||
def thumb_path(histpath, rem, mtime, fmt):
|
||||
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
|
||||
# base16 = 16 = 256
|
||||
# b64-lc = 38 = 1444
|
||||
# base64 = 64 = 4096
|
||||
try:
|
||||
rd, fn = rem.rsplit("/", 1)
|
||||
except:
|
||||
rd = ""
|
||||
fn = rem
|
||||
|
||||
rd, fn = vsplit(rem)
|
||||
if rd:
|
||||
h = hashlib.sha512(fsenc(rd)).digest()
|
||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
@@ -90,13 +79,17 @@ def thumb_path(histpath, rem, mtime, fmt):
|
||||
h = hashlib.sha512(fsenc(fn)).digest()
|
||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
|
||||
return "{}/th/{}/{}.{:x}.{}".format(
|
||||
histpath, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
|
||||
)
|
||||
if fmt in ("opus", "caf"):
|
||||
cat = "ac"
|
||||
else:
|
||||
fmt = "webp" if fmt == "w" else "png" if fmt == "p" else "jpg"
|
||||
cat = "th"
|
||||
|
||||
return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt)
|
||||
|
||||
|
||||
class ThumbSrv(object):
|
||||
def __init__(self, hub):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
self.hub = hub
|
||||
self.asrv = hub.asrv
|
||||
self.args = hub.args
|
||||
@@ -107,18 +100,20 @@ class ThumbSrv(object):
|
||||
self.poke_cd = Cooldown(self.args.th_poke)
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.busy = {}
|
||||
self.busy: dict[str, list[threading.Condition]] = {}
|
||||
self.stopping = False
|
||||
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
self.q = Queue(self.nthr * 4)
|
||||
self.nthr = max(1, self.args.th_mt)
|
||||
|
||||
self.q: Queue[Optional[tuple[str, str]]] = Queue(self.nthr * 4)
|
||||
for n in range(self.nthr):
|
||||
t = threading.Thread(
|
||||
thr = threading.Thread(
|
||||
target=self.worker, name="thumb-{}-{}".format(n, self.nthr)
|
||||
)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
||||
want_ff = not self.args.no_vthumb or not self.args.no_athumb
|
||||
if want_ff and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
||||
missing = []
|
||||
if not HAVE_FFMPEG:
|
||||
missing.append("FFmpeg")
|
||||
@@ -126,29 +121,64 @@ class ThumbSrv(object):
|
||||
if not HAVE_FFPROBE:
|
||||
missing.append("FFprobe")
|
||||
|
||||
msg = "cannot create video thumbnails because some of the required programs are not available: "
|
||||
msg = "cannot create audio/video thumbnails because some of the required programs are not available: "
|
||||
msg += ", ".join(missing)
|
||||
self.log(msg, c=3)
|
||||
|
||||
if self.args.th_clean:
|
||||
t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
|
||||
t = threading.Thread(target=self.cleaner, name="thumb.cln")
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def log(self, msg, c=0):
|
||||
self.fmt_pil, self.fmt_vips, self.fmt_ffi, self.fmt_ffv, self.fmt_ffa = [
|
||||
set(y.split(","))
|
||||
for y in [
|
||||
self.args.th_r_pil,
|
||||
self.args.th_r_vips,
|
||||
self.args.th_r_ffi,
|
||||
self.args.th_r_ffv,
|
||||
self.args.th_r_ffa,
|
||||
]
|
||||
]
|
||||
|
||||
if not HAVE_HEIF:
|
||||
for f in "heif heifs heic heics".split(" "):
|
||||
self.fmt_pil.discard(f)
|
||||
|
||||
if not HAVE_AVIF:
|
||||
for f in "avif avifs".split(" "):
|
||||
self.fmt_pil.discard(f)
|
||||
|
||||
self.thumbable: set[str] = set()
|
||||
|
||||
if "pil" in self.args.th_dec:
|
||||
self.thumbable |= self.fmt_pil
|
||||
|
||||
if "vips" in self.args.th_dec:
|
||||
self.thumbable |= self.fmt_vips
|
||||
|
||||
if "ff" in self.args.th_dec:
|
||||
for zss in [self.fmt_ffi, self.fmt_ffv, self.fmt_ffa]:
|
||||
self.thumbable |= zss
|
||||
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("thumb", msg, c)
|
||||
|
||||
def shutdown(self):
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
for _ in range(self.nthr):
|
||||
self.q.put(None)
|
||||
|
||||
def stopped(self):
|
||||
def stopped(self) -> bool:
|
||||
with self.mutex:
|
||||
return not self.nthr
|
||||
|
||||
def get(self, ptop, rem, mtime, fmt):
|
||||
histpath = self.asrv.vfs.histtab[ptop]
|
||||
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
return None
|
||||
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
||||
abspath = os.path.join(ptop, rem)
|
||||
cond = threading.Condition(self.mutex)
|
||||
@@ -159,13 +189,10 @@ class ThumbSrv(object):
|
||||
self.log("wait {}".format(tpath))
|
||||
except:
|
||||
thdir = os.path.dirname(tpath)
|
||||
try:
|
||||
os.makedirs(thdir)
|
||||
except:
|
||||
pass
|
||||
bos.makedirs(thdir)
|
||||
|
||||
inf_path = os.path.join(thdir, "dir.txt")
|
||||
if not os.path.exists(inf_path):
|
||||
if not bos.path.exists(inf_path):
|
||||
with open(inf_path, "wb") as f:
|
||||
f.write(fsenc(os.path.dirname(abspath)))
|
||||
|
||||
@@ -173,7 +200,7 @@ class ThumbSrv(object):
|
||||
do_conv = True
|
||||
|
||||
if do_conv:
|
||||
self.q.put([abspath, tpath])
|
||||
self.q.put((abspath, tpath))
|
||||
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
||||
|
||||
while not self.stopping:
|
||||
@@ -185,15 +212,26 @@ class ThumbSrv(object):
|
||||
cond.wait(3)
|
||||
|
||||
try:
|
||||
st = os.stat(tpath)
|
||||
st = bos.stat(tpath)
|
||||
if st.st_size:
|
||||
self.poke(tpath)
|
||||
return tpath
|
||||
except:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def worker(self):
|
||||
def getcfg(self) -> dict[str, set[str]]:
|
||||
return {
|
||||
"thumbable": self.thumbable,
|
||||
"pil": self.fmt_pil,
|
||||
"vips": self.fmt_vips,
|
||||
"ffi": self.fmt_ffi,
|
||||
"ffv": self.fmt_ffv,
|
||||
"ffa": self.fmt_ffa,
|
||||
}
|
||||
|
||||
def worker(self) -> None:
|
||||
while not self.stopping:
|
||||
task = self.q.get()
|
||||
if not task:
|
||||
@@ -201,21 +239,47 @@ class ThumbSrv(object):
|
||||
|
||||
abspath, tpath = task
|
||||
ext = abspath.split(".")[-1].lower()
|
||||
png_ok = False
|
||||
fun = None
|
||||
if not os.path.exists(tpath):
|
||||
if ext in FMT_PIL:
|
||||
fun = self.conv_pil
|
||||
elif ext in FMT_FF:
|
||||
fun = self.conv_ffmpeg
|
||||
if not bos.path.exists(tpath):
|
||||
for lib in self.args.th_dec:
|
||||
if fun:
|
||||
break
|
||||
elif lib == "pil" and ext in self.fmt_pil:
|
||||
fun = self.conv_pil
|
||||
elif lib == "vips" and ext in self.fmt_vips:
|
||||
fun = self.conv_vips
|
||||
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
|
||||
fun = self.conv_ffmpeg
|
||||
elif lib == "ff" and ext in self.fmt_ffa:
|
||||
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
||||
fun = self.conv_opus
|
||||
elif tpath.endswith(".png"):
|
||||
fun = self.conv_waves
|
||||
png_ok = True
|
||||
else:
|
||||
fun = self.conv_spec
|
||||
|
||||
if not png_ok and tpath.endswith(".png"):
|
||||
raise Pebkac(400, "png only allowed for waveforms")
|
||||
|
||||
if fun:
|
||||
try:
|
||||
fun(abspath, tpath)
|
||||
except:
|
||||
msg = "{} failed on {}\n{}"
|
||||
self.log(msg.format(fun.__name__, abspath, min_ex()), 3)
|
||||
with open(tpath, "wb") as _:
|
||||
pass
|
||||
except Exception as ex:
|
||||
msg = "{} could not create thumbnail of {}\n{}"
|
||||
msg = msg.format(fun.__name__, abspath, min_ex())
|
||||
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "1;30"
|
||||
self.log(msg, c)
|
||||
if getattr(ex, "returncode", 0) != 321:
|
||||
with open(tpath, "wb") as _:
|
||||
pass
|
||||
else:
|
||||
# ffmpeg may spawn empty files on windows
|
||||
try:
|
||||
os.unlink(tpath)
|
||||
except:
|
||||
pass
|
||||
|
||||
with self.mutex:
|
||||
subs = self.busy[tpath]
|
||||
@@ -228,21 +292,38 @@ class ThumbSrv(object):
|
||||
with self.mutex:
|
||||
self.nthr -= 1
|
||||
|
||||
def conv_pil(self, abspath, tpath):
|
||||
with Image.open(fsenc(abspath)) as im:
|
||||
crop = not self.args.th_no_crop
|
||||
res2 = self.res
|
||||
if crop:
|
||||
res2 = (res2[0] * 2, res2[1] * 2)
|
||||
def fancy_pillow(self, im: "Image.Image") -> "Image.Image":
|
||||
# exif_transpose is expensive (loads full image + unconditional copy)
|
||||
r = max(*self.res) * 2
|
||||
im.thumbnail((r, r), resample=Image.LANCZOS)
|
||||
try:
|
||||
k = next(k for k, v in ExifTags.TAGS.items() if v == "Orientation")
|
||||
exif = im.getexif()
|
||||
rot = int(exif[k])
|
||||
del exif[k]
|
||||
except:
|
||||
rot = 1
|
||||
|
||||
rots = {8: Image.ROTATE_90, 3: Image.ROTATE_180, 6: Image.ROTATE_270}
|
||||
if rot in rots:
|
||||
im = im.transpose(rots[rot])
|
||||
|
||||
if self.args.th_no_crop:
|
||||
im.thumbnail(self.res, resample=Image.LANCZOS)
|
||||
else:
|
||||
iw, ih = im.size
|
||||
dw, dh = self.res
|
||||
res = (min(iw, dw), min(ih, dh))
|
||||
im = ImageOps.fit(im, res, method=Image.LANCZOS)
|
||||
|
||||
return im
|
||||
|
||||
def conv_pil(self, abspath: str, tpath: str) -> None:
|
||||
with Image.open(fsenc(abspath)) as im:
|
||||
try:
|
||||
im.thumbnail(res2, resample=Image.LANCZOS)
|
||||
if crop:
|
||||
iw, ih = im.size
|
||||
dw, dh = self.res
|
||||
res = (min(iw, dw), min(ih, dh))
|
||||
im = ImageOps.fit(im, res, method=Image.LANCZOS)
|
||||
except:
|
||||
im = self.fancy_pillow(im)
|
||||
except Exception as ex:
|
||||
self.log("fancy_pillow {}".format(ex), "1;30")
|
||||
im.thumbnail(self.res)
|
||||
|
||||
fmts = ["RGB", "L"]
|
||||
@@ -257,24 +338,44 @@ class ThumbSrv(object):
|
||||
fmts += ["RGBA", "LA"]
|
||||
args["method"] = 6
|
||||
else:
|
||||
pass # default q = 75
|
||||
# default q = 75
|
||||
args["progressive"] = True
|
||||
|
||||
if im.mode not in fmts:
|
||||
# print("conv {}".format(im.mode))
|
||||
im = im.convert("RGB")
|
||||
|
||||
im.save(tpath, quality=40, method=6)
|
||||
im.save(tpath, **args)
|
||||
|
||||
def conv_ffmpeg(self, abspath, tpath):
|
||||
ret, _ = ffprobe(abspath)
|
||||
def conv_vips(self, abspath: str, tpath: str) -> None:
|
||||
crops = ["centre", "none"]
|
||||
if self.args.th_no_crop:
|
||||
crops = ["none"]
|
||||
|
||||
ext = abspath.rsplit(".")[-1]
|
||||
if ext in ["h264", "h265"]:
|
||||
seek = []
|
||||
w, h = self.res
|
||||
kw = {"height": h, "size": "down", "intent": "relative"}
|
||||
|
||||
for c in crops:
|
||||
try:
|
||||
kw["crop"] = c
|
||||
img = pyvips.Image.thumbnail(abspath, w, **kw)
|
||||
break
|
||||
except:
|
||||
pass
|
||||
|
||||
img.write_to_file(tpath, Q=40)
|
||||
|
||||
def conv_ffmpeg(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
if not ret:
|
||||
return
|
||||
|
||||
ext = abspath.rsplit(".")[-1].lower()
|
||||
if ext in ["h264", "h265"] or ext in self.fmt_ffi:
|
||||
seek: list[bytes] = []
|
||||
else:
|
||||
dur = ret[".dur"][1] if ".dur" in ret else 4
|
||||
seek = "{:.0f}".format(dur / 3)
|
||||
seek = [b"-ss", seek.encode("utf-8")]
|
||||
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
|
||||
|
||||
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
||||
if self.args.th_no_crop:
|
||||
@@ -282,7 +383,7 @@ class ThumbSrv(object):
|
||||
else:
|
||||
scale += "increase,crop={0}:{1},setsar=1:1"
|
||||
|
||||
scale = scale.format(*list(self.res)).encode("utf-8")
|
||||
bscale = scale.format(*list(self.res)).encode("utf-8")
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
@@ -293,8 +394,10 @@ class ThumbSrv(object):
|
||||
cmd += seek
|
||||
cmd += [
|
||||
b"-i", fsenc(abspath),
|
||||
b"-vf", scale,
|
||||
b"-vframes", b"1",
|
||||
b"-map", b"0:v:0",
|
||||
b"-vf", bscale,
|
||||
b"-frames:v", b"1",
|
||||
b"-metadata:s:v:0", b"rotate=0",
|
||||
]
|
||||
# fmt: on
|
||||
|
||||
@@ -312,27 +415,185 @@ class ThumbSrv(object):
|
||||
]
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
|
||||
ret, sout, serr = runcmd(*cmd)
|
||||
if ret != 0:
|
||||
msg = ["ff: {}".format(x) for x in serr.split("\n")]
|
||||
self.log("FFmpeg failed:\n" + "\n".join(msg), c="1;30")
|
||||
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||
def _run_ff(self, cmd: list[bytes]) -> None:
|
||||
# self.log((b" ".join(cmd)).decode("utf-8"))
|
||||
ret, _, serr = runcmd(cmd, timeout=self.args.th_convt)
|
||||
if not ret:
|
||||
return
|
||||
|
||||
def poke(self, tdir):
|
||||
c: Union[str, int] = "1;30"
|
||||
t = "FFmpeg failed (probably a corrupt video file):\n"
|
||||
if (
|
||||
(not self.args.th_ff_jpg or time.time() - int(self.args.th_ff_jpg) < 60)
|
||||
and cmd[-1].lower().endswith(b".webp")
|
||||
and (
|
||||
"Error selecting an encoder" in serr
|
||||
or "Automatic encoder selection failed" in serr
|
||||
or "Default encoder for format webp" in serr
|
||||
or "Please choose an encoder manually" in serr
|
||||
)
|
||||
):
|
||||
self.args.th_ff_jpg = time.time()
|
||||
t = "FFmpeg failed because it was compiled without libwebp; enabling --th-ff-jpg to force jpeg output:\n"
|
||||
ret = 321
|
||||
c = 1
|
||||
|
||||
if (
|
||||
not self.args.th_ff_swr or time.time() - int(self.args.th_ff_swr) < 60
|
||||
) and (
|
||||
"Requested resampling engine is unavailable" in serr
|
||||
or "output pad on Parsed_aresample_" in serr
|
||||
):
|
||||
self.args.th_ff_swr = time.time()
|
||||
t = "FFmpeg failed because it was compiled without libsox; enabling --th-ff-swr to force swr resampling:\n"
|
||||
ret = 321
|
||||
c = 1
|
||||
|
||||
lines = serr.strip("\n").split("\n")
|
||||
if len(lines) > 50:
|
||||
lines = lines[:25] + ["[...]"] + lines[-25:]
|
||||
|
||||
txt = "\n".join(["ff: " + str(x) for x in lines])
|
||||
if len(txt) > 5000:
|
||||
txt = txt[:2500] + "...\nff: [...]\nff: ..." + txt[-2500:]
|
||||
|
||||
self.log(t + txt, c=c)
|
||||
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||
|
||||
def conv_waves(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
flt = (
|
||||
b"[0:a:0]"
|
||||
b"compand=.3|.3:1|1:-90/-60|-60/-40|-40/-30|-20/-20:6:0:-90:0.2"
|
||||
b",volume=2"
|
||||
b",showwavespic=s=2048x64:colors=white"
|
||||
b",convolution=1 1 1 1 1 1 1 1 1:1 1 1 1 1 1 1 1 1:1 1 1 1 1 1 1 1 1:1 -1 1 -1 5 -1 1 -1 1" # idk what im doing but it looks ok
|
||||
)
|
||||
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
b"-filter_complex", flt,
|
||||
b"-frames:v", b"1",
|
||||
]
|
||||
# fmt: on
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
|
||||
def conv_spec(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
fc = "[0:a:0]aresample=48000{},showspectrumpic=s=640x512,crop=780:544:70:50[o]"
|
||||
|
||||
if self.args.th_ff_swr:
|
||||
fco = ":filter_size=128:cutoff=0.877"
|
||||
else:
|
||||
fco = ":resampler=soxr"
|
||||
|
||||
fc = fc.format(fco)
|
||||
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
b"-filter_complex", fc.encode("utf-8"),
|
||||
b"-map", b"[o]",
|
||||
b"-frames:v", b"1",
|
||||
]
|
||||
# fmt: on
|
||||
|
||||
if tpath.endswith(".jpg"):
|
||||
cmd += [
|
||||
b"-q:v",
|
||||
b"6", # default=??
|
||||
]
|
||||
else:
|
||||
cmd += [
|
||||
b"-q:v",
|
||||
b"50", # default=75
|
||||
b"-compression_level:v",
|
||||
b"6", # default=4, 0=fast, 6=max
|
||||
]
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
|
||||
def conv_opus(self, abspath: str, tpath: str) -> None:
|
||||
if self.args.no_acode:
|
||||
raise Exception("disabled in server config")
|
||||
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
|
||||
want_caf = tpath.endswith(".caf")
|
||||
tmp_opus = tpath
|
||||
if want_caf:
|
||||
tmp_opus = tpath.rsplit(".", 1)[0] + ".opus"
|
||||
|
||||
if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)):
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
b"-map_metadata", b"-1",
|
||||
b"-map", b"0:a:0",
|
||||
b"-c:a", b"libopus",
|
||||
b"-b:a", b"128k",
|
||||
fsenc(tmp_opus)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
|
||||
if want_caf:
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath if src_opus else tmp_opus),
|
||||
b"-map_metadata", b"-1",
|
||||
b"-map", b"0:a:0",
|
||||
b"-c:a", b"copy",
|
||||
b"-f", b"caf",
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
|
||||
def poke(self, tdir: str) -> None:
|
||||
if not self.poke_cd.poke(tdir):
|
||||
return
|
||||
|
||||
ts = int(time.time())
|
||||
try:
|
||||
p1 = os.path.dirname(tdir)
|
||||
p2 = os.path.dirname(p1)
|
||||
for dp in [tdir, p1, p2]:
|
||||
os.utime(fsenc(dp), (ts, ts))
|
||||
for _ in range(4):
|
||||
bos.utime(tdir, (ts, ts))
|
||||
tdir = os.path.dirname(tdir)
|
||||
except:
|
||||
pass
|
||||
|
||||
def cleaner(self):
|
||||
def cleaner(self) -> None:
|
||||
interval = self.args.th_clean
|
||||
while True:
|
||||
time.sleep(interval)
|
||||
@@ -347,30 +608,42 @@ class ThumbSrv(object):
|
||||
|
||||
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
||||
|
||||
def clean(self, histpath):
|
||||
thumbpath = os.path.join(histpath, "th")
|
||||
def clean(self, histpath: str) -> int:
|
||||
ret = 0
|
||||
for cat in ["th", "ac"]:
|
||||
top = os.path.join(histpath, cat)
|
||||
if not bos.path.isdir(top):
|
||||
continue
|
||||
|
||||
ret += self._clean(cat, top)
|
||||
|
||||
return ret
|
||||
|
||||
def _clean(self, cat: str, thumbpath: str) -> int:
|
||||
# self.log("cln {}".format(thumbpath))
|
||||
maxage = self.args.th_maxage
|
||||
exts = ["jpg", "webp"] if cat == "th" else ["opus", "caf"]
|
||||
maxage = getattr(self.args, cat + "_maxage")
|
||||
now = time.time()
|
||||
prev_b64 = None
|
||||
prev_fp = None
|
||||
prev_fp = ""
|
||||
try:
|
||||
ents = os.listdir(thumbpath)
|
||||
t1 = statdir(self.log_func, not self.args.no_scandir, False, thumbpath)
|
||||
ents = sorted(list(t1))
|
||||
except:
|
||||
return 0
|
||||
|
||||
ndirs = 0
|
||||
for f in sorted(ents):
|
||||
for f, inf in ents:
|
||||
fp = os.path.join(thumbpath, f)
|
||||
cmp = fp.lower().replace("\\", "/")
|
||||
|
||||
# "top" or b64 prefix/full (a folder)
|
||||
if len(f) <= 3 or len(f) == 24:
|
||||
age = now - os.path.getmtime(fp)
|
||||
age = now - inf.st_mtime
|
||||
if age > maxage:
|
||||
with self.mutex:
|
||||
safe = True
|
||||
for k in self.busy.keys():
|
||||
for k in self.busy:
|
||||
if k.lower().replace("\\", "/").startswith(cmp):
|
||||
safe = False
|
||||
break
|
||||
@@ -380,16 +653,15 @@ class ThumbSrv(object):
|
||||
self.log("rm -rf [{}]".format(fp))
|
||||
shutil.rmtree(fp, ignore_errors=True)
|
||||
else:
|
||||
ndirs += self.clean(fp)
|
||||
ndirs += self._clean(cat, fp)
|
||||
|
||||
continue
|
||||
|
||||
# thumb file
|
||||
try:
|
||||
b64, ts, ext = f.split(".")
|
||||
if len(b64) != 24 or len(ts) != 8 or ext not in ["jpg", "webp"]:
|
||||
if len(b64) != 24 or len(ts) != 8 or ext not in exts:
|
||||
raise Exception()
|
||||
|
||||
ts = int(ts, 16)
|
||||
except:
|
||||
if f != "dir.txt":
|
||||
self.log("foreign file in thumbs dir: [{}]".format(fp), 1)
|
||||
@@ -398,7 +670,11 @@ class ThumbSrv(object):
|
||||
|
||||
if b64 == prev_b64:
|
||||
self.log("rm replaced [{}]".format(fp))
|
||||
os.unlink(prev_fp)
|
||||
bos.unlink(prev_fp)
|
||||
|
||||
if cat != "th" and inf.st_mtime + maxage < now:
|
||||
self.log("rm expired [{}]".format(fp))
|
||||
bos.unlink(fp)
|
||||
|
||||
prev_b64 = b64
|
||||
prev_fp = fp
|
||||
|
||||
@@ -1,26 +1,37 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import calendar
|
||||
import os
|
||||
import time
|
||||
import re
|
||||
import threading
|
||||
from datetime import datetime
|
||||
import time
|
||||
from operator import itemgetter
|
||||
|
||||
from .__init__ import unicode
|
||||
from .util import s3dec, Pebkac, min_ex
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
|
||||
from .bos import bos
|
||||
from .up2k import up2k_wark_from_hashlist
|
||||
from .util import HAVE_SQLITE3, Pebkac, absreal, gen_filekey, min_ex, quotep, s3dec
|
||||
|
||||
if HAVE_SQLITE3:
|
||||
import sqlite3
|
||||
|
||||
try:
|
||||
HAVE_SQLITE3 = True
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
except:
|
||||
HAVE_SQLITE3 = False
|
||||
pass
|
||||
|
||||
try:
|
||||
from typing import Any, Optional, Union
|
||||
except:
|
||||
pass
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .httpconn import HttpConn
|
||||
|
||||
|
||||
class U2idx(object):
|
||||
def __init__(self, conn):
|
||||
def __init__(self, conn: "HttpConn") -> None:
|
||||
self.log_func = conn.log_func
|
||||
self.asrv = conn.asrv
|
||||
self.args = conn.args
|
||||
@@ -30,17 +41,21 @@ class U2idx(object):
|
||||
self.log("your python does not have sqlite3; searching will be disabled")
|
||||
return
|
||||
|
||||
self.cur = {}
|
||||
self.mem_cur = sqlite3.connect(":memory:")
|
||||
self.active_id = ""
|
||||
self.active_cur: Optional["sqlite3.Cursor"] = None
|
||||
self.cur: dict[str, "sqlite3.Cursor"] = {}
|
||||
self.mem_cur = sqlite3.connect(":memory:").cursor()
|
||||
self.mem_cur.execute(r"create table a (b text)")
|
||||
|
||||
self.p_end = None
|
||||
self.p_dur = 0
|
||||
self.p_end = 0.0
|
||||
self.p_dur = 0.0
|
||||
|
||||
def log(self, msg, c=0):
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("u2idx", msg, c)
|
||||
|
||||
def fsearch(self, vols, body):
|
||||
def fsearch(
|
||||
self, vols: list[tuple[str, str, dict[str, Any]]], body: dict[str, Any]
|
||||
) -> list[dict[str, Any]]:
|
||||
"""search by up2k hashlist"""
|
||||
if not HAVE_SQLITE3:
|
||||
return []
|
||||
@@ -49,15 +64,15 @@ class U2idx(object):
|
||||
fhash = body["hash"]
|
||||
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
|
||||
|
||||
uq = "where substr(w,1,16) = ? and w = ?"
|
||||
uv = [wark[:16], wark]
|
||||
uq = "substr(w,1,16) = ? and w = ?"
|
||||
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||
|
||||
try:
|
||||
return self.run_query(vols, uq, uv)[0]
|
||||
return self.run_query(vols, uq, uv, True, False, 99999)[0]
|
||||
except:
|
||||
raise Pebkac(500, min_ex())
|
||||
|
||||
def get_cur(self, ptop):
|
||||
def get_cur(self, ptop: str) -> Optional["sqlite3.Cursor"]:
|
||||
if not HAVE_SQLITE3:
|
||||
return None
|
||||
|
||||
@@ -65,33 +80,54 @@ class U2idx(object):
|
||||
if cur:
|
||||
return cur
|
||||
|
||||
histpath = self.asrv.vfs.histtab[ptop]
|
||||
db_path = os.path.join(histpath, "up2k.db")
|
||||
if not os.path.exists(db_path):
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
return None
|
||||
|
||||
cur = sqlite3.connect(db_path, 2).cursor()
|
||||
db_path = os.path.join(histpath, "up2k.db")
|
||||
if not bos.path.exists(db_path):
|
||||
return None
|
||||
|
||||
cur = None
|
||||
if ANYWIN:
|
||||
uri = ""
|
||||
try:
|
||||
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
||||
cur = sqlite3.connect(uri, 2, uri=True).cursor()
|
||||
self.log("ro: {}".format(db_path))
|
||||
except:
|
||||
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
||||
|
||||
if not cur:
|
||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||
cur = sqlite3.connect(db_path, 2).cursor()
|
||||
self.log("opened {}".format(db_path))
|
||||
|
||||
self.cur[ptop] = cur
|
||||
return cur
|
||||
|
||||
def search(self, vols, uq):
|
||||
def search(
|
||||
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
|
||||
) -> tuple[list[dict[str, Any]], list[str]]:
|
||||
"""search by query params"""
|
||||
if not HAVE_SQLITE3:
|
||||
return []
|
||||
return [], []
|
||||
|
||||
q = ""
|
||||
va = []
|
||||
joins = ""
|
||||
v: Union[str, int] = ""
|
||||
va: list[Union[str, int]] = []
|
||||
have_up = False # query has up.* operands
|
||||
have_mt = False
|
||||
is_key = True
|
||||
is_size = False
|
||||
is_date = False
|
||||
field_end = "" # closing parenthesis or whatever
|
||||
kw_key = ["(", ")", "and ", "or ", "not "]
|
||||
kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "]
|
||||
ptn_mt = re.compile(r"^\.?[a-z]+$")
|
||||
mt_ctr = 0
|
||||
mt_keycmp = "substr(up.w,1,16)"
|
||||
mt_keycmp2 = None
|
||||
ptn_lc = re.compile(r" (mt[0-9]+\.v) ([=<!>]+) \? $")
|
||||
ptn_mt = re.compile(r"^\.?[a-z_-]+$")
|
||||
ptn_lc = re.compile(r" (mt\.v) ([=<!>]+) \? \) $")
|
||||
ptn_lcv = re.compile(r"[a-zA-Z]")
|
||||
|
||||
while True:
|
||||
@@ -111,35 +147,47 @@ class U2idx(object):
|
||||
if ok:
|
||||
continue
|
||||
|
||||
v, uq = (uq + " ").split(" ", 1)
|
||||
if uq.startswith('"'):
|
||||
v, uq = uq[1:].split('"', 1)
|
||||
while v.endswith("\\"):
|
||||
v2, uq = uq.split('"', 1)
|
||||
v = v[:-1] + '"' + v2
|
||||
uq = uq.strip()
|
||||
else:
|
||||
v, uq = (uq + " ").split(" ", 1)
|
||||
v = v.replace('\\"', '"')
|
||||
|
||||
if is_key:
|
||||
is_key = False
|
||||
|
||||
if v == "size":
|
||||
v = "up.sz"
|
||||
is_size = True
|
||||
have_up = True
|
||||
|
||||
elif v == "date":
|
||||
v = "up.mt"
|
||||
is_date = True
|
||||
have_up = True
|
||||
|
||||
elif v == "path":
|
||||
v = "up.rd"
|
||||
v = "trim(?||up.rd,'/')"
|
||||
va.append("\nrd")
|
||||
have_up = True
|
||||
|
||||
elif v == "name":
|
||||
v = "up.fn"
|
||||
have_up = True
|
||||
|
||||
elif v == "tags" or ptn_mt.match(v):
|
||||
mt_ctr += 1
|
||||
mt_keycmp2 = "mt{}.w".format(mt_ctr)
|
||||
joins += "inner join mt mt{} on {} = {} ".format(
|
||||
mt_ctr, mt_keycmp, mt_keycmp2
|
||||
)
|
||||
mt_keycmp = mt_keycmp2
|
||||
have_mt = True
|
||||
field_end = ") "
|
||||
if v == "tags":
|
||||
v = "mt{0}.v".format(mt_ctr)
|
||||
vq = "mt.v"
|
||||
else:
|
||||
v = "+mt{0}.k = '{1}' and mt{0}.v".format(mt_ctr, v)
|
||||
vq = "+mt.k = '{}' and mt.v".format(v)
|
||||
|
||||
v = "exists(select 1 from mt where mt.w = mtw and " + vq
|
||||
|
||||
else:
|
||||
raise Pebkac(400, "invalid key [" + v + "]")
|
||||
@@ -152,18 +200,17 @@ class U2idx(object):
|
||||
|
||||
if is_date:
|
||||
is_date = False
|
||||
v = v.upper().rstrip("Z").replace(",", " ").replace("T", " ")
|
||||
while " " in v:
|
||||
v = v.replace(" ", " ")
|
||||
|
||||
v = re.sub(r"[tzTZ, ]+", " ", v).strip()
|
||||
for fmt in [
|
||||
"%Y-%m-%d %H:%M:%S",
|
||||
"%Y-%m-%d %H:%M",
|
||||
"%Y-%m-%d %H",
|
||||
"%Y-%m-%d",
|
||||
"%Y-%m",
|
||||
"%Y",
|
||||
]:
|
||||
try:
|
||||
v = datetime.strptime(v, fmt).timestamp()
|
||||
v = calendar.timegm(time.strptime(str(v), fmt))
|
||||
break
|
||||
except:
|
||||
pass
|
||||
@@ -185,28 +232,41 @@ class U2idx(object):
|
||||
va.append(v)
|
||||
is_key = True
|
||||
|
||||
if field_end:
|
||||
q += field_end
|
||||
field_end = ""
|
||||
|
||||
# lowercase tag searches
|
||||
m = ptn_lc.search(q)
|
||||
if not m or not ptn_lcv.search(unicode(v)):
|
||||
zs = unicode(v)
|
||||
if not m or not ptn_lcv.search(zs):
|
||||
continue
|
||||
|
||||
va.pop()
|
||||
va.append(v.lower())
|
||||
va.append(zs.lower())
|
||||
q = q[: m.start()]
|
||||
|
||||
field, oper = m.groups()
|
||||
if oper in ["=", "=="]:
|
||||
q += " {} like ? ".format(field)
|
||||
q += " {} like ? ) ".format(field)
|
||||
else:
|
||||
q += " lower({}) {} ? ".format(field, oper)
|
||||
q += " lower({}) {} ? ) ".format(field, oper)
|
||||
|
||||
try:
|
||||
return self.run_query(vols, joins + "where " + q, va)
|
||||
return self.run_query(vols, q, va, have_up, have_mt, lim)
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
def run_query(self, vols, uq, uv):
|
||||
done_flag = []
|
||||
def run_query(
|
||||
self,
|
||||
vols: list[tuple[str, str, dict[str, Any]]],
|
||||
uq: str,
|
||||
uv: list[Union[str, int]],
|
||||
have_up: bool,
|
||||
have_mt: bool,
|
||||
lim: int,
|
||||
) -> tuple[list[dict[str, Any]], list[str]]:
|
||||
done_flag: list[bool] = []
|
||||
self.active_id = "{:.6f}_{}".format(
|
||||
time.time(), threading.current_thread().ident
|
||||
)
|
||||
@@ -222,16 +282,17 @@ class U2idx(object):
|
||||
thr.start()
|
||||
|
||||
if not uq or not uv:
|
||||
q = "select * from up"
|
||||
v = ()
|
||||
uq = "select * from up"
|
||||
uv = []
|
||||
elif have_mt:
|
||||
uq = "select up.*, substr(up.w,1,16) mtw from up where " + uq
|
||||
else:
|
||||
q = "select up.* from up " + uq
|
||||
v = tuple(uv)
|
||||
uq = "select up.* from up where " + uq
|
||||
|
||||
self.log("qs: {!r} {!r}".format(q, v))
|
||||
self.log("qs: {!r} {!r}".format(uq, uv))
|
||||
|
||||
ret = []
|
||||
lim = 1000
|
||||
lim = min(lim, int(self.args.srch_hits))
|
||||
taglist = {}
|
||||
for (vtop, ptop, flags) in vols:
|
||||
cur = self.get_cur(ptop)
|
||||
@@ -240,25 +301,49 @@ class U2idx(object):
|
||||
|
||||
self.active_cur = cur
|
||||
|
||||
vuv = []
|
||||
for v in uv:
|
||||
if v == "\nrd":
|
||||
v = vtop + "/"
|
||||
|
||||
vuv.append(v)
|
||||
|
||||
sret = []
|
||||
c = cur.execute(q, v)
|
||||
fk = flags.get("fk")
|
||||
c = cur.execute(uq, tuple(vuv))
|
||||
for hit in c:
|
||||
w, ts, sz, rd, fn = hit
|
||||
w, ts, sz, rd, fn, ip, at = hit[:7]
|
||||
lim -= 1
|
||||
if lim <= 0:
|
||||
if lim < 0:
|
||||
break
|
||||
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
rp = "/".join([x for x in [vtop, rd, fn] if x])
|
||||
if not fk:
|
||||
suf = ""
|
||||
else:
|
||||
try:
|
||||
ap = absreal(os.path.join(ptop, rd, fn))
|
||||
inf = bos.stat(ap)
|
||||
except:
|
||||
continue
|
||||
|
||||
suf = (
|
||||
"?k="
|
||||
+ gen_filekey(
|
||||
self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino
|
||||
)[:fk]
|
||||
)
|
||||
|
||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x])) + suf
|
||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||
|
||||
for hit in sret:
|
||||
w = hit["w"]
|
||||
del hit["w"]
|
||||
tags = {}
|
||||
q2 = "select k, v from mt where w = ? and k != 'x'"
|
||||
q2 = "select k, v from mt where w = ? and +k != 'x'"
|
||||
for k, v2 in cur.execute(q2, (w,)):
|
||||
taglist[k] = True
|
||||
tags[k] = v2
|
||||
@@ -269,21 +354,26 @@ class U2idx(object):
|
||||
# print("[{}] {}".format(ptop, sret))
|
||||
|
||||
done_flag.append(True)
|
||||
self.active_id = None
|
||||
self.active_id = ""
|
||||
|
||||
# undupe hits from multiple metadata keys
|
||||
if len(ret) > 1:
|
||||
ret = [ret[0]] + [
|
||||
y for x, y in zip(ret[:-1], ret[1:]) if x["rp"] != y["rp"]
|
||||
y
|
||||
for x, y in zip(ret[:-1], ret[1:])
|
||||
if x["rp"].split("?")[0] != y["rp"].split("?")[0]
|
||||
]
|
||||
|
||||
ret.sort(key=itemgetter("rp"))
|
||||
|
||||
return ret, list(taglist.keys())
|
||||
|
||||
def terminator(self, identifier, done_flag):
|
||||
def terminator(self, identifier: str, done_flag: list[bool]) -> None:
|
||||
for _ in range(self.timeout):
|
||||
time.sleep(1)
|
||||
if done_flag:
|
||||
return
|
||||
|
||||
if identifier == self.active_id:
|
||||
assert self.active_cur
|
||||
self.active_cur.connection.interrupt()
|
||||
|
||||
2622
copyparty/up2k.py
2622
copyparty/up2k.py
File diff suppressed because it is too large
Load Diff
1643
copyparty/util.py
1643
copyparty/util.py
File diff suppressed because it is too large
Load Diff
@@ -17,12 +17,11 @@ window.baguetteBox = (function () {
|
||||
titleTag: false,
|
||||
async: false,
|
||||
preload: 2,
|
||||
animation: 'slideIn',
|
||||
afterShow: null,
|
||||
afterHide: null,
|
||||
onChange: null,
|
||||
},
|
||||
overlay, slider, btnPrev, btnNext, btnHelp, btnVmode, btnClose,
|
||||
overlay, slider, btnPrev, btnNext, btnHelp, btnAnim, btnRotL, btnRotR, btnSel, btnFull, btnVmode, btnClose,
|
||||
currentGallery = [],
|
||||
currentIndex = 0,
|
||||
isOverlayVisible = false,
|
||||
@@ -30,13 +29,17 @@ window.baguetteBox = (function () {
|
||||
touchFlag = false, // busy
|
||||
re_i = /.+\.(gif|jpe?g|png|webp)(\?|$)/i,
|
||||
re_v = /.+\.(webm|mp4)(\?|$)/i,
|
||||
anims = ['slideIn', 'fadeIn', 'none'],
|
||||
data = {}, // all galleries
|
||||
imagesElements = [],
|
||||
documentLastFocus = null,
|
||||
isFullscreen = false,
|
||||
vmute = false,
|
||||
vloop = false,
|
||||
vnext = false,
|
||||
vloop = sread('vmode') == 'L',
|
||||
vnext = sread('vmode') == 'C',
|
||||
loopA = null,
|
||||
loopB = null,
|
||||
url_ts = null,
|
||||
resume_mp = false;
|
||||
|
||||
var onFSC = function (e) {
|
||||
@@ -49,7 +52,7 @@ window.baguetteBox = (function () {
|
||||
};
|
||||
|
||||
var touchstartHandler = function (e) {
|
||||
touch.count++;
|
||||
touch.count = e.touches.length;
|
||||
if (touch.count > 1)
|
||||
touch.multitouch = true;
|
||||
|
||||
@@ -72,8 +75,11 @@ window.baguetteBox = (function () {
|
||||
hideOverlay();
|
||||
}
|
||||
};
|
||||
var touchendHandler = function () {
|
||||
var touchendHandler = function (e) {
|
||||
touch.count--;
|
||||
if (e && e.touches)
|
||||
touch.count = e.touches.length;
|
||||
|
||||
if (touch.count <= 0)
|
||||
touch.multitouch = false;
|
||||
|
||||
@@ -175,6 +181,11 @@ window.baguetteBox = (function () {
|
||||
'<button id="bbox-next" class="bbox-btn" type="button" aria-label="Next">></button>' +
|
||||
'<div id="bbox-btns">' +
|
||||
'<button id="bbox-help" type="button">?</button>' +
|
||||
'<button id="bbox-anim" type="button" tt="a">-</button>' +
|
||||
'<button id="bbox-rotl" type="button">↶</button>' +
|
||||
'<button id="bbox-rotr" type="button">↷</button>' +
|
||||
'<button id="bbox-tsel" type="button">sel</button>' +
|
||||
'<button id="bbox-full" type="button">⛶</button>' +
|
||||
'<button id="bbox-vmode" type="button" tt="a"></button>' +
|
||||
'<button id="bbox-close" type="button" aria-label="Close">X</button>' +
|
||||
'</div></div>'
|
||||
@@ -187,9 +198,13 @@ window.baguetteBox = (function () {
|
||||
btnPrev = ebi('bbox-prev');
|
||||
btnNext = ebi('bbox-next');
|
||||
btnHelp = ebi('bbox-help');
|
||||
btnAnim = ebi('bbox-anim');
|
||||
btnRotL = ebi('bbox-rotl');
|
||||
btnRotR = ebi('bbox-rotr');
|
||||
btnSel = ebi('bbox-tsel');
|
||||
btnFull = ebi('bbox-full');
|
||||
btnVmode = ebi('bbox-vmode');
|
||||
btnClose = ebi('bbox-close');
|
||||
bindEvents();
|
||||
}
|
||||
|
||||
function halp() {
|
||||
@@ -203,22 +218,27 @@ window.baguetteBox = (function () {
|
||||
['right, L', 'next file'],
|
||||
['home', 'first file'],
|
||||
['end', 'last file'],
|
||||
['R', 'rotate (shift=ccw)'],
|
||||
['F', 'toggle fullscreen'],
|
||||
['S', 'toggle file selection'],
|
||||
['space, P, K', 'video: play / pause'],
|
||||
['U', 'video: seek 10sec back'],
|
||||
['P', 'video: seek 10sec ahead'],
|
||||
['0..9', 'video: seek 0%..90%'],
|
||||
['M', 'video: toggle mute'],
|
||||
['R', 'video: toggle loop'],
|
||||
['V', 'video: toggle loop'],
|
||||
['C', 'video: toggle auto-next'],
|
||||
['F', 'video: toggle fullscreen'],
|
||||
['<code>[</code>, <code>]</code>', 'video: loop start / end'],
|
||||
],
|
||||
d = mknod('table'),
|
||||
d = mknod('table', 'bbox-halp'),
|
||||
html = ['<tbody>'];
|
||||
|
||||
for (var a = 0; a < list.length; a++)
|
||||
html.push('<tr><td>' + list[a][0] + '</td><td>' + list[a][1] + '</td></tr>');
|
||||
|
||||
html.push('<tr><td colspan="2">tap middle of img to hide btns</td></tr>');
|
||||
html.push('<tr><td colspan="2">tap left/right sides for prev/next</td></tr>');
|
||||
d.innerHTML = html.join('\n') + '</tbody>';
|
||||
d.setAttribute('id', 'bbox-halp');
|
||||
d.onclick = function () {
|
||||
overlay.removeChild(d);
|
||||
};
|
||||
@@ -226,10 +246,10 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function keyDownHandler(e) {
|
||||
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
|
||||
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing || modal.busy)
|
||||
return;
|
||||
|
||||
var k = e.code + '', v = vid();
|
||||
var k = e.code + '', v = vid(), pos = -1;
|
||||
|
||||
if (k == "ArrowLeft" || k == "KeyJ")
|
||||
showPreviousImage();
|
||||
@@ -245,11 +265,13 @@ window.baguetteBox = (function () {
|
||||
playpause();
|
||||
else if (k == "KeyU" || k == "KeyO")
|
||||
relseek(k == "KeyU" ? -10 : 10);
|
||||
else if (k.indexOf('Digit') === 0)
|
||||
vid().currentTime = vid().duration * parseInt(k.slice(-1)) * 0.1;
|
||||
else if (k == "KeyM" && v) {
|
||||
v.muted = vmute = !vmute;
|
||||
mp_ctl();
|
||||
}
|
||||
else if (k == "KeyR" && v) {
|
||||
else if (k == "KeyV" && v) {
|
||||
vloop = !vloop;
|
||||
vnext = vnext && !vloop;
|
||||
setVmode();
|
||||
@@ -260,13 +282,27 @@ window.baguetteBox = (function () {
|
||||
setVmode();
|
||||
}
|
||||
else if (k == "KeyF")
|
||||
try {
|
||||
if (isFullscreen)
|
||||
document.exitFullscreen();
|
||||
else
|
||||
v.requestFullscreen();
|
||||
}
|
||||
catch (ex) { }
|
||||
tglfull();
|
||||
else if (k == "KeyS")
|
||||
tglsel();
|
||||
else if (k == "KeyR")
|
||||
rotn(e.shiftKey ? -1 : 1);
|
||||
else if (k == "KeyY")
|
||||
dlpic();
|
||||
else if (k == "BracketLeft")
|
||||
setloop(1);
|
||||
else if (k == "BracketRight")
|
||||
setloop(2);
|
||||
}
|
||||
|
||||
function anim() {
|
||||
var i = (anims.indexOf(options.animation) + 1) % anims.length,
|
||||
o = options;
|
||||
swrite('ganim', anims[i]);
|
||||
options = {};
|
||||
setOptions(o);
|
||||
if (tt.en)
|
||||
tt.show.bind(this)();
|
||||
}
|
||||
|
||||
function setVmode() {
|
||||
@@ -279,7 +315,7 @@ window.baguetteBox = (function () {
|
||||
if (vloop) {
|
||||
lbl = 'Loop';
|
||||
msg += 'repeat it';
|
||||
tts = '$NHotkey: R';
|
||||
tts = '$NHotkey: V';
|
||||
}
|
||||
else if (vnext) {
|
||||
lbl = 'Cont';
|
||||
@@ -293,6 +329,7 @@ window.baguetteBox = (function () {
|
||||
btnVmode.setAttribute('aria-label', msg);
|
||||
btnVmode.setAttribute('tt', msg + tts);
|
||||
btnVmode.textContent = lbl;
|
||||
swrite('vmode', lbl[0]);
|
||||
|
||||
v.loop = vloop
|
||||
if (vloop && v.paused)
|
||||
@@ -314,6 +351,60 @@ window.baguetteBox = (function () {
|
||||
tt.show.bind(this)();
|
||||
}
|
||||
|
||||
function findfile() {
|
||||
var thumb = currentGallery[currentIndex].imageElement,
|
||||
name = vsplit(thumb.href)[1].split('?')[0],
|
||||
files = msel.getall();
|
||||
|
||||
for (var a = 0; a < files.length; a++)
|
||||
if (vsplit(files[a].vp)[1] == name)
|
||||
return [name, a, files, ebi(files[a].id)];
|
||||
}
|
||||
|
||||
function tglfull() {
|
||||
try {
|
||||
if (isFullscreen)
|
||||
document.exitFullscreen();
|
||||
else
|
||||
(vid() || ebi('bbox-overlay')).requestFullscreen();
|
||||
}
|
||||
catch (ex) { alert(ex); }
|
||||
}
|
||||
|
||||
function tglsel() {
|
||||
var o = findfile()[3];
|
||||
clmod(o.closest('tr'), 'sel', 't');
|
||||
msel.selui();
|
||||
selbg();
|
||||
}
|
||||
|
||||
function dlpic() {
|
||||
var url = findfile()[3].href;
|
||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache';
|
||||
dl_file(url);
|
||||
}
|
||||
|
||||
function selbg() {
|
||||
var img = vidimg(),
|
||||
thumb = currentGallery[currentIndex].imageElement,
|
||||
name = vsplit(thumb.href)[1].split('?')[0],
|
||||
files = msel.getsel(),
|
||||
sel = false;
|
||||
|
||||
for (var a = 0; a < files.length; a++)
|
||||
if (vsplit(files[a].vp)[1] == name)
|
||||
sel = true;
|
||||
|
||||
ebi('bbox-overlay').style.background = sel ?
|
||||
'rgba(153,34,85,0.7)' : '';
|
||||
|
||||
img.style.borderRadius = sel ? '1em' : '';
|
||||
btnSel.style.color = sel ? '#fff' : '';
|
||||
btnSel.style.background = sel ? '#d48' : '';
|
||||
btnSel.style.textShadow = sel ? '1px 1px 0 #b38' : '';
|
||||
btnSel.style.boxShadow = sel ? '.15em .15em 0 #502' : '';
|
||||
}
|
||||
|
||||
function keyUpHandler(e) {
|
||||
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
|
||||
return;
|
||||
@@ -342,12 +433,20 @@ window.baguetteBox = (function () {
|
||||
var nonPassiveEvent = passiveSupp ? { passive: true } : null;
|
||||
|
||||
function bindEvents() {
|
||||
bind(document, 'keydown', keyDownHandler);
|
||||
bind(document, 'keyup', keyUpHandler);
|
||||
bind(document, 'fullscreenchange', onFSC);
|
||||
bind(overlay, 'click', overlayClickHandler);
|
||||
bind(btnPrev, 'click', showPreviousImage);
|
||||
bind(btnNext, 'click', showNextImage);
|
||||
bind(btnClose, 'click', hideOverlay);
|
||||
bind(btnVmode, 'click', tglVmode);
|
||||
bind(btnHelp, 'click', halp);
|
||||
bind(btnAnim, 'click', anim);
|
||||
bind(btnRotL, 'click', rotl);
|
||||
bind(btnRotR, 'click', rotr);
|
||||
bind(btnSel, 'click', tglsel);
|
||||
bind(btnFull, 'click', tglfull);
|
||||
bind(slider, 'contextmenu', contextmenuHandler);
|
||||
bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
|
||||
bind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
|
||||
@@ -356,17 +455,26 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function unbindEvents() {
|
||||
unbind(document, 'keydown', keyDownHandler);
|
||||
unbind(document, 'keyup', keyUpHandler);
|
||||
unbind(document, 'fullscreenchange', onFSC);
|
||||
unbind(overlay, 'click', overlayClickHandler);
|
||||
unbind(btnPrev, 'click', showPreviousImage);
|
||||
unbind(btnNext, 'click', showNextImage);
|
||||
unbind(btnClose, 'click', hideOverlay);
|
||||
unbind(btnVmode, 'click', tglVmode);
|
||||
unbind(btnHelp, 'click', halp);
|
||||
unbind(btnAnim, 'click', anim);
|
||||
unbind(btnRotL, 'click', rotl);
|
||||
unbind(btnRotR, 'click', rotr);
|
||||
unbind(btnSel, 'click', tglsel);
|
||||
unbind(btnFull, 'click', tglfull);
|
||||
unbind(slider, 'contextmenu', contextmenuHandler);
|
||||
unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
|
||||
unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
|
||||
unbind(overlay, 'touchend', touchendHandler);
|
||||
unbind(document, 'focus', trapFocusInsideOverlay, true);
|
||||
timer.rm(rotn);
|
||||
}
|
||||
|
||||
function prepareOverlay(gallery, userOptions) {
|
||||
@@ -381,9 +489,8 @@ window.baguetteBox = (function () {
|
||||
var imagesFiguresIds = [];
|
||||
var imagesCaptionsIds = [];
|
||||
for (var i = 0, fullImage; i < gallery.length; i++) {
|
||||
fullImage = mknod('div');
|
||||
fullImage = mknod('div', 'baguette-img-' + i);
|
||||
fullImage.className = 'full-image';
|
||||
fullImage.id = 'baguette-img-' + i;
|
||||
imagesElements.push(fullImage);
|
||||
|
||||
imagesFiguresIds.push('bbox-figure-' + i);
|
||||
@@ -403,7 +510,12 @@ window.baguetteBox = (function () {
|
||||
if (typeof newOptions[item] !== 'undefined')
|
||||
options[item] = newOptions[item];
|
||||
}
|
||||
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .4s ease' :
|
||||
|
||||
var an = options.animation = sread('ganim') || anims[ANIM ? 0 : 2];
|
||||
btnAnim.textContent = ['⇄', '⮺', '⚡'][anims.indexOf(an)];
|
||||
btnAnim.setAttribute('tt', 'animation: ' + an);
|
||||
|
||||
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .3s ease' :
|
||||
options.animation === 'slideIn' ? '' : 'none');
|
||||
|
||||
if (options.buttons === 'auto' && ('ontouchstart' in window || currentGallery.length === 1))
|
||||
@@ -420,9 +532,7 @@ window.baguetteBox = (function () {
|
||||
if (overlay.style.display === 'block')
|
||||
return;
|
||||
|
||||
bind(document, 'keydown', keyDownHandler);
|
||||
bind(document, 'keyup', keyUpHandler);
|
||||
bind(document, 'fullscreenchange', onFSC);
|
||||
bindEvents();
|
||||
currentIndex = chosenImageIndex;
|
||||
touch = {
|
||||
count: 0,
|
||||
@@ -434,6 +544,10 @@ window.baguetteBox = (function () {
|
||||
preloadPrev(currentIndex);
|
||||
});
|
||||
|
||||
clmod(ebi('bbox-btns'), 'off');
|
||||
clmod(btnPrev, 'off');
|
||||
clmod(btnNext, 'off');
|
||||
|
||||
updateOffset();
|
||||
overlay.style.display = 'block';
|
||||
// Fade in overlay
|
||||
@@ -446,9 +560,10 @@ window.baguetteBox = (function () {
|
||||
options.afterShow();
|
||||
}, 50);
|
||||
|
||||
if (options.onChange)
|
||||
if (options.onChange && !url_ts)
|
||||
options.onChange(currentIndex, imagesElements.length);
|
||||
|
||||
url_ts = null;
|
||||
documentLastFocus = document.activeElement;
|
||||
btnClose.focus();
|
||||
isOverlayVisible = true;
|
||||
@@ -464,9 +579,14 @@ window.baguetteBox = (function () {
|
||||
if (overlay.style.display === 'none')
|
||||
return;
|
||||
|
||||
unbind(document, 'keydown', keyDownHandler);
|
||||
unbind(document, 'keyup', keyUpHandler);
|
||||
unbind(document, 'fullscreenchange', onFSC);
|
||||
sethash('');
|
||||
unbindEvents();
|
||||
try {
|
||||
document.exitFullscreen();
|
||||
isFullscreen = false;
|
||||
}
|
||||
catch (ex) { }
|
||||
|
||||
// Fade out and hide the overlay
|
||||
overlay.className = '';
|
||||
setTimeout(function () {
|
||||
@@ -474,9 +594,7 @@ window.baguetteBox = (function () {
|
||||
if (options.bodyClass && document.body.classList)
|
||||
document.body.classList.remove(options.bodyClass);
|
||||
|
||||
var h = ebi('bbox-halp');
|
||||
if (h)
|
||||
h.parentNode.removeChild(h);
|
||||
qsr('#bbox-halp');
|
||||
|
||||
if (options.afterHide)
|
||||
options.afterHide();
|
||||
@@ -514,16 +632,14 @@ window.baguetteBox = (function () {
|
||||
if (is_vid && index != currentIndex)
|
||||
return; // no preload
|
||||
|
||||
var figure = mknod('figure');
|
||||
figure.id = 'bbox-figure-' + index;
|
||||
var figure = mknod('figure', 'bbox-figure-' + index);
|
||||
figure.innerHTML = '<div class="bbox-spinner">' +
|
||||
'<div class="bbox-double-bounce1"></div>' +
|
||||
'<div class="bbox-double-bounce2"></div>' +
|
||||
'</div>';
|
||||
|
||||
if (options.captions && imageCaption) {
|
||||
var figcaption = mknod('figcaption');
|
||||
figcaption.id = 'bbox-figcaption-' + index;
|
||||
var figcaption = mknod('figcaption', 'bbox-figcaption-' + index);
|
||||
figcaption.innerHTML = imageCaption;
|
||||
figure.appendChild(figcaption);
|
||||
}
|
||||
@@ -534,8 +650,7 @@ window.baguetteBox = (function () {
|
||||
|
||||
image.addEventListener(is_vid ? 'loadedmetadata' : 'load', function () {
|
||||
// Remove loader element
|
||||
var spinner = QS('#baguette-img-' + index + ' .bbox-spinner');
|
||||
figure.removeChild(spinner);
|
||||
qsr('#baguette-img-' + index + ' .bbox-spinner');
|
||||
if (!options.async && callback)
|
||||
callback();
|
||||
});
|
||||
@@ -584,18 +699,12 @@ window.baguetteBox = (function () {
|
||||
showOverlay(index);
|
||||
return true;
|
||||
}
|
||||
if (index < 0) {
|
||||
if (options.animation)
|
||||
bounceAnimation('left');
|
||||
|
||||
return false;
|
||||
}
|
||||
if (index >= imagesElements.length) {
|
||||
if (options.animation)
|
||||
bounceAnimation('right');
|
||||
if (index < 0)
|
||||
return bounceAnimation('left');
|
||||
|
||||
return false;
|
||||
}
|
||||
if (index >= imagesElements.length)
|
||||
return bounceAnimation('right');
|
||||
|
||||
var v = vid();
|
||||
if (v) {
|
||||
@@ -617,13 +726,104 @@ window.baguetteBox = (function () {
|
||||
return true;
|
||||
}
|
||||
|
||||
var prev_cw = 0, prev_ch = 0, unrot_timer = null;
|
||||
function rotn(n) {
|
||||
var el = vidimg(),
|
||||
orot = parseInt(el.getAttribute('rot') || 0),
|
||||
frot = orot + (n || 0) * 90;
|
||||
|
||||
if (!frot && !orot)
|
||||
return; // reflow noop
|
||||
|
||||
var co = ebi('bbox-overlay'),
|
||||
cw = co.clientWidth,
|
||||
ch = co.clientHeight;
|
||||
|
||||
if (!n && prev_cw === cw && prev_ch === ch)
|
||||
return; // reflow noop
|
||||
|
||||
prev_cw = cw;
|
||||
prev_ch = ch;
|
||||
var rot = frot,
|
||||
iw = el.naturalWidth || el.videoWidth,
|
||||
ih = el.naturalHeight || el.videoHeight,
|
||||
magic = 4, // idk, works in enough browsers
|
||||
dl = el.closest('div').querySelector('figcaption a'),
|
||||
vw = cw,
|
||||
vh = ch - dl.offsetHeight + magic,
|
||||
pmag = Math.min(1, Math.min(vw / ih, vh / iw)),
|
||||
wmag = Math.min(1, Math.min(vw / iw, vh / ih));
|
||||
|
||||
while (rot < 0) rot += 360;
|
||||
while (rot >= 360) rot -= 360;
|
||||
var q = rot == 90 || rot == 270 ? 1 : 0,
|
||||
mag = q ? pmag : wmag;
|
||||
|
||||
el.style.cssText = 'max-width:none; max-height:none; position:absolute; display:block; margin:0';
|
||||
if (!orot) {
|
||||
el.style.width = iw * wmag + 'px';
|
||||
el.style.height = ih * wmag + 'px';
|
||||
el.style.left = (vw - iw * wmag) / 2 + 'px';
|
||||
el.style.top = (vh - ih * wmag) / 2 - magic + 'px';
|
||||
q = el.offsetHeight;
|
||||
}
|
||||
el.style.width = iw * mag + 'px';
|
||||
el.style.height = ih * mag + 'px';
|
||||
el.style.left = (vw - iw * mag) / 2 + 'px';
|
||||
el.style.top = (vh - ih * mag) / 2 - magic + 'px';
|
||||
el.style.transform = 'rotate(' + frot + 'deg)';
|
||||
el.setAttribute('rot', frot);
|
||||
timer.add(rotn);
|
||||
if (!rot) {
|
||||
clearTimeout(unrot_timer);
|
||||
unrot_timer = setTimeout(unrot, 300);
|
||||
}
|
||||
}
|
||||
function rotl() {
|
||||
rotn(-1);
|
||||
}
|
||||
function rotr() {
|
||||
rotn(1);
|
||||
}
|
||||
function unrot() {
|
||||
var el = vidimg(),
|
||||
orot = el.getAttribute('rot'),
|
||||
rot = parseInt(orot || 0);
|
||||
|
||||
while (rot < 0) rot += 360;
|
||||
while (rot >= 360) rot -= 360;
|
||||
if (rot || orot === null)
|
||||
return;
|
||||
|
||||
clmod(el, 'nt', 1);
|
||||
el.removeAttribute('rot');
|
||||
el.removeAttribute("style");
|
||||
rot = el.offsetHeight;
|
||||
clmod(el, 'nt');
|
||||
timer.rm(rotn);
|
||||
}
|
||||
|
||||
function vid() {
|
||||
return imagesElements[currentIndex].querySelector('video');
|
||||
}
|
||||
|
||||
function vidimg() {
|
||||
return imagesElements[currentIndex].querySelector('img, video');
|
||||
}
|
||||
|
||||
function playvid(play) {
|
||||
if (vid())
|
||||
vid()[play ? 'play' : 'pause']();
|
||||
if (!play) {
|
||||
timer.rm(loopchk);
|
||||
loopA = loopB = null;
|
||||
}
|
||||
|
||||
var v = vid();
|
||||
if (!v)
|
||||
return;
|
||||
|
||||
v[play ? 'play' : 'pause']();
|
||||
if (play && loopA !== null && v.currentTime < loopA)
|
||||
v.currentTime = loopA;
|
||||
}
|
||||
|
||||
function playpause() {
|
||||
@@ -642,6 +842,38 @@ window.baguetteBox = (function () {
|
||||
showNextImage();
|
||||
}
|
||||
|
||||
function setloop(side) {
|
||||
var v = vid();
|
||||
if (!v)
|
||||
return;
|
||||
|
||||
var t = v.currentTime;
|
||||
if (side == 1) loopA = t;
|
||||
if (side == 2) loopB = t;
|
||||
if (side)
|
||||
toast.inf(5, 'Loop' + (side == 1 ? 'A' : 'B') + ': ' + f2f(t, 2));
|
||||
|
||||
if (loopB !== null) {
|
||||
timer.add(loopchk);
|
||||
sethash(window.location.hash.slice(1).split('&')[0] + '&t=' + (loopA || 0) + '-' + loopB);
|
||||
}
|
||||
}
|
||||
|
||||
function loopchk() {
|
||||
if (loopB === null)
|
||||
return;
|
||||
|
||||
var v = vid();
|
||||
if (!v || v.paused || v.currentTime < loopB)
|
||||
return;
|
||||
|
||||
v.currentTime = loopA || 0;
|
||||
}
|
||||
|
||||
function urltime(txt) {
|
||||
url_ts = txt;
|
||||
}
|
||||
|
||||
function mp_ctl() {
|
||||
var v = vid();
|
||||
if (!vmute && v && mp.au && !mp.au.paused) {
|
||||
@@ -655,22 +887,29 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function bounceAnimation(direction) {
|
||||
slider.className = 'bounce-from-' + direction;
|
||||
slider.className = options.animation == 'slideIn' ? 'bounce-from-' + direction : 'eog';
|
||||
setTimeout(function () {
|
||||
slider.className = '';
|
||||
}, 400);
|
||||
}, 300);
|
||||
return false;
|
||||
}
|
||||
|
||||
function updateOffset() {
|
||||
var offset = -currentIndex * 100 + '%';
|
||||
var offset = -currentIndex * 100 + '%',
|
||||
xform = slider.style.perspective !== undefined;
|
||||
|
||||
if (options.animation === 'fadeIn') {
|
||||
slider.style.opacity = 0;
|
||||
setTimeout(function () {
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)';
|
||||
xform ?
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)' :
|
||||
slider.style.left = offset;
|
||||
slider.style.opacity = 1;
|
||||
}, 400);
|
||||
}, 100);
|
||||
} else {
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)';
|
||||
xform ?
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)' :
|
||||
slider.style.left = offset;
|
||||
}
|
||||
playvid(false);
|
||||
var v = vid();
|
||||
@@ -678,9 +917,53 @@ window.baguetteBox = (function () {
|
||||
playvid(true);
|
||||
v.muted = vmute;
|
||||
v.loop = vloop;
|
||||
if (url_ts) {
|
||||
var seek = ('' + url_ts).split('-');
|
||||
v.currentTime = seek[0];
|
||||
if (seek.length > 1) {
|
||||
loopA = parseFloat(seek[0]);
|
||||
loopB = parseFloat(seek[1]);
|
||||
setloop();
|
||||
}
|
||||
}
|
||||
}
|
||||
selbg();
|
||||
mp_ctl();
|
||||
setVmode();
|
||||
|
||||
var el = vidimg();
|
||||
if (el.getAttribute('rot'))
|
||||
timer.add(rotn);
|
||||
else
|
||||
timer.rm(rotn);
|
||||
|
||||
var ctime = 0;
|
||||
el.onclick = v ? null : function (e) {
|
||||
var rc = e.target.getBoundingClientRect(),
|
||||
x = e.clientX - rc.left,
|
||||
fx = x / (rc.right - rc.left);
|
||||
|
||||
if (fx < 0.3)
|
||||
return showPreviousImage();
|
||||
|
||||
if (fx > 0.7)
|
||||
return showNextImage();
|
||||
|
||||
clmod(ebi('bbox-btns'), 'off', 't');
|
||||
clmod(btnPrev, 'off', 't');
|
||||
clmod(btnNext, 'off', 't');
|
||||
|
||||
if (Date.now() - ctime <= 500)
|
||||
tglfull();
|
||||
|
||||
ctime = Date.now();
|
||||
};
|
||||
|
||||
var prev = QS('.full-image.vis');
|
||||
if (prev)
|
||||
clmod(prev, 'vis');
|
||||
|
||||
clmod(el.closest('div'), 'vis', 1);
|
||||
}
|
||||
|
||||
function preloadNext(index) {
|
||||
@@ -712,8 +995,6 @@ window.baguetteBox = (function () {
|
||||
function destroyPlugin() {
|
||||
unbindEvents();
|
||||
clearCachedData();
|
||||
unbind(document, 'keydown', keyDownHandler);
|
||||
unbind(document, 'keyup', keyUpHandler);
|
||||
document.getElementsByTagName('body')[0].removeChild(ebi('bbox-overlay'));
|
||||
data = {};
|
||||
currentGallery = [];
|
||||
@@ -726,6 +1007,7 @@ window.baguetteBox = (function () {
|
||||
showNext: showNextImage,
|
||||
showPrevious: showPreviousImage,
|
||||
relseek: relseek,
|
||||
urltime: urltime,
|
||||
playpause: playpause,
|
||||
hide: hideOverlay,
|
||||
destroy: destroyPlugin
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,13 +3,14 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>⇆🎉 {{ title }}</title>
|
||||
<title>{{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css?_={{ ts }}">
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/browser.css?_={{ ts }}">
|
||||
{%- if css %}
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="{{ css }}?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ css }}?_={{ ts }}">
|
||||
{%- endif %}
|
||||
</head>
|
||||
|
||||
@@ -18,9 +19,9 @@
|
||||
|
||||
<div id="op_search" class="opview">
|
||||
{%- if have_tags_idx %}
|
||||
<div id="srch_form" class="tags"></div>
|
||||
<div id="srch_form" class="tags opbox"></div>
|
||||
{%- else %}
|
||||
<div id="srch_form"></div>
|
||||
<div id="srch_form" class="opbox"></div>
|
||||
{%- endif %}
|
||||
<div id="srch_q"></div>
|
||||
</div>
|
||||
@@ -31,15 +32,16 @@
|
||||
<div id="u2err"></div>
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="bput" />
|
||||
<input type="file" name="f" multiple><br />
|
||||
<input type="file" name="f" multiple /><br />
|
||||
<input type="submit" value="start upload">
|
||||
</form>
|
||||
<a id="bbsw" href="?b=u"><br />switch to basic browser</a>
|
||||
</div>
|
||||
|
||||
<div id="op_mkdir" class="opview opbox act">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="mkdir" />
|
||||
📂<input type="text" name="name" size="30">
|
||||
📂<input type="text" name="name" class="i">
|
||||
<input type="submit" value="make directory">
|
||||
</form>
|
||||
</div>
|
||||
@@ -47,24 +49,26 @@
|
||||
<div id="op_new_md" class="opview opbox">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="new_md" />
|
||||
📝<input type="text" name="name" size="30">
|
||||
📝<input type="text" name="name" class="i">
|
||||
<input type="submit" value="new markdown doc">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_msg" class="opview opbox act">
|
||||
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
📟<input type="text" name="msg" size="30">
|
||||
<input type="submit" value="send msg to server log">
|
||||
📟<input type="text" name="msg" class="i">
|
||||
<input type="submit" value="send msg to srv log">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_unpost" class="opview opbox"></div>
|
||||
|
||||
<div id="op_up2k" class="opview"></div>
|
||||
|
||||
<div id="op_cfg" class="opview opbox opwide"></div>
|
||||
|
||||
<h1 id="path">
|
||||
<a href="#" id="entree" tt="show directory tree$NHotkey: B">🌲</a>
|
||||
<a href="#" id="entree">🌲</a>
|
||||
{%- for n in vpnodes %}
|
||||
<a href="/{{ n[0] }}">{{ n[1] }}</a>
|
||||
{%- endfor %}
|
||||
@@ -74,6 +78,12 @@
|
||||
|
||||
<div id="wrap">
|
||||
|
||||
{%- if doc %}
|
||||
<div id="bdoc"><pre>{{ doc|e }}</pre></div>
|
||||
{%- else %}
|
||||
<div id="bdoc"></div>
|
||||
{%- endif %}
|
||||
|
||||
<div id="pro" class="logue">{{ logues[0] }}</div>
|
||||
|
||||
<table id="files">
|
||||
@@ -110,7 +120,9 @@
|
||||
|
||||
<div id="epi" class="logue">{{ logues[1] }}</div>
|
||||
|
||||
<h2><a href="/?h">control-panel</a></h2>
|
||||
<h2><a href="/?h" id="goh">control-panel</a></h2>
|
||||
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
</div>
|
||||
|
||||
@@ -121,15 +133,39 @@
|
||||
<div id="widget"></div>
|
||||
|
||||
<script>
|
||||
var perms = {{ perms }},
|
||||
tag_order_cfg = {{ tag_order }},
|
||||
var acct = "{{ acct }}",
|
||||
perms = {{ perms }},
|
||||
themes = {{ themes }},
|
||||
dtheme = "{{ dtheme }}",
|
||||
srvinf = "{{ srv_info }}",
|
||||
lang = "{{ lang }}",
|
||||
dfavico = "{{ favico }}",
|
||||
def_hcols = {{ def_hcols|tojson }},
|
||||
have_up2k_idx = {{ have_up2k_idx|tojson }},
|
||||
have_tags_idx = {{ have_tags_idx|tojson }},
|
||||
have_zip = {{ have_zip|tojson }};
|
||||
have_acode = {{ have_acode|tojson }},
|
||||
have_mv = {{ have_mv|tojson }},
|
||||
have_del = {{ have_del|tojson }},
|
||||
have_unpost = {{ have_unpost }},
|
||||
have_zip = {{ have_zip|tojson }},
|
||||
lifetime = {{ lifetime }},
|
||||
turbolvl = {{ turbolvl }},
|
||||
u2sort = "{{ u2sort }}",
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
txt_ext = "{{ txt_ext }}",
|
||||
{% if no_prism %}no_prism = 1,{% endif %}
|
||||
readme = {{ readme|tojson }},
|
||||
ls0 = {{ ls0|tojson }};
|
||||
|
||||
document.documentElement.className = localStorage.theme || dtheme;
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/baguettebox.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/up2k.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}?_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,6 +6,7 @@
|
||||
<title>{{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
{{ html_head }}
|
||||
<style>
|
||||
html{font-family:sans-serif}
|
||||
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
||||
@@ -44,7 +45,9 @@
|
||||
<tr><td></td><td><a href="../{{ url_suf }}">parent folder</a></td><td>-</td><td>-</td></tr>
|
||||
|
||||
{%- for f in files %}
|
||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}{{ url_suf }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.dt }}</td></tr>
|
||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}{{
|
||||
'&' + url_suf[1:] if url_suf[:1] == '?' and '?' in f.href else url_suf
|
||||
}}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.dt }}</td></tr>
|
||||
{%- endfor %}
|
||||
|
||||
</tbody>
|
||||
|
||||
27
copyparty/web/cf.html
Normal file
27
copyparty/web/cf.html
Normal file
@@ -0,0 +1,27 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="box" style="opacity: 0; font-family: sans-serif">
|
||||
<h3>please press F5 to reload the page</h3>
|
||||
<p>sorry for the inconvenience</p>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
setTimeout(function() {
|
||||
document.getElementById('box').style.opacity = 1;
|
||||
}, 500);
|
||||
|
||||
parent.toast.ok(30, parent.L.cf_ok);
|
||||
parent.qsr('#cf_frame');
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@@ -13,8 +13,7 @@ audio_eq.apply = function () {
|
||||
|
||||
var can = ebi('fft_can');
|
||||
if (!can) {
|
||||
can = mknod('canvas');
|
||||
can.setAttribute('id', 'fft_can');
|
||||
can = mknod('canvas', 'fft_can');
|
||||
can.style.cssText = 'position:absolute;left:0;bottom:5em;width:' + w + 'px;height:' + h + 'px;z-index:9001';
|
||||
document.body.appendChild(can);
|
||||
can.width = w;
|
||||
|
||||
@@ -1,53 +1,16 @@
|
||||
@font-face {
|
||||
font-family: 'scp';
|
||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
||||
}
|
||||
html, body {
|
||||
color: #333;
|
||||
background: #eee;
|
||||
font-family: sans-serif;
|
||||
line-height: 1.5em;
|
||||
}
|
||||
#tt {
|
||||
position: fixed;
|
||||
max-width: 34em;
|
||||
background: #222;
|
||||
border: 0 solid #777;
|
||||
overflow: hidden;
|
||||
margin-top: 1em;
|
||||
padding: 0 1.3em;
|
||||
height: 0;
|
||||
opacity: .1;
|
||||
transition: opacity 0.14s, height 0.14s, padding 0.14s;
|
||||
box-shadow: 0 .2em .5em #222;
|
||||
border-radius: .4em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#tt.b {
|
||||
padding: 0 2em;
|
||||
border-radius: .5em;
|
||||
box-shadow: 0 .2em 1em #000;
|
||||
}
|
||||
#tt.show {
|
||||
padding: 1em 1.3em;
|
||||
border-width: .4em 0;
|
||||
height: auto;
|
||||
opacity: 1;
|
||||
}
|
||||
#tt.show.b {
|
||||
padding: 1.5em 2em;
|
||||
border-width: .5em 0;
|
||||
}
|
||||
#tt code {
|
||||
background: #3c3c3c;
|
||||
padding: .1em .3em;
|
||||
border-top: 1px solid #777;
|
||||
border-radius: .3em;
|
||||
font-family: monospace, monospace;
|
||||
line-height: 1.7em;
|
||||
}
|
||||
#tt em {
|
||||
color: #f6a;
|
||||
#repl {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: .5em;
|
||||
border: none;
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
#mtw {
|
||||
display: none;
|
||||
@@ -56,122 +19,12 @@ html, body {
|
||||
margin: 0 auto;
|
||||
padding: 0 1.5em;
|
||||
}
|
||||
pre, code, a {
|
||||
color: #480;
|
||||
background: #f7f7f7;
|
||||
border: .07em solid #ddd;
|
||||
border-radius: .2em;
|
||||
padding: .1em .3em;
|
||||
margin: 0 .1em;
|
||||
#toast {
|
||||
bottom: auto;
|
||||
top: 1.4em;
|
||||
}
|
||||
code {
|
||||
font-size: .96em;
|
||||
}
|
||||
pre, code {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
pre {
|
||||
counter-reset: precode;
|
||||
}
|
||||
pre code {
|
||||
counter-increment: precode;
|
||||
display: inline-block;
|
||||
margin: 0 -.3em;
|
||||
padding: .4em .5em;
|
||||
border: none;
|
||||
border-bottom: 1px solid #cdc;
|
||||
min-width: calc(100% - .6em);
|
||||
line-height: 1.1em;
|
||||
}
|
||||
pre code:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
pre code::before {
|
||||
content: counter(precode);
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
display: inline-block;
|
||||
text-align: right;
|
||||
font-size: .75em;
|
||||
color: #48a;
|
||||
width: 4em;
|
||||
padding-right: 1.5em;
|
||||
margin-left: -5.5em;
|
||||
}
|
||||
pre code:hover {
|
||||
background: #fec;
|
||||
color: #360;
|
||||
}
|
||||
h1, h2 {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
h1 {
|
||||
font-size: 1.7em;
|
||||
text-align: center;
|
||||
border: 1em solid #777;
|
||||
border-width: .05em 0;
|
||||
margin: 3em 0;
|
||||
}
|
||||
h2 {
|
||||
font-size: 1.5em;
|
||||
font-weight: normal;
|
||||
background: #f7f7f7;
|
||||
border-top: .07em solid #fff;
|
||||
border-bottom: .07em solid #bbb;
|
||||
border-radius: .5em .5em 0 0;
|
||||
padding-left: .4em;
|
||||
margin-top: 3em;
|
||||
}
|
||||
h3 {
|
||||
border-bottom: .1em solid #999;
|
||||
}
|
||||
h1 a, h3 a, h5 a,
|
||||
h2 a, h4 a, h6 a {
|
||||
color: inherit;
|
||||
display: block;
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
#mp ul,
|
||||
#mp ol {
|
||||
border-left: .3em solid #ddd;
|
||||
}
|
||||
#m>ul,
|
||||
#m>ol {
|
||||
border-color: #bbb;
|
||||
}
|
||||
#mp ul>li {
|
||||
list-style-type: disc;
|
||||
}
|
||||
#mp ul>li,
|
||||
#mp ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
}
|
||||
blockquote {
|
||||
font-family: serif;
|
||||
background: #f7f7f7;
|
||||
border: .07em dashed #ccc;
|
||||
padding: 0 2em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
small {
|
||||
opacity: .8;
|
||||
a {
|
||||
text-decoration: none;
|
||||
}
|
||||
#toc {
|
||||
margin: 0 1em;
|
||||
@@ -207,7 +60,7 @@ small {
|
||||
z-index: 99;
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
font-family: monospace, monospace;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
font-weight: bold;
|
||||
font-size: 1.3em;
|
||||
line-height: .1em;
|
||||
@@ -219,14 +72,6 @@ small {
|
||||
color: #6b3;
|
||||
text-shadow: .02em 0 0 #6b3;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
margin: 1em 0;
|
||||
}
|
||||
th, td {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
blink {
|
||||
animation: blinker .7s cubic-bezier(.9, 0, .1, 1) infinite;
|
||||
}
|
||||
@@ -239,6 +84,36 @@ blink {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.mdo pre {
|
||||
counter-reset: precode;
|
||||
}
|
||||
.mdo pre code {
|
||||
counter-increment: precode;
|
||||
display: inline-block;
|
||||
border: none;
|
||||
border-bottom: 1px solid #cdc;
|
||||
min-width: calc(100% - .6em);
|
||||
}
|
||||
.mdo pre code:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
.mdo pre code::before {
|
||||
content: counter(precode);
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
display: inline-block;
|
||||
text-align: right;
|
||||
font-size: .75em;
|
||||
color: #48a;
|
||||
width: 4em;
|
||||
padding-right: 1.5em;
|
||||
margin-left: -5.5em;
|
||||
}
|
||||
|
||||
|
||||
@media screen {
|
||||
html, body {
|
||||
margin: 0;
|
||||
@@ -255,34 +130,6 @@ blink {
|
||||
#mp {
|
||||
max-width: 52em;
|
||||
margin-bottom: 6em;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
}
|
||||
a {
|
||||
color: #fff;
|
||||
background: #39b;
|
||||
text-decoration: none;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
border-bottom: .07em solid #079;
|
||||
}
|
||||
h2 {
|
||||
color: #fff;
|
||||
background: #555;
|
||||
margin-top: 2em;
|
||||
border-bottom: .22em solid #999;
|
||||
border-top: none;
|
||||
}
|
||||
h1 {
|
||||
color: #fff;
|
||||
background: #444;
|
||||
font-weight: normal;
|
||||
border-top: .4em solid #fb0;
|
||||
border-bottom: .4em solid #777;
|
||||
border-radius: 0 1em 0 1em;
|
||||
margin: 3em 0 1em 0;
|
||||
padding: .5em 0;
|
||||
}
|
||||
#mn {
|
||||
padding: 1.3em 0 .7em 1em;
|
||||
@@ -314,7 +161,7 @@ blink {
|
||||
height: 1.05em;
|
||||
margin: -.2em .3em -.2em -.4em;
|
||||
display: inline-block;
|
||||
border: 1px solid rgba(0,0,0,0.2);
|
||||
border: 1px solid rgba(154,154,154,0.6);
|
||||
border-width: .2em .2em 0 0;
|
||||
transform: rotate(45deg);
|
||||
}
|
||||
@@ -335,6 +182,8 @@ blink {
|
||||
color: #444;
|
||||
background: none;
|
||||
text-decoration: underline;
|
||||
margin: 0 .1em;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
}
|
||||
#mh a:hover {
|
||||
@@ -363,100 +212,52 @@ blink {
|
||||
#toolsbox a+a {
|
||||
text-decoration: none;
|
||||
}
|
||||
#lno {
|
||||
position: absolute;
|
||||
right: 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark,
|
||||
html.dark body {
|
||||
html.z,
|
||||
html.z body {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark #toc a {
|
||||
html.z #toc a {
|
||||
color: #ccc;
|
||||
border-left: .4em solid #444;
|
||||
border-bottom: .1em solid #333;
|
||||
}
|
||||
html.dark #toc a.act {
|
||||
html.z #toc a.act {
|
||||
color: #fff;
|
||||
border-left: .4em solid #3ad;
|
||||
}
|
||||
html.dark #toc li {
|
||||
html.z #toc li {
|
||||
border-width: 0;
|
||||
}
|
||||
html.dark #mp a {
|
||||
background: #057;
|
||||
}
|
||||
html.dark #mp h1 a, html.dark #mp h4 a,
|
||||
html.dark #mp h2 a, html.dark #mp h5 a,
|
||||
html.dark #mp h3 a, html.dark #mp h6 a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
color: #8c0;
|
||||
background: #1a1a1a;
|
||||
border: .07em solid #333;
|
||||
}
|
||||
html.dark #mp ul,
|
||||
html.dark #mp ol {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark #m>ul,
|
||||
html.dark #m>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
html.dark h1 {
|
||||
background: #383838;
|
||||
border-top: .4em solid #b80;
|
||||
border-bottom: .4em solid #4c4c4c;
|
||||
}
|
||||
html.dark h2 {
|
||||
background: #444;
|
||||
border-bottom: .22em solid #555;
|
||||
}
|
||||
html.dark td,
|
||||
html.dark th {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark blockquote {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
html.dark #mn a:not(:last-child)::after {
|
||||
border-color: rgba(255,255,255,0.3);
|
||||
}
|
||||
html.dark #mn a {
|
||||
html.z #mn a {
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark #mn {
|
||||
html.z #mn {
|
||||
border-bottom: 1px solid #333;
|
||||
}
|
||||
html.dark #mn,
|
||||
html.dark #mh {
|
||||
html.z #mn,
|
||||
html.z #mh {
|
||||
background: #222;
|
||||
}
|
||||
html.dark #mh a {
|
||||
html.z #mh a {
|
||||
color: #ccc;
|
||||
background: none;
|
||||
}
|
||||
html.dark #mh a:hover {
|
||||
html.z #mh a:hover {
|
||||
background: #333;
|
||||
color: #fff;
|
||||
}
|
||||
html.dark #toolsbox {
|
||||
html.z #toolsbox {
|
||||
background: #222;
|
||||
}
|
||||
html.dark #toolsbox.open {
|
||||
html.z #toolsbox.open {
|
||||
box-shadow: 0 .2em .2em #069;
|
||||
border-radius: 0 0 .4em .4em;
|
||||
}
|
||||
@@ -503,24 +304,24 @@ blink {
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark #toc {
|
||||
|
||||
html.z #toc {
|
||||
background: #282828;
|
||||
border-top: 1px solid #2c2c2c;
|
||||
box-shadow: 0 0 1em #181818;
|
||||
}
|
||||
html.dark #toc,
|
||||
html.dark #mw {
|
||||
html.z #toc,
|
||||
html.z #mw {
|
||||
scrollbar-color: #b80 #282828;
|
||||
}
|
||||
html.dark #toc::-webkit-scrollbar-track {
|
||||
html.z #toc::-webkit-scrollbar-track {
|
||||
background: #282828;
|
||||
}
|
||||
html.dark #toc::-webkit-scrollbar {
|
||||
html.z #toc::-webkit-scrollbar {
|
||||
background: #282828;
|
||||
width: .8em;
|
||||
}
|
||||
html.dark #toc::-webkit-scrollbar-thumb {
|
||||
html.z #toc::-webkit-scrollbar-thumb {
|
||||
background: #b80;
|
||||
}
|
||||
}
|
||||
@@ -537,12 +338,15 @@ blink {
|
||||
mso-footer-margin: .6in;
|
||||
mso-paper-source: 0;
|
||||
}
|
||||
a {
|
||||
.mdo a {
|
||||
color: #079;
|
||||
text-decoration: none;
|
||||
border-bottom: .07em solid #4ac;
|
||||
padding: 0 .3em;
|
||||
}
|
||||
#repl {
|
||||
display: none;
|
||||
}
|
||||
#toc>ul {
|
||||
border-left: .1em solid #84c4dd;
|
||||
}
|
||||
@@ -567,18 +371,20 @@ blink {
|
||||
a[ctr]::before {
|
||||
content: attr(ctr) '. ';
|
||||
}
|
||||
h1 {
|
||||
.mdo h1 {
|
||||
margin: 2em 0;
|
||||
}
|
||||
h2 {
|
||||
.mdo h2 {
|
||||
margin: 2em 0 0 0;
|
||||
}
|
||||
h1, h2, h3 {
|
||||
.mdo h1,
|
||||
.mdo h2,
|
||||
.mdo h3 {
|
||||
page-break-inside: avoid;
|
||||
}
|
||||
h1::after,
|
||||
h2::after,
|
||||
h3::after {
|
||||
.mdo h1::after,
|
||||
.mdo h2::after,
|
||||
.mdo h3::after {
|
||||
content: 'orz';
|
||||
color: transparent;
|
||||
display: block;
|
||||
@@ -586,20 +392,20 @@ blink {
|
||||
padding: 4em 0 0 0;
|
||||
margin: 0 0 -5em 0;
|
||||
}
|
||||
p {
|
||||
.mdo p {
|
||||
page-break-inside: avoid;
|
||||
}
|
||||
table {
|
||||
.mdo table {
|
||||
page-break-inside: auto;
|
||||
}
|
||||
tr {
|
||||
.mdo tr {
|
||||
page-break-inside: avoid;
|
||||
page-break-after: auto;
|
||||
}
|
||||
thead {
|
||||
.mdo thead {
|
||||
display: table-header-group;
|
||||
}
|
||||
tfoot {
|
||||
.mdo tfoot {
|
||||
display: table-footer-group;
|
||||
}
|
||||
#mp a.vis::after {
|
||||
@@ -607,31 +413,32 @@ blink {
|
||||
border-bottom: 1px solid #bbb;
|
||||
color: #444;
|
||||
}
|
||||
blockquote {
|
||||
.mdo blockquote {
|
||||
border-color: #555;
|
||||
}
|
||||
code {
|
||||
.mdo code {
|
||||
border-color: #bbb;
|
||||
}
|
||||
pre, pre code {
|
||||
.mdo pre,
|
||||
.mdo pre code {
|
||||
border-color: #999;
|
||||
}
|
||||
pre code::before {
|
||||
.mdo pre code::before {
|
||||
color: #058;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark a {
|
||||
|
||||
html.z .mdo a {
|
||||
color: #000;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
html.z .mdo pre,
|
||||
html.z .mdo code {
|
||||
color: #240;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
html.z .mdo p>em,
|
||||
html.z .mdo li>em,
|
||||
html.z .mdo td>em {
|
||||
color: #940;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +1,22 @@
|
||||
<!DOCTYPE html><html><head>
|
||||
<meta charset="utf-8">
|
||||
<title>📝🎉 {{ title }}</title> <!-- 📜 -->
|
||||
<title>📝 {{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
<link href="/.cpr/md.css?_={{ ts }}" rel="stylesheet">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/md.css?_={{ ts }}">
|
||||
{%- if edit %}
|
||||
<link href="/.cpr/md2.css?_={{ ts }}" rel="stylesheet">
|
||||
<link rel="stylesheet" href="/.cpr/md2.css?_={{ ts }}">
|
||||
{%- endif %}
|
||||
</head>
|
||||
<body>
|
||||
<div id="mn">navbar</div>
|
||||
<div id="mn"></div>
|
||||
<div id="mh">
|
||||
<a id="lightswitch" href="#">go dark</a>
|
||||
<a id="navtoggle" href="#">hide nav</a>
|
||||
{%- if edit %}
|
||||
<a id="save" href="?edit" tt="Hotkey: ctrl-s">save</a>
|
||||
<a id="save" href="{{ arg_base }}edit" tt="Hotkey: ctrl-s">save</a>
|
||||
<a id="sbs" href="#" tt="editor and preview side by side">sbs</a>
|
||||
<a id="nsbs" href="#" tt="switch between editor and preview$NHotkey: ctrl-e">editor</a>
|
||||
<div id="toolsbox">
|
||||
@@ -25,10 +27,11 @@
|
||||
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
|
||||
<a id="help" href="#">help</a>
|
||||
</div>
|
||||
<span id="lno">L#</span>
|
||||
{%- else %}
|
||||
<a href="?edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
|
||||
<a href="?edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
||||
<a href="?raw">view raw</a>
|
||||
<a href="{{ arg_base }}edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
|
||||
<a href="{{ arg_base }}edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
||||
<a href="{{ arg_base }}raw">view raw</a>
|
||||
{%- endif %}
|
||||
</div>
|
||||
<div id="toc"></div>
|
||||
@@ -42,8 +45,9 @@
|
||||
if you're still reading this, check that javascript is allowed
|
||||
</div>
|
||||
</div>
|
||||
<div id="mp"></div>
|
||||
<div id="mp" class="mdo"></div>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
{%- if edit %}
|
||||
<div id="helpbox">
|
||||
@@ -123,30 +127,32 @@ write markdown (most html is 🙆 too)
|
||||
|
||||
<script>
|
||||
|
||||
var last_modified = {{ lastmod }};
|
||||
var last_modified = {{ lastmod }},
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
dfavico = "{{ favico }}";
|
||||
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
(function () {
|
||||
var btn = document.getElementById("lightswitch");
|
||||
var toggle = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
var dark = !document.documentElement.getAttribute("class");
|
||||
document.documentElement.setAttribute("class", dark ? "dark" : "");
|
||||
btn.innerHTML = "go " + (dark ? "light" : "dark");
|
||||
if (window.localStorage)
|
||||
localStorage.setItem('lightmode', dark ? 0 : 1);
|
||||
};
|
||||
btn.onclick = toggle;
|
||||
if (window.localStorage && localStorage.getItem('lightmode') != 1)
|
||||
toggle();
|
||||
var l = localStorage,
|
||||
drk = l.light != 1,
|
||||
btn = document.getElementById("lightswitch"),
|
||||
f = function (e) {
|
||||
if (e) { e.preventDefault(); drk = !drk; }
|
||||
document.documentElement.className = drk? "z":"y";
|
||||
btn.innerHTML = "go " + (drk ? "light":"dark");
|
||||
l.light = drk? 0:1;
|
||||
};
|
||||
|
||||
btn.onclick = f;
|
||||
f();
|
||||
})();
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/md.js?_={{ ts }}"></script>
|
||||
{%- if edit %}
|
||||
|
||||
@@ -20,27 +20,6 @@ var dbg = function () { };
|
||||
// dbg = console.log
|
||||
|
||||
|
||||
// plugins
|
||||
var md_plug = {};
|
||||
|
||||
|
||||
function hesc(txt) {
|
||||
return txt.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
}
|
||||
|
||||
|
||||
function cls(dom, name, add) {
|
||||
var re = new RegExp('(^| )' + name + '( |$)');
|
||||
var lst = (dom.getAttribute('class') + '').replace(re, "$1$2").replace(/ /, "");
|
||||
dom.setAttribute('class', lst + (add ? ' ' + name : ''));
|
||||
}
|
||||
|
||||
|
||||
function statify(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
|
||||
// dodge browser issues
|
||||
(function () {
|
||||
var ua = navigator.userAgent;
|
||||
@@ -56,20 +35,34 @@ function statify(obj) {
|
||||
|
||||
// add navbar
|
||||
(function () {
|
||||
var n = document.location + '';
|
||||
n = n.substr(n.indexOf('//') + 2).split('?')[0].split('/');
|
||||
n[0] = 'top';
|
||||
var loc = [];
|
||||
var nav = [];
|
||||
for (var a = 0; a < n.length; a++) {
|
||||
if (a > 0)
|
||||
loc.push(n[a]);
|
||||
|
||||
var dec = hesc(uricom_dec(n[a])[0]);
|
||||
|
||||
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
||||
var parts = get_evpath().split('/'), link = '', o;
|
||||
for (var a = 0, aa = parts.length - 2; a <= aa; a++) {
|
||||
link += parts[a] + (a < aa ? '/' : '');
|
||||
o = mknod('a');
|
||||
o.setAttribute('href', link);
|
||||
o.textContent = uricom_dec(parts[a]) || 'top';
|
||||
dom_nav.appendChild(o);
|
||||
}
|
||||
dom_nav.innerHTML = nav.join('');
|
||||
})();
|
||||
|
||||
|
||||
// image load handler
|
||||
var img_load = (function () {
|
||||
var r = {};
|
||||
r.callbacks = [];
|
||||
|
||||
function fire() {
|
||||
for (var a = 0; a < r.callbacks.length; a++)
|
||||
r.callbacks[a]();
|
||||
}
|
||||
|
||||
var timeout = null;
|
||||
r.done = function () {
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(fire, 500);
|
||||
};
|
||||
|
||||
return r;
|
||||
})();
|
||||
|
||||
|
||||
@@ -88,13 +81,13 @@ function copydom(src, dst, lv) {
|
||||
|
||||
var rpl = [];
|
||||
for (var a = sc.length - 1; a >= 0; a--) {
|
||||
var st = sc[a].tagName,
|
||||
dt = dc[a].tagName;
|
||||
var st = sc[a].tagName || sc[a].nodeType,
|
||||
dt = dc[a].tagName || dc[a].nodeType;
|
||||
|
||||
if (st !== dt) {
|
||||
dbg("replace L%d (%d/%d) type %s/%s", lv, a, sc.length, st, dt);
|
||||
rpl.push(a);
|
||||
continue;
|
||||
dst.innerHTML = src.innerHTML;
|
||||
return;
|
||||
}
|
||||
|
||||
var sa = sc[a].attributes || [],
|
||||
@@ -143,8 +136,11 @@ function copydom(src, dst, lv) {
|
||||
// repl is reversed; build top-down
|
||||
var nbytes = 0;
|
||||
for (var a = rpl.length - 1; a >= 0; a--) {
|
||||
var html = sc[rpl[a]].outerHTML;
|
||||
dc[rpl[a]].outerHTML = html;
|
||||
var i = rpl[a],
|
||||
prop = sc[i].nodeType == 1 ? 'outerHTML' : 'nodeValue';
|
||||
|
||||
var html = sc[i][prop];
|
||||
dc[i][prop] = html;
|
||||
nbytes += html.length;
|
||||
}
|
||||
if (nbytes > 0)
|
||||
@@ -160,11 +156,8 @@ function copydom(src, dst, lv) {
|
||||
}
|
||||
|
||||
|
||||
function md_plug_err(ex, js) {
|
||||
var errbox = ebi('md_errbox');
|
||||
if (errbox)
|
||||
errbox.parentNode.removeChild(errbox);
|
||||
|
||||
md_plug_err = function (ex, js) {
|
||||
qsr('#md_errbox');
|
||||
if (!ex)
|
||||
return;
|
||||
|
||||
@@ -176,16 +169,15 @@ function md_plug_err(ex, js) {
|
||||
var lns = js.split('\n');
|
||||
if (ln < lns.length) {
|
||||
o = mknod('span');
|
||||
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
|
||||
o.style.cssText = "color:#ac2;font-size:.9em;font-family:'scp',monospace,monospace;display:block";
|
||||
o.textContent = lns[ln - 1];
|
||||
}
|
||||
}
|
||||
errbox = mknod('div');
|
||||
errbox.setAttribute('id', 'md_errbox');
|
||||
var errbox = mknod('div', 'md_errbox');
|
||||
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
|
||||
errbox.textContent = msg;
|
||||
errbox.onclick = function () {
|
||||
alert('' + ex.stack);
|
||||
modal.alert('<pre>' + esc(ex.stack) + '</pre>');
|
||||
};
|
||||
if (o) {
|
||||
errbox.appendChild(o);
|
||||
@@ -200,50 +192,12 @@ function md_plug_err(ex, js) {
|
||||
}
|
||||
|
||||
|
||||
function load_plug(md_text, plug_type) {
|
||||
if (!md_opt.allow_plugins)
|
||||
return md_text;
|
||||
|
||||
var find = '\n```copyparty_' + plug_type + '\n';
|
||||
var ofs = md_text.indexOf(find);
|
||||
if (ofs === -1)
|
||||
return md_text;
|
||||
|
||||
var ofs2 = md_text.indexOf('\n```', ofs + 1);
|
||||
if (ofs2 == -1)
|
||||
return md_text;
|
||||
|
||||
var js = md_text.slice(ofs + find.length, ofs2 + 1);
|
||||
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
|
||||
|
||||
var old_plug = md_plug[plug_type];
|
||||
if (!old_plug || old_plug[1] != js) {
|
||||
js = 'const x = { ' + js + ' }; x;';
|
||||
try {
|
||||
var x = eval(js);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug[plug_type] = null;
|
||||
md_plug_err(ex, js);
|
||||
return md;
|
||||
}
|
||||
if (x['ctor']) {
|
||||
x['ctor']();
|
||||
delete x['ctor'];
|
||||
}
|
||||
md_plug[plug_type] = [x, js];
|
||||
}
|
||||
|
||||
return md;
|
||||
}
|
||||
|
||||
|
||||
function convert_markdown(md_text, dest_dom) {
|
||||
md_text = md_text.replace(/\r/g, '');
|
||||
|
||||
md_plug_err(null);
|
||||
md_text = load_plug(md_text, 'pre');
|
||||
md_text = load_plug(md_text, 'post');
|
||||
md_text = load_md_plug(md_text, 'pre');
|
||||
md_text = load_md_plug(md_text, 'post');
|
||||
|
||||
var marked_opts = {
|
||||
//headerPrefix: 'h-',
|
||||
@@ -251,12 +205,12 @@ function convert_markdown(md_text, dest_dom) {
|
||||
gfm: true
|
||||
};
|
||||
|
||||
var ext = md_plug['pre'];
|
||||
var ext = md_plug.pre;
|
||||
if (ext)
|
||||
Object.assign(marked_opts, ext[0]);
|
||||
|
||||
try {
|
||||
var md_html = marked(md_text, marked_opts);
|
||||
var md_html = marked.parse(md_text, marked_opts);
|
||||
}
|
||||
catch (ex) {
|
||||
if (ext)
|
||||
@@ -264,7 +218,14 @@ function convert_markdown(md_text, dest_dom) {
|
||||
|
||||
throw ex;
|
||||
}
|
||||
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
||||
var md_dom = dest_dom;
|
||||
try {
|
||||
md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
||||
}
|
||||
catch (ex) {
|
||||
md_dom.innerHTML = md_html;
|
||||
window.copydom = noop;
|
||||
}
|
||||
|
||||
var nodes = md_dom.getElementsByTagName('a');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
@@ -274,7 +235,7 @@ function convert_markdown(md_text, dest_dom) {
|
||||
if (!txt)
|
||||
nodes[a].textContent = href;
|
||||
else if (href !== txt)
|
||||
nodes[a].setAttribute('class', 'vis');
|
||||
nodes[a].className = 'vis';
|
||||
}
|
||||
|
||||
// todo-lists (should probably be a marked extension)
|
||||
@@ -290,7 +251,7 @@ function convert_markdown(md_text, dest_dom) {
|
||||
var clas = done ? 'done' : 'pend';
|
||||
var char = done ? 'Y' : 'N';
|
||||
|
||||
dom_li.setAttribute('class', 'task-list-item');
|
||||
dom_li.className = 'task-list-item';
|
||||
dom_li.style.listStyleType = 'none';
|
||||
var html = dom_li.innerHTML;
|
||||
dom_li.innerHTML =
|
||||
@@ -345,7 +306,7 @@ function convert_markdown(md_text, dest_dom) {
|
||||
el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>';
|
||||
}
|
||||
|
||||
ext = md_plug['post'];
|
||||
ext = md_plug.post;
|
||||
if (ext && ext[0].render)
|
||||
try {
|
||||
ext[0].render(md_dom);
|
||||
@@ -356,6 +317,10 @@ function convert_markdown(md_text, dest_dom) {
|
||||
|
||||
copydom(md_dom, dest_dom, 0);
|
||||
|
||||
var imgs = dest_dom.getElementsByTagName('img');
|
||||
for (var a = 0, aa = imgs.length; a < aa; a++)
|
||||
imgs[a].onload = img_load.done;
|
||||
|
||||
if (ext && ext[0].render2)
|
||||
try {
|
||||
ext[0].render2(dest_dom);
|
||||
@@ -367,8 +332,7 @@ function convert_markdown(md_text, dest_dom) {
|
||||
|
||||
|
||||
function init_toc() {
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
qsr('#ml');
|
||||
|
||||
var anchors = []; // list of toc entries, complex objects
|
||||
var anchor = null; // current toc node
|
||||
@@ -431,7 +395,7 @@ function init_toc() {
|
||||
|
||||
// collect vertical position of all toc items (headers in document)
|
||||
function freshen_offsets() {
|
||||
var top = window.pageYOffset || document.documentElement.scrollTop;
|
||||
var top = yscroll();
|
||||
for (var a = anchors.length - 1; a >= 0; a--) {
|
||||
var y = top + anchors[a].elm.getBoundingClientRect().top;
|
||||
y = Math.round(y * 10.0) / 10;
|
||||
@@ -447,7 +411,7 @@ function init_toc() {
|
||||
if (anchors.length == 0)
|
||||
return;
|
||||
|
||||
var ptop = window.pageYOffset || document.documentElement.scrollTop;
|
||||
var ptop = yscroll();
|
||||
var hit = anchors.length - 1;
|
||||
for (var a = 0; a < anchors.length; a++) {
|
||||
if (anchors[a].y >= ptop - 8) { //???
|
||||
@@ -461,11 +425,11 @@ function init_toc() {
|
||||
for (var a = 0; a < anchors.length; a++) {
|
||||
if (anchors[a].active) {
|
||||
anchors[a].active = false;
|
||||
links[a].setAttribute('class', '');
|
||||
links[a].className = '';
|
||||
}
|
||||
}
|
||||
anchors[hit].active = true;
|
||||
links[hit].setAttribute('class', 'act');
|
||||
links[hit].className = 'act';
|
||||
}
|
||||
|
||||
var pane_height = parseInt(getComputedStyle(dom_toc).height);
|
||||
@@ -490,13 +454,16 @@ function init_toc() {
|
||||
// "main" :p
|
||||
convert_markdown(dom_src.value, dom_pre);
|
||||
var toc = init_toc();
|
||||
img_load.callbacks = [toc.refresh];
|
||||
|
||||
|
||||
// scroll handler
|
||||
var redraw = (function () {
|
||||
var sbs = false;
|
||||
var sbs = true;
|
||||
function onresize() {
|
||||
sbs = window.matchMedia('(min-width: 64em)').matches;
|
||||
if (window.matchMedia)
|
||||
sbs = window.matchMedia('(min-width: 64em)').matches;
|
||||
|
||||
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
|
||||
if (sbs) {
|
||||
dom_toc.style.top = y;
|
||||
|
||||
@@ -36,6 +36,11 @@
|
||||
width: 55em;
|
||||
width: min(55em, calc(100% - 2em));
|
||||
}
|
||||
#mtw.single.editor,
|
||||
#mw.single.editor {
|
||||
width: calc(100% - 1em);
|
||||
left: .5em;
|
||||
}
|
||||
|
||||
|
||||
#mp {
|
||||
@@ -50,7 +55,7 @@
|
||||
outline: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-family: 'consolas', monospace, monospace;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
@@ -61,7 +66,7 @@
|
||||
position: relative;
|
||||
scrollbar-color: #eb0 #f7f7f7;
|
||||
}
|
||||
html.dark #mt {
|
||||
html.z #mt {
|
||||
color: #eee;
|
||||
background: #222;
|
||||
border: 1px solid #777;
|
||||
@@ -77,20 +82,17 @@ html.dark #mt {
|
||||
background: #f97;
|
||||
border-radius: .15em;
|
||||
}
|
||||
html.dark #save.force-save {
|
||||
html.z #save.force-save {
|
||||
color: #fca;
|
||||
background: #720;
|
||||
}
|
||||
#save.disabled {
|
||||
opacity: .4;
|
||||
}
|
||||
#helpbox,
|
||||
#toast {
|
||||
#helpbox {
|
||||
background: #f7f7f7;
|
||||
border-radius: .4em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#helpbox {
|
||||
display: none;
|
||||
position: fixed;
|
||||
padding: 2em;
|
||||
@@ -105,21 +107,9 @@ html.dark #save.force-save {
|
||||
#helpclose {
|
||||
display: block;
|
||||
}
|
||||
html.dark #helpbox {
|
||||
html.z #helpbox {
|
||||
box-shadow: 0 .5em 2em #444;
|
||||
}
|
||||
html.dark #helpbox,
|
||||
html.dark #toast {
|
||||
background: #222;
|
||||
border: 1px solid #079;
|
||||
border-width: 1px 0;
|
||||
}
|
||||
#toast {
|
||||
font-weight: bold;
|
||||
text-align: center;
|
||||
padding: .6em 0;
|
||||
position: fixed;
|
||||
top: 30%;
|
||||
transition: opacity 0.2s ease-in-out;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
@@ -16,8 +16,7 @@ var dom_sbs = ebi('sbs');
|
||||
var dom_nsbs = ebi('nsbs');
|
||||
var dom_tbox = ebi('toolsbox');
|
||||
var dom_ref = (function () {
|
||||
var d = mknod('div');
|
||||
d.setAttribute('id', 'mtr');
|
||||
var d = mknod('div', 'mtr');
|
||||
dom_swrap.appendChild(d);
|
||||
d = ebi('mtr');
|
||||
// hide behind the textarea (offsetTop is not computed if display:none)
|
||||
@@ -98,7 +97,7 @@ var draw_md = (function () {
|
||||
var src = dom_src.value;
|
||||
convert_markdown(src, dom_pre);
|
||||
|
||||
var lines = hesc(src).replace(/\r/g, "").split('\n');
|
||||
var lines = esc(src).replace(/\r/g, "").split('\n');
|
||||
nlines = lines.length;
|
||||
var html = [];
|
||||
for (var a = 0; a < lines.length; a++)
|
||||
@@ -108,7 +107,7 @@ var draw_md = (function () {
|
||||
map_src = genmap(dom_ref, map_src);
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
|
||||
cls(ebi('save'), 'disabled', src == server_md);
|
||||
clmod(ebi('save'), 'disabled', src == server_md);
|
||||
|
||||
var t1 = Date.now();
|
||||
delay = t1 - t0 > 100 ? 25 : 1;
|
||||
@@ -127,6 +126,12 @@ var draw_md = (function () {
|
||||
})();
|
||||
|
||||
|
||||
// discard TOC callback, just regen editor scroll map
|
||||
img_load.callbacks = [function () {
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
}];
|
||||
|
||||
|
||||
// resize handler
|
||||
redraw = (function () {
|
||||
function onresize() {
|
||||
@@ -136,19 +141,18 @@ redraw = (function () {
|
||||
dom_ref.style.width = getComputedStyle(dom_src).offsetWidth + 'px';
|
||||
map_src = genmap(dom_ref, map_src);
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
dbg(document.body.clientWidth + 'x' + document.body.clientHeight);
|
||||
}
|
||||
function setsbs() {
|
||||
dom_wrap.setAttribute('class', '');
|
||||
dom_swrap.setAttribute('class', '');
|
||||
dom_wrap.className = '';
|
||||
dom_swrap.className = '';
|
||||
onresize();
|
||||
}
|
||||
function modetoggle() {
|
||||
var mode = dom_nsbs.innerHTML;
|
||||
dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor';
|
||||
mode += ' single';
|
||||
dom_wrap.setAttribute('class', mode);
|
||||
dom_swrap.setAttribute('class', mode);
|
||||
dom_wrap.className = mode;
|
||||
dom_swrap.className = mode;
|
||||
onresize();
|
||||
}
|
||||
|
||||
@@ -225,51 +229,44 @@ redraw = (function () {
|
||||
|
||||
// modification checker
|
||||
function Modpoll() {
|
||||
this.skip_one = true;
|
||||
this.disabled = false;
|
||||
|
||||
this.periodic = function () {
|
||||
var that = this;
|
||||
setTimeout(function () {
|
||||
that.periodic();
|
||||
}, 1000 * md_opt.modpoll_freq);
|
||||
var r = {
|
||||
skip_one: true,
|
||||
disabled: false
|
||||
};
|
||||
|
||||
r.periodic = function () {
|
||||
var skip = null;
|
||||
|
||||
if (ebi('toast'))
|
||||
if (toast.visible)
|
||||
skip = 'toast';
|
||||
|
||||
else if (this.skip_one)
|
||||
else if (r.skip_one)
|
||||
skip = 'saved';
|
||||
|
||||
else if (this.disabled)
|
||||
else if (r.disabled)
|
||||
skip = 'disabled';
|
||||
|
||||
if (skip) {
|
||||
console.log('modpoll skip, ' + skip);
|
||||
this.skip_one = false;
|
||||
r.skip_one = false;
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('modpoll...');
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.modpoll = this;
|
||||
var xhr = new XHR();
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = this.cb;
|
||||
xhr.onload = xhr.onerror = r.cb;
|
||||
xhr.send();
|
||||
}
|
||||
};
|
||||
|
||||
this.cb = function () {
|
||||
if (this.modpoll.disabled || this.modpoll.skip_one) {
|
||||
r.cb = function () {
|
||||
if (r.disabled || r.skip_one) {
|
||||
console.log('modpoll abort');
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
console.log('modpoll err ' + this.status + ": " + this.responseText);
|
||||
return;
|
||||
@@ -283,33 +280,32 @@ function Modpoll() {
|
||||
|
||||
if (server_ref != server_now) {
|
||||
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
|
||||
this.modpoll.disabled = true;
|
||||
r.disabled = true;
|
||||
var msg = [
|
||||
"The document has changed on the server.<br />" +
|
||||
"The document has changed on the server.",
|
||||
"The changes will NOT be loaded into your editor automatically.",
|
||||
|
||||
"Press F5 or CTRL-R to refresh the page,<br />" +
|
||||
"",
|
||||
"Press F5 or CTRL-R to refresh the page,",
|
||||
"replacing your document with the server copy.",
|
||||
|
||||
"You can click this message to ignore and contnue."
|
||||
"",
|
||||
"You can close this message to ignore and contnue."
|
||||
];
|
||||
return toast(false, "box-shadow:0 1em 2em rgba(64,64,64,0.8);font-weight:normal",
|
||||
36, "<p>" + msg.join('</p>\n<p>') + '</p>');
|
||||
return toast.warn(0, msg.join('\n'));
|
||||
}
|
||||
|
||||
console.log('modpoll eq');
|
||||
}
|
||||
};
|
||||
|
||||
if (md_opt.modpoll_freq > 0)
|
||||
this.periodic();
|
||||
setInterval(r.periodic, 1000 * md_opt.modpoll_freq);
|
||||
|
||||
return this;
|
||||
return r;
|
||||
}
|
||||
var modpoll = new Modpoll();
|
||||
|
||||
|
||||
window.onbeforeunload = function (e) {
|
||||
if ((ebi("save").getAttribute('class') + '').indexOf('disabled') >= 0)
|
||||
if ((ebi("save").className + '').indexOf('disabled') >= 0)
|
||||
return; //nice (todo)
|
||||
|
||||
e.preventDefault(); //ff
|
||||
@@ -321,59 +317,55 @@ window.onbeforeunload = function (e) {
|
||||
function save(e) {
|
||||
if (e) e.preventDefault();
|
||||
var save_btn = ebi("save"),
|
||||
save_cls = save_btn.getAttribute('class') + '';
|
||||
save_cls = save_btn.className + '';
|
||||
|
||||
if (save_cls.indexOf('disabled') >= 0) {
|
||||
toast(true, ";font-size:2em;color:#c90", 9, "no changes");
|
||||
return;
|
||||
}
|
||||
if (save_cls.indexOf('disabled') >= 0)
|
||||
return toast.inf(2, "no changes");
|
||||
|
||||
var force = (save_cls.indexOf('force-save') >= 0);
|
||||
if (force && !confirm('confirm that you wish to lose the changes made on the server since you opened this document')) {
|
||||
alert('ok, aborted');
|
||||
return;
|
||||
function save2() {
|
||||
var txt = dom_src.value,
|
||||
fd = new FormData();
|
||||
|
||||
fd.append("act", "tput");
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XHR();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onload = xhr.onerror = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.txt = txt;
|
||||
|
||||
modpoll.skip_one = true; // skip one iteration while we save
|
||||
xhr.send(fd);
|
||||
}
|
||||
|
||||
var txt = dom_src.value;
|
||||
|
||||
var fd = new FormData();
|
||||
fd.append("act", "tput");
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.txt = txt;
|
||||
|
||||
modpoll.skip_one = true; // skip one iteration while we save
|
||||
xhr.send(fd);
|
||||
if (!force)
|
||||
save2();
|
||||
else
|
||||
modal.confirm('confirm that you wish to lose the changes made on the server since you opened this document', save2, function () {
|
||||
toast.inf(3, 'aborted');
|
||||
});
|
||||
}
|
||||
|
||||
function save_cb() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return;
|
||||
}
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
|
||||
var r;
|
||||
try {
|
||||
r = JSON.parse(this.responseText);
|
||||
}
|
||||
catch (ex) {
|
||||
alert('Failed to parse reply from server:\n\n' + this.responseText);
|
||||
return;
|
||||
return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
|
||||
}
|
||||
|
||||
if (!r.ok) {
|
||||
if (!this.btn.classList.contains('force-save')) {
|
||||
this.btn.classList.add('force-save');
|
||||
if (!clgot(this.btn, 'force-save')) {
|
||||
clmod(this.btn, 'force-save', 1);
|
||||
var msg = [
|
||||
'This file has been modified since you started editing it!\n',
|
||||
'if you really want to overwrite, press save again.\n',
|
||||
@@ -383,15 +375,13 @@ function save_cb() {
|
||||
r.lastmod + ' lastmod on the server now,',
|
||||
r.now + ' server time now,\n',
|
||||
];
|
||||
alert(msg.join('\n'));
|
||||
return toast.err(0, msg.join('\n'));
|
||||
}
|
||||
else {
|
||||
alert('Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
|
||||
}
|
||||
return;
|
||||
else
|
||||
return toast.err(0, 'Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
|
||||
}
|
||||
|
||||
this.btn.classList.remove('force-save');
|
||||
clmod(this.btn, 'force-save');
|
||||
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
|
||||
|
||||
run_savechk(r.lastmod, this.txt, this.btn, 0);
|
||||
@@ -400,10 +390,10 @@ function save_cb() {
|
||||
function run_savechk(lastmod, txt, btn, ntry) {
|
||||
// download the saved doc from the server and compare
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
|
||||
var xhr = new XMLHttpRequest();
|
||||
var xhr = new XHR();
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = savechk_cb;
|
||||
xhr.onload = xhr.onerror = savechk_cb;
|
||||
xhr.lastmod = lastmod;
|
||||
xhr.txt = txt;
|
||||
xhr.btn = btn;
|
||||
@@ -412,13 +402,8 @@ function run_savechk(lastmod, txt, btn, ntry) {
|
||||
}
|
||||
|
||||
function savechk_cb() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return;
|
||||
}
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
|
||||
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
||||
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
||||
@@ -431,58 +416,22 @@ function savechk_cb() {
|
||||
}, 100);
|
||||
return;
|
||||
}
|
||||
alert(
|
||||
modal.alert(
|
||||
'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' +
|
||||
'Length: yours=' + doc1.length + ', server=' + doc2.length
|
||||
);
|
||||
alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
|
||||
alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
|
||||
modal.alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
|
||||
modal.alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
|
||||
return;
|
||||
}
|
||||
|
||||
last_modified = this.lastmod;
|
||||
server_md = this.txt;
|
||||
draw_md();
|
||||
toast(true, ";font-size:6em;font-family:serif;color:#9b4", 4,
|
||||
'OK✔️<span style="font-size:.2em;color:#999;position:absolute">' + this.ntry + '</span>');
|
||||
|
||||
toast.ok(2, 'save OK' + (this.ntry ? '\nattempt ' + this.ntry : ''));
|
||||
modpoll.disabled = false;
|
||||
}
|
||||
|
||||
function toast(autoclose, style, width, msg) {
|
||||
var ok = ebi("toast");
|
||||
if (ok)
|
||||
ok.parentNode.removeChild(ok);
|
||||
|
||||
style = "width:" + width + "em;left:calc(50% - " + (width / 2) + "em);" + style;
|
||||
ok = mknod('div');
|
||||
ok.setAttribute('id', 'toast');
|
||||
ok.setAttribute('style', style);
|
||||
ok.innerHTML = msg;
|
||||
var parent = ebi('m');
|
||||
document.documentElement.appendChild(ok);
|
||||
|
||||
var hide = function (delay) {
|
||||
delay = delay || 0;
|
||||
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
}, delay);
|
||||
|
||||
setTimeout(function () {
|
||||
if (ok.parentNode)
|
||||
ok.parentNode.removeChild(ok);
|
||||
}, delay + 250);
|
||||
}
|
||||
|
||||
ok.onclick = function () {
|
||||
hide(0);
|
||||
};
|
||||
|
||||
if (autoclose)
|
||||
hide(500);
|
||||
}
|
||||
|
||||
|
||||
// firefox bug: initial selection offset isn't cleared properly through js
|
||||
var ff_clearsel = (function () {
|
||||
@@ -559,6 +508,20 @@ function setsel(s) {
|
||||
}
|
||||
|
||||
|
||||
// cut/copy current line
|
||||
function md_cut(cut) {
|
||||
var s = linebounds();
|
||||
if (s.car != s.cdr)
|
||||
return;
|
||||
|
||||
dom_src.setSelectionRange(s.n1, s.n2 + 1, 'forward');
|
||||
setTimeout(function () {
|
||||
var i = cut ? s.n1 : s.car;
|
||||
dom_src.setSelectionRange(i, i, 'forward');
|
||||
}, 1);
|
||||
}
|
||||
|
||||
|
||||
// indent/dedent
|
||||
function md_indent(dedent) {
|
||||
var s = getsel(),
|
||||
@@ -719,7 +682,7 @@ function reLastIndexOf(txt, ptn, end) {
|
||||
// table formatter
|
||||
function fmt_table(e) {
|
||||
if (e) e.preventDefault();
|
||||
//dom_tbox.setAttribute('class', '');
|
||||
//dom_tbox.className = '';
|
||||
|
||||
var txt = dom_src.value,
|
||||
ofs = dom_src.selectionStart,
|
||||
@@ -761,7 +724,7 @@ function fmt_table(e) {
|
||||
|
||||
var ind2 = tab[a].match(re_ind)[0];
|
||||
if (ind != ind2 && a != 1) // the table can be a list entry or something, ignore [0]
|
||||
return alert(err + 'indentation mismatch on row#2 and ' + row_name + ',\n' + tab[a]);
|
||||
return toast.err(7, err + 'indentation mismatch on row#2 and ' + row_name + ',\n' + tab[a]);
|
||||
|
||||
var t = tab[a].slice(ind.length);
|
||||
t = t.replace(re_lpipe, "");
|
||||
@@ -771,7 +734,7 @@ function fmt_table(e) {
|
||||
if (a == 0)
|
||||
ncols = tab[a].length;
|
||||
else if (ncols < tab[a].length)
|
||||
return alert(err + 'num.columns(' + row_name + ') exceeding row#2; ' + ncols + ' < ' + tab[a].length);
|
||||
return toast.err(7, err + 'num.columns(' + row_name + ') exceeding row#2; ' + ncols + ' < ' + tab[a].length);
|
||||
|
||||
// if row has less columns than row2, fill them in
|
||||
while (tab[a].length < ncols)
|
||||
@@ -788,7 +751,7 @@ function fmt_table(e) {
|
||||
for (var col = 0; col < tab[1].length; col++) {
|
||||
var m = tab[1][col].match(re_align);
|
||||
if (!m)
|
||||
return alert(err + 'invalid column specification, row#2, col ' + (col + 1) + ', [' + tab[1][col] + ']');
|
||||
return toast.err(7, err + 'invalid column specification, row#2, col ' + (col + 1) + ', [' + tab[1][col] + ']');
|
||||
|
||||
if (m[2]) {
|
||||
if (m[1])
|
||||
@@ -870,16 +833,15 @@ function fmt_table(e) {
|
||||
// show unicode
|
||||
function mark_uni(e) {
|
||||
if (e) e.preventDefault();
|
||||
dom_tbox.setAttribute('class', '');
|
||||
dom_tbox.className = '';
|
||||
|
||||
var txt = dom_src.value,
|
||||
ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g'),
|
||||
mod = txt.replace(/\r/g, "").replace(ptn, "\u2588\u2770$1\u2771");
|
||||
|
||||
if (txt == mod) {
|
||||
alert('no results; no modifications were made');
|
||||
return;
|
||||
}
|
||||
if (txt == mod)
|
||||
return toast.inf(5, 'no results; no modifications were made');
|
||||
|
||||
dom_src.value = mod;
|
||||
}
|
||||
|
||||
@@ -893,10 +855,9 @@ function iter_uni(e) {
|
||||
re = new RegExp('([^' + js_uni_whitelist + ']+)'),
|
||||
m = re.exec(txt.slice(ofs));
|
||||
|
||||
if (!m) {
|
||||
alert('no more hits from cursor onwards');
|
||||
return;
|
||||
}
|
||||
if (!m)
|
||||
return toast.inf(5, 'no more hits from cursor onwards');
|
||||
|
||||
ofs += m.index;
|
||||
|
||||
dom_src.setSelectionRange(ofs, ofs + m[0].length, "forward");
|
||||
@@ -911,15 +872,47 @@ function iter_uni(e) {
|
||||
function cfg_uni(e) {
|
||||
if (e) e.preventDefault();
|
||||
|
||||
var reply = prompt("unicode whitelist", esc_uni_whitelist);
|
||||
if (reply === null)
|
||||
return;
|
||||
|
||||
esc_uni_whitelist = reply;
|
||||
js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
|
||||
modal.prompt("unicode whitelist", esc_uni_whitelist, function (reply) {
|
||||
esc_uni_whitelist = reply;
|
||||
js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
|
||||
}, null);
|
||||
}
|
||||
|
||||
|
||||
var set_lno = (function () {
|
||||
var t = null,
|
||||
pi = null,
|
||||
pv = null,
|
||||
lno = ebi('lno');
|
||||
|
||||
function poke() {
|
||||
clearTimeout(t);
|
||||
t = setTimeout(fire, 20);
|
||||
}
|
||||
|
||||
function fire() {
|
||||
try {
|
||||
clearTimeout(t);
|
||||
|
||||
var i = dom_src.selectionStart;
|
||||
if (i === pi)
|
||||
return;
|
||||
|
||||
var v = 'L' + dom_src.value.slice(0, i).split('\n').length;
|
||||
if (v != pv)
|
||||
lno.innerHTML = v;
|
||||
|
||||
pi = i;
|
||||
pv = v;
|
||||
}
|
||||
catch (e) { }
|
||||
}
|
||||
|
||||
timer.add(fire);
|
||||
return poke;
|
||||
})();
|
||||
|
||||
|
||||
// hotkeys / toolbar
|
||||
(function () {
|
||||
function keydown(ev) {
|
||||
@@ -938,6 +931,8 @@ function cfg_uni(e) {
|
||||
if (document.activeElement != dom_src)
|
||||
return true;
|
||||
|
||||
set_lno();
|
||||
|
||||
if (ctrl(ev)) {
|
||||
if (ev.code == "KeyH" || kc == 72) {
|
||||
md_header(ev.shiftKey);
|
||||
@@ -973,6 +968,10 @@ function cfg_uni(e) {
|
||||
md_p_jump(dn);
|
||||
return false;
|
||||
}
|
||||
if (ev.code == "KeyX" || ev.code == "KeyC") {
|
||||
md_cut(ev.code == "KeyX");
|
||||
return true; //sic
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (ev.code == "Tab" || kc == 9) {
|
||||
@@ -998,14 +997,14 @@ function cfg_uni(e) {
|
||||
|
||||
ebi('tools').onclick = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
var is_open = dom_tbox.getAttribute('class') != 'open';
|
||||
dom_tbox.setAttribute('class', is_open ? 'open' : '');
|
||||
var is_open = dom_tbox.className != 'open';
|
||||
dom_tbox.className = is_open ? 'open' : '';
|
||||
};
|
||||
|
||||
|
||||
ebi('help').onclick = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
dom_tbox.setAttribute('class', '');
|
||||
dom_tbox.className = '';
|
||||
|
||||
var dom = ebi('helpbox');
|
||||
var dtxt = dom.getElementsByTagName('textarea');
|
||||
@@ -1132,9 +1131,9 @@ action_stack = (function () {
|
||||
ref = newtxt;
|
||||
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
|
||||
if (hist.un.length > 0)
|
||||
dbg(statify(hist.un.slice(-1)[0]));
|
||||
dbg(jcp(hist.un.slice(-1)[0]));
|
||||
if (hist.re.length > 0)
|
||||
dbg(statify(hist.re.slice(-1)[0]));
|
||||
dbg(jcp(hist.re.slice(-1)[0]));
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -7,6 +7,8 @@ html .editor-toolbar>button.active { border-color: rgba(0,0,0,0.4); background:
|
||||
html .editor-toolbar>i.separator { border-left: 1px solid #ccc; }
|
||||
html .editor-toolbar.disabled-for-preview>button:not(.no-disable) { opacity: .35 }
|
||||
|
||||
|
||||
|
||||
html {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
@@ -18,6 +20,22 @@ html, body {
|
||||
background: #f7f7f7;
|
||||
color: #333;
|
||||
}
|
||||
#toast {
|
||||
bottom: auto;
|
||||
top: 1.4em;
|
||||
}
|
||||
#repl {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: .5em;
|
||||
border: none;
|
||||
color: inherit;
|
||||
background: none;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
#mn {
|
||||
font-weight: normal;
|
||||
margin: 1.3em 0 .7em 1em;
|
||||
@@ -59,253 +77,79 @@ html .editor-toolbar>button.disabled {
|
||||
html .editor-toolbar>button.save.force-save {
|
||||
background: #f97;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* copied from md.css for now */
|
||||
.mdo pre,
|
||||
.mdo code,
|
||||
.mdo a {
|
||||
color: #480;
|
||||
background: #f7f7f7;
|
||||
border: .07em solid #ddd;
|
||||
border-radius: .2em;
|
||||
padding: .1em .3em;
|
||||
margin: 0 .1em;
|
||||
}
|
||||
.mdo code {
|
||||
font-size: .96em;
|
||||
}
|
||||
.mdo pre,
|
||||
.mdo code {
|
||||
font-family: monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
.mdo pre code {
|
||||
display: block;
|
||||
margin: 0 -.3em;
|
||||
padding: .4em .5em;
|
||||
line-height: 1.1em;
|
||||
}
|
||||
.mdo a {
|
||||
color: #fff;
|
||||
background: #39b;
|
||||
text-decoration: none;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
border-bottom: .07em solid #079;
|
||||
}
|
||||
.mdo h2 {
|
||||
color: #fff;
|
||||
background: #555;
|
||||
margin-top: 2em;
|
||||
border-bottom: .22em solid #999;
|
||||
border-top: none;
|
||||
}
|
||||
.mdo h1 {
|
||||
color: #fff;
|
||||
background: #444;
|
||||
font-weight: normal;
|
||||
border-top: .4em solid #fb0;
|
||||
border-bottom: .4em solid #777;
|
||||
border-radius: 0 1em 0 1em;
|
||||
margin: 3em 0 1em 0;
|
||||
padding: .5em 0;
|
||||
}
|
||||
h1, h2 {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
h1 {
|
||||
font-size: 1.7em;
|
||||
text-align: center;
|
||||
border: 1em solid #777;
|
||||
border-width: .05em 0;
|
||||
margin: 3em 0;
|
||||
}
|
||||
h2 {
|
||||
font-size: 1.5em;
|
||||
font-weight: normal;
|
||||
background: #f7f7f7;
|
||||
border-top: .07em solid #fff;
|
||||
border-bottom: .07em solid #bbb;
|
||||
border-radius: .5em .5em 0 0;
|
||||
padding-left: .4em;
|
||||
margin-top: 3em;
|
||||
}
|
||||
.mdo ul,
|
||||
.mdo ol {
|
||||
border-left: .3em solid #ddd;
|
||||
}
|
||||
.mdo>ul,
|
||||
.mdo>ol {
|
||||
border-color: #bbb;
|
||||
}
|
||||
.mdo ul>li {
|
||||
list-style-type: disc;
|
||||
}
|
||||
.mdo ul>li,
|
||||
.mdo ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
}
|
||||
blockquote {
|
||||
font-family: serif;
|
||||
background: #f7f7f7;
|
||||
border: .07em dashed #ccc;
|
||||
padding: 0 2em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
small {
|
||||
opacity: .8;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
td {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
th {
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* mde support */
|
||||
.mdo {
|
||||
padding: 1em;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
html.dark .mdo {
|
||||
background: #1c1c1c;
|
||||
}
|
||||
.CodeMirror {
|
||||
background: #f7f7f7;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* darkmode */
|
||||
html.dark .mdo,
|
||||
html.dark .CodeMirror {
|
||||
html.z .mdo,
|
||||
html.z .CodeMirror {
|
||||
border-color: #222;
|
||||
}
|
||||
html.dark,
|
||||
html.dark body,
|
||||
html.dark .CodeMirror {
|
||||
html.z,
|
||||
html.z body,
|
||||
html.z .CodeMirror {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark .CodeMirror-cursor {
|
||||
html.z .CodeMirror-cursor {
|
||||
border-color: #fff;
|
||||
}
|
||||
html.dark .CodeMirror-selected {
|
||||
html.z .CodeMirror-selected {
|
||||
box-shadow: 0 0 1px #0cf inset;
|
||||
}
|
||||
html.dark .CodeMirror-selected,
|
||||
html.dark .CodeMirror-selectedtext {
|
||||
html.z .CodeMirror-selected,
|
||||
html.z .CodeMirror-selectedtext {
|
||||
border-radius: .1em;
|
||||
background: #246;
|
||||
color: #fff;
|
||||
}
|
||||
html.dark .mdo a {
|
||||
background: #057;
|
||||
}
|
||||
html.dark .mdo h1 a, html.dark .mdo h4 a,
|
||||
html.dark .mdo h2 a, html.dark .mdo h5 a,
|
||||
html.dark .mdo h3 a, html.dark .mdo h6 a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
color: #8c0;
|
||||
background: #1a1a1a;
|
||||
border: .07em solid #333;
|
||||
}
|
||||
html.dark .mdo ul,
|
||||
html.dark .mdo ol {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark .mdo>ul,
|
||||
html.dark .mdo>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
html.dark h1 {
|
||||
background: #383838;
|
||||
border-top: .4em solid #b80;
|
||||
border-bottom: .4em solid #4c4c4c;
|
||||
}
|
||||
html.dark h2 {
|
||||
background: #444;
|
||||
border-bottom: .22em solid #555;
|
||||
}
|
||||
html.dark td,
|
||||
html.dark th {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark blockquote {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark #mn a {
|
||||
html.z #mn a {
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark #mn a:not(:last-child):after {
|
||||
html.z #mn a:not(:last-child):after {
|
||||
border-color: rgba(255,255,255,0.3);
|
||||
}
|
||||
html.dark .editor-toolbar {
|
||||
html.z .editor-toolbar {
|
||||
border-color: #2c2c2c;
|
||||
background: #1c1c1c;
|
||||
}
|
||||
html.dark .editor-toolbar>i.separator {
|
||||
html.z .editor-toolbar>i.separator {
|
||||
border-left: 1px solid #444;
|
||||
border-right: 1px solid #111;
|
||||
}
|
||||
html.dark .editor-toolbar>button {
|
||||
html.z .editor-toolbar>button {
|
||||
margin-left: -1px; border: 1px solid rgba(255,255,255,0.1);
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark .editor-toolbar>button:hover {
|
||||
html.z .editor-toolbar>button:hover {
|
||||
color: #333;
|
||||
}
|
||||
html.dark .editor-toolbar>button.active {
|
||||
html.z .editor-toolbar>button.active {
|
||||
color: #333;
|
||||
border-color: #ec1;
|
||||
background: #c90;
|
||||
}
|
||||
html.dark .editor-toolbar::after,
|
||||
html.dark .editor-toolbar::before {
|
||||
html.z .editor-toolbar::after,
|
||||
html.z .editor-toolbar::before {
|
||||
background: none;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* ui.css overrides */
|
||||
.mdo {
|
||||
padding: 1em;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
html.z .mdo {
|
||||
background: #1c1c1c;
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
<!DOCTYPE html><html><head>
|
||||
<meta charset="utf-8">
|
||||
<title>📝🎉 {{ title }}</title>
|
||||
<title>📝 {{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
<link href="/.cpr/mde.css?_={{ ts }}" rel="stylesheet">
|
||||
<link href="/.cpr/deps/mini-fa.css?_={{ ts }}" rel="stylesheet">
|
||||
<link href="/.cpr/deps/easymde.css?_={{ ts }}" rel="stylesheet">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/mde.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/deps/mini-fa.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/deps/easymde.css?_={{ ts }}">
|
||||
</head>
|
||||
<body>
|
||||
<div id="mw">
|
||||
@@ -20,30 +22,33 @@
|
||||
<textarea id="mt" style="display:none" autocomplete="off">{{ md }}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
var last_modified = {{ lastmod }};
|
||||
var last_modified = {{ lastmod }},
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
dfavico = "{{ favico }}";
|
||||
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
var lightswitch = (function () {
|
||||
var fun = function () {
|
||||
var dark = !document.documentElement.getAttribute("class");
|
||||
document.documentElement.setAttribute("class", dark ? "dark" : "");
|
||||
if (window.localStorage)
|
||||
localStorage.setItem('lightmode', dark ? 0 : 1);
|
||||
};
|
||||
if (window.localStorage && localStorage.getItem('lightmode') != 1)
|
||||
fun();
|
||||
|
||||
return fun;
|
||||
var l = localStorage,
|
||||
drk = l.light != 1,
|
||||
f = function (e) {
|
||||
if (e) drk = !drk;
|
||||
document.documentElement.className = drk? "z":"y";
|
||||
l.light = drk? 0:1;
|
||||
};
|
||||
f();
|
||||
return f;
|
||||
})();
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/mde.js?_={{ ts }}"></script>
|
||||
</body></html>
|
||||
|
||||
@@ -15,7 +15,7 @@ var dom_md = ebi('mt');
|
||||
if (a > 0)
|
||||
loc.push(n[a]);
|
||||
|
||||
var dec = uricom_dec(n[a])[0].replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
var dec = uricom_dec(n[a]).replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
|
||||
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
||||
}
|
||||
@@ -65,8 +65,7 @@ var mde = (function () {
|
||||
mde.codemirror.on("change", function () {
|
||||
md_changed(mde);
|
||||
});
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
qsr('#ml');
|
||||
return mde;
|
||||
})();
|
||||
|
||||
@@ -75,7 +74,7 @@ function set_jumpto() {
|
||||
}
|
||||
|
||||
function jumpto(ev) {
|
||||
var tgt = ev.target || ev.srcElement;
|
||||
var tgt = ev.target;
|
||||
var ln = null;
|
||||
while (tgt && !ln) {
|
||||
ln = tgt.getAttribute('data-ln');
|
||||
@@ -96,65 +95,58 @@ function md_changed(mde, on_srv) {
|
||||
var md_now = mde.value();
|
||||
var save_btn = QS('.editor-toolbar button.save');
|
||||
|
||||
if (md_now == window.md_saved)
|
||||
save_btn.classList.add('disabled');
|
||||
else
|
||||
save_btn.classList.remove('disabled');
|
||||
|
||||
clmod(save_btn, 'disabled', md_now == window.md_saved);
|
||||
set_jumpto();
|
||||
}
|
||||
|
||||
function save(mde) {
|
||||
var save_btn = QS('.editor-toolbar button.save');
|
||||
if (save_btn.classList.contains('disabled')) {
|
||||
alert('there is nothing to save');
|
||||
return;
|
||||
}
|
||||
var force = save_btn.classList.contains('force-save');
|
||||
if (force && !confirm('confirm that you wish to lose the changes made on the server since you opened this document')) {
|
||||
alert('ok, aborted');
|
||||
return;
|
||||
if (clgot(save_btn, 'disabled'))
|
||||
return toast.inf(2, 'no changes');
|
||||
|
||||
var force = clgot(save_btn, 'force-save');
|
||||
function save2() {
|
||||
var txt = mde.value();
|
||||
|
||||
var fd = new FormData();
|
||||
fd.append("act", "tput");
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XHR();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onload = xhr.onerror = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.mde = mde;
|
||||
xhr.txt = txt;
|
||||
xhr.send(fd);
|
||||
}
|
||||
|
||||
var txt = mde.value();
|
||||
|
||||
var fd = new FormData();
|
||||
fd.append("act", "tput");
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.mde = mde;
|
||||
xhr.txt = txt;
|
||||
xhr.send(fd);
|
||||
if (!force)
|
||||
save2();
|
||||
else
|
||||
modal.confirm('confirm that you wish to lose the changes made on the server since you opened this document', save2, function () {
|
||||
toast.inf(3, 'aborted');
|
||||
});
|
||||
}
|
||||
|
||||
function save_cb() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return;
|
||||
}
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
|
||||
var r;
|
||||
try {
|
||||
r = JSON.parse(this.responseText);
|
||||
}
|
||||
catch (ex) {
|
||||
alert('Failed to parse reply from server:\n\n' + this.responseText);
|
||||
return;
|
||||
return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
|
||||
}
|
||||
|
||||
if (!r.ok) {
|
||||
if (!this.btn.classList.contains('force-save')) {
|
||||
this.btn.classList.add('force-save');
|
||||
if (!clgot(this.btn, 'force-save')) {
|
||||
clmod(this.btn, 'force-save', 1);
|
||||
var msg = [
|
||||
'This file has been modified since you started editing it!\n',
|
||||
'if you really want to overwrite, press save again.\n',
|
||||
@@ -164,23 +156,21 @@ function save_cb() {
|
||||
r.lastmod + ' lastmod on the server now,',
|
||||
r.now + ' server time now,\n',
|
||||
];
|
||||
alert(msg.join('\n'));
|
||||
return toast.err(0, msg.join('\n'));
|
||||
}
|
||||
else {
|
||||
alert('Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
|
||||
}
|
||||
return;
|
||||
else
|
||||
return toast.err(0, 'Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
|
||||
}
|
||||
|
||||
this.btn.classList.remove('force-save');
|
||||
clmod(this.btn, 'force-save');
|
||||
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
|
||||
|
||||
// download the saved doc from the server and compare
|
||||
var url = (document.location + '').split('?')[0] + '?raw';
|
||||
var xhr = new XMLHttpRequest();
|
||||
var xhr = new XHR();
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_chk;
|
||||
xhr.onload = xhr.onerror = save_chk;
|
||||
xhr.btn = this.save_btn;
|
||||
xhr.mde = this.mde;
|
||||
xhr.txt = this.txt;
|
||||
@@ -189,38 +179,23 @@ function save_cb() {
|
||||
}
|
||||
|
||||
function save_chk() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return;
|
||||
}
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
|
||||
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
||||
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
||||
if (doc1 != doc2) {
|
||||
alert(
|
||||
modal.alert(
|
||||
'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' +
|
||||
'Length: yours=' + doc1.length + ', server=' + doc2.length
|
||||
);
|
||||
alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
|
||||
alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
|
||||
modal.alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
|
||||
modal.alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
|
||||
return;
|
||||
}
|
||||
|
||||
last_modified = this.lastmod;
|
||||
md_changed(this.mde, true);
|
||||
|
||||
var ok = mknod('div');
|
||||
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
|
||||
ok.innerHTML = 'OK✔️';
|
||||
var parent = ebi('m');
|
||||
document.documentElement.appendChild(ok);
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
ok.parentNode.removeChild(ok);
|
||||
}, 750);
|
||||
toast.ok(2, 'save OK' + (this.ntry ? '\nattempt ' + this.ntry : ''));
|
||||
}
|
||||
|
||||
@@ -11,14 +11,12 @@ html {
|
||||
background: #333;
|
||||
font-family: sans-serif;
|
||||
text-shadow: 1px 1px 0px #000;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
html, body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
body {
|
||||
padding-bottom: 5em;
|
||||
}
|
||||
#box {
|
||||
padding: .5em 1em;
|
||||
background: #2c2c2c;
|
||||
@@ -28,4 +26,4 @@ pre {
|
||||
}
|
||||
a {
|
||||
color: #fc5;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,48 +2,49 @@
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>copyparty</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/msg.css?_={{ ts }}">
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="box">
|
||||
|
||||
{%- if h1 %}
|
||||
<h1>{{ h1 }}</h1>
|
||||
{%- endif %}
|
||||
|
||||
{%- if h2 %}
|
||||
<h2>{{ h2 }}</h2>
|
||||
{%- endif %}
|
||||
|
||||
{%- if p %}
|
||||
<p>{{ p }}</p>
|
||||
{%- endif %}
|
||||
<div id="box">
|
||||
|
||||
{%- if pre %}
|
||||
<pre>{{ pre }}</pre>
|
||||
{%- endif %}
|
||||
{%- if h1 %}
|
||||
<h1>{{ h1 }}</h1>
|
||||
{%- endif %}
|
||||
|
||||
{%- if html %}
|
||||
{{ html }}
|
||||
{%- endif %}
|
||||
{%- if h2 %}
|
||||
<h2>{{ h2 }}</h2>
|
||||
{%- endif %}
|
||||
|
||||
{%- if click %}
|
||||
<script>document.getElementsByTagName("a")[0].click()</script>
|
||||
{%- endif %}
|
||||
</div>
|
||||
{%- if p %}
|
||||
<p>{{ p }}</p>
|
||||
{%- endif %}
|
||||
|
||||
{%- if redir %}
|
||||
<script>
|
||||
setTimeout(function() {
|
||||
window.location.replace("{{ redir }}");
|
||||
}, 1000);
|
||||
</script>
|
||||
{%- endif %}
|
||||
{%- if pre %}
|
||||
<pre>{{ pre }}</pre>
|
||||
{%- endif %}
|
||||
|
||||
{%- if html %}
|
||||
{{ html }}
|
||||
{%- endif %}
|
||||
|
||||
{%- if click %}
|
||||
<script>document.getElementsByTagName("a")[0].click()</script>
|
||||
{%- endif %}
|
||||
</div>
|
||||
|
||||
{%- if redir %}
|
||||
<script>
|
||||
setTimeout(function() {
|
||||
window.location.replace("{{ redir }}");
|
||||
}, 1000);
|
||||
</script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
|
||||
</html>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user