mirror of
https://github.com/9001/copyparty.git
synced 2025-10-24 16:43:55 +00:00
Compare commits
517 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c9c4aac6cf | ||
|
|
8b2c7586ce | ||
|
|
32e22dfe84 | ||
|
|
d70b885722 | ||
|
|
ac6c4b13f5 | ||
|
|
ececdad22d | ||
|
|
bf659781b0 | ||
|
|
2c6bb195a4 | ||
|
|
c032cd08b3 | ||
|
|
39e7a7a231 | ||
|
|
6e14cd2c39 | ||
|
|
aab3baaea7 | ||
|
|
b8453c3b4f | ||
|
|
6ce0e2cd5b | ||
|
|
76beaae7f2 | ||
|
|
c1a7f9edbe | ||
|
|
b5f2fe2f0a | ||
|
|
98a90d49cb | ||
|
|
f55e982cb5 | ||
|
|
686c7defeb | ||
|
|
0b1e483c53 | ||
|
|
457d7df129 | ||
|
|
ce776a547c | ||
|
|
ded0567cbf | ||
|
|
c9cac83d09 | ||
|
|
4fbe6b01a8 | ||
|
|
ee9585264e | ||
|
|
c9ffead7bf | ||
|
|
ed69d42005 | ||
|
|
0b47ee306b | ||
|
|
e4e63619d4 | ||
|
|
f32cca292a | ||
|
|
e87ea19ff1 | ||
|
|
0214793740 | ||
|
|
fc9dd5d743 | ||
|
|
9e6d5dd2b9 | ||
|
|
bdad197e2c | ||
|
|
7e139288a6 | ||
|
|
6e7935abaf | ||
|
|
3ba0cc20f1 | ||
|
|
dd28de1796 | ||
|
|
9eecc9e19a | ||
|
|
6530cb6b05 | ||
|
|
41ce613379 | ||
|
|
5e2785caba | ||
|
|
d7cc000976 | ||
|
|
50d8ff95ae | ||
|
|
b2de1459b6 | ||
|
|
f0ffbea0b2 | ||
|
|
199ccca0fe | ||
|
|
1d9b355743 | ||
|
|
f0437fbb07 | ||
|
|
abc404a5b7 | ||
|
|
04b9e21330 | ||
|
|
1044aa071b | ||
|
|
4c3192c8cc | ||
|
|
689e77a025 | ||
|
|
3bd89403d2 | ||
|
|
b4800d9bcb | ||
|
|
05485e8539 | ||
|
|
0e03dc0868 | ||
|
|
352b1ed10a | ||
|
|
0db1244d04 | ||
|
|
ece08b8179 | ||
|
|
b8945ae233 | ||
|
|
dcaf7b0a20 | ||
|
|
f982cdc178 | ||
|
|
b265e59834 | ||
|
|
4a843a6624 | ||
|
|
241ef5b99d | ||
|
|
f39f575a9c | ||
|
|
1521307f1e | ||
|
|
dd122111e6 | ||
|
|
00c177fa74 | ||
|
|
f6c7e49eb8 | ||
|
|
1a8dc3d18a | ||
|
|
38a163a09a | ||
|
|
8f031246d2 | ||
|
|
8f3d97dde7 | ||
|
|
4acaf24d65 | ||
|
|
9a8dbbbcf8 | ||
|
|
a3efc4c726 | ||
|
|
0278bf328f | ||
|
|
17ddd96cc6 | ||
|
|
0e82e79aea | ||
|
|
30f124c061 | ||
|
|
e19d90fcfc | ||
|
|
184bbdd23d | ||
|
|
30b50aec95 | ||
|
|
c3c3d81db1 | ||
|
|
49b7231283 | ||
|
|
edbedcdad3 | ||
|
|
e4ae5f74e6 | ||
|
|
2c7ffe08d7 | ||
|
|
3ca46bae46 | ||
|
|
7e82aaf843 | ||
|
|
315bd71adf | ||
|
|
2c612c9aeb | ||
|
|
36aee085f7 | ||
|
|
d01bb69a9c | ||
|
|
c9b1c48c72 | ||
|
|
aea3843cf2 | ||
|
|
131b6f4b9a | ||
|
|
6efb8b735a | ||
|
|
223b7af2ce | ||
|
|
e72c2a6982 | ||
|
|
dd9b93970e | ||
|
|
e4c7cd81a9 | ||
|
|
12b3a62586 | ||
|
|
2da3bdcd47 | ||
|
|
c1dccbe0ba | ||
|
|
9629fcde68 | ||
|
|
cae436b566 | ||
|
|
01714700ae | ||
|
|
51e6c4852b | ||
|
|
b206c5d64e | ||
|
|
62c3272351 | ||
|
|
c5d822c70a | ||
|
|
9c09b4061a | ||
|
|
c26fb43ced | ||
|
|
deb8f20db6 | ||
|
|
50e18ed8ff | ||
|
|
31f3895f40 | ||
|
|
615929268a | ||
|
|
b8b15814cf | ||
|
|
7766fffe83 | ||
|
|
2a16c150d1 | ||
|
|
418c2166cc | ||
|
|
a4dd44f648 | ||
|
|
5352f7cda7 | ||
|
|
5533b47099 | ||
|
|
e9b14464ee | ||
|
|
4e986e5cd1 | ||
|
|
8a59b40c53 | ||
|
|
391caca043 | ||
|
|
171ce348d6 | ||
|
|
c2cc729135 | ||
|
|
e7e71b76f0 | ||
|
|
a2af61cf6f | ||
|
|
e111edd5e4 | ||
|
|
3375377371 | ||
|
|
0ced020c67 | ||
|
|
c0d7aa9e4a | ||
|
|
e5b3d2a312 | ||
|
|
7b4a794981 | ||
|
|
86a859de17 | ||
|
|
b3aaa7bd0f | ||
|
|
a90586e6a8 | ||
|
|
807f272895 | ||
|
|
f050647b43 | ||
|
|
73baebbd16 | ||
|
|
f327f698b9 | ||
|
|
8164910fe8 | ||
|
|
3498644055 | ||
|
|
d31116b54c | ||
|
|
aced110cdf | ||
|
|
e9ab6aec77 | ||
|
|
15b261c861 | ||
|
|
970badce66 | ||
|
|
64304a9d65 | ||
|
|
d1983553d2 | ||
|
|
6b15df3bcd | ||
|
|
730b1fff71 | ||
|
|
c3add751e5 | ||
|
|
9da2dbdc1c | ||
|
|
977f09c470 | ||
|
|
4d0c6a8802 | ||
|
|
5345565037 | ||
|
|
be38c27c64 | ||
|
|
82a0401099 | ||
|
|
33bea1b663 | ||
|
|
f083acd46d | ||
|
|
5aacd15272 | ||
|
|
cb7674b091 | ||
|
|
3899c7ad56 | ||
|
|
d2debced09 | ||
|
|
b86c0ddc48 | ||
|
|
ba36f33bd8 | ||
|
|
49368a10ba | ||
|
|
ac1568cacf | ||
|
|
862ca3439d | ||
|
|
fdd4f9f2aa | ||
|
|
aa2dc49ebe | ||
|
|
cc23b7ee74 | ||
|
|
f6f9fc5a45 | ||
|
|
26c8589399 | ||
|
|
c2469935cb | ||
|
|
5e7c20955e | ||
|
|
967fa38108 | ||
|
|
280fe8e36b | ||
|
|
03ca96ccc3 | ||
|
|
b5b8a2c9d5 | ||
|
|
0008832730 | ||
|
|
c9b385db4b | ||
|
|
c951b66ae0 | ||
|
|
de735f3a45 | ||
|
|
19161425f3 | ||
|
|
c69e8d5bf4 | ||
|
|
3d3bce2788 | ||
|
|
1cb0dc7f8e | ||
|
|
cd5c56e601 | ||
|
|
8c979905e4 | ||
|
|
4d69f15f48 | ||
|
|
083f6572f7 | ||
|
|
4e7dd75266 | ||
|
|
3eb83f449b | ||
|
|
d31f69117b | ||
|
|
f5f9e3ac97 | ||
|
|
598d6c598c | ||
|
|
744727087a | ||
|
|
f93212a665 | ||
|
|
6dade82d2c | ||
|
|
6b737bf1d7 | ||
|
|
94dbd70677 | ||
|
|
527ae0348e | ||
|
|
79629c430a | ||
|
|
908dd61be5 | ||
|
|
88f77b8cca | ||
|
|
1e846657d1 | ||
|
|
ce70f62a88 | ||
|
|
bca0cdbb62 | ||
|
|
1ee11e04e6 | ||
|
|
6eef44f212 | ||
|
|
8bd94f4a1c | ||
|
|
4bc4701372 | ||
|
|
dfd89b503a | ||
|
|
060dc54832 | ||
|
|
f7a4ea5793 | ||
|
|
71b478e6e2 | ||
|
|
ed8fff8c52 | ||
|
|
95dc78db10 | ||
|
|
addeac64c7 | ||
|
|
d77ec22007 | ||
|
|
20030c91b7 | ||
|
|
8b366e255c | ||
|
|
6da366fcb0 | ||
|
|
2fa35f851e | ||
|
|
e4ca4260bb | ||
|
|
b69aace8d8 | ||
|
|
79097bb43c | ||
|
|
806fac1742 | ||
|
|
4f97d7cf8d | ||
|
|
42acc457af | ||
|
|
c02920607f | ||
|
|
452885c271 | ||
|
|
5c242a07b6 | ||
|
|
088899d59f | ||
|
|
1faff2a37e | ||
|
|
23c8d3d045 | ||
|
|
a033388d2b | ||
|
|
82fe45ac56 | ||
|
|
bcb7fcda6b | ||
|
|
726a98100b | ||
|
|
2f021a0c2b | ||
|
|
eb05cb6c6e | ||
|
|
7530af95da | ||
|
|
8399e95bda | ||
|
|
3b4dfe326f | ||
|
|
2e787a254e | ||
|
|
f888bed1a6 | ||
|
|
d865e9f35a | ||
|
|
fc7fe70f66 | ||
|
|
5aff39d2b2 | ||
|
|
d1be37a04a | ||
|
|
b0fd8bf7d4 | ||
|
|
b9cf8f3973 | ||
|
|
4588f11613 | ||
|
|
1a618c3c97 | ||
|
|
d500a51d97 | ||
|
|
734e9d3874 | ||
|
|
bd5cfc2f1b | ||
|
|
89f88ee78c | ||
|
|
b2ae14695a | ||
|
|
19d86b44d9 | ||
|
|
85be62e38b | ||
|
|
80f3d90200 | ||
|
|
0249fa6e75 | ||
|
|
2d0696e048 | ||
|
|
ff32ec515e | ||
|
|
a6935b0293 | ||
|
|
63eb08ba9f | ||
|
|
e5b67d2b3a | ||
|
|
9e10af6885 | ||
|
|
42bc9115d2 | ||
|
|
0a569ce413 | ||
|
|
9a16639a61 | ||
|
|
57953c68c6 | ||
|
|
088d08963f | ||
|
|
7bc8196821 | ||
|
|
7715299dd3 | ||
|
|
b8ac9b7994 | ||
|
|
98e7d8f728 | ||
|
|
e7fd871ffe | ||
|
|
14aab62f32 | ||
|
|
cb81fe962c | ||
|
|
fc970d2dea | ||
|
|
b0e203d1f9 | ||
|
|
37cef05b19 | ||
|
|
5886a42901 | ||
|
|
2fd99f807d | ||
|
|
3d4cbd7d10 | ||
|
|
f10d03c238 | ||
|
|
f9a66ffb0e | ||
|
|
777a50063d | ||
|
|
0bb9154747 | ||
|
|
30c3f45072 | ||
|
|
0d5ca67f32 | ||
|
|
4a8bf6aebd | ||
|
|
b11db090d8 | ||
|
|
189391fccd | ||
|
|
86d4c43909 | ||
|
|
5994f40982 | ||
|
|
076d32dee5 | ||
|
|
16c8e38ecd | ||
|
|
eacbcda8e5 | ||
|
|
59be76cd44 | ||
|
|
5bb0e7e8b3 | ||
|
|
b78d207121 | ||
|
|
0fcbcdd08c | ||
|
|
ed6c683922 | ||
|
|
9fe1edb02b | ||
|
|
fb3811a708 | ||
|
|
18f8658eec | ||
|
|
3ead4676b0 | ||
|
|
d30001d23d | ||
|
|
06bbf0d656 | ||
|
|
6ddd952e04 | ||
|
|
027ad0c3ee | ||
|
|
3abad2b87b | ||
|
|
32a1c7c5d5 | ||
|
|
f06e165bd4 | ||
|
|
1c843b24f7 | ||
|
|
2ace9ed380 | ||
|
|
5f30c0ae03 | ||
|
|
ef60adf7e2 | ||
|
|
7354b462e8 | ||
|
|
da904d6be8 | ||
|
|
c5fbbbbb5c | ||
|
|
5010387d8a | ||
|
|
f00c54a7fb | ||
|
|
9f52c169d0 | ||
|
|
bf18339404 | ||
|
|
2ad12b074b | ||
|
|
a6788ffe8d | ||
|
|
0e884df486 | ||
|
|
ef1c55286f | ||
|
|
abc0424c26 | ||
|
|
44e5c82e6d | ||
|
|
5849c446ed | ||
|
|
12b7317831 | ||
|
|
fe323f59af | ||
|
|
a00e56f219 | ||
|
|
1a7852794f | ||
|
|
22b1373a57 | ||
|
|
17d78b1469 | ||
|
|
4d8b32b249 | ||
|
|
b65bea2550 | ||
|
|
0b52ccd200 | ||
|
|
3006a07059 | ||
|
|
801dbc7a9a | ||
|
|
4f4e895fb7 | ||
|
|
cc57c3b655 | ||
|
|
ca6ec9c5c7 | ||
|
|
633b1f0a78 | ||
|
|
6136b9bf9c | ||
|
|
524a3ba566 | ||
|
|
58580320f9 | ||
|
|
759b0a994d | ||
|
|
d2800473e4 | ||
|
|
f5b1a2065e | ||
|
|
5e62532295 | ||
|
|
c1bee96c40 | ||
|
|
f273253a2b | ||
|
|
012bbcf770 | ||
|
|
b54cb47b2e | ||
|
|
1b15f43745 | ||
|
|
96771bf1bd | ||
|
|
580078bddb | ||
|
|
c5c7080ec6 | ||
|
|
408339b51d | ||
|
|
02e3d44998 | ||
|
|
156f13ded1 | ||
|
|
d288467cb7 | ||
|
|
21662c9f3f | ||
|
|
9149fe6cdd | ||
|
|
9a146192b7 | ||
|
|
3a9d3b7b61 | ||
|
|
f03f0973ab | ||
|
|
7ec0881e8c | ||
|
|
59e1ab42ff | ||
|
|
722216b901 | ||
|
|
bd8f3dc368 | ||
|
|
33cd94a141 | ||
|
|
053ac74734 | ||
|
|
cced99fafa | ||
|
|
a009ff53f7 | ||
|
|
ca16c4108d | ||
|
|
d1b6c67dc3 | ||
|
|
a61f8133d5 | ||
|
|
38d797a544 | ||
|
|
16c1877f50 | ||
|
|
da5f15a778 | ||
|
|
396c64ecf7 | ||
|
|
252c3a7985 | ||
|
|
a3ecbf0ae7 | ||
|
|
314327d8f2 | ||
|
|
bfacd06929 | ||
|
|
4f5e8f8cf5 | ||
|
|
1fbb4c09cc | ||
|
|
b332e1992b | ||
|
|
5955940b82 | ||
|
|
231a03bcfd | ||
|
|
bc85723657 | ||
|
|
be32b743c6 | ||
|
|
83c9843059 | ||
|
|
11cf43626d | ||
|
|
a6dc5e2ce3 | ||
|
|
38593a0394 | ||
|
|
95309afeea | ||
|
|
c2bf6fe2a3 | ||
|
|
99ac324fbd | ||
|
|
5562de330f | ||
|
|
95014236ac | ||
|
|
6aa7386138 | ||
|
|
3226a1f588 | ||
|
|
b4cf890cd8 | ||
|
|
ce09e323af | ||
|
|
941aedb177 | ||
|
|
87a0d502a3 | ||
|
|
cab7c1b0b8 | ||
|
|
d5892341b6 | ||
|
|
646557a43e | ||
|
|
ed8d34ab43 | ||
|
|
5e34463c77 | ||
|
|
1b14eb7959 | ||
|
|
ed48c2d0ed | ||
|
|
26fe84b660 | ||
|
|
5938230270 | ||
|
|
1a33a047fa | ||
|
|
43a8bcefb9 | ||
|
|
2e740e513f | ||
|
|
8a21a86b61 | ||
|
|
f600116205 | ||
|
|
1c03705de8 | ||
|
|
f7e461fac6 | ||
|
|
03ce6c97ff | ||
|
|
ffd9e76e07 | ||
|
|
fc49cb1e67 | ||
|
|
f5712d9f25 | ||
|
|
161d57bdda | ||
|
|
bae0d440bf | ||
|
|
fff052dde1 | ||
|
|
73b06eaa02 | ||
|
|
08a8ebed17 | ||
|
|
74d07426b3 | ||
|
|
69a2bba99a | ||
|
|
4d685d78ee | ||
|
|
5845ec3f49 | ||
|
|
13373426fe | ||
|
|
8e55551a06 | ||
|
|
12a3f0ac31 | ||
|
|
18e33edc88 | ||
|
|
c72c5ad4ee | ||
|
|
0fbc81ab2f | ||
|
|
af0a34cf82 | ||
|
|
b4590c5398 | ||
|
|
f787a66230 | ||
|
|
b21a99fd62 | ||
|
|
eb16306cde | ||
|
|
7bc23687e3 | ||
|
|
e1eaa057f2 | ||
|
|
97c264ca3e | ||
|
|
cf848ab1f7 | ||
|
|
cf83f9b0fd | ||
|
|
d98e361083 | ||
|
|
ce7f5309c7 | ||
|
|
75c485ced7 | ||
|
|
9c6e2ec012 | ||
|
|
1a02948a61 | ||
|
|
8b05ba4ba1 | ||
|
|
21e2874cb7 | ||
|
|
360ed5c46c | ||
|
|
5099bc365d | ||
|
|
12986da147 | ||
|
|
23e72797bc | ||
|
|
ac7b6f8f55 | ||
|
|
981b9ff11e | ||
|
|
4186906f4c | ||
|
|
0850d24e0c | ||
|
|
7ab8334c96 | ||
|
|
a4d7329ab7 | ||
|
|
3f4eae6bce | ||
|
|
518cf4be57 | ||
|
|
71096182be | ||
|
|
6452e927ea | ||
|
|
bc70cfa6f0 | ||
|
|
2b6e5ebd2d | ||
|
|
c761bd799a | ||
|
|
2f7c2fdee4 | ||
|
|
70a76ec343 | ||
|
|
7c3f64abf2 | ||
|
|
f5f38f195c | ||
|
|
7e84f4f015 | ||
|
|
4802f8cf07 | ||
|
|
cc05e67d8f | ||
|
|
2b6b174517 | ||
|
|
a1d05e6e12 | ||
|
|
f95ceb6a9b | ||
|
|
8f91b0726d | ||
|
|
97807f4383 | ||
|
|
5f42237f2c | ||
|
|
68289cfa54 | ||
|
|
42ea30270f | ||
|
|
ebbbbf3d82 | ||
|
|
27516e2d16 | ||
|
|
84bb6f915e | ||
|
|
46752f758a |
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: '9001'
|
||||
|
||||
---
|
||||
|
||||
NOTE:
|
||||
all of the below are optional, consider them as inspiration, delete and rewrite at will, thx md
|
||||
|
||||
|
||||
**Describe the bug**
|
||||
a description of what the bug is
|
||||
|
||||
**To Reproduce**
|
||||
List of steps to reproduce the issue, or, if it's hard to reproduce, then at least a detailed explanation of what you did to run into it
|
||||
|
||||
**Expected behavior**
|
||||
a description of what you expected to happen
|
||||
|
||||
**Screenshots**
|
||||
if applicable, add screenshots to help explain your problem, such as the kickass crashpage :^)
|
||||
|
||||
**Server details**
|
||||
if the issue is possibly on the server-side, then mention some of the following:
|
||||
* server OS / version:
|
||||
* python version:
|
||||
* copyparty arguments:
|
||||
* filesystem (`lsblk -f` on linux):
|
||||
|
||||
**Client details**
|
||||
if the issue is possibly on the client-side, then mention some of the following:
|
||||
* the device type and model:
|
||||
* OS version:
|
||||
* browser version:
|
||||
|
||||
**Additional context**
|
||||
any other context about the problem here
|
||||
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: '9001'
|
||||
|
||||
---
|
||||
|
||||
all of the below are optional, consider them as inspiration, delete and rewrite at will
|
||||
|
||||
**is your feature request related to a problem? Please describe.**
|
||||
a description of what the problem is, for example, `I'm always frustrated when [...]` or `Why is it not possible to [...]`
|
||||
|
||||
**Describe the idea / solution you'd like**
|
||||
a description of what you want to happen
|
||||
|
||||
**Describe any alternatives you've considered**
|
||||
a description of any alternative solutions or features you've considered
|
||||
|
||||
**Additional context**
|
||||
add any other context or screenshots about the feature request here
|
||||
10
.github/ISSUE_TEMPLATE/something-else.md
vendored
Normal file
10
.github/ISSUE_TEMPLATE/something-else.md
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
---
|
||||
name: Something else
|
||||
about: "┐(゚∀゚)┌"
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
|
||||
7
.github/branch-rename.md
vendored
Normal file
7
.github/branch-rename.md
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
modernize your local checkout of the repo like so,
|
||||
```sh
|
||||
git branch -m master hovudstraum
|
||||
git fetch origin
|
||||
git branch -u origin/hovudstraum hovudstraum
|
||||
git remote set-head origin -a
|
||||
```
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -9,6 +9,7 @@ buildenv/
|
||||
build/
|
||||
dist/
|
||||
sfx/
|
||||
py2/
|
||||
.venv/
|
||||
|
||||
# ide
|
||||
@@ -20,3 +21,7 @@ sfx/
|
||||
# derived
|
||||
copyparty/web/deps/
|
||||
srv/
|
||||
|
||||
# state/logs
|
||||
up.*.txt
|
||||
.hist/
|
||||
2
.vscode/launch.json
vendored
2
.vscode/launch.json
vendored
@@ -17,7 +17,7 @@
|
||||
"-mtp",
|
||||
".bpm=f,bin/mtag/audio-bpm.py",
|
||||
"-aed:wark",
|
||||
"-vsrv::r:aed:cnodupe",
|
||||
"-vsrv::r:rw,ed:c,dupe",
|
||||
"-vdist:dist:r"
|
||||
]
|
||||
},
|
||||
|
||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -55,4 +55,5 @@
|
||||
"py27"
|
||||
],
|
||||
"python.linting.enabled": true,
|
||||
"python.pythonPath": "/usr/bin/python3"
|
||||
}
|
||||
24
CODE_OF_CONDUCT.md
Normal file
24
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,24 @@
|
||||
in the words of Abraham Lincoln:
|
||||
|
||||
> Be excellent to each other... and... PARTY ON, DUDES!
|
||||
|
||||
more specifically I'll paraphrase some examples from a german automotive corporation as they cover all the bases without being too wordy
|
||||
|
||||
## Examples of unacceptable behavior
|
||||
* intimidation, harassment, trolling
|
||||
* insulting, derogatory, harmful or prejudicial comments
|
||||
* posting private information without permission
|
||||
* political or personal attacks
|
||||
|
||||
## Examples of expected behavior
|
||||
* being nice, friendly, welcoming, inclusive, mindful and empathetic
|
||||
* acting considerate, modest, respectful
|
||||
* using polite and inclusive language
|
||||
* criticize constructively and accept constructive criticism
|
||||
* respect different points of view
|
||||
|
||||
## finally and even more specifically,
|
||||
* parse opinions and feedback objectively without prejudice
|
||||
* it's the message that matters, not who said it
|
||||
|
||||
aaand that's how you say `be nice` in a way that fills half a floppy w
|
||||
3
CONTRIBUTING.md
Normal file
3
CONTRIBUTING.md
Normal file
@@ -0,0 +1,3 @@
|
||||
* do something cool
|
||||
|
||||
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight
|
||||
@@ -1,3 +1,16 @@
|
||||
# [`up2k.py`](up2k.py)
|
||||
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||
* faster than browsers
|
||||
* if something breaks just restart it
|
||||
|
||||
|
||||
# [`partyjournal.py`](partyjournal.py)
|
||||
produces a chronological list of all uploads by collecting info from up2k databases and the filesystem
|
||||
* outputs a standalone html file
|
||||
* optional mapping from IP-addresses to nicknames
|
||||
|
||||
|
||||
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||
@@ -47,6 +60,7 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
||||
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
||||
|
||||
|
||||
|
||||
# [`dbtool.py`](dbtool.py)
|
||||
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
||||
|
||||
@@ -63,6 +77,7 @@ cd /mnt/nas/music/.hist
|
||||
```
|
||||
|
||||
|
||||
|
||||
# [`prisonparty.sh`](prisonparty.sh)
|
||||
* run copyparty in a chroot, preventing any accidental file access
|
||||
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`
|
||||
|
||||
@@ -22,7 +22,7 @@ dependencies:
|
||||
|
||||
note:
|
||||
you probably want to run this on windows clients:
|
||||
https://github.com/9001/copyparty/blob/master/contrib/explorer-nothumbs-nofoldertypes.reg
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/contrib/explorer-nothumbs-nofoldertypes.reg
|
||||
|
||||
get server cert:
|
||||
awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem
|
||||
@@ -71,7 +71,7 @@ except:
|
||||
elif MACOS:
|
||||
libfuse = "install https://osxfuse.github.io/"
|
||||
else:
|
||||
libfuse = "apt install libfuse\n modprobe fuse"
|
||||
libfuse = "apt install libfuse3-3\n modprobe fuse"
|
||||
|
||||
print(
|
||||
"\n could not import fuse; these may help:"
|
||||
@@ -393,15 +393,16 @@ class Gateway(object):
|
||||
|
||||
rsp = json.loads(rsp.decode("utf-8"))
|
||||
ret = []
|
||||
for is_dir, nodes in [[True, rsp["dirs"]], [False, rsp["files"]]]:
|
||||
for statfun, nodes in [
|
||||
[self.stat_dir, rsp["dirs"]],
|
||||
[self.stat_file, rsp["files"]],
|
||||
]:
|
||||
for n in nodes:
|
||||
fname = unquote(n["href"]).rstrip(b"/")
|
||||
fname = fname.decode("wtf-8")
|
||||
fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
|
||||
if bad_good:
|
||||
fname = enwin(fname)
|
||||
|
||||
fun = self.stat_dir if is_dir else self.stat_file
|
||||
ret.append([fname, fun(n["ts"], n["sz"]), 0])
|
||||
ret.append([fname, statfun(n["ts"], n["sz"]), 0])
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
@@ -11,14 +11,18 @@ import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
import stat
|
||||
import errno
|
||||
import struct
|
||||
import codecs
|
||||
import platform
|
||||
import threading
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
try:
|
||||
import fuse
|
||||
@@ -38,7 +42,7 @@ except:
|
||||
mount a copyparty server (local or remote) as a filesystem
|
||||
|
||||
usage:
|
||||
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas
|
||||
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas
|
||||
|
||||
dependencies:
|
||||
sudo apk add fuse-dev python3-dev
|
||||
@@ -50,6 +54,10 @@ fork of copyparty-fuse.py based on fuse-python which
|
||||
"""
|
||||
|
||||
|
||||
WINDOWS = sys.platform == "win32"
|
||||
MACOS = platform.system() == "Darwin"
|
||||
|
||||
|
||||
def threadless_log(msg):
|
||||
print(msg + "\n", end="")
|
||||
|
||||
@@ -93,6 +101,41 @@ def html_dec(txt):
|
||||
)
|
||||
|
||||
|
||||
def register_wtf8():
|
||||
def wtf8_enc(text):
|
||||
return str(text).encode("utf-8", "surrogateescape"), len(text)
|
||||
|
||||
def wtf8_dec(binary):
|
||||
return bytes(binary).decode("utf-8", "surrogateescape"), len(binary)
|
||||
|
||||
def wtf8_search(encoding_name):
|
||||
return codecs.CodecInfo(wtf8_enc, wtf8_dec, name="wtf-8")
|
||||
|
||||
codecs.register(wtf8_search)
|
||||
|
||||
|
||||
bad_good = {}
|
||||
good_bad = {}
|
||||
|
||||
|
||||
def enwin(txt):
|
||||
return "".join([bad_good.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(bad, good)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
def dewin(txt):
|
||||
return "".join([good_bad.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(good, bad)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
class CacheNode(object):
|
||||
def __init__(self, tag, data):
|
||||
self.tag = tag
|
||||
@@ -115,8 +158,9 @@ class Stat(fuse.Stat):
|
||||
|
||||
|
||||
class Gateway(object):
|
||||
def __init__(self, base_url):
|
||||
def __init__(self, base_url, pw):
|
||||
self.base_url = base_url
|
||||
self.pw = pw
|
||||
|
||||
ui = urllib.parse.urlparse(base_url)
|
||||
self.web_root = ui.path.strip("/")
|
||||
@@ -135,8 +179,7 @@ class Gateway(object):
|
||||
self.conns = {}
|
||||
|
||||
def quotep(self, path):
|
||||
# TODO: mojibake support
|
||||
path = path.encode("utf-8", "ignore")
|
||||
path = path.encode("wtf-8")
|
||||
return quote(path, safe="/")
|
||||
|
||||
def getconn(self, tid=None):
|
||||
@@ -159,20 +202,29 @@ class Gateway(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
def sendreq(self, *args, **kwargs):
|
||||
def sendreq(self, *args, **ka):
|
||||
tid = get_tid()
|
||||
if self.pw:
|
||||
ck = "cppwd=" + self.pw
|
||||
try:
|
||||
ka["headers"]["Cookie"] = ck
|
||||
except:
|
||||
ka["headers"] = {"Cookie": ck}
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
c.request(*list(args), **ka)
|
||||
return c.getresponse()
|
||||
except:
|
||||
self.closeconn(tid)
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
c.request(*list(args), **ka)
|
||||
return c.getresponse()
|
||||
|
||||
def listdir(self, path):
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls"
|
||||
r = self.sendreq("GET", web_path)
|
||||
if r.status != 200:
|
||||
self.closeconn()
|
||||
@@ -182,9 +234,12 @@ class Gateway(object):
|
||||
)
|
||||
)
|
||||
|
||||
return self.parse_html(r)
|
||||
return self.parse_jls(r)
|
||||
|
||||
def download_file_range(self, path, ofs1, ofs2):
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
|
||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
|
||||
log("downloading {}".format(hdr_range))
|
||||
@@ -200,40 +255,27 @@ class Gateway(object):
|
||||
|
||||
return r.read()
|
||||
|
||||
def parse_html(self, datasrc):
|
||||
ret = []
|
||||
remainder = b""
|
||||
ptn = re.compile(
|
||||
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
|
||||
)
|
||||
|
||||
def parse_jls(self, datasrc):
|
||||
rsp = b""
|
||||
while True:
|
||||
buf = remainder + datasrc.read(4096)
|
||||
# print('[{}]'.format(buf.decode('utf-8')))
|
||||
buf = datasrc.read(1024 * 32)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
remainder = b""
|
||||
endpos = buf.rfind(b"\n")
|
||||
if endpos >= 0:
|
||||
remainder = buf[endpos + 1 :]
|
||||
buf = buf[:endpos]
|
||||
rsp += buf
|
||||
|
||||
lines = buf.decode("utf-8").split("\n")
|
||||
for line in lines:
|
||||
m = ptn.match(line)
|
||||
if not m:
|
||||
# print(line)
|
||||
continue
|
||||
rsp = json.loads(rsp.decode("utf-8"))
|
||||
ret = []
|
||||
for statfun, nodes in [
|
||||
[self.stat_dir, rsp["dirs"]],
|
||||
[self.stat_file, rsp["files"]],
|
||||
]:
|
||||
for n in nodes:
|
||||
fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
|
||||
if bad_good:
|
||||
fname = enwin(fname)
|
||||
|
||||
ftype, fname, fsize, fdate = m.groups()
|
||||
fname = html_dec(fname)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
sz = int(fsize)
|
||||
if ftype == "-":
|
||||
ret.append([fname, self.stat_file(ts, sz), 0])
|
||||
else:
|
||||
ret.append([fname, self.stat_dir(ts, sz), 0])
|
||||
ret.append([fname, statfun(n["ts"], n["sz"]), 0])
|
||||
|
||||
return ret
|
||||
|
||||
@@ -262,6 +304,7 @@ class CPPF(Fuse):
|
||||
Fuse.__init__(self, *args, **kwargs)
|
||||
|
||||
self.url = None
|
||||
self.pw = None
|
||||
|
||||
self.dircache = []
|
||||
self.dircache_mtx = threading.Lock()
|
||||
@@ -271,7 +314,7 @@ class CPPF(Fuse):
|
||||
|
||||
def init2(self):
|
||||
# TODO figure out how python-fuse wanted this to go
|
||||
self.gw = Gateway(self.url) # .decode('utf-8'))
|
||||
self.gw = Gateway(self.url, self.pw) # .decode('utf-8'))
|
||||
info("up")
|
||||
|
||||
def clean_dircache(self):
|
||||
@@ -536,6 +579,8 @@ class CPPF(Fuse):
|
||||
|
||||
def getattr(self, path):
|
||||
log("getattr [{}]".format(path))
|
||||
if WINDOWS:
|
||||
path = enwin(path) # windows occasionally decodes f0xx to xx
|
||||
|
||||
path = path.strip("/")
|
||||
try:
|
||||
@@ -568,9 +613,25 @@ class CPPF(Fuse):
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
register_wtf8()
|
||||
if WINDOWS:
|
||||
os.system("rem")
|
||||
|
||||
for ch in '<>:"\\|?*':
|
||||
# microsoft maps illegal characters to f0xx
|
||||
# (e000 to f8ff is basic-plane private-use)
|
||||
bad_good[ch] = chr(ord(ch) + 0xF000)
|
||||
|
||||
for n in range(0, 0x100):
|
||||
# map surrogateescape to another private-use area
|
||||
bad_good[chr(n + 0xDC00)] = chr(n + 0xF100)
|
||||
|
||||
for k, v in bad_good.items():
|
||||
good_bad[v] = k
|
||||
|
||||
server = CPPF()
|
||||
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
|
||||
server.parser.add_option(mountopt="pw", metavar="PASSWORD", default=None)
|
||||
server.parse(values=server, errex=1)
|
||||
if not server.url or not str(server.url).startswith("http"):
|
||||
print("\nerror:")
|
||||
@@ -578,7 +639,7 @@ def main():
|
||||
print(" need argument: mount-path")
|
||||
print("example:")
|
||||
print(
|
||||
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas"
|
||||
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -1,10 +1,22 @@
|
||||
standalone programs which take an audio file as argument
|
||||
|
||||
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
|
||||
|
||||
some of these rely on libraries which are not MIT-compatible
|
||||
|
||||
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
|
||||
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
|
||||
|
||||
these invoke standalone programs which are GPL or similar, so is legally fine for most purposes:
|
||||
|
||||
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
|
||||
* [image-noexif.py](./image-noexif.py) removes exif tags from images; uses exiftool (GPLv1 or artistic-license)
|
||||
|
||||
these do not have any problematic dependencies at all:
|
||||
|
||||
* [cksum.py](./cksum.py) computes various checksums
|
||||
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
||||
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
@@ -19,17 +19,18 @@ dep: ffmpeg
|
||||
def det(tf):
|
||||
# fmt: off
|
||||
sp.check_call([
|
||||
"ffmpeg",
|
||||
"-nostdin",
|
||||
"-hide_banner",
|
||||
"-v", "fatal",
|
||||
"-ss", "13",
|
||||
"-y", "-i", fsenc(sys.argv[1]),
|
||||
"-ac", "1",
|
||||
"-ar", "22050",
|
||||
"-t", "300",
|
||||
"-f", "f32le",
|
||||
tf
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-v", b"fatal",
|
||||
b"-ss", b"13",
|
||||
b"-y", b"-i", fsenc(sys.argv[1]),
|
||||
b"-map", b"0:a:0",
|
||||
b"-ac", b"1",
|
||||
b"-ar", b"22050",
|
||||
b"-t", b"300",
|
||||
b"-f", b"f32le",
|
||||
fsenc(tf)
|
||||
])
|
||||
# fmt: on
|
||||
|
||||
|
||||
@@ -23,14 +23,15 @@ dep: ffmpeg
|
||||
def det(tf):
|
||||
# fmt: off
|
||||
sp.check_call([
|
||||
"ffmpeg",
|
||||
"-nostdin",
|
||||
"-hide_banner",
|
||||
"-v", "fatal",
|
||||
"-y", "-i", fsenc(sys.argv[1]),
|
||||
"-t", "300",
|
||||
"-sample_fmt", "s16",
|
||||
tf
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-v", b"fatal",
|
||||
b"-y", b"-i", fsenc(sys.argv[1]),
|
||||
b"-map", b"0:a:0",
|
||||
b"-t", b"300",
|
||||
b"-sample_fmt", b"s16",
|
||||
fsenc(tf)
|
||||
])
|
||||
# fmt: on
|
||||
|
||||
|
||||
89
bin/mtag/cksum.py
Executable file
89
bin/mtag/cksum.py
Executable file
@@ -0,0 +1,89 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import json
|
||||
import zlib
|
||||
import struct
|
||||
import base64
|
||||
import hashlib
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p
|
||||
|
||||
|
||||
"""
|
||||
calculates various checksums for uploads,
|
||||
usage: -mtp crc32,md5,sha1,sha256b=bin/mtag/cksum.py
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
config = "crc32 md5 md5b sha1 sha1b sha256 sha256b sha512/240 sha512b/240"
|
||||
# b suffix = base64 encoded
|
||||
# slash = truncate to n bits
|
||||
|
||||
known = {
|
||||
"md5": hashlib.md5,
|
||||
"sha1": hashlib.sha1,
|
||||
"sha256": hashlib.sha256,
|
||||
"sha512": hashlib.sha512,
|
||||
}
|
||||
config = config.split()
|
||||
hashers = {
|
||||
k: v()
|
||||
for k, v in known.items()
|
||||
if k in [x.split("/")[0].rstrip("b") for x in known]
|
||||
}
|
||||
crc32 = 0 if "crc32" in config else None
|
||||
|
||||
with open(fsenc(sys.argv[1]), "rb", 512 * 1024) as f:
|
||||
while True:
|
||||
buf = f.read(64 * 1024)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
for x in hashers.values():
|
||||
x.update(buf)
|
||||
|
||||
if crc32 is not None:
|
||||
crc32 = zlib.crc32(buf, crc32)
|
||||
|
||||
ret = {}
|
||||
for s in config:
|
||||
alg = s.split("/")[0]
|
||||
b64 = alg.endswith("b")
|
||||
alg = alg.rstrip("b")
|
||||
if alg in hashers:
|
||||
v = hashers[alg].digest()
|
||||
elif alg == "crc32":
|
||||
v = crc32
|
||||
if v < 0:
|
||||
v &= 2 ** 32 - 1
|
||||
v = struct.pack(">L", v)
|
||||
else:
|
||||
raise Exception("what is {}".format(s))
|
||||
|
||||
if "/" in s:
|
||||
v = v[: int(int(s.split("/")[1]) / 8)]
|
||||
|
||||
if b64:
|
||||
v = base64.b64encode(v).decode("ascii").rstrip("=")
|
||||
else:
|
||||
try:
|
||||
v = v.hex()
|
||||
except:
|
||||
import binascii
|
||||
|
||||
v = binascii.hexlify(v)
|
||||
|
||||
ret[s] = v
|
||||
|
||||
print(json.dumps(ret, indent=4))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
93
bin/mtag/image-noexif.py
Normal file
93
bin/mtag/image-noexif.py
Normal file
@@ -0,0 +1,93 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
remove exif tags from uploaded images
|
||||
|
||||
dependencies:
|
||||
exiftool
|
||||
|
||||
about:
|
||||
creates a "noexif" subfolder and puts exif-stripped copies of each image there,
|
||||
the reason for the subfolder is to avoid issues with the up2k.db / deduplication:
|
||||
|
||||
if the original image is modified in-place, then copyparty will keep the original
|
||||
hash in up2k.db for a while (until the next volume rescan), so if the image is
|
||||
reuploaded after a rescan then the upload will be renamed and kept as a dupe
|
||||
|
||||
alternatively you could switch the logic around, making a copy of the original
|
||||
image into a subfolder named "exif" and modify the original in-place, but then
|
||||
up2k.db will be out of sync until the next rescan, so any additional uploads
|
||||
of the same image will get symlinked (deduplicated) to the modified copy
|
||||
instead of the original in "exif"
|
||||
|
||||
or maybe delete the original image after processing, that would kinda work too
|
||||
|
||||
example copyparty config to use this:
|
||||
-v/mnt/nas/pics:pics:rwmd,ed:c,e2ts,mte=+noexif:c,mtp=noexif=ejpg,ejpeg,ad,bin/mtag/image-noexif.py
|
||||
|
||||
explained:
|
||||
for realpath /mnt/nas/pics (served at /pics) with read-write-modify-delete for ed,
|
||||
enable file analysis on upload (e2ts),
|
||||
append "noexif" to the list of known tags (mtp),
|
||||
and use mtp plugin "bin/mtag/image-noexif.py" to provide that tag,
|
||||
do this on all uploads with the file extension "jpg" or "jpeg",
|
||||
ad = parse file regardless if FFmpeg thinks it is audio or not
|
||||
|
||||
PS: this requires e2ts to be functional,
|
||||
meaning you need to do at least one of these:
|
||||
* apt install ffmpeg
|
||||
* pip3 install mutagen
|
||||
and your python must have sqlite3 support compiled in
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import filecmp
|
||||
import subprocess as sp
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
def main():
|
||||
cwd, fn = os.path.split(sys.argv[1])
|
||||
if os.path.basename(cwd) == "noexif":
|
||||
return
|
||||
|
||||
os.chdir(cwd)
|
||||
f1 = fsenc(fn)
|
||||
f2 = os.path.join(b"noexif", f1)
|
||||
cmd = [
|
||||
b"exiftool",
|
||||
b"-exif:all=",
|
||||
b"-iptc:all=",
|
||||
b"-xmp:all=",
|
||||
b"-P",
|
||||
b"-o",
|
||||
b"noexif/",
|
||||
b"--",
|
||||
f1,
|
||||
]
|
||||
sp.check_output(cmd)
|
||||
if not os.path.exists(f2):
|
||||
print("failed")
|
||||
return
|
||||
|
||||
if filecmp.cmp(f1, f2, shallow=False):
|
||||
print("clean")
|
||||
else:
|
||||
print("exif")
|
||||
|
||||
# lastmod = os.path.getmtime(f1)
|
||||
# times = (int(time.time()), int(lastmod))
|
||||
# os.utime(f2, times)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -4,7 +4,8 @@ set -e
|
||||
|
||||
# install dependencies for audio-*.py
|
||||
#
|
||||
# linux: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf
|
||||
# linux/alpine: requires gcc g++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-dev py3-{wheel,pip} py3-numpy{,-dev}
|
||||
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3,libsndfile1}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
|
||||
# win64: requires msys2-mingw64 environment
|
||||
# macos: requires macports
|
||||
#
|
||||
@@ -100,8 +101,11 @@ export -f dl_files
|
||||
|
||||
|
||||
github_tarball() {
|
||||
rm -rf g
|
||||
mkdir g
|
||||
cd g
|
||||
dl_text "$1" |
|
||||
tee json |
|
||||
tee ../json |
|
||||
(
|
||||
# prefer jq if available
|
||||
jq -r '.tarball_url' ||
|
||||
@@ -110,8 +114,11 @@ github_tarball() {
|
||||
awk -F\" '/"tarball_url": "/ {print$4}'
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
head -n 1 |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
mv * ../tgz
|
||||
cd ..
|
||||
}
|
||||
|
||||
|
||||
@@ -126,6 +133,7 @@ gitlab_tarball() {
|
||||
tr \" '\n' | grep -E '\.tar\.gz$' | head -n 1
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
head -n 1 |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
tee links |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
@@ -137,10 +145,17 @@ install_keyfinder() {
|
||||
# use msys2 in mingw-w64 mode
|
||||
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python}
|
||||
|
||||
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
|
||||
[ -e $HOME/pe/keyfinder ] && {
|
||||
echo found a keyfinder build in ~/pe, skipping
|
||||
return
|
||||
}
|
||||
|
||||
tar -xf mixxxdj-libkeyfinder-*
|
||||
rm -- *.tar.gz
|
||||
cd "$td"
|
||||
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
|
||||
ls -al
|
||||
|
||||
tar -xf tgz
|
||||
rm tgz
|
||||
cd mixxxdj-libkeyfinder*
|
||||
|
||||
h="$HOME"
|
||||
@@ -207,6 +222,22 @@ install_vamp() {
|
||||
|
||||
$pybin -m pip install --user vamp
|
||||
|
||||
cd "$td"
|
||||
echo '#include <vamp-sdk/Plugin.h>' | gcc -x c -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
||||
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
|
||||
sha512sum -c <(
|
||||
echo "7ef7f837d19a08048b059e0da408373a7964ced452b290fae40b85d6d70ca9000bcfb3302cd0b4dc76cf2a848528456f78c1ce1ee0c402228d812bd347b6983b -"
|
||||
) <vamp-plugin-sdk-2.9.0.tar.gz
|
||||
tar -xf vamp-plugin-sdk-2.9.0.tar.gz
|
||||
rm -- *.tar.gz
|
||||
ls -al
|
||||
cd vamp-plugin-sdk-*
|
||||
./configure --prefix=$HOME/pe/vamp-sdk
|
||||
make -j1 install
|
||||
}
|
||||
|
||||
cd "$td"
|
||||
have_beatroot || {
|
||||
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz)
|
||||
@@ -214,8 +245,11 @@ install_vamp() {
|
||||
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
|
||||
) <beatroot-vamp-v1.0.tar.gz
|
||||
tar -xf beatroot-vamp-v1.0.tar.gz
|
||||
rm -- *.tar.gz
|
||||
cd beatroot-vamp-v1.0
|
||||
make -f Makefile.linux -j4
|
||||
[ -e ~/pe/vamp-sdk ] &&
|
||||
sed -ri 's`^(CFLAGS :=.*)`\1 -I'$HOME'/pe/vamp-sdk/include`' Makefile.linux
|
||||
make -f Makefile.linux -j4 LDFLAGS=-L$HOME/pe/vamp-sdk/lib
|
||||
# /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp
|
||||
mkdir ~/vamp
|
||||
cp -pv beatroot-vamp.* ~/vamp/
|
||||
@@ -229,6 +263,7 @@ install_vamp() {
|
||||
|
||||
# not in use because it kinda segfaults, also no windows support
|
||||
install_soundtouch() {
|
||||
cd "$td"
|
||||
gitlab_tarball https://gitlab.com/api/v4/projects/soundtouch%2Fsoundtouch/releases
|
||||
|
||||
tar -xvf soundtouch-*
|
||||
|
||||
@@ -13,7 +13,7 @@ try:
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
"""
|
||||
@@ -24,13 +24,13 @@ dep: ffmpeg
|
||||
def det():
|
||||
# fmt: off
|
||||
cmd = [
|
||||
"ffmpeg",
|
||||
"-nostdin",
|
||||
"-hide_banner",
|
||||
"-v", "fatal",
|
||||
"-i", fsenc(sys.argv[1]),
|
||||
"-f", "framemd5",
|
||||
"-"
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-v", b"fatal",
|
||||
b"-i", fsenc(sys.argv[1]),
|
||||
b"-f", b"framemd5",
|
||||
b"-"
|
||||
]
|
||||
# fmt: on
|
||||
|
||||
|
||||
21
bin/mtag/res/twitter-unmute.user.js
Normal file
21
bin/mtag/res/twitter-unmute.user.js
Normal file
@@ -0,0 +1,21 @@
|
||||
// ==UserScript==
|
||||
// @name twitter-unmute
|
||||
// @namespace http://ocv.me/
|
||||
// @version 0.1
|
||||
// @description memes
|
||||
// @author ed <irc.rizon.net>
|
||||
// @match https://twitter.com/*
|
||||
// @icon https://www.google.com/s2/favicons?domain=twitter.com
|
||||
// @grant GM_addStyle
|
||||
// ==/UserScript==
|
||||
|
||||
function grunnur() {
|
||||
setInterval(function () {
|
||||
//document.querySelector('div[aria-label="Unmute"]').click();
|
||||
document.querySelector('video').muted = false;
|
||||
}, 200);
|
||||
}
|
||||
|
||||
var scr = document.createElement('script');
|
||||
scr.textContent = '(' + grunnur.toString() + ')();';
|
||||
(document.head || document.getElementsByTagName('head')[0]).appendChild(scr);
|
||||
139
bin/mtag/very-bad-idea.py
Executable file
139
bin/mtag/very-bad-idea.py
Executable file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
use copyparty as a chromecast replacement:
|
||||
* post a URL and it will open in the default browser
|
||||
* upload a file and it will open in the default application
|
||||
* the `key` command simulates keyboard input
|
||||
* the `x` command executes other xdotool commands
|
||||
* the `c` command executes arbitrary unix commands
|
||||
|
||||
the android app makes it a breeze to post pics and links:
|
||||
https://github.com/9001/party-up/releases
|
||||
(iOS devices have to rely on the web-UI)
|
||||
|
||||
goes without saying, but this is HELLA DANGEROUS,
|
||||
GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS
|
||||
|
||||
example copyparty config to use this:
|
||||
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,bin/mtag/very-bad-idea.py
|
||||
|
||||
recommended deps:
|
||||
apt install xdotool libnotify-bin
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js
|
||||
|
||||
and you probably want `twitter-unmute.user.js` from the res folder
|
||||
|
||||
|
||||
-----------------------------------------------------------------------
|
||||
-- startup script:
|
||||
-----------------------------------------------------------------------
|
||||
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# create qr code
|
||||
ip=$(ip r | awk '/^default/{print$(NF-2)}'); echo http://$ip:3923/ | qrencode -o - -s 4 >/dev/shm/cpp-qr.png
|
||||
/usr/bin/feh -x /dev/shm/cpp-qr.png &
|
||||
|
||||
# reposition and make topmost (with janky raspbian support)
|
||||
( sleep 0.5
|
||||
xdotool search --name cpp-qr.png windowactivate --sync windowmove 1780 0
|
||||
wmctrl -r :ACTIVE: -b toggle,above || true
|
||||
|
||||
ps aux | grep -E 'sleep[ ]7\.27' ||
|
||||
while true; do
|
||||
w=$(xdotool getactivewindow)
|
||||
xdotool search --name cpp-qr.png windowactivate windowraise windowfocus
|
||||
xdotool windowactivate $w
|
||||
xdotool windowfocus $w
|
||||
sleep 7.27 || break
|
||||
done &
|
||||
xeyes # distraction window to prevent ^w from closing the qr-code
|
||||
) &
|
||||
|
||||
# bail if copyparty is already running
|
||||
ps aux | grep -E '[3] copy[p]arty' && exit 0
|
||||
|
||||
# dumb chrome wrapper to allow autoplay
|
||||
cat >/usr/local/bin/chromium-browser <<'EOF'
|
||||
#!/bin/bash
|
||||
set -e
|
||||
/usr/bin/chromium-browser --autoplay-policy=no-user-gesture-required "$@"
|
||||
EOF
|
||||
chmod 755 /usr/local/bin/chromium-browser
|
||||
|
||||
# start the server (note: replace `-v.::rw:` with `-v.::r:` to disallow retrieving uploaded stuff)
|
||||
cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,very-bad-idea.py
|
||||
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import subprocess as sp
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
def main():
|
||||
fp = os.path.abspath(sys.argv[1])
|
||||
with open(fp, "rb") as f:
|
||||
txt = f.read(4096)
|
||||
|
||||
if txt.startswith(b"msg="):
|
||||
open_post(txt)
|
||||
else:
|
||||
open_url(fp)
|
||||
|
||||
|
||||
def open_post(txt):
|
||||
txt = unquote(txt.replace(b"+", b" ")).decode("utf-8")[4:]
|
||||
try:
|
||||
k, v = txt.split(" ", 1)
|
||||
except:
|
||||
open_url(txt)
|
||||
|
||||
if k == "key":
|
||||
sp.call(["xdotool", "key"] + v.split(" "))
|
||||
elif k == "x":
|
||||
sp.call(["xdotool"] + v.split(" "))
|
||||
elif k == "c":
|
||||
env = os.environ.copy()
|
||||
while " " in v:
|
||||
v1, v2 = v.split(" ", 1)
|
||||
if "=" not in v1:
|
||||
break
|
||||
|
||||
ek, ev = v1.split("=", 1)
|
||||
env[ek] = ev
|
||||
v = v2
|
||||
|
||||
sp.call(v.split(" "), env=env)
|
||||
else:
|
||||
open_url(txt)
|
||||
|
||||
|
||||
def open_url(txt):
|
||||
ext = txt.rsplit(".")[-1].lower()
|
||||
sp.call(["notify-send", "--", txt])
|
||||
if ext not in ["jpg", "jpeg", "png", "gif", "webp"]:
|
||||
# sp.call(["wmctrl", "-c", ":ACTIVE:"]) # closes the active window correctly
|
||||
sp.call(["killall", "vlc"])
|
||||
sp.call(["killall", "mpv"])
|
||||
sp.call(["killall", "feh"])
|
||||
time.sleep(0.5)
|
||||
for _ in range(20):
|
||||
sp.call(["xdotool", "key", "ctrl+w"]) # closes the open tab correctly
|
||||
# else:
|
||||
# sp.call(["xdotool", "getactivewindow", "windowminimize"]) # minimizes the focused windo
|
||||
|
||||
# close any error messages:
|
||||
sp.call(["xdotool", "search", "--name", "Error", "windowclose"])
|
||||
# sp.call(["xdotool", "key", "ctrl+alt+d"]) # doesnt work at all
|
||||
# sp.call(["xdotool", "keydown", "--delay", "100", "ctrl+alt+d"])
|
||||
# sp.call(["xdotool", "keyup", "ctrl+alt+d"])
|
||||
sp.call(["xdg-open", txt])
|
||||
|
||||
|
||||
main()
|
||||
85
bin/mtag/wget.py
Normal file
85
bin/mtag/wget.py
Normal file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
use copyparty as a file downloader by POSTing URLs as
|
||||
application/x-www-form-urlencoded (for example using the
|
||||
message/pager function on the website)
|
||||
|
||||
example copyparty config to use this:
|
||||
--urlform save,get -vsrv/wget:wget:rwmd,ed:c,e2ts,mtp=title=ebin,t300,ad,bin/mtag/wget.py
|
||||
|
||||
explained:
|
||||
for realpath srv/wget (served at /wget) with read-write-modify-delete for ed,
|
||||
enable file analysis on upload (e2ts),
|
||||
use mtp plugin "bin/mtag/wget.py" to provide metadata tag "title",
|
||||
do this on all uploads with the file extension "bin",
|
||||
t300 = 300 seconds timeout for each dwonload,
|
||||
ad = parse file regardless if FFmpeg thinks it is audio or not
|
||||
|
||||
PS: this requires e2ts to be functional,
|
||||
meaning you need to do at least one of these:
|
||||
* apt install ffmpeg
|
||||
* pip3 install mutagen
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
def main():
|
||||
fp = os.path.abspath(sys.argv[1])
|
||||
fdir = os.path.dirname(fp)
|
||||
fname = os.path.basename(fp)
|
||||
if not fname.startswith("put-") or not fname.endswith(".bin"):
|
||||
raise Exception("not a post file")
|
||||
|
||||
buf = b""
|
||||
with open(fp, "rb") as f:
|
||||
while True:
|
||||
b = f.read(4096)
|
||||
buf += b
|
||||
if len(buf) > 4096:
|
||||
raise Exception("too big")
|
||||
|
||||
if not b:
|
||||
break
|
||||
|
||||
if not buf:
|
||||
raise Exception("file is empty")
|
||||
|
||||
buf = unquote(buf.replace(b"+", b" "))
|
||||
url = buf.decode("utf-8")
|
||||
|
||||
if not url.startswith("msg="):
|
||||
raise Exception("does not start with msg=")
|
||||
|
||||
url = url[4:]
|
||||
if "://" not in url:
|
||||
url = "https://" + url
|
||||
|
||||
os.chdir(fdir)
|
||||
|
||||
name = url.split("?")[0].split("/")[-1]
|
||||
tfn = "-- DOWNLOADING " + name
|
||||
open(tfn, "wb").close()
|
||||
|
||||
cmd = ["wget", "--trust-server-names", "--", url]
|
||||
|
||||
try:
|
||||
sp.check_call(cmd)
|
||||
|
||||
# OPTIONAL:
|
||||
# on success, delete the .bin file which contains the URL
|
||||
os.unlink(fp)
|
||||
except:
|
||||
open("-- FAILED TO DONWLOAD " + name, "wb").close()
|
||||
|
||||
os.unlink(tfn)
|
||||
print(url)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -17,7 +17,7 @@ it's probably best to use this through a config file; see res/yt-ipr.conf
|
||||
|
||||
but if you want to use plain arguments instead then:
|
||||
-v srv/ytm:ytm:w:rw,ed
|
||||
:c,e2ts:c,e2dsa
|
||||
:c,e2ts,e2dsa
|
||||
:c,sz=16k-1m:c,maxn=10,300:c,rotf=%Y-%m/%d-%H
|
||||
:c,mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
|
||||
:c,mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
|
||||
|
||||
177
bin/partyjournal.py
Executable file
177
bin/partyjournal.py
Executable file
@@ -0,0 +1,177 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
partyjournal.py: chronological history of uploads
|
||||
2021-12-31, v0.1, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/partyjournal.py
|
||||
|
||||
produces a chronological list of all uploads,
|
||||
by collecting info from up2k databases and the filesystem
|
||||
|
||||
specify subnet `192.168.1.*` with argument `.=192.168.1.`,
|
||||
affecting all successive mappings
|
||||
|
||||
usage:
|
||||
./partyjournal.py > partyjournal.html .=192.168.1. cart=125 steen=114 steen=131 sleepy=121 fscarlet=144 ed=101 ed=123
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import base64
|
||||
import sqlite3
|
||||
import argparse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
FS_ENCODING = sys.getfilesystemencoding()
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
pass
|
||||
|
||||
|
||||
##
|
||||
## snibbed from copyparty
|
||||
|
||||
|
||||
def s3dec(v):
|
||||
if not v.startswith("//"):
|
||||
return v
|
||||
|
||||
v = base64.urlsafe_b64decode(v.encode("ascii")[2:])
|
||||
return v.decode(FS_ENCODING, "replace")
|
||||
|
||||
|
||||
def quotep(txt):
|
||||
btxt = txt.encode("utf-8", "replace")
|
||||
quot1 = quote(btxt, safe=b"/")
|
||||
quot1 = quot1.encode("ascii")
|
||||
quot2 = quot1.replace(b" ", b"+")
|
||||
return quot2.decode("utf-8", "replace")
|
||||
|
||||
|
||||
def html_escape(s, quote=False, crlf=False):
|
||||
"""html.escape but also newlines"""
|
||||
s = s.replace("&", "&").replace("<", "<").replace(">", ">")
|
||||
if quote:
|
||||
s = s.replace('"', """).replace("'", "'")
|
||||
if crlf:
|
||||
s = s.replace("\r", " ").replace("\n", " ")
|
||||
|
||||
return s
|
||||
|
||||
|
||||
## end snibs
|
||||
##
|
||||
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser(formatter_class=APF)
|
||||
ap.add_argument("who", nargs="*")
|
||||
ar = ap.parse_args()
|
||||
|
||||
imap = {}
|
||||
subnet = ""
|
||||
for v in ar.who:
|
||||
if "=" not in v:
|
||||
raise Exception("bad who: " + v)
|
||||
|
||||
k, v = v.split("=")
|
||||
if k == ".":
|
||||
subnet = v
|
||||
continue
|
||||
|
||||
imap["{}{}".format(subnet, v)] = k
|
||||
|
||||
print(repr(imap), file=sys.stderr)
|
||||
|
||||
print(
|
||||
"""\
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head><meta charset="utf-8"><style>
|
||||
|
||||
html, body {
|
||||
color: #ccc;
|
||||
background: #222;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
a {
|
||||
color: #fc5;
|
||||
}
|
||||
td, th {
|
||||
padding: .2em .5em;
|
||||
border: 1px solid #999;
|
||||
border-width: 0 1px 1px 0;
|
||||
white-space: nowrap;
|
||||
}
|
||||
td:nth-child(1),
|
||||
td:nth-child(2),
|
||||
td:nth-child(3) {
|
||||
font-family: monospace, monospace;
|
||||
text-align: right;
|
||||
}
|
||||
tr:first-child {
|
||||
position: sticky;
|
||||
top: -1px;
|
||||
}
|
||||
th {
|
||||
background: #222;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
</style></head><body><table><tr>
|
||||
<th>wark</th>
|
||||
<th>time</th>
|
||||
<th>size</th>
|
||||
<th>who</th>
|
||||
<th>link</th>
|
||||
</tr>"""
|
||||
)
|
||||
|
||||
db_path = ".hist/up2k.db"
|
||||
conn = sqlite3.connect(db_path)
|
||||
q = r"pragma table_info(up)"
|
||||
inf = conn.execute(q).fetchall()
|
||||
cols = [x[1] for x in inf]
|
||||
print("<!-- " + str(cols) + " -->")
|
||||
# ['w', 'mt', 'sz', 'rd', 'fn', 'ip', 'at']
|
||||
|
||||
q = r"select * from up order by case when at > 0 then at else mt end"
|
||||
for w, mt, sz, rd, fn, ip, at in conn.execute(q):
|
||||
link = "/".join([s3dec(x) for x in [rd, fn] if x])
|
||||
if fn.startswith("put-") and sz < 4096:
|
||||
try:
|
||||
with open(link, "rb") as f:
|
||||
txt = f.read().decode("utf-8", "replace")
|
||||
except:
|
||||
continue
|
||||
|
||||
if txt.startswith("msg="):
|
||||
txt = txt.encode("utf-8", "replace")
|
||||
txt = unquote(txt.replace(b"+", b" "))
|
||||
link = txt.decode("utf-8")[4:]
|
||||
|
||||
sz = "{:,}".format(sz)
|
||||
v = [
|
||||
w[:16],
|
||||
datetime.utcfromtimestamp(at if at > 0 else mt).strftime(
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
),
|
||||
sz,
|
||||
imap.get(ip, ip),
|
||||
]
|
||||
|
||||
row = "<tr>\n "
|
||||
row += "\n ".join(["<td>{}</th>".format(x) for x in v])
|
||||
row += '\n <td><a href="{}">{}</a></td>'.format(link, html_escape(link))
|
||||
row += "\n</tr>"
|
||||
print(row)
|
||||
|
||||
print("</table></body></html>")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
67
bin/prisonparty.sh
Normal file → Executable file
67
bin/prisonparty.sh
Normal file → Executable file
@@ -11,10 +11,16 @@ sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
|
||||
help() { cat <<'EOF'
|
||||
|
||||
usage:
|
||||
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- copyparty-sfx.py [...]"
|
||||
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]"
|
||||
|
||||
example:
|
||||
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- copyparty-sfx.py -v /mnt/nas/music::rwmd"
|
||||
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd"
|
||||
|
||||
example for running straight from source (instead of using an sfx):
|
||||
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd"
|
||||
|
||||
note that if you have python modules installed as --user (such as bpm/key detectors),
|
||||
you should add /home/foo/.local as a VOLDIR
|
||||
|
||||
EOF
|
||||
exit 1
|
||||
@@ -35,10 +41,20 @@ while true; do
|
||||
vols+=( "$(realpath "$v")" )
|
||||
done
|
||||
pybin="$1"; shift
|
||||
pybin="$(realpath "$pybin")"
|
||||
pybin="$(command -v "$pybin")"
|
||||
pyarg=
|
||||
while true; do
|
||||
v="$1"
|
||||
[ "${v:0:1}" = - ] || break
|
||||
pyarg="$pyarg $v"
|
||||
shift
|
||||
done
|
||||
cpp="$1"; shift
|
||||
cpp="$(realpath "$cpp")"
|
||||
cppdir="$(dirname "$cpp")"
|
||||
[ -d "$cpp" ] && cppdir="$PWD" || {
|
||||
# sfx, not module
|
||||
cpp="$(realpath "$cpp")"
|
||||
cppdir="$(dirname "$cpp")"
|
||||
}
|
||||
trap - EXIT
|
||||
|
||||
|
||||
@@ -60,11 +76,10 @@ echo
|
||||
|
||||
# remove any trailing slashes
|
||||
jail="${jail%/}"
|
||||
cppdir="${cppdir%/}"
|
||||
|
||||
|
||||
# bind-mount system directories and volumes
|
||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | LC_ALL=C sort |
|
||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
||||
while IFS= read -r v; do
|
||||
[ -e "$v" ] || {
|
||||
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v"
|
||||
@@ -72,6 +87,7 @@ while IFS= read -r v; do
|
||||
}
|
||||
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
||||
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b)
|
||||
# echo "v [$v] i1 [$i1] i2 [$i2]"
|
||||
[ $i1 = $i2 ] && continue
|
||||
|
||||
mkdir -p "$jail$v"
|
||||
@@ -79,21 +95,34 @@ while IFS= read -r v; do
|
||||
done
|
||||
|
||||
|
||||
cln() {
|
||||
rv=$?
|
||||
# cleanup if not in use
|
||||
lsof "$jail" | grep -qF "$jail" &&
|
||||
echo "chroot is in use, will not cleanup" ||
|
||||
{
|
||||
mount | grep -F " on $jail" |
|
||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
|
||||
}
|
||||
exit $rv
|
||||
}
|
||||
trap cln EXIT
|
||||
|
||||
|
||||
# create a tmp
|
||||
mkdir -p "$jail/tmp"
|
||||
chmod 777 "$jail/tmp"
|
||||
|
||||
|
||||
# run copyparty
|
||||
/sbin/chroot --userspec=$uid:$gid "$jail" "$pybin" "$cpp" "$@" && rv=0 || rv=$?
|
||||
|
||||
|
||||
# cleanup if not in use
|
||||
lsof "$jail" | grep -qF "$jail" &&
|
||||
echo "chroot is in use, will not cleanup" ||
|
||||
{
|
||||
mount | grep -qF " on $jail" |
|
||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
|
||||
}
|
||||
exit $rv
|
||||
export HOME=$(getent passwd $uid | cut -d: -f6)
|
||||
export USER=$(getent passwd $uid | cut -d: -f1)
|
||||
export LOGNAME="$USER"
|
||||
#echo "pybin [$pybin]"
|
||||
#echo "pyarg [$pyarg]"
|
||||
#echo "cpp [$cpp]"
|
||||
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
||||
p=$!
|
||||
trap 'kill $p' INT TERM
|
||||
wait
|
||||
|
||||
830
bin/up2k.py
Executable file
830
bin/up2k.py
Executable file
@@ -0,0 +1,830 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
"""
|
||||
up2k.py: upload to copyparty
|
||||
2021-11-28, v0.13, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||
|
||||
- dependencies: requests
|
||||
- supports python 2.6, 2.7, and 3.3 through 3.10
|
||||
|
||||
- almost zero error-handling
|
||||
- but if something breaks just try again and it'll autoresume
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import math
|
||||
import time
|
||||
import atexit
|
||||
import signal
|
||||
import base64
|
||||
import hashlib
|
||||
import argparse
|
||||
import platform
|
||||
import threading
|
||||
import requests
|
||||
import datetime
|
||||
|
||||
|
||||
# from copyparty/__init__.py
|
||||
PY2 = sys.version_info[0] == 2
|
||||
if PY2:
|
||||
from Queue import Queue
|
||||
from urllib import unquote
|
||||
from urllib import quote
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
bytes = str
|
||||
else:
|
||||
from queue import Queue
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
|
||||
unicode = str
|
||||
|
||||
VT100 = platform.system() != "Windows"
|
||||
|
||||
|
||||
req_ses = requests.Session()
|
||||
|
||||
|
||||
class File(object):
|
||||
"""an up2k upload task; represents a single file"""
|
||||
|
||||
def __init__(self, top, rel, size, lmod):
|
||||
self.top = top # type: bytes
|
||||
self.rel = rel.replace(b"\\", b"/") # type: bytes
|
||||
self.size = size # type: int
|
||||
self.lmod = lmod # type: float
|
||||
|
||||
self.abs = os.path.join(top, rel) # type: bytes
|
||||
self.name = self.rel.split(b"/")[-1].decode("utf-8", "replace") # type: str
|
||||
|
||||
# set by get_hashlist
|
||||
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
|
||||
self.kchunks = {} # type: dict[str, tuple[int, int]] # hash: [ ofs, sz ]
|
||||
|
||||
# set by handshake
|
||||
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
||||
self.wark = None # type: str
|
||||
self.url = None # type: str
|
||||
|
||||
# set by upload
|
||||
self.up_b = 0 # type: int
|
||||
self.up_c = 0 # type: int
|
||||
|
||||
# m = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||
# eprint(m.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||
|
||||
|
||||
class FileSlice(object):
|
||||
"""file-like object providing a fixed window into a file"""
|
||||
|
||||
def __init__(self, file, cid):
|
||||
# type: (File, str) -> FileSlice
|
||||
|
||||
self.car, self.len = file.kchunks[cid]
|
||||
self.cdr = self.car + self.len
|
||||
self.ofs = 0 # type: int
|
||||
self.f = open(file.abs, "rb", 512 * 1024)
|
||||
self.f.seek(self.car)
|
||||
|
||||
# https://stackoverflow.com/questions/4359495/what-is-exactly-a-file-like-object-in-python
|
||||
# IOBase, RawIOBase, BufferedIOBase
|
||||
funs = "close closed __enter__ __exit__ __iter__ isatty __next__ readable seekable writable"
|
||||
try:
|
||||
for fun in funs.split():
|
||||
setattr(self, fun, getattr(self.f, fun))
|
||||
except:
|
||||
pass # py27 probably
|
||||
|
||||
def tell(self):
|
||||
return self.ofs
|
||||
|
||||
def seek(self, ofs, wh=0):
|
||||
if wh == 1:
|
||||
ofs = self.ofs + ofs
|
||||
elif wh == 2:
|
||||
ofs = self.len + ofs # provided ofs is negative
|
||||
|
||||
if ofs < 0:
|
||||
ofs = 0
|
||||
elif ofs >= self.len:
|
||||
ofs = self.len - 1
|
||||
|
||||
self.ofs = ofs
|
||||
self.f.seek(self.car + ofs)
|
||||
|
||||
def read(self, sz):
|
||||
sz = min(sz, self.len - self.ofs)
|
||||
ret = self.f.read(sz)
|
||||
self.ofs += len(ret)
|
||||
return ret
|
||||
|
||||
|
||||
_print = print
|
||||
|
||||
|
||||
def eprint(*a, **ka):
|
||||
ka["file"] = sys.stderr
|
||||
ka["end"] = ""
|
||||
if not PY2:
|
||||
ka["flush"] = True
|
||||
|
||||
_print(*a, **ka)
|
||||
if PY2 or not VT100:
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
def flushing_print(*a, **ka):
|
||||
_print(*a, **ka)
|
||||
if "flush" not in ka:
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
if not VT100:
|
||||
print = flushing_print
|
||||
|
||||
|
||||
def termsize():
|
||||
import os
|
||||
|
||||
env = os.environ
|
||||
|
||||
def ioctl_GWINSZ(fd):
|
||||
try:
|
||||
import fcntl, termios, struct, os
|
||||
|
||||
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
|
||||
except:
|
||||
return
|
||||
return cr
|
||||
|
||||
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||
if not cr:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
cr = ioctl_GWINSZ(fd)
|
||||
os.close(fd)
|
||||
except:
|
||||
pass
|
||||
if not cr:
|
||||
try:
|
||||
cr = (env["LINES"], env["COLUMNS"])
|
||||
except:
|
||||
cr = (25, 80)
|
||||
return int(cr[1]), int(cr[0])
|
||||
|
||||
|
||||
class CTermsize(object):
|
||||
def __init__(self):
|
||||
self.ev = False
|
||||
self.margin = None
|
||||
self.g = None
|
||||
self.w, self.h = termsize()
|
||||
|
||||
try:
|
||||
signal.signal(signal.SIGWINCH, self.ev_sig)
|
||||
except:
|
||||
return
|
||||
|
||||
thr = threading.Thread(target=self.worker)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def worker(self):
|
||||
while True:
|
||||
time.sleep(0.5)
|
||||
if not self.ev:
|
||||
continue
|
||||
|
||||
self.ev = False
|
||||
self.w, self.h = termsize()
|
||||
|
||||
if self.margin is not None:
|
||||
self.scroll_region(self.margin)
|
||||
|
||||
def ev_sig(self, *a, **ka):
|
||||
self.ev = True
|
||||
|
||||
def scroll_region(self, margin):
|
||||
self.margin = margin
|
||||
if margin is None:
|
||||
self.g = None
|
||||
eprint("\033[s\033[r\033[u")
|
||||
else:
|
||||
self.g = 1 + self.h - margin
|
||||
m = "{0}\033[{1}A".format("\n" * margin, margin)
|
||||
eprint("{0}\033[s\033[1;{1}r\033[u".format(m, self.g - 1))
|
||||
|
||||
|
||||
ss = CTermsize()
|
||||
|
||||
|
||||
def _scd(err, top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
with os.scandir(top) as dh:
|
||||
for fh in dh:
|
||||
abspath = os.path.join(top, fh.name)
|
||||
try:
|
||||
yield [abspath, fh.stat()]
|
||||
except:
|
||||
err.append(abspath)
|
||||
|
||||
|
||||
def _lsd(err, top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
for name in os.listdir(top):
|
||||
abspath = os.path.join(top, name)
|
||||
try:
|
||||
yield [abspath, os.stat(abspath)]
|
||||
except:
|
||||
err.append(abspath)
|
||||
|
||||
|
||||
if hasattr(os, "scandir"):
|
||||
statdir = _scd
|
||||
else:
|
||||
statdir = _lsd
|
||||
|
||||
|
||||
def walkdir(err, top):
|
||||
"""recursive statdir"""
|
||||
for ap, inf in sorted(statdir(err, top)):
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
try:
|
||||
for x in walkdir(err, ap):
|
||||
yield x
|
||||
except:
|
||||
err.append(ap)
|
||||
else:
|
||||
yield ap, inf
|
||||
|
||||
|
||||
def walkdirs(err, tops):
|
||||
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||
sep = "{0}".format(os.sep).encode("ascii")
|
||||
for top in tops:
|
||||
if top[-1:] == sep:
|
||||
stop = top.rstrip(sep)
|
||||
else:
|
||||
stop = os.path.dirname(top)
|
||||
|
||||
if os.path.isdir(top):
|
||||
for ap, inf in walkdir(err, top):
|
||||
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||
else:
|
||||
d, n = top.rsplit(sep, 1)
|
||||
yield d, n, os.stat(top)
|
||||
|
||||
|
||||
# mostly from copyparty/util.py
|
||||
def quotep(btxt):
|
||||
quot1 = quote(btxt, safe=b"/")
|
||||
if not PY2:
|
||||
quot1 = quot1.encode("ascii")
|
||||
|
||||
return quot1.replace(b" ", b"+")
|
||||
|
||||
|
||||
# from copyparty/util.py
|
||||
def humansize(sz, terse=False):
|
||||
"""picks a sensible unit for the given extent"""
|
||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||
if sz < 1024:
|
||||
break
|
||||
|
||||
sz /= 1024.0
|
||||
|
||||
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||
|
||||
if not terse:
|
||||
return ret
|
||||
|
||||
return ret.replace("iB", "").replace(" ", "")
|
||||
|
||||
|
||||
# from copyparty/up2k.py
|
||||
def up2k_chunksize(filesize):
|
||||
"""gives The correct chunksize for up2k hashing"""
|
||||
chunksize = 1024 * 1024
|
||||
stepsize = 512 * 1024
|
||||
while True:
|
||||
for mul in [1, 2]:
|
||||
nchunks = math.ceil(filesize * 1.0 / chunksize)
|
||||
if nchunks <= 256 or chunksize >= 32 * 1024 * 1024:
|
||||
return chunksize
|
||||
|
||||
chunksize += stepsize
|
||||
stepsize *= mul
|
||||
|
||||
|
||||
# mostly from copyparty/up2k.py
|
||||
def get_hashlist(file, pcb):
|
||||
# type: (File, any) -> None
|
||||
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
||||
|
||||
chunk_sz = up2k_chunksize(file.size)
|
||||
file_rem = file.size
|
||||
file_ofs = 0
|
||||
ret = []
|
||||
with open(file.abs, "rb", 512 * 1024) as f:
|
||||
while file_rem > 0:
|
||||
hashobj = hashlib.sha512()
|
||||
chunk_sz = chunk_rem = min(chunk_sz, file_rem)
|
||||
while chunk_rem > 0:
|
||||
buf = f.read(min(chunk_rem, 64 * 1024))
|
||||
if not buf:
|
||||
raise Exception("EOF at " + str(f.tell()))
|
||||
|
||||
hashobj.update(buf)
|
||||
chunk_rem -= len(buf)
|
||||
|
||||
digest = hashobj.digest()[:33]
|
||||
digest = base64.urlsafe_b64encode(digest).decode("utf-8")
|
||||
|
||||
ret.append([digest, file_ofs, chunk_sz])
|
||||
file_ofs += chunk_sz
|
||||
file_rem -= chunk_sz
|
||||
|
||||
if pcb:
|
||||
pcb(file, file_ofs)
|
||||
|
||||
file.cids = ret
|
||||
file.kchunks = {}
|
||||
for k, v1, v2 in ret:
|
||||
file.kchunks[k] = [v1, v2]
|
||||
|
||||
|
||||
def handshake(req_ses, url, file, pw, search):
|
||||
# type: (requests.Session, str, File, any, bool) -> List[str]
|
||||
"""
|
||||
performs a handshake with the server; reply is:
|
||||
if search, a list of search results
|
||||
otherwise, a list of chunks to upload
|
||||
"""
|
||||
|
||||
req = {
|
||||
"hash": [x[0] for x in file.cids],
|
||||
"name": file.name,
|
||||
"lmod": file.lmod,
|
||||
"size": file.size,
|
||||
}
|
||||
if search:
|
||||
req["srch"] = 1
|
||||
|
||||
headers = {"Content-Type": "text/plain"} # wtf ed
|
||||
if pw:
|
||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||
|
||||
if file.url:
|
||||
url = file.url
|
||||
elif b"/" in file.rel:
|
||||
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
|
||||
|
||||
while True:
|
||||
try:
|
||||
r = req_ses.post(url, headers=headers, json=req)
|
||||
break
|
||||
except:
|
||||
eprint("handshake failed, retrying: {0}\n".format(file.name))
|
||||
time.sleep(1)
|
||||
|
||||
try:
|
||||
r = r.json()
|
||||
except:
|
||||
raise Exception(r.text)
|
||||
|
||||
if search:
|
||||
return r["hits"]
|
||||
|
||||
try:
|
||||
pre, url = url.split("://")
|
||||
pre += "://"
|
||||
except:
|
||||
pre = ""
|
||||
|
||||
file.url = pre + url.split("/")[0] + r["purl"]
|
||||
file.name = r["name"]
|
||||
file.wark = r["wark"]
|
||||
|
||||
return r["hash"]
|
||||
|
||||
|
||||
def upload(req_ses, file, cid, pw):
|
||||
# type: (requests.Session, File, str, any) -> None
|
||||
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
||||
|
||||
headers = {
|
||||
"X-Up2k-Hash": cid,
|
||||
"X-Up2k-Wark": file.wark,
|
||||
"Content-Type": "application/octet-stream",
|
||||
}
|
||||
if pw:
|
||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||
|
||||
f = FileSlice(file, cid)
|
||||
try:
|
||||
r = req_ses.post(file.url, headers=headers, data=f)
|
||||
if not r:
|
||||
raise Exception(repr(r))
|
||||
|
||||
_ = r.content
|
||||
finally:
|
||||
f.f.close()
|
||||
|
||||
|
||||
class Daemon(threading.Thread):
|
||||
def __init__(self, *a, **ka):
|
||||
threading.Thread.__init__(self, *a, **ka)
|
||||
self.daemon = True
|
||||
|
||||
|
||||
class Ctl(object):
|
||||
"""
|
||||
this will be the coordinator which runs everything in parallel
|
||||
(hashing, handshakes, uploads) but right now it's p dumb
|
||||
"""
|
||||
|
||||
def __init__(self, ar):
|
||||
self.ar = ar
|
||||
ar.files = [
|
||||
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
||||
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
|
||||
for x in ar.files
|
||||
]
|
||||
ar.url = ar.url.rstrip("/") + "/"
|
||||
if "://" not in ar.url:
|
||||
ar.url = "http://" + ar.url
|
||||
|
||||
eprint("\nscanning {0} locations\n".format(len(ar.files)))
|
||||
|
||||
nfiles = 0
|
||||
nbytes = 0
|
||||
err = []
|
||||
for _, _, inf in walkdirs(err, ar.files):
|
||||
nfiles += 1
|
||||
nbytes += inf.st_size
|
||||
|
||||
if err:
|
||||
eprint("\n# failed to access {0} paths:\n".format(len(err)))
|
||||
for x in err:
|
||||
eprint(x.decode("utf-8", "replace") + "\n")
|
||||
|
||||
eprint("^ failed to access those {0} paths ^\n\n".format(len(err)))
|
||||
if not ar.ok:
|
||||
eprint("aborting because --ok is not set\n")
|
||||
return
|
||||
|
||||
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
||||
self.nfiles = nfiles
|
||||
self.nbytes = nbytes
|
||||
|
||||
if ar.td:
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
req_ses.verify = False
|
||||
if ar.te:
|
||||
req_ses.verify = ar.te
|
||||
|
||||
self.filegen = walkdirs([], ar.files)
|
||||
if ar.safe:
|
||||
self.safe()
|
||||
else:
|
||||
self.fancy()
|
||||
|
||||
def safe(self):
|
||||
"""minimal basic slow boring fallback codepath"""
|
||||
search = self.ar.s
|
||||
for nf, (top, rel, inf) in enumerate(self.filegen):
|
||||
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
|
||||
print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
|
||||
get_hashlist(file, None)
|
||||
|
||||
burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
|
||||
while True:
|
||||
print(" hs...")
|
||||
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
print(" found: {0}{1}".format(burl, hit["rp"]))
|
||||
else:
|
||||
print(" NOT found")
|
||||
break
|
||||
|
||||
file.ucids = hs
|
||||
if not hs:
|
||||
break
|
||||
|
||||
print("{0} {1}".format(self.nfiles - nf, upath))
|
||||
ncs = len(hs)
|
||||
for nc, cid in enumerate(hs):
|
||||
print(" {0} up {1}".format(ncs - nc, cid))
|
||||
upload(req_ses, file, cid, self.ar.a)
|
||||
|
||||
print(" ok!")
|
||||
|
||||
def fancy(self):
|
||||
self.hash_f = 0
|
||||
self.hash_c = 0
|
||||
self.hash_b = 0
|
||||
self.up_f = 0
|
||||
self.up_c = 0
|
||||
self.up_b = 0
|
||||
self.up_br = 0
|
||||
self.hasher_busy = 1
|
||||
self.handshaker_busy = 0
|
||||
self.uploader_busy = 0
|
||||
|
||||
self.t0 = time.time()
|
||||
self.t0_up = None
|
||||
self.spd = None
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.q_handshake = Queue() # type: Queue[File]
|
||||
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
|
||||
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
||||
|
||||
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
if VT100:
|
||||
atexit.register(self.cleanup_vt100)
|
||||
ss.scroll_region(3)
|
||||
|
||||
Daemon(target=self.hasher).start()
|
||||
for _ in range(self.ar.j):
|
||||
Daemon(target=self.handshaker).start()
|
||||
Daemon(target=self.uploader).start()
|
||||
|
||||
idles = 0
|
||||
while idles < 3:
|
||||
time.sleep(0.07)
|
||||
with self.mutex:
|
||||
if (
|
||||
self.q_handshake.empty()
|
||||
and self.q_upload.empty()
|
||||
and not self.hasher_busy
|
||||
and not self.handshaker_busy
|
||||
and not self.uploader_busy
|
||||
):
|
||||
idles += 1
|
||||
else:
|
||||
idles = 0
|
||||
|
||||
if VT100:
|
||||
maxlen = ss.w - len(str(self.nfiles)) - 14
|
||||
txt = "\033[s\033[{0}H".format(ss.g)
|
||||
for y, k, st, f in [
|
||||
[0, "hash", self.st_hash, self.hash_f],
|
||||
[1, "send", self.st_up, self.up_f],
|
||||
]:
|
||||
txt += "\033[{0}H{1}:".format(ss.g + y, k)
|
||||
file, arg = st
|
||||
if not file:
|
||||
txt += " {0}\033[K".format(arg)
|
||||
else:
|
||||
if y:
|
||||
p = 100 * file.up_b / file.size
|
||||
else:
|
||||
p = 100 * arg / file.size
|
||||
|
||||
name = file.abs.decode("utf-8", "replace")[-maxlen:]
|
||||
if "/" in name:
|
||||
name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1))
|
||||
|
||||
m = "{0:6.1f}% {1} {2}\033[K"
|
||||
txt += m.format(p, self.nfiles - f, name)
|
||||
|
||||
txt += "\033[{0}H ".format(ss.g + 2)
|
||||
else:
|
||||
txt = " "
|
||||
|
||||
if not self.up_br:
|
||||
spd = self.hash_b / (time.time() - self.t0)
|
||||
eta = (self.nbytes - self.hash_b) / (spd + 1)
|
||||
else:
|
||||
spd = self.up_br / (time.time() - self.t0_up)
|
||||
spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1
|
||||
eta = (self.nbytes - self.up_b) / (spd + 1)
|
||||
|
||||
spd = humansize(spd)
|
||||
eta = str(datetime.timedelta(seconds=int(eta)))
|
||||
left = humansize(self.nbytes - self.up_b)
|
||||
tail = "\033[K\033[u" if VT100 else "\r"
|
||||
|
||||
m = "eta: {0} @ {1}/s, {2} left".format(eta, spd, left)
|
||||
eprint(txt + "\033]0;{0}\033\\\r{1}{2}".format(m, m, tail))
|
||||
|
||||
def cleanup_vt100(self):
|
||||
ss.scroll_region(None)
|
||||
eprint("\033[J\033]0;\033\\")
|
||||
|
||||
def cb_hasher(self, file, ofs):
|
||||
self.st_hash = [file, ofs]
|
||||
|
||||
def hasher(self):
|
||||
prd = None
|
||||
ls = {}
|
||||
for top, rel, inf in self.filegen:
|
||||
if self.ar.z:
|
||||
rd = os.path.dirname(rel)
|
||||
if prd != rd:
|
||||
prd = rd
|
||||
headers = {}
|
||||
if self.ar.a:
|
||||
headers["Cookie"] = "=".join(["cppwd", self.ar.a])
|
||||
|
||||
ls = {}
|
||||
try:
|
||||
print(" ls ~{0}".format(rd.decode("utf-8", "replace")))
|
||||
r = req_ses.get(
|
||||
self.ar.url.encode("utf-8") + quotep(rd) + b"?ls",
|
||||
headers=headers,
|
||||
)
|
||||
for f in r.json()["files"]:
|
||||
rfn = f["href"].split("?")[0].encode("utf-8", "replace")
|
||||
ls[unquote(rfn)] = f
|
||||
except:
|
||||
print(" mkdir ~{0}".format(rd.decode("utf-8", "replace")))
|
||||
|
||||
rf = ls.get(os.path.basename(rel), None)
|
||||
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1:
|
||||
self.nfiles -= 1
|
||||
self.nbytes -= inf.st_size
|
||||
continue
|
||||
|
||||
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||
while True:
|
||||
with self.mutex:
|
||||
if (
|
||||
self.hash_b - self.up_b < 1024 * 1024 * 128
|
||||
and self.hash_c - self.up_c < 64
|
||||
and (
|
||||
not self.ar.nh
|
||||
or (
|
||||
self.q_upload.empty()
|
||||
and self.q_handshake.empty()
|
||||
and not self.uploader_busy
|
||||
)
|
||||
)
|
||||
):
|
||||
break
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
get_hashlist(file, self.cb_hasher)
|
||||
with self.mutex:
|
||||
self.hash_f += 1
|
||||
self.hash_c += len(file.cids)
|
||||
self.hash_b += file.size
|
||||
|
||||
self.q_handshake.put(file)
|
||||
|
||||
self.hasher_busy = 0
|
||||
self.st_hash = [None, "(finished)"]
|
||||
|
||||
def handshaker(self):
|
||||
search = self.ar.s
|
||||
q = self.q_handshake
|
||||
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
|
||||
while True:
|
||||
file = q.get()
|
||||
if not file:
|
||||
if q == self.q_handshake:
|
||||
q = self.q_recheck
|
||||
q.put(None)
|
||||
continue
|
||||
|
||||
self.q_upload.put(None)
|
||||
break
|
||||
|
||||
with self.mutex:
|
||||
self.handshaker_busy += 1
|
||||
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
|
||||
try:
|
||||
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||
except Exception as ex:
|
||||
if q == self.q_handshake and "<pre>partial upload exists" in str(ex):
|
||||
self.q_recheck.put(file)
|
||||
hs = []
|
||||
else:
|
||||
raise
|
||||
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
m = "found: {0}\n {1}{2}\n"
|
||||
print(m.format(upath, burl, hit["rp"]), end="")
|
||||
else:
|
||||
print("NOT found: {0}\n".format(upath), end="")
|
||||
|
||||
with self.mutex:
|
||||
self.up_f += 1
|
||||
self.up_c += len(file.cids)
|
||||
self.up_b += file.size
|
||||
self.handshaker_busy -= 1
|
||||
|
||||
continue
|
||||
|
||||
with self.mutex:
|
||||
if not hs:
|
||||
# all chunks done
|
||||
self.up_f += 1
|
||||
self.up_c += len(file.cids) - file.up_c
|
||||
self.up_b += file.size - file.up_b
|
||||
|
||||
if hs and file.up_c:
|
||||
# some chunks failed
|
||||
self.up_c -= len(hs)
|
||||
file.up_c -= len(hs)
|
||||
for cid in hs:
|
||||
sz = file.kchunks[cid][1]
|
||||
self.up_b -= sz
|
||||
file.up_b -= sz
|
||||
|
||||
file.ucids = hs
|
||||
self.handshaker_busy -= 1
|
||||
|
||||
if not hs:
|
||||
kw = "uploaded" if file.up_b else " found"
|
||||
print("{0} {1}".format(kw, upath))
|
||||
for cid in hs:
|
||||
self.q_upload.put([file, cid])
|
||||
|
||||
def uploader(self):
|
||||
while True:
|
||||
task = self.q_upload.get()
|
||||
if not task:
|
||||
self.st_up = [None, "(finished)"]
|
||||
break
|
||||
|
||||
with self.mutex:
|
||||
self.uploader_busy += 1
|
||||
self.t0_up = self.t0_up or time.time()
|
||||
|
||||
file, cid = task
|
||||
try:
|
||||
upload(req_ses, file, cid, self.ar.a)
|
||||
except:
|
||||
eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
|
||||
pass # handshake will fix it
|
||||
|
||||
with self.mutex:
|
||||
sz = file.kchunks[cid][1]
|
||||
file.ucids = [x for x in file.ucids if x != cid]
|
||||
if not file.ucids:
|
||||
self.q_handshake.put(file)
|
||||
|
||||
self.st_up = [file, cid]
|
||||
file.up_b += sz
|
||||
self.up_b += sz
|
||||
self.up_br += sz
|
||||
file.up_c += 1
|
||||
self.up_c += 1
|
||||
self.uploader_busy -= 1
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if not VT100:
|
||||
os.system("rem") # enables colors
|
||||
|
||||
# fmt: off
|
||||
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
|
||||
NOTE:
|
||||
source file/folder selection uses rsync syntax, meaning that:
|
||||
"foo" uploads the entire folder to URL/foo/
|
||||
"foo/" uploads the CONTENTS of the folder into URL/
|
||||
""")
|
||||
|
||||
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||
ap = app.add_argument_group("performance tweaks")
|
||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||
ap = app.add_argument_group("tls")
|
||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||
# fmt: on
|
||||
|
||||
Ctl(app.parse_args())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
24
bin/up2k.sh
Executable file → Normal file
24
bin/up2k.sh
Executable file → Normal file
@@ -8,7 +8,7 @@ set -e
|
||||
##
|
||||
## config
|
||||
|
||||
datalen=$((2*1024*1024*1024))
|
||||
datalen=$((128*1024*1024))
|
||||
target=127.0.0.1
|
||||
posturl=/inc
|
||||
passwd=wark
|
||||
@@ -37,10 +37,10 @@ gendata() {
|
||||
# pipe a chunk, get the base64 checksum
|
||||
gethash() {
|
||||
printf $(
|
||||
sha512sum | cut -c-64 |
|
||||
sha512sum | cut -c-66 |
|
||||
sed -r 's/ .*//;s/(..)/\\x\1/g'
|
||||
) |
|
||||
base64 -w0 | cut -c-43 |
|
||||
base64 -w0 | cut -c-44 |
|
||||
tr '+/' '-_'
|
||||
}
|
||||
|
||||
@@ -123,7 +123,7 @@ printf '\033[36m'
|
||||
{
|
||||
{
|
||||
cat <<EOF
|
||||
POST $posturl/handshake.php HTTP/1.1
|
||||
POST $posturl/ HTTP/1.1
|
||||
Connection: Close
|
||||
Cookie: cppwd=$passwd
|
||||
Content-Type: text/plain;charset=UTF-8
|
||||
@@ -145,14 +145,16 @@ printf '\033[0m\nwark: %s\n' $wark
|
||||
##
|
||||
## wait for signal to continue
|
||||
|
||||
w8=/dev/shm/$salt.w8
|
||||
touch $w8
|
||||
true || {
|
||||
w8=/dev/shm/$salt.w8
|
||||
touch $w8
|
||||
|
||||
echo "ready; rm -f $w8"
|
||||
echo "ready; rm -f $w8"
|
||||
|
||||
while [ -e $w8 ]; do
|
||||
sleep 0.2
|
||||
done
|
||||
while [ -e $w8 ]; do
|
||||
sleep 0.2
|
||||
done
|
||||
}
|
||||
|
||||
|
||||
##
|
||||
@@ -175,7 +177,7 @@ while [ $remains -gt 0 ]; do
|
||||
|
||||
{
|
||||
cat <<EOF
|
||||
POST $posturl/chunkpit.php HTTP/1.1
|
||||
POST $posturl/ HTTP/1.1
|
||||
Connection: Keep-Alive
|
||||
Cookie: cppwd=$passwd
|
||||
Content-Type: application/octet-stream
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
### [`plugins/`](plugins/)
|
||||
* example extensions
|
||||
|
||||
### [`copyparty.bat`](copyparty.bat)
|
||||
* launches copyparty with no arguments (anon read+write within same folder)
|
||||
* intended for windows machines with no python.exe in PATH
|
||||
@@ -29,7 +32,9 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
|
||||
|
||||
# OS integration
|
||||
init-scripts to start copyparty as a service
|
||||
* [`systemd/copyparty.service`](systemd/copyparty.service)
|
||||
* [`systemd/copyparty.service`](systemd/copyparty.service) runs the sfx normally
|
||||
* [`rc/copyparty`](rc/copyparty) runs sfx normally on freebsd, create a `copyparty` user
|
||||
* [`systemd/prisonparty.service`](systemd/prisonparty.service) runs the sfx in a chroot
|
||||
* [`openrc/copyparty`](openrc/copyparty)
|
||||
|
||||
# Reverse-proxy
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# ca-name and server-name
|
||||
# ca-name and server-fqdn
|
||||
ca_name="$1"
|
||||
srv_name="$2"
|
||||
srv_fqdn="$2"
|
||||
|
||||
[ -z "$srv_name" ] && {
|
||||
[ -z "$srv_fqdn" ] && {
|
||||
echo "need arg 1: ca name"
|
||||
echo "need arg 2: server name"
|
||||
echo "need arg 2: server fqdn"
|
||||
echo "optional arg 3: if set, write cert into copyparty cfg"
|
||||
exit 1
|
||||
}
|
||||
|
||||
@@ -31,15 +32,15 @@ EOF
|
||||
gen_srv() {
|
||||
(tee /dev/stderr <<EOF
|
||||
{"key": {"algo":"rsa", "size":4096},
|
||||
"names": [{"O":"$ca_name - $srv_name"}]}
|
||||
"names": [{"O":"$ca_name - $srv_fqdn"}]}
|
||||
EOF
|
||||
)|
|
||||
cfssl gencert -ca ca.pem -ca-key ca.key \
|
||||
-profile=www -hostname="$srv_name.$ca_name" - |
|
||||
cfssljson -bare "$srv_name"
|
||||
-profile=www -hostname="$srv_fqdn" - |
|
||||
cfssljson -bare "$srv_fqdn"
|
||||
|
||||
mv "$srv_name-key.pem" "$srv_name.key"
|
||||
rm "$srv_name.csr"
|
||||
mv "$srv_fqdn-key.pem" "$srv_fqdn.key"
|
||||
rm "$srv_fqdn.csr"
|
||||
}
|
||||
|
||||
|
||||
@@ -57,13 +58,13 @@ show() {
|
||||
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
|
||||
}
|
||||
show ca.pem
|
||||
show "$srv_name.pem"
|
||||
show "$srv_fqdn.pem"
|
||||
|
||||
|
||||
# write cert into copyparty config
|
||||
[ -z "$3" ] || {
|
||||
mkdir -p ~/.config/copyparty
|
||||
cat "$srv_name".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||
cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
|
||||
upstream cpp {
|
||||
server 127.0.0.1:3923;
|
||||
keepalive 120;
|
||||
keepalive 1;
|
||||
}
|
||||
server {
|
||||
listen 443 ssl;
|
||||
|
||||
25
contrib/plugins/README.md
Normal file
25
contrib/plugins/README.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# example resource files
|
||||
|
||||
can be provided to copyparty to tweak things
|
||||
|
||||
|
||||
|
||||
## example `.epilogue.html`
|
||||
save one of these as `.epilogue.html` inside a folder to customize it:
|
||||
|
||||
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||
|
||||
|
||||
|
||||
## example browser-css
|
||||
point `--css-browser` to one of these by URL:
|
||||
|
||||
* [`browser.css`](browser.css) changes the background
|
||||
* [`browser-icons.css`](browser-icons.css) adds filetype icons
|
||||
|
||||
|
||||
|
||||
## meadup.js
|
||||
|
||||
* turns copyparty into chromecast just more flexible (and probably way more buggy)
|
||||
* usage: put the js somewhere in the webroot and `--js-browser /memes/meadup.js`
|
||||
71
contrib/plugins/browser-icons.css
Normal file
71
contrib/plugins/browser-icons.css
Normal file
@@ -0,0 +1,71 @@
|
||||
/* video, alternative 1:
|
||||
top-left icon, just like the other formats
|
||||
=======================================================================
|
||||
|
||||
#ggrid>a:is(
|
||||
[href$=".mkv"i],
|
||||
[href$=".mp4"i],
|
||||
[href$=".webm"i],
|
||||
):before {
|
||||
content: '📺';
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
|
||||
/* video, alternative 2:
|
||||
play-icon in the middle of the thumbnail
|
||||
=======================================================================
|
||||
*/
|
||||
#ggrid>a:is(
|
||||
[href$=".mkv"i],
|
||||
[href$=".mp4"i],
|
||||
[href$=".webm"i],
|
||||
) {
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
}
|
||||
#ggrid>a:is(
|
||||
[href$=".mkv"i],
|
||||
[href$=".mp4"i],
|
||||
[href$=".webm"i],
|
||||
):before {
|
||||
content: '▶';
|
||||
opacity: .8;
|
||||
margin: 0;
|
||||
padding: 1em .5em 1em .7em;
|
||||
border-radius: 9em;
|
||||
line-height: 0;
|
||||
color: #fff;
|
||||
text-shadow: none;
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
left: calc(50% - 1em);
|
||||
top: calc(50% - 1.4em);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* audio */
|
||||
#ggrid>a:is(
|
||||
[href$=".mp3"i],
|
||||
[href$=".ogg"i],
|
||||
[href$=".opus"i],
|
||||
[href$=".flac"i],
|
||||
[href$=".m4a"i],
|
||||
[href$=".aac"i],
|
||||
):before {
|
||||
content: '🎵';
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* image */
|
||||
#ggrid>a:is(
|
||||
[href$=".jpg"i],
|
||||
[href$=".jpeg"i],
|
||||
[href$=".png"i],
|
||||
[href$=".gif"i],
|
||||
[href$=".webp"i],
|
||||
):before {
|
||||
content: '🎨';
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
html {
|
||||
background: #333 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
|
||||
background: #222 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
|
||||
}
|
||||
#files th {
|
||||
background: rgba(32, 32, 32, 0.9) !important;
|
||||
}
|
||||
#ops,
|
||||
#treeul,
|
||||
#tree,
|
||||
#files td {
|
||||
background: rgba(32, 32, 32, 0.3) !important;
|
||||
}
|
||||
@@ -17,8 +17,9 @@ html.light {
|
||||
html.light #files th {
|
||||
background: rgba(255, 255, 255, 0.9) !important;
|
||||
}
|
||||
html.light .logue,
|
||||
html.light #ops,
|
||||
html.light #treeul,
|
||||
html.light #tree,
|
||||
html.light #files td {
|
||||
background: rgba(248, 248, 248, 0.8) !important;
|
||||
}
|
||||
506
contrib/plugins/meadup.js
Normal file
506
contrib/plugins/meadup.js
Normal file
@@ -0,0 +1,506 @@
|
||||
// USAGE:
|
||||
// place this file somewhere in the webroot and then
|
||||
// python3 -m copyparty --js-browser /memes/meadup.js
|
||||
//
|
||||
// FEATURES:
|
||||
// * adds an onscreen keyboard for operating a media center remotely,
|
||||
// relies on https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/very-bad-idea.py
|
||||
// * adds an interactive anime girl (if you can find the dependencies)
|
||||
|
||||
var hambagas = [
|
||||
"https://www.youtube.com/watch?v=pFA3KGp4GuU"
|
||||
];
|
||||
|
||||
// keybaord,
|
||||
// onscreen keyboard by @steinuil
|
||||
function initKeybaord(BASE_URL, HAMBAGA, consoleLog, consoleError) {
|
||||
document.querySelector('.keybaord-container').innerHTML = `
|
||||
<div class="keybaord-body">
|
||||
<div class="keybaord-row keybaord-row-1">
|
||||
<div class="keybaord-key" data-keybaord-key="Escape">
|
||||
esc
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F1">
|
||||
F1
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F2">
|
||||
F2
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F3">
|
||||
F3
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F4">
|
||||
F4
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F5">
|
||||
F5
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F6">
|
||||
F6
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F7">
|
||||
F7
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F8">
|
||||
F8
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F9">
|
||||
F9
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F10">
|
||||
F10
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F11">
|
||||
F11
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F12">
|
||||
F12
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Insert">
|
||||
ins
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Delete">
|
||||
del
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-2">
|
||||
<div class="keybaord-key" data-keybaord-key="\`">
|
||||
\`
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="1">
|
||||
1
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="2">
|
||||
2
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="3">
|
||||
3
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="4">
|
||||
4
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="5">
|
||||
5
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="6">
|
||||
6
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="7">
|
||||
7
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="8">
|
||||
8
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="9">
|
||||
9
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="0">
|
||||
0
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="-">
|
||||
-
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="=">
|
||||
=
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-backspace" data-keybaord-key="BackSpace">
|
||||
backspace
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-3">
|
||||
<div class="keybaord-key keybaord-tab" data-keybaord-key="Tab">
|
||||
tab
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="q">
|
||||
q
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="w">
|
||||
w
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="e">
|
||||
e
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="r">
|
||||
r
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="t">
|
||||
t
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="y">
|
||||
y
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="u">
|
||||
u
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="i">
|
||||
i
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="o">
|
||||
o
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="p">
|
||||
p
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="[">
|
||||
[
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="]">
|
||||
]
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-enter" data-keybaord-key="Return">
|
||||
enter
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-4">
|
||||
<div class="keybaord-key keybaord-capslock" data-keybaord-key="HAMBAGA">
|
||||
🍔
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="a">
|
||||
a
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="s">
|
||||
s
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="d">
|
||||
d
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="f">
|
||||
f
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="g">
|
||||
g
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="h">
|
||||
h
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="j">
|
||||
j
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="k">
|
||||
k
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="l">
|
||||
l
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key=";">
|
||||
;
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="'">
|
||||
'
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-backslash" data-keybaord-key="\\">
|
||||
\\
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-5">
|
||||
<div class="keybaord-key keybaord-lshift" data-keybaord-key="Shift_L">
|
||||
shift
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="\\">
|
||||
\\
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="z">
|
||||
z
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="x">
|
||||
x
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="c">
|
||||
c
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="v">
|
||||
v
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="b">
|
||||
b
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="n">
|
||||
n
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="m">
|
||||
m
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key=",">
|
||||
,
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key=".">
|
||||
.
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="/">
|
||||
/
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-rshift" data-keybaord-key="Shift_R">
|
||||
shift
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-6">
|
||||
<div class="keybaord-key keybaord-lctrl" data-keybaord-key="Control_L">
|
||||
ctrl
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-super" data-keybaord-key="Meta_L">
|
||||
win
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-alt" data-keybaord-key="Alt_L">
|
||||
alt
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-spacebar" data-keybaord-key="space">
|
||||
space
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-altgr" data-keybaord-key="Alt_R">
|
||||
altgr
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-what" data-keybaord-key="Menu">
|
||||
menu
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-rctrl" data-keybaord-key="Control_R">
|
||||
ctrl
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row">
|
||||
<div class="keybaord-key" data-keybaord-key="XF86AudioLowerVolume">
|
||||
🔉
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="XF86AudioRaiseVolume">
|
||||
🔊
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Left">
|
||||
⬅️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Down">
|
||||
⬇️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Up">
|
||||
⬆️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Right">
|
||||
➡️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Page_Up">
|
||||
PgUp
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Page_Down">
|
||||
PgDn
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Home">
|
||||
🏠
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="End">
|
||||
End
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
`;
|
||||
|
||||
function arraySample(array) {
|
||||
return array[Math.floor(Math.random() * array.length)];
|
||||
}
|
||||
|
||||
function sendMessage(msg) {
|
||||
return fetch(BASE_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded;charset=UTF-8",
|
||||
},
|
||||
body: "msg=" + encodeURIComponent(msg),
|
||||
}).then(
|
||||
(r) => r.text(), // so the response body shows up in network tab
|
||||
(err) => consoleError(err)
|
||||
);
|
||||
}
|
||||
const MODIFIER_ON_CLASS = "keybaord-modifier-on";
|
||||
const KEY_DATASET = "data-keybaord-key";
|
||||
const KEY_CLASS = "keybaord-key";
|
||||
|
||||
const modifiers = new Set()
|
||||
|
||||
function toggleModifier(button, key) {
|
||||
button.classList.toggle(MODIFIER_ON_CLASS);
|
||||
if (modifiers.has(key)) {
|
||||
modifiers.delete(key);
|
||||
} else {
|
||||
modifiers.add(key);
|
||||
}
|
||||
}
|
||||
|
||||
function popModifiers() {
|
||||
let modifierString = "";
|
||||
|
||||
modifiers.forEach((mod) => {
|
||||
document.querySelector("[" + KEY_DATASET + "='" + mod + "']")
|
||||
.classList.remove(MODIFIER_ON_CLASS);
|
||||
|
||||
modifierString += mod + "+";
|
||||
});
|
||||
|
||||
modifiers.clear();
|
||||
|
||||
return modifierString;
|
||||
}
|
||||
|
||||
Array.from(document.querySelectorAll("." + KEY_CLASS)).forEach((button) => {
|
||||
const key = button.dataset.keybaordKey;
|
||||
|
||||
button.addEventListener("click", (ev) => {
|
||||
switch (key) {
|
||||
case "HAMBAGA":
|
||||
sendMessage(arraySample(HAMBAGA));
|
||||
break;
|
||||
|
||||
case "Shift_L":
|
||||
case "Shift_R":
|
||||
|
||||
case "Control_L":
|
||||
case "Control_R":
|
||||
|
||||
case "Meta_L":
|
||||
|
||||
case "Alt_L":
|
||||
case "Alt_R":
|
||||
toggleModifier(button, key);
|
||||
break;
|
||||
|
||||
default: {
|
||||
const keyWithModifiers = popModifiers() + key;
|
||||
|
||||
consoleLog(keyWithModifiers);
|
||||
|
||||
sendMessage("key " + keyWithModifiers)
|
||||
.then(() => consoleLog(keyWithModifiers + " OK"));
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// keybaord integration
|
||||
(function () {
|
||||
var o = mknod('div');
|
||||
clmod(o, 'keybaord-container', 1);
|
||||
ebi('op_msg').appendChild(o);
|
||||
|
||||
o = mknod('style');
|
||||
o.innerHTML = `
|
||||
.keybaord-body {
|
||||
display: flex;
|
||||
flex-flow: column nowrap;
|
||||
margin: .6em 0;
|
||||
}
|
||||
|
||||
.keybaord-row {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.keybaord-key {
|
||||
border: 1px solid rgba(128,128,128,0.2);
|
||||
width: 41px;
|
||||
height: 40px;
|
||||
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.keybaord-key:active {
|
||||
background-color: lightgrey;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-modifier-on {
|
||||
background-color: lightblue;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-backspace {
|
||||
width: 82px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-tab {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-enter {
|
||||
width: 69px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-capslock {
|
||||
width: 80px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-backslash {
|
||||
width: 88px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-lshift {
|
||||
width: 65px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-rshift {
|
||||
width: 103px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-lctrl {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-super {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-alt {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-altgr {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-what {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-rctrl {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-spacebar {
|
||||
width: 302px;
|
||||
}
|
||||
`;
|
||||
document.head.appendChild(o);
|
||||
|
||||
initKeybaord('/', hambagas,
|
||||
(msg) => { toast.inf(2, msg.toString()) },
|
||||
(msg) => { toast.err(30, msg.toString()) });
|
||||
})();
|
||||
|
||||
|
||||
// live2d (dumb pointless meme)
|
||||
// dependencies for this part are not tracked in git
|
||||
// so delete this section if you wanna use this file
|
||||
// (or supply your own l2d model and js)
|
||||
(function () {
|
||||
var o = mknod('link');
|
||||
o.setAttribute('rel', 'stylesheet');
|
||||
o.setAttribute('href', "/bad-memes/pio.css");
|
||||
document.head.appendChild(o);
|
||||
|
||||
o = mknod('style');
|
||||
o.innerHTML = '.pio-container{text-shadow:none;z-index:1}';
|
||||
document.head.appendChild(o);
|
||||
|
||||
o = mknod('div');
|
||||
clmod(o, 'pio-container', 1);
|
||||
o.innerHTML = '<div class="pio-action"></div><canvas id="pio" width="280" height="500"></canvas>';
|
||||
document.body.appendChild(o);
|
||||
|
||||
var remaining = 3;
|
||||
for (var a of ['pio', 'l2d', 'fireworks']) {
|
||||
import_js(`/bad-memes/${a}.js`, function () {
|
||||
if (remaining --> 1)
|
||||
return;
|
||||
|
||||
o = mknod('script');
|
||||
o.innerHTML = 'var pio = new Paul_Pio({"selector":[],"mode":"fixed","hidden":false,"content":{"close":"ok bye"},"model":["/bad-memes/sagiri/model.json"]});';
|
||||
document.body.appendChild(o);
|
||||
});
|
||||
}
|
||||
})();
|
||||
@@ -9,9 +9,11 @@
|
||||
|
||||
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
|
||||
|
||||
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||
#u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||
|
||||
#u2cards /* and the upload progress tabs */
|
||||
#srch_dz, #srch_zd, /* the filesearch dropzone */
|
||||
|
||||
#u2cards, #u2etaw /* and the upload progress tabs */
|
||||
|
||||
{display: none !important} /* do it! */
|
||||
|
||||
@@ -19,13 +21,16 @@
|
||||
|
||||
/* add some margins because now it's weird */
|
||||
.opview {margin-top: 2.5em}
|
||||
#op_up2k {margin-top: 3em}
|
||||
#op_up2k {margin-top: 6em}
|
||||
|
||||
/* and embiggen the upload button */
|
||||
#u2conf #u2btn, #u2btn {padding:1.5em 0}
|
||||
|
||||
/* adjust the button area a bit */
|
||||
#u2conf.has_btn {width: 35em !important; margin: 5em auto}
|
||||
#u2conf.w, #u2conf.ww {width: 35em !important; margin: 5em auto}
|
||||
|
||||
/* a */
|
||||
#op_up2k {min-height: 0}
|
||||
|
||||
</style>
|
||||
|
||||
31
contrib/rc/copyparty
Normal file
31
contrib/rc/copyparty
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# PROVIDE: copyparty
|
||||
# REQUIRE: networking
|
||||
# KEYWORD:
|
||||
|
||||
. /etc/rc.subr
|
||||
|
||||
name="copyparty"
|
||||
rcvar="copyparty_enable"
|
||||
copyparty_user="copyparty"
|
||||
copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit
|
||||
copyparty_command="/usr/local/bin/python3.8 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
|
||||
pidfile="/var/run/copyparty/${name}.pid"
|
||||
command="/usr/sbin/daemon"
|
||||
command_args="-P ${pidfile} -r -f ${copyparty_command}"
|
||||
|
||||
stop_postcmd="copyparty_shutdown"
|
||||
|
||||
copyparty_shutdown()
|
||||
{
|
||||
if [ -e "${pidfile}" ]; then
|
||||
echo "Stopping supervising daemon."
|
||||
kill -s TERM `cat ${pidfile}`
|
||||
fi
|
||||
}
|
||||
|
||||
load_rc_config $name
|
||||
: ${copyparty_enable:=no}
|
||||
|
||||
run_rc_command "$1"
|
||||
@@ -3,10 +3,15 @@
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
|
||||
# restorecon -vr /etc/systemd/system/copyparty.service
|
||||
# firewall-cmd --permanent --add-port={80,443,3923}/tcp
|
||||
# firewall-cmd --reload
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/usr/bin/python3' to another interpreter
|
||||
# change '/mnt::rw' to another location or permission-set
|
||||
# remove '-p 80,443,3923' to only listen on port 3923
|
||||
# add '-i 127.0.0.1' to only allow local connections
|
||||
#
|
||||
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
||||
# accept connections; correctly delaying units depending on copyparty.
|
||||
@@ -14,11 +19,8 @@
|
||||
# python disabling line-buffering, so messages are out-of-order:
|
||||
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
|
||||
#
|
||||
# enable line-buffering for realtime logging (slight performance cost):
|
||||
# modify ExecStart and prefix it with `/usr/bin/stdbuf -oL` like so:
|
||||
# ExecStart=/usr/bin/stdbuf -oL /usr/bin/python3 [...]
|
||||
# but some systemd versions require this instead (higher performance cost):
|
||||
# inside the [Service] block, add the following line:
|
||||
# if you remove -q to enable logging, you may also want to remove the
|
||||
# following line to enable buffering (slightly better performance):
|
||||
# Environment=PYTHONUNBUFFERED=x
|
||||
|
||||
[Unit]
|
||||
@@ -27,8 +29,10 @@ Description=copyparty file server
|
||||
[Service]
|
||||
Type=notify
|
||||
SyslogIdentifier=copyparty
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
||||
Environment=PYTHONUNBUFFERED=x
|
||||
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -p 80,443,3923 -v /mnt::rw
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
@@ -25,26 +25,34 @@ ANYWIN = WINDOWS or sys.platform in ["msys"]
|
||||
MACOS = platform.system() == "Darwin"
|
||||
|
||||
|
||||
def get_unix_home():
|
||||
try:
|
||||
v = os.environ["XDG_CONFIG_HOME"]
|
||||
if not v:
|
||||
raise Exception()
|
||||
ret = os.path.normpath(v)
|
||||
os.listdir(ret)
|
||||
return ret
|
||||
except:
|
||||
pass
|
||||
def get_unixdir():
|
||||
paths = [
|
||||
(os.environ.get, "XDG_CONFIG_HOME"),
|
||||
(os.path.expanduser, "~/.config"),
|
||||
(os.environ.get, "TMPDIR"),
|
||||
(os.environ.get, "TEMP"),
|
||||
(os.environ.get, "TMP"),
|
||||
(unicode, "/tmp"),
|
||||
]
|
||||
for chk in [os.listdir, os.mkdir]:
|
||||
for pf, pa in paths:
|
||||
try:
|
||||
p = pf(pa)
|
||||
# print(chk.__name__, p, pa)
|
||||
if not p or p.startswith("~"):
|
||||
continue
|
||||
|
||||
try:
|
||||
v = os.path.expanduser("~/.config")
|
||||
if v.startswith("~"):
|
||||
raise Exception()
|
||||
ret = os.path.normpath(v)
|
||||
os.listdir(ret)
|
||||
return ret
|
||||
except:
|
||||
return "/tmp"
|
||||
p = os.path.normpath(p)
|
||||
chk(p)
|
||||
p = os.path.join(p, "copyparty")
|
||||
if not os.path.isdir(p):
|
||||
os.mkdir(p)
|
||||
|
||||
return p
|
||||
except:
|
||||
pass
|
||||
|
||||
raise Exception("could not find a writable path for config")
|
||||
|
||||
|
||||
class EnvParams(object):
|
||||
@@ -59,7 +67,7 @@ class EnvParams(object):
|
||||
elif sys.platform == "darwin":
|
||||
self.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
|
||||
else:
|
||||
self.cfg = get_unix_home() + "/copyparty"
|
||||
self.cfg = get_unixdir()
|
||||
|
||||
self.cfg = self.cfg.replace("\\", "/")
|
||||
try:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
@@ -20,10 +20,10 @@ import threading
|
||||
import traceback
|
||||
from textwrap import dedent
|
||||
|
||||
from .__init__ import E, WINDOWS, VT100, PY2, unicode
|
||||
from .__init__ import E, WINDOWS, ANYWIN, VT100, PY2, unicode
|
||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||
from .svchub import SvcHub
|
||||
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re
|
||||
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re, min_ex
|
||||
from .authsrv import re_vol
|
||||
|
||||
HAVE_SSL = True
|
||||
@@ -104,7 +104,7 @@ def ensure_cert():
|
||||
cert_insec = os.path.join(E.mod, "res/insecure.pem")
|
||||
cert_cfg = os.path.join(E.cfg, "cert.pem")
|
||||
if not os.path.exists(cert_cfg):
|
||||
shutil.copy2(cert_insec, cert_cfg)
|
||||
shutil.copy(cert_insec, cert_cfg)
|
||||
|
||||
try:
|
||||
if filecmp.cmp(cert_cfg, cert_insec):
|
||||
@@ -186,6 +186,32 @@ def configure_ssl_ciphers(al):
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def args_from_cfg(cfg_path):
|
||||
ret = []
|
||||
skip = False
|
||||
with open(cfg_path, "rb") as f:
|
||||
for ln in [x.decode("utf-8").strip() for x in f]:
|
||||
if not ln:
|
||||
skip = False
|
||||
continue
|
||||
|
||||
if ln.startswith("#"):
|
||||
continue
|
||||
|
||||
if not ln.startswith("-"):
|
||||
continue
|
||||
|
||||
if skip:
|
||||
continue
|
||||
|
||||
try:
|
||||
ret.extend(ln.split(" ", 1))
|
||||
except:
|
||||
ret.append(ln)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def sighandler(sig=None, frame=None):
|
||||
msg = [""] * 5
|
||||
for th in threading.enumerate():
|
||||
@@ -196,6 +222,54 @@ def sighandler(sig=None, frame=None):
|
||||
print("\n".join(msg))
|
||||
|
||||
|
||||
def disable_quickedit():
|
||||
import ctypes
|
||||
import atexit
|
||||
from ctypes import wintypes
|
||||
|
||||
def ecb(ok, fun, args):
|
||||
if not ok:
|
||||
err = ctypes.get_last_error()
|
||||
if err:
|
||||
raise ctypes.WinError(err)
|
||||
return args
|
||||
|
||||
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
|
||||
if PY2:
|
||||
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
|
||||
|
||||
k32.GetStdHandle.errcheck = ecb
|
||||
k32.GetConsoleMode.errcheck = ecb
|
||||
k32.SetConsoleMode.errcheck = ecb
|
||||
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
|
||||
k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
|
||||
|
||||
def cmode(out, mode=None):
|
||||
h = k32.GetStdHandle(-11 if out else -10)
|
||||
if mode:
|
||||
return k32.SetConsoleMode(h, mode)
|
||||
|
||||
mode = wintypes.DWORD()
|
||||
k32.GetConsoleMode(h, ctypes.byref(mode))
|
||||
return mode.value
|
||||
|
||||
# disable quickedit
|
||||
mode = orig_in = cmode(False)
|
||||
quickedit = 0x40
|
||||
extended = 0x80
|
||||
mask = quickedit + extended
|
||||
if mode & mask != extended:
|
||||
atexit.register(cmode, False, orig_in)
|
||||
cmode(False, mode & ~mask | extended)
|
||||
|
||||
# enable colors in case the os.system("rem") trick ever stops working
|
||||
if VT100:
|
||||
mode = orig_out = cmode(True)
|
||||
if mode & 4 != 4:
|
||||
atexit.register(cmode, True, orig_out)
|
||||
cmode(True, mode | 4)
|
||||
|
||||
|
||||
def run_argparse(argv, formatter):
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=formatter,
|
||||
@@ -203,6 +277,13 @@ def run_argparse(argv, formatter):
|
||||
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
||||
)
|
||||
|
||||
try:
|
||||
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
|
||||
except:
|
||||
fk_salt = "hunter2"
|
||||
|
||||
cores = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
|
||||
sects = [
|
||||
[
|
||||
"accounts",
|
||||
@@ -211,14 +292,15 @@ def run_argparse(argv, formatter):
|
||||
"""
|
||||
-a takes username:password,
|
||||
-v takes src:dst:perm1:perm2:permN:volflag1:volflag2:volflagN:...
|
||||
where "perm" is "accesslevels,username1,username2,..."
|
||||
where "perm" is "permissions,username1,username2,..."
|
||||
and "volflag" is config flags to set on this volume
|
||||
|
||||
list of accesslevels:
|
||||
list of permissions:
|
||||
"r" (read): list folder contents, download files
|
||||
"w" (write): upload files; need "r" to see the uploads
|
||||
"m" (move): move files and folders; need "w" at destination
|
||||
"d" (delete): permanently delete files and folders
|
||||
"g" (get): download files, but cannot see folder contents
|
||||
|
||||
too many volflags to list here, see the other sections
|
||||
|
||||
@@ -268,9 +350,12 @@ def run_argparse(argv, formatter):
|
||||
|
||||
\033[0mdatabase, general:
|
||||
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
|
||||
\033[36md2ts\033[35m disables metadata collection for existing files
|
||||
\033[36md2ds\033[35m disables onboot indexing, overrides -e2ds*
|
||||
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
|
||||
\033[36md2d\033[35m disables all database stuff, overrides -e2*
|
||||
\033[36mdhash\033[35m disables file hashing on initial scans, also ehash
|
||||
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
|
||||
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
|
||||
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
|
||||
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
|
||||
|
||||
@@ -279,6 +364,10 @@ def run_argparse(argv, formatter):
|
||||
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
|
||||
generate ".bpm" tags from uploads (f = overwrite tags)
|
||||
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
|
||||
|
||||
\033[0mothers:
|
||||
\033[36mfk=8\033[35m generates per-file accesskeys,
|
||||
which will then be required at the "g" permission
|
||||
\033[0m"""
|
||||
),
|
||||
],
|
||||
@@ -322,24 +411,32 @@ def run_argparse(argv, formatter):
|
||||
ap2 = ap.add_argument_group('general options')
|
||||
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
|
||||
ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
||||
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
||||
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all")
|
||||
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark")
|
||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
|
||||
ap2.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
|
||||
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="window title, for example '$ip-10.1.2.' or '$ip-'")
|
||||
|
||||
ap2 = ap.add_argument_group('upload options')
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
|
||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without")
|
||||
ap2.add_argument("--no-symlink", action="store_true", help="duplicate file contents instead")
|
||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=9000, help="max number of uploads to keep in memory when running without -e2d")
|
||||
|
||||
ap2 = ap.add_argument_group('network options')
|
||||
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
|
||||
|
||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="socket write delay in seconds")
|
||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="response delay in seconds")
|
||||
|
||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
||||
@@ -348,8 +445,16 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets")
|
||||
|
||||
ap2 = ap.add_argument_group('FTP options')
|
||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example 3921")
|
||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example 3990")
|
||||
ap2.add_argument("--ftp-dbg", action="store_true", help="enable debug logging")
|
||||
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
||||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example 12000-13000")
|
||||
|
||||
ap2 = ap.add_argument_group('opt-outs')
|
||||
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows")
|
||||
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
|
||||
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
||||
ap2.add_argument("-nih", action="store_true", help="no info hostname")
|
||||
@@ -360,6 +465,16 @@ def run_argparse(argv, formatter):
|
||||
ap2 = ap.add_argument_group('safety options')
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
||||
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
|
||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt")
|
||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
||||
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
||||
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||
|
||||
ap2 = ap.add_argument_group('yolo options')
|
||||
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||
|
||||
ap2 = ap.add_argument_group('logging options')
|
||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||
@@ -371,40 +486,49 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
|
||||
|
||||
ap2 = ap.add_argument_group('admin panel options')
|
||||
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
|
||||
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
||||
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
||||
|
||||
ap2 = ap.add_argument_group('thumbnail options')
|
||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
|
||||
ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms)")
|
||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
|
||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=0, help="max num cpu cores to use, 0=all")
|
||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for generating thumbnails")
|
||||
ap2.add_argument("--th-convt", metavar="SEC", type=int, default=60, help="conversion timeout in seconds")
|
||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
|
||||
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs")
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
|
||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for")
|
||||
|
||||
ap2 = ap.add_argument_group('transcoding options')
|
||||
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete transcode output after SEC seconds")
|
||||
|
||||
ap2 = ap.add_argument_group('general db options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
|
||||
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
|
||||
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=1000, help="max search results")
|
||||
|
||||
ap2 = ap.add_argument_group('metadata db options')
|
||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
|
||||
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
||||
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
||||
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead")
|
||||
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
||||
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader")
|
||||
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for tag scanning")
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash")
|
||||
@@ -412,8 +536,11 @@ def run_argparse(argv, formatter):
|
||||
default=".vq,.aq,vc,ac,res,.fps")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin")
|
||||
|
||||
ap2 = ap.add_argument_group('appearance options')
|
||||
ap2 = ap.add_argument_group('ui options')
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
||||
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
|
||||
|
||||
ap2 = ap.add_argument_group('debug options')
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||
@@ -456,7 +583,12 @@ def main(argv=None):
|
||||
if HAVE_SSL:
|
||||
ensure_cert()
|
||||
|
||||
deprecated = [["-e2s", "-e2ds"]]
|
||||
for k, v in zip(argv[1:], argv[2:]):
|
||||
if k == "-c":
|
||||
supp = args_from_cfg(v)
|
||||
argv.extend(supp)
|
||||
|
||||
deprecated = []
|
||||
for dk, nk in deprecated:
|
||||
try:
|
||||
idx = argv.index(dk)
|
||||
@@ -468,11 +600,26 @@ def main(argv=None):
|
||||
argv[idx] = nk
|
||||
time.sleep(2)
|
||||
|
||||
try:
|
||||
if len(argv) == 1 and (ANYWIN or not os.geteuid()):
|
||||
argv.extend(["-p80,443,3923", "--ign-ebind"])
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
al = run_argparse(argv, RiceFormatter)
|
||||
except AssertionError:
|
||||
al = run_argparse(argv, Dodge11874)
|
||||
|
||||
if WINDOWS and not al.keep_qem:
|
||||
try:
|
||||
disable_quickedit()
|
||||
except:
|
||||
print("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
|
||||
|
||||
if not VT100:
|
||||
al.wintitle = ""
|
||||
|
||||
nstrs = []
|
||||
anymod = False
|
||||
for ostr in al.v or []:
|
||||
@@ -489,7 +636,7 @@ def main(argv=None):
|
||||
if re.match("c[^,]", opt):
|
||||
mod = True
|
||||
na.append("c," + opt[1:])
|
||||
elif re.sub("^[rwmd]*", "", opt) and "," not in opt:
|
||||
elif re.sub("^[rwmdg]*", "", opt) and "," not in opt:
|
||||
mod = True
|
||||
perm = opt[0]
|
||||
if perm == "a":
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 13, 3)
|
||||
CODENAME = "future-proof"
|
||||
BUILD_DT = (2021, 8, 14)
|
||||
VERSION = (1, 2, 1)
|
||||
CODENAME = "ftp btw"
|
||||
BUILD_DT = (2022, 3, 3)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -29,17 +29,18 @@ LEELOO_DALLAS = "leeloo_dallas"
|
||||
|
||||
|
||||
class AXS(object):
|
||||
def __init__(self, uread=None, uwrite=None, umove=None, udel=None):
|
||||
def __init__(self, uread=None, uwrite=None, umove=None, udel=None, uget=None):
|
||||
self.uread = {} if uread is None else {k: 1 for k in uread}
|
||||
self.uwrite = {} if uwrite is None else {k: 1 for k in uwrite}
|
||||
self.umove = {} if umove is None else {k: 1 for k in umove}
|
||||
self.udel = {} if udel is None else {k: 1 for k in udel}
|
||||
self.uget = {} if uget is None else {k: 1 for k in uget}
|
||||
|
||||
def __repr__(self):
|
||||
return "AXS({})".format(
|
||||
", ".join(
|
||||
"{}={!r}".format(k, self.__dict__[k])
|
||||
for k in "uread uwrite umove udel".split()
|
||||
for k in "uread uwrite umove udel uget".split()
|
||||
)
|
||||
)
|
||||
|
||||
@@ -215,6 +216,7 @@ class VFS(object):
|
||||
self.awrite = {}
|
||||
self.amove = {}
|
||||
self.adel = {}
|
||||
self.aget = {}
|
||||
else:
|
||||
self.histpath = None
|
||||
self.all_vols = None
|
||||
@@ -222,6 +224,7 @@ class VFS(object):
|
||||
self.awrite = None
|
||||
self.amove = None
|
||||
self.adel = None
|
||||
self.aget = None
|
||||
|
||||
def __repr__(self):
|
||||
return "VFS({})".format(
|
||||
@@ -308,7 +311,7 @@ class VFS(object):
|
||||
|
||||
def can_access(self, vpath, uname):
|
||||
# type: (str, str) -> tuple[bool, bool, bool, bool]
|
||||
"""can Read,Write,Move,Delete"""
|
||||
"""can Read,Write,Move,Delete,Get"""
|
||||
vn, _ = self._find(vpath)
|
||||
c = vn.axs
|
||||
return [
|
||||
@@ -316,10 +319,20 @@ class VFS(object):
|
||||
uname in c.uwrite or "*" in c.uwrite,
|
||||
uname in c.umove or "*" in c.umove,
|
||||
uname in c.udel or "*" in c.udel,
|
||||
uname in c.uget or "*" in c.uget,
|
||||
]
|
||||
|
||||
def get(self, vpath, uname, will_read, will_write, will_move=False, will_del=False):
|
||||
# type: (str, str, bool, bool, bool, bool) -> tuple[VFS, str]
|
||||
def get(
|
||||
self,
|
||||
vpath,
|
||||
uname,
|
||||
will_read,
|
||||
will_write,
|
||||
will_move=False,
|
||||
will_del=False,
|
||||
will_get=False,
|
||||
):
|
||||
# type: (str, str, bool, bool, bool, bool, bool) -> tuple[VFS, str]
|
||||
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
||||
vn, rem = self._find(vpath)
|
||||
c = vn.axs
|
||||
@@ -329,6 +342,7 @@ class VFS(object):
|
||||
[will_write, c.uwrite, "write"],
|
||||
[will_move, c.umove, "move"],
|
||||
[will_del, c.udel, "delete"],
|
||||
[will_get, c.uget, "get"],
|
||||
]:
|
||||
if req and (uname not in d and "*" not in d) and uname != LEELOO_DALLAS:
|
||||
m = "you don't have {}-access for this location"
|
||||
@@ -342,7 +356,7 @@ class VFS(object):
|
||||
if not dbv:
|
||||
return self, vrem
|
||||
|
||||
vrem = [self.vpath[len(dbv.vpath) + 1 :], vrem]
|
||||
vrem = [self.vpath[len(dbv.vpath) :].lstrip("/"), vrem]
|
||||
vrem = "/".join([x for x in vrem if x])
|
||||
return dbv, vrem
|
||||
|
||||
@@ -368,7 +382,7 @@ class VFS(object):
|
||||
for name, vn2 in sorted(self.nodes.items()):
|
||||
ok = False
|
||||
axs = vn2.axs
|
||||
axs = [axs.uread, axs.uwrite, axs.umove, axs.udel]
|
||||
axs = [axs.uread, axs.uwrite, axs.umove, axs.udel, axs.uget]
|
||||
for pset in permsets:
|
||||
ok = True
|
||||
for req, lst in zip(pset, axs):
|
||||
@@ -380,6 +394,13 @@ class VFS(object):
|
||||
if ok:
|
||||
virt_vis[name] = vn2
|
||||
|
||||
if ".hist" in abspath:
|
||||
p = abspath.replace("\\", "/") if WINDOWS else abspath
|
||||
if p.endswith("/.hist"):
|
||||
real = [x for x in real if not x[0].startswith("up2k.")]
|
||||
elif "/.hist/th/" in p:
|
||||
real = [x for x in real if not x[0].endswith("dir.txt")]
|
||||
|
||||
return [abspath, real, virt_vis]
|
||||
|
||||
def walk(self, rel, rem, seen, uname, permsets, dots, scandir, lstat):
|
||||
@@ -430,11 +451,11 @@ class VFS(object):
|
||||
if flt:
|
||||
flt = {k: True for k in flt}
|
||||
|
||||
f1 = "{0}.hist{0}up2k.".format(os.sep)
|
||||
f2a = os.sep + "dir.txt"
|
||||
f2b = "{0}.hist{0}".format(os.sep)
|
||||
# if multiselect: add all items to archive root
|
||||
# if single folder: the folder itself is the top-level item
|
||||
folder = "" if flt else (vrem.split("/")[-1] or "top")
|
||||
|
||||
g = self.walk("", vrem, [], uname, [[True]], dots, scandir, False)
|
||||
g = self.walk(folder, vrem, [], uname, [[True]], dots, scandir, False)
|
||||
for _, _, vpath, apath, files, rd, vd in g:
|
||||
if flt:
|
||||
files = [x for x in files if x[0] in flt]
|
||||
@@ -465,13 +486,6 @@ class VFS(object):
|
||||
for x in rm:
|
||||
del vd[x]
|
||||
|
||||
# up2k filetring based on actual abspath
|
||||
files = [
|
||||
x
|
||||
for x in files
|
||||
if f1 not in x[1] and (not x[1].endswith(f2a) or f2b not in x[1])
|
||||
]
|
||||
|
||||
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
|
||||
yield f
|
||||
|
||||
@@ -508,8 +522,27 @@ class AuthSrv(object):
|
||||
|
||||
yield prev, True
|
||||
|
||||
def _map_volume(self, src, dst, mount, daxs, mflags):
|
||||
if dst in mount:
|
||||
m = "multiple filesystem-paths mounted at [/{}]:\n [{}]\n [{}]"
|
||||
self.log(m.format(dst, mount[dst], src), c=1)
|
||||
raise Exception("invalid config")
|
||||
|
||||
if src in mount.values():
|
||||
m = "warning: filesystem-path [{}] mounted in multiple locations:"
|
||||
m = m.format(src)
|
||||
for v in [k for k, v in mount.items() if v == src] + [dst]:
|
||||
m += "\n /{}".format(v)
|
||||
|
||||
self.log(m, c=3)
|
||||
|
||||
mount[dst] = src
|
||||
daxs[dst] = AXS()
|
||||
mflags[dst] = {}
|
||||
|
||||
def _parse_config_file(self, fd, acct, daxs, mflags, mount):
|
||||
# type: (any, str, dict[str, AXS], any, str) -> None
|
||||
skip = False
|
||||
vol_src = None
|
||||
vol_dst = None
|
||||
self.line_ctr = 0
|
||||
@@ -519,6 +552,11 @@ class AuthSrv(object):
|
||||
vol_src = None
|
||||
vol_dst = None
|
||||
|
||||
if skip:
|
||||
if not ln:
|
||||
skip = False
|
||||
continue
|
||||
|
||||
if not ln or ln.startswith("#"):
|
||||
continue
|
||||
|
||||
@@ -526,6 +564,8 @@ class AuthSrv(object):
|
||||
if ln.startswith("u "):
|
||||
u, p = ln[2:].split(":", 1)
|
||||
acct[u] = p
|
||||
elif ln.startswith("-"):
|
||||
skip = True # argv
|
||||
else:
|
||||
vol_src = ln
|
||||
continue
|
||||
@@ -538,9 +578,7 @@ class AuthSrv(object):
|
||||
# cfg files override arguments and previous files
|
||||
vol_src = bos.path.abspath(vol_src)
|
||||
vol_dst = vol_dst.strip("/")
|
||||
mount[vol_dst] = vol_src
|
||||
daxs[vol_dst] = AXS()
|
||||
mflags[vol_dst] = {}
|
||||
self._map_volume(vol_src, vol_dst, mount, daxs, mflags)
|
||||
continue
|
||||
|
||||
try:
|
||||
@@ -557,13 +595,21 @@ class AuthSrv(object):
|
||||
|
||||
def _read_vol_str(self, lvl, uname, axs, flags):
|
||||
# type: (str, str, AXS, any) -> None
|
||||
if lvl.strip("crwmd"):
|
||||
if lvl.strip("crwmdg"):
|
||||
raise Exception("invalid volume flag: {},{}".format(lvl, uname))
|
||||
|
||||
if lvl == "c":
|
||||
cval = True
|
||||
if "=" in uname:
|
||||
try:
|
||||
# volume flag with arguments, possibly with a preceding list of bools
|
||||
uname, cval = uname.split("=", 1)
|
||||
except:
|
||||
# just one or more bools
|
||||
cval = True
|
||||
|
||||
while "," in uname:
|
||||
# one or more bools before the final flag; eat them
|
||||
n1, uname = uname.split(",", 1)
|
||||
self._read_volflag(flags, n1, True, False)
|
||||
|
||||
self._read_volflag(flags, uname, cval, False)
|
||||
return
|
||||
@@ -571,7 +617,7 @@ class AuthSrv(object):
|
||||
if uname == "":
|
||||
uname = "*"
|
||||
|
||||
for un in uname.split(","):
|
||||
for un in uname.replace(",", " ").strip().split():
|
||||
if "r" in lvl:
|
||||
axs.uread[un] = 1
|
||||
|
||||
@@ -584,6 +630,9 @@ class AuthSrv(object):
|
||||
if "d" in lvl:
|
||||
axs.udel[un] = 1
|
||||
|
||||
if "g" in lvl:
|
||||
axs.uget[un] = 1
|
||||
|
||||
def _read_volflag(self, flags, name, value, is_list):
|
||||
if name not in ["mtp"]:
|
||||
flags[name] = value
|
||||
@@ -621,7 +670,7 @@ class AuthSrv(object):
|
||||
|
||||
if self.args.v:
|
||||
# list of src:dst:permset:permset:...
|
||||
# permset is <rwmd>[,username][,username] or <c>,<flag>[=args]
|
||||
# permset is <rwmdg>[,username][,username] or <c>,<flag>[=args]
|
||||
for v_str in self.args.v:
|
||||
m = re_vol.match(v_str)
|
||||
if not m:
|
||||
@@ -634,9 +683,7 @@ class AuthSrv(object):
|
||||
# print("\n".join([src, dst, perms]))
|
||||
src = bos.path.abspath(src)
|
||||
dst = dst.strip("/")
|
||||
mount[dst] = src
|
||||
daxs[dst] = AXS()
|
||||
mflags[dst] = {}
|
||||
self._map_volume(src, dst, mount, daxs, mflags)
|
||||
|
||||
for x in perms.split(":"):
|
||||
lvl, uname = x.split(",", 1) if "," in x else [x, ""]
|
||||
@@ -688,20 +735,22 @@ class AuthSrv(object):
|
||||
vfs.all_vols = {}
|
||||
vfs.get_all_vols(vfs.all_vols)
|
||||
|
||||
for perm in "read write move del".split():
|
||||
for perm in "read write move del get".split():
|
||||
axs_key = "u" + perm
|
||||
unames = ["*"] + list(acct.keys())
|
||||
umap = {x: [] for x in unames}
|
||||
for usr in unames:
|
||||
for mp, vol in vfs.all_vols.items():
|
||||
if usr in getattr(vol.axs, axs_key):
|
||||
axs = getattr(vol.axs, axs_key)
|
||||
if usr in axs or "*" in axs:
|
||||
umap[usr].append(mp)
|
||||
umap[usr].sort()
|
||||
setattr(vfs, "a" + perm, umap)
|
||||
|
||||
all_users = {}
|
||||
missing_users = {}
|
||||
for axs in daxs.values():
|
||||
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel]:
|
||||
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel, axs.uget]:
|
||||
for usr in d.keys():
|
||||
all_users[usr] = 1
|
||||
if usr != "*" and usr not in acct:
|
||||
@@ -812,6 +861,11 @@ class AuthSrv(object):
|
||||
if use:
|
||||
vol.lim = lim
|
||||
|
||||
for vol in vfs.all_vols.values():
|
||||
fk = vol.flags.get("fk")
|
||||
if fk:
|
||||
vol.flags["fk"] = int(fk) if fk is not True else 8
|
||||
|
||||
for vol in vfs.all_vols.values():
|
||||
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
|
||||
vol.flags["gz"] = False # def.pk
|
||||
@@ -830,9 +884,14 @@ class AuthSrv(object):
|
||||
if self.args.e2d or "e2ds" in vol.flags:
|
||||
vol.flags["e2d"] = True
|
||||
|
||||
if self.args.no_hash:
|
||||
if "ehash" not in vol.flags:
|
||||
vol.flags["dhash"] = True
|
||||
for ga, vf in [["no_hash", "nohash"], ["no_idx", "noidx"]]:
|
||||
if vf in vol.flags:
|
||||
ptn = vol.flags.pop(vf)
|
||||
else:
|
||||
ptn = getattr(self.args, ga)
|
||||
|
||||
if ptn:
|
||||
vol.flags[vf] = re.compile(ptn)
|
||||
|
||||
for k in ["e2t", "e2ts", "e2tsr"]:
|
||||
if getattr(self.args, k):
|
||||
@@ -845,6 +904,10 @@ class AuthSrv(object):
|
||||
# default tag cfgs if unset
|
||||
if "mte" not in vol.flags:
|
||||
vol.flags["mte"] = self.args.mte
|
||||
elif vol.flags["mte"].startswith("+"):
|
||||
vol.flags["mte"] = ",".join(
|
||||
x for x in [self.args.mte, vol.flags["mte"][1:]] if x
|
||||
)
|
||||
if "mth" not in vol.flags:
|
||||
vol.flags["mth"] = self.args.mth
|
||||
|
||||
@@ -859,6 +922,14 @@ class AuthSrv(object):
|
||||
vol.flags["d2t"] = True
|
||||
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
|
||||
|
||||
# d2ds drops all onboot scans for a volume
|
||||
for grp, rm in [["d2ds", "e2ds"], ["d2ts", "e2ts"]]:
|
||||
if not vol.flags.get(grp, False):
|
||||
continue
|
||||
|
||||
vol.flags["d2ts"] = True
|
||||
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
|
||||
|
||||
# mt* needs e2t so drop those too
|
||||
for grp, rm in [["e2t", "mt"]]:
|
||||
if vol.flags.get(grp, False):
|
||||
@@ -926,6 +997,7 @@ class AuthSrv(object):
|
||||
[" write", "uwrite"],
|
||||
[" move", "umove"],
|
||||
["delete", "udel"],
|
||||
[" get", "uget"],
|
||||
]:
|
||||
u = list(sorted(getattr(v.axs, attr).keys()))
|
||||
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
|
||||
@@ -940,7 +1012,7 @@ class AuthSrv(object):
|
||||
v, _ = vfs.get("/", "*", False, True)
|
||||
if self.warn_anonwrite and os.getcwd() == v.realpath:
|
||||
self.warn_anonwrite = False
|
||||
msg = "anyone can read/write the current directory: {}"
|
||||
msg = "anyone can read/write the current directory: {}\n"
|
||||
self.log(msg.format(v.realpath), c=1)
|
||||
except Pebkac:
|
||||
self.warn_anonwrite = True
|
||||
@@ -993,10 +1065,10 @@ class AuthSrv(object):
|
||||
raise Exception("volume not found: " + v)
|
||||
|
||||
self.log({"users": users, "vols": vols, "flags": flags})
|
||||
m = "/{}: read({}) write({}) move({}) del({})"
|
||||
m = "/{}: read({}) write({}) move({}) del({}) get({})"
|
||||
for k, v in self.vfs.all_vols.items():
|
||||
vc = v.axs
|
||||
self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel))
|
||||
self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel, vc.uget))
|
||||
|
||||
flag_v = "v" in flags
|
||||
flag_ln = "ln" in flags
|
||||
@@ -1010,7 +1082,7 @@ class AuthSrv(object):
|
||||
for u in users:
|
||||
self.log("checking /{} as {}".format(v, u))
|
||||
try:
|
||||
vn, _ = self.vfs.get(v, u, True, False, False, False)
|
||||
vn, _ = self.vfs.get(v, u, True, False, False, False, False)
|
||||
except:
|
||||
continue
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
from ..util import fsenc, fsdec
|
||||
from ..util import fsenc, fsdec, SYMTIME
|
||||
from . import path
|
||||
|
||||
|
||||
@@ -18,21 +18,17 @@ def listdir(p="."):
|
||||
return [fsdec(x) for x in os.listdir(fsenc(p))]
|
||||
|
||||
|
||||
def lstat(p):
|
||||
return os.lstat(fsenc(p))
|
||||
|
||||
|
||||
def makedirs(name, mode=0o755, exist_ok=True):
|
||||
bname = fsenc(name)
|
||||
try:
|
||||
os.makedirs(bname, mode=mode)
|
||||
os.makedirs(bname, mode)
|
||||
except:
|
||||
if not exist_ok or not os.path.isdir(bname):
|
||||
raise
|
||||
|
||||
|
||||
def mkdir(p, mode=0o755):
|
||||
return os.mkdir(fsenc(p), mode=mode)
|
||||
return os.mkdir(fsenc(p), mode)
|
||||
|
||||
|
||||
def rename(src, dst):
|
||||
@@ -55,5 +51,17 @@ def unlink(p):
|
||||
return os.unlink(fsenc(p))
|
||||
|
||||
|
||||
def utime(p, times=None):
|
||||
return os.utime(fsenc(p), times)
|
||||
def utime(p, times=None, follow_symlinks=True):
|
||||
if SYMTIME:
|
||||
return os.utime(fsenc(p), times, follow_symlinks=follow_symlinks)
|
||||
else:
|
||||
return os.utime(fsenc(p), times)
|
||||
|
||||
|
||||
if hasattr(os, "lstat"):
|
||||
|
||||
def lstat(p):
|
||||
return os.lstat(fsenc(p))
|
||||
|
||||
else:
|
||||
lstat = stat
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
from ..util import fsenc, fsdec
|
||||
from ..util import fsenc, fsdec, SYMTIME
|
||||
|
||||
|
||||
def abspath(p):
|
||||
@@ -13,14 +13,21 @@ def exists(p):
|
||||
return os.path.exists(fsenc(p))
|
||||
|
||||
|
||||
def getmtime(p):
|
||||
return os.path.getmtime(fsenc(p))
|
||||
def getmtime(p, follow_symlinks=True):
|
||||
if not follow_symlinks and SYMTIME:
|
||||
return os.lstat(fsenc(p)).st_mtime
|
||||
else:
|
||||
return os.path.getmtime(fsenc(p))
|
||||
|
||||
|
||||
def getsize(p):
|
||||
return os.path.getsize(fsenc(p))
|
||||
|
||||
|
||||
def isfile(p):
|
||||
return os.path.isfile(fsenc(p))
|
||||
|
||||
|
||||
def isdir(p):
|
||||
return os.path.isdir(fsenc(p))
|
||||
|
||||
@@ -29,5 +36,9 @@ def islink(p):
|
||||
return os.path.islink(fsenc(p))
|
||||
|
||||
|
||||
def lexists(p):
|
||||
return os.path.lexists(fsenc(p))
|
||||
|
||||
|
||||
def realpath(p):
|
||||
return fsdec(os.path.realpath(fsenc(p)))
|
||||
|
||||
@@ -62,6 +62,11 @@ class BrokerMp(object):
|
||||
|
||||
procs.pop()
|
||||
|
||||
def reload(self):
|
||||
self.log("broker", "reloading")
|
||||
for _, proc in enumerate(self.procs):
|
||||
proc.q_pend.put([0, "reload", []])
|
||||
|
||||
def collector(self, proc):
|
||||
"""receive message from hub in other process"""
|
||||
while True:
|
||||
|
||||
@@ -29,7 +29,7 @@ class MpWorker(object):
|
||||
# we inherited signal_handler from parent,
|
||||
# replace it with something harmless
|
||||
if not FAKE_MP:
|
||||
for sig in [signal.SIGINT, signal.SIGTERM]:
|
||||
for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGUSR1]:
|
||||
signal.signal(sig, self.signal_handler)
|
||||
|
||||
# starting to look like a good idea
|
||||
@@ -69,6 +69,11 @@ class MpWorker(object):
|
||||
sys.exit(0)
|
||||
return
|
||||
|
||||
elif dest == "reload":
|
||||
self.logw("mpw.asrv reloading")
|
||||
self.asrv.reload()
|
||||
self.logw("mpw.asrv reloaded")
|
||||
|
||||
elif dest == "listen":
|
||||
self.httpsrv.listen(args[0], args[1])
|
||||
|
||||
|
||||
@@ -21,10 +21,13 @@ class BrokerThr(object):
|
||||
|
||||
# instantiate all services here (TODO: inheritance?)
|
||||
self.httpsrv = HttpSrv(self, None)
|
||||
self.reload = self.noop
|
||||
|
||||
def shutdown(self):
|
||||
# self.log("broker", "shutting down")
|
||||
self.httpsrv.shutdown()
|
||||
|
||||
def noop(self):
|
||||
pass
|
||||
|
||||
def put(self, want_retval, dest, *args):
|
||||
|
||||
374
copyparty/ftpd.py
Normal file
374
copyparty/ftpd.py
Normal file
@@ -0,0 +1,374 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import time
|
||||
import logging
|
||||
import threading
|
||||
|
||||
from .__init__ import E, PY2
|
||||
from .util import Pebkac, fsenc, exclude_dotfiles
|
||||
from .bos import bos
|
||||
|
||||
try:
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
except ImportError:
|
||||
p = os.path.join(E.mod, "vend")
|
||||
print("loading asynchat from " + p)
|
||||
sys.path.append(p)
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
|
||||
from pyftpdlib.authorizers import DummyAuthorizer, AuthenticationFailed
|
||||
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
|
||||
from pyftpdlib.handlers import FTPHandler
|
||||
from pyftpdlib.servers import FTPServer
|
||||
from pyftpdlib.log import config_logging
|
||||
|
||||
|
||||
try:
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class FtpAuth(DummyAuthorizer):
|
||||
def __init__(self):
|
||||
super(FtpAuth, self).__init__()
|
||||
self.hub = None # type: SvcHub
|
||||
|
||||
def validate_authentication(self, username, password, handler):
|
||||
asrv = self.hub.asrv
|
||||
if username == "anonymous":
|
||||
password = ""
|
||||
|
||||
uname = "*"
|
||||
if password:
|
||||
uname = asrv.iacct.get(password, None)
|
||||
|
||||
handler.username = uname
|
||||
|
||||
if password and not uname:
|
||||
raise AuthenticationFailed("Authentication failed.")
|
||||
|
||||
def get_home_dir(self, username):
|
||||
return "/"
|
||||
|
||||
def has_user(self, username):
|
||||
asrv = self.hub.asrv
|
||||
return username in asrv.acct
|
||||
|
||||
def has_perm(self, username, perm, path=None):
|
||||
return True # handled at filesystem layer
|
||||
|
||||
def get_perms(self, username):
|
||||
return "elradfmwMT"
|
||||
|
||||
def get_msg_login(self, username):
|
||||
return "sup {}".format(username)
|
||||
|
||||
def get_msg_quit(self, username):
|
||||
return "cya"
|
||||
|
||||
|
||||
class FtpFs(AbstractedFS):
|
||||
def __init__(self, root, cmd_channel):
|
||||
self.h = self.cmd_channel = cmd_channel # type: FTPHandler
|
||||
self.hub = cmd_channel.hub # type: SvcHub
|
||||
self.args = cmd_channel.args
|
||||
|
||||
self.uname = self.hub.asrv.iacct.get(cmd_channel.password, "*")
|
||||
|
||||
self.cwd = "/" # pyftpdlib convention of leading slash
|
||||
self.root = "/var/lib/empty"
|
||||
|
||||
self.listdirinfo = self.listdir
|
||||
self.chdir(".")
|
||||
|
||||
def v2a(self, vpath, r=False, w=False, m=False, d=False):
|
||||
try:
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
||||
if not vfs.realpath:
|
||||
raise FilesystemError("no filesystem mounted at this path")
|
||||
|
||||
return os.path.join(vfs.realpath, rem)
|
||||
except Pebkac as ex:
|
||||
raise FilesystemError(str(ex))
|
||||
|
||||
def rv2a(self, vpath, r=False, w=False, m=False, d=False):
|
||||
return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d)
|
||||
|
||||
def ftp2fs(self, ftppath):
|
||||
# return self.v2a(ftppath)
|
||||
return ftppath # self.cwd must be vpath
|
||||
|
||||
def fs2ftp(self, fspath):
|
||||
# raise NotImplementedError()
|
||||
return fspath
|
||||
|
||||
def validpath(self, path):
|
||||
if "/.hist/" in path:
|
||||
if "/up2k." in path or path.endswith("/dir.txt"):
|
||||
raise FilesystemError("access to this file is forbidden")
|
||||
|
||||
return True
|
||||
|
||||
def open(self, filename, mode):
|
||||
r = "r" in mode
|
||||
w = "w" in mode or "a" in mode or "+" in mode
|
||||
|
||||
ap = self.rv2a(filename, r, w)
|
||||
if w and bos.path.exists(ap):
|
||||
raise FilesystemError("cannot open existing file for writing")
|
||||
|
||||
self.validpath(ap)
|
||||
return open(fsenc(ap), mode)
|
||||
|
||||
def chdir(self, path):
|
||||
self.cwd = join(self.cwd, path)
|
||||
x = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username)
|
||||
self.can_read, self.can_write, self.can_move, self.can_delete, self.can_get = x
|
||||
|
||||
def mkdir(self, path):
|
||||
ap = self.rv2a(path, w=True)
|
||||
bos.mkdir(ap)
|
||||
|
||||
def listdir(self, path):
|
||||
vpath = join(self.cwd, path).lstrip("/")
|
||||
try:
|
||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, True, False)
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = vfs.ls(
|
||||
rem, self.uname, not self.args.no_scandir, [[True], [False, True]]
|
||||
)
|
||||
vfs_ls = [x[0] for x in vfs_ls]
|
||||
vfs_ls.extend(vfs_virt.keys())
|
||||
|
||||
if not self.args.ed:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
vfs_ls.sort()
|
||||
return vfs_ls
|
||||
except Exception as ex:
|
||||
if vpath:
|
||||
# display write-only folders as empty
|
||||
return []
|
||||
|
||||
# return list of volumes
|
||||
r = {x.split("/")[0]: 1 for x in self.hub.asrv.vfs.all_vols.keys()}
|
||||
return list(sorted(list(r.keys())))
|
||||
|
||||
def rmdir(self, path):
|
||||
ap = self.rv2a(path, d=True)
|
||||
bos.rmdir(ap)
|
||||
|
||||
def remove(self, path):
|
||||
if self.args.no_del:
|
||||
raise FilesystemError("the delete feature is disabled in server config")
|
||||
|
||||
vp = join(self.cwd, path).lstrip("/")
|
||||
x = self.hub.broker.put(
|
||||
True, "up2k.handle_rm", self.uname, self.h.remote_ip, [vp]
|
||||
)
|
||||
|
||||
try:
|
||||
x.get()
|
||||
except Exception as ex:
|
||||
raise FilesystemError(str(ex))
|
||||
|
||||
def rename(self, src, dst):
|
||||
if not self.can_move:
|
||||
raise FilesystemError("not allowed for user " + self.h.username)
|
||||
|
||||
if self.args.no_mv:
|
||||
m = "the rename/move feature is disabled in server config"
|
||||
raise FilesystemError(m)
|
||||
|
||||
svp = join(self.cwd, src).lstrip("/")
|
||||
dvp = join(self.cwd, dst).lstrip("/")
|
||||
x = self.hub.broker.put(True, "up2k.handle_mv", self.uname, svp, dvp)
|
||||
try:
|
||||
x.get()
|
||||
except Exception as ex:
|
||||
raise FilesystemError(str(ex))
|
||||
|
||||
def chmod(self, path, mode):
|
||||
pass
|
||||
|
||||
def stat(self, path):
|
||||
try:
|
||||
ap = self.rv2a(path, r=True)
|
||||
return bos.stat(ap)
|
||||
except:
|
||||
ap = self.rv2a(path)
|
||||
st = bos.stat(ap)
|
||||
if not stat.S_ISDIR(st.st_mode):
|
||||
raise
|
||||
|
||||
return st
|
||||
|
||||
def utime(self, path, timeval):
|
||||
ap = self.rv2a(path, w=True)
|
||||
return bos.utime(ap, (timeval, timeval))
|
||||
|
||||
def lstat(self, path):
|
||||
ap = self.rv2a(path)
|
||||
return bos.lstat(ap)
|
||||
|
||||
def isfile(self, path):
|
||||
st = self.stat(path)
|
||||
return stat.S_ISREG(st.st_mode)
|
||||
|
||||
def islink(self, path):
|
||||
ap = self.rv2a(path)
|
||||
return bos.path.islink(ap)
|
||||
|
||||
def isdir(self, path):
|
||||
try:
|
||||
st = self.stat(path)
|
||||
return stat.S_ISDIR(st.st_mode)
|
||||
except:
|
||||
return True
|
||||
|
||||
def getsize(self, path):
|
||||
ap = self.rv2a(path)
|
||||
return bos.path.getsize(ap)
|
||||
|
||||
def getmtime(self, path):
|
||||
ap = self.rv2a(path)
|
||||
return bos.path.getmtime(ap)
|
||||
|
||||
def realpath(self, path):
|
||||
return path
|
||||
|
||||
def lexists(self, path):
|
||||
ap = self.rv2a(path)
|
||||
return bos.path.lexists(ap)
|
||||
|
||||
def get_user_by_uid(self, uid):
|
||||
return "root"
|
||||
|
||||
def get_group_by_uid(self, gid):
|
||||
return "root"
|
||||
|
||||
|
||||
class FtpHandler(FTPHandler):
|
||||
abstracted_fs = FtpFs
|
||||
|
||||
def __init__(self, conn, server, ioloop=None):
|
||||
if PY2:
|
||||
FTPHandler.__init__(self, conn, server, ioloop)
|
||||
else:
|
||||
super(FtpHandler, self).__init__(conn, server, ioloop)
|
||||
|
||||
# abspath->vpath mapping to resolve log_transfer paths
|
||||
self.vfs_map = {}
|
||||
|
||||
def ftp_STOR(self, file, mode="w"):
|
||||
vp = join(self.fs.cwd, file).lstrip("/")
|
||||
ap = self.fs.v2a(vp)
|
||||
self.vfs_map[ap] = vp
|
||||
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
|
||||
ret = FTPHandler.ftp_STOR(self, file, mode)
|
||||
# print("ftp_STOR: {} {} OK".format(vp, mode))
|
||||
return ret
|
||||
|
||||
def log_transfer(self, cmd, filename, receive, completed, elapsed, bytes):
|
||||
ap = filename.decode("utf-8", "replace")
|
||||
vp = self.vfs_map.pop(ap, None)
|
||||
# print("xfer_end: {} => {}".format(ap, vp))
|
||||
if vp:
|
||||
vp, fn = os.path.split(vp)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vp, self.username, False, True)
|
||||
vfs, rem = vfs.get_dbv(rem)
|
||||
self.hub.broker.put(
|
||||
False,
|
||||
"up2k.hash_file",
|
||||
vfs.realpath,
|
||||
vfs.flags,
|
||||
rem,
|
||||
fn,
|
||||
self.remote_ip,
|
||||
time.time(),
|
||||
)
|
||||
|
||||
return FTPHandler.log_transfer(
|
||||
self, cmd, filename, receive, completed, elapsed, bytes
|
||||
)
|
||||
|
||||
|
||||
try:
|
||||
from pyftpdlib.handlers import TLS_FTPHandler
|
||||
|
||||
class SftpHandler(FtpHandler, TLS_FTPHandler):
|
||||
pass
|
||||
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class Ftpd(object):
|
||||
def __init__(self, hub):
|
||||
self.hub = hub
|
||||
self.args = hub.args
|
||||
|
||||
hs = []
|
||||
if self.args.ftp:
|
||||
hs.append([FtpHandler, self.args.ftp])
|
||||
if self.args.ftps:
|
||||
try:
|
||||
h = SftpHandler
|
||||
except:
|
||||
m = "\nftps requires pyopenssl;\nplease run the following:\n\n {} -m pip install --user pyopenssl\n"
|
||||
print(m.format(sys.executable))
|
||||
sys.exit(1)
|
||||
|
||||
h.certfile = os.path.join(E.cfg, "cert.pem")
|
||||
h.tls_control_required = True
|
||||
h.tls_data_required = True
|
||||
|
||||
hs.append([h, self.args.ftps])
|
||||
|
||||
for h in hs:
|
||||
h, lp = h
|
||||
h.hub = hub
|
||||
h.args = hub.args
|
||||
h.authorizer = FtpAuth()
|
||||
h.authorizer.hub = hub
|
||||
|
||||
if self.args.ftp_pr:
|
||||
p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")]
|
||||
if self.args.ftp and self.args.ftps:
|
||||
# divide port range in half
|
||||
d = int((p2 - p1) / 2)
|
||||
if lp == self.args.ftp:
|
||||
p2 = p1 + d
|
||||
else:
|
||||
p1 += d + 1
|
||||
|
||||
h.passive_ports = list(range(p1, p2 + 1))
|
||||
|
||||
if self.args.ftp_nat:
|
||||
h.masquerade_address = self.args.ftp_nat
|
||||
|
||||
if self.args.ftp_dbg:
|
||||
config_logging(level=logging.DEBUG)
|
||||
|
||||
ioloop = IOLoop()
|
||||
for ip in self.args.i:
|
||||
for h, lp in hs:
|
||||
FTPServer((ip, int(lp)), h, ioloop)
|
||||
|
||||
t = threading.Thread(target=ioloop.loop)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
|
||||
def join(p1, p2):
|
||||
w = os.path.join(p1, p2.replace("\\", "/"))
|
||||
return os.path.normpath(w).replace("\\", "/")
|
||||
File diff suppressed because it is too large
Load Diff
@@ -32,12 +32,14 @@ class HttpConn(object):
|
||||
self.addr = addr
|
||||
self.hsrv = hsrv
|
||||
|
||||
self.mutex = hsrv.mutex
|
||||
self.args = hsrv.args
|
||||
self.asrv = hsrv.asrv
|
||||
self.cert_path = hsrv.cert_path
|
||||
self.u2fh = hsrv.u2fh
|
||||
|
||||
enth = HAVE_PIL and not self.args.no_thumb
|
||||
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
|
||||
self.thumbcli = ThumbCli(hsrv) if enth else None
|
||||
self.ico = Ico(self.args)
|
||||
|
||||
self.t0 = time.time()
|
||||
|
||||
@@ -27,7 +27,7 @@ except ImportError:
|
||||
sys.exit(1)
|
||||
|
||||
from .__init__ import E, PY2, MACOS
|
||||
from .util import spack, min_ex, start_stackmon, start_log_thrs
|
||||
from .util import FHC, spack, min_ex, start_stackmon, start_log_thrs
|
||||
from .bos import bos
|
||||
from .httpconn import HttpConn
|
||||
|
||||
@@ -50,7 +50,9 @@ class HttpSrv(object):
|
||||
self.log = broker.log
|
||||
self.asrv = broker.asrv
|
||||
|
||||
self.name = "httpsrv" + ("-n{}-i{:x}".format(nid, os.getpid()) if nid else "")
|
||||
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
|
||||
|
||||
self.name = "hsrv" + nsuf
|
||||
self.mutex = threading.Lock()
|
||||
self.stopping = False
|
||||
|
||||
@@ -58,7 +60,9 @@ class HttpSrv(object):
|
||||
self.tp_ncli = 0 # fading
|
||||
self.tp_time = None # latest worker collect
|
||||
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
|
||||
self.t_periodic = None
|
||||
|
||||
self.u2fh = FHC()
|
||||
self.srvs = []
|
||||
self.ncli = 0 # exact
|
||||
self.clients = {} # laggy
|
||||
@@ -72,6 +76,7 @@ class HttpSrv(object):
|
||||
x: env.get_template(x + ".html")
|
||||
for x in ["splash", "browser", "browser2", "msg", "md", "mde"]
|
||||
}
|
||||
self.prism = os.path.exists(os.path.join(E.mod, "web", "deps", "prism.js.gz"))
|
||||
|
||||
cert_path = os.path.join(E.cfg, "cert.pem")
|
||||
if bos.path.exists(cert_path):
|
||||
@@ -82,11 +87,6 @@ class HttpSrv(object):
|
||||
if self.tp_q:
|
||||
self.start_threads(4)
|
||||
|
||||
name = "httpsrv-scaler" + ("-{}".format(nid) if nid else "")
|
||||
t = threading.Thread(target=self.thr_scaler, name=name)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
if nid:
|
||||
if self.args.stackmon:
|
||||
start_stackmon(self.args.stackmon, nid)
|
||||
@@ -115,13 +115,19 @@ class HttpSrv(object):
|
||||
for _ in range(n):
|
||||
self.tp_q.put(None)
|
||||
|
||||
def thr_scaler(self):
|
||||
def periodic(self):
|
||||
while True:
|
||||
time.sleep(2 if self.tp_ncli else 30)
|
||||
time.sleep(2 if self.tp_ncli or self.ncli else 10)
|
||||
with self.mutex:
|
||||
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
||||
if self.tp_nthr > self.tp_ncli + 8:
|
||||
self.stop_threads(4)
|
||||
self.u2fh.clean()
|
||||
if self.tp_q:
|
||||
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
||||
if self.tp_nthr > self.tp_ncli + 8:
|
||||
self.stop_threads(4)
|
||||
|
||||
if not self.ncli and not self.u2fh.cache and self.tp_nthr <= 8:
|
||||
self.t_periodic = None
|
||||
return
|
||||
|
||||
def listen(self, sck, nlisteners):
|
||||
ip, port = sck.getsockname()
|
||||
@@ -141,7 +147,12 @@ class HttpSrv(object):
|
||||
fno = srv_sck.fileno()
|
||||
msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
|
||||
self.log(self.name, msg)
|
||||
self.broker.put(False, "cb_httpsrv_up")
|
||||
|
||||
def fun():
|
||||
self.broker.put(False, "cb_httpsrv_up")
|
||||
|
||||
threading.Thread(target=fun).start()
|
||||
|
||||
while not self.stopping:
|
||||
if self.args.log_conn:
|
||||
self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30")
|
||||
@@ -181,6 +192,16 @@ class HttpSrv(object):
|
||||
|
||||
with self.mutex:
|
||||
self.ncli += 1
|
||||
if not self.t_periodic:
|
||||
name = "hsrv-pt"
|
||||
if self.nid:
|
||||
name += "-{}".format(self.nid)
|
||||
|
||||
t = threading.Thread(target=self.periodic, name=name)
|
||||
self.t_periodic = t
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
if self.tp_q:
|
||||
self.tp_time = self.tp_time or now
|
||||
self.tp_ncli = max(self.tp_ncli, self.ncli)
|
||||
|
||||
@@ -8,7 +8,7 @@ import shutil
|
||||
import subprocess as sp
|
||||
|
||||
from .__init__ import PY2, WINDOWS, unicode
|
||||
from .util import fsenc, fsdec, uncyg, REKOBO_LKEY
|
||||
from .util import fsenc, fsdec, uncyg, runcmd, REKOBO_LKEY
|
||||
from .bos import bos
|
||||
|
||||
|
||||
@@ -73,7 +73,7 @@ class MParser(object):
|
||||
raise Exception()
|
||||
|
||||
|
||||
def ffprobe(abspath):
|
||||
def ffprobe(abspath, timeout=10):
|
||||
cmd = [
|
||||
b"ffprobe",
|
||||
b"-hide_banner",
|
||||
@@ -82,10 +82,8 @@ def ffprobe(abspath):
|
||||
b"--",
|
||||
fsenc(abspath),
|
||||
]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
r = p.communicate()
|
||||
txt = r[0].decode("utf-8", "replace")
|
||||
return parse_ffprobe(txt)
|
||||
rc = runcmd(cmd, timeout=timeout)
|
||||
return parse_ffprobe(rc[1])
|
||||
|
||||
|
||||
def parse_ffprobe(txt):
|
||||
@@ -413,11 +411,15 @@ class MTag(object):
|
||||
return r1
|
||||
|
||||
def get_mutagen(self, abspath):
|
||||
if not bos.path.isfile(abspath):
|
||||
return {}
|
||||
|
||||
import mutagen
|
||||
|
||||
try:
|
||||
md = mutagen.File(fsenc(abspath), easy=True)
|
||||
x = md.info.length
|
||||
if not md.info.length and not md.info.codec:
|
||||
raise Exception()
|
||||
except Exception as ex:
|
||||
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
||||
|
||||
@@ -458,10 +460,16 @@ class MTag(object):
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_ffprobe(self, abspath):
|
||||
if not bos.path.isfile(abspath):
|
||||
return {}
|
||||
|
||||
ret, md = ffprobe(abspath)
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_bin(self, parsers, abspath):
|
||||
if not bos.path.isfile(abspath):
|
||||
return {}
|
||||
|
||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
pypath = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||
pypath = str(os.pathsep.join(pypath))
|
||||
@@ -471,7 +479,10 @@ class MTag(object):
|
||||
ret = {}
|
||||
for tagname, mp in parsers.items():
|
||||
try:
|
||||
cmd = [sys.executable, mp.bin, abspath]
|
||||
cmd = [mp.bin, abspath]
|
||||
if mp.bin.endswith(".py"):
|
||||
cmd = [sys.executable] + cmd
|
||||
|
||||
args = {"env": env, "timeout": mp.timeout}
|
||||
|
||||
if WINDOWS:
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error
|
||||
handler of Python 3.
|
||||
@@ -171,7 +173,7 @@ FS_ENCODING = sys.getfilesystemencoding()
|
||||
|
||||
if WINDOWS and not PY3:
|
||||
# py2 thinks win* is mbcs, probably a bug? anyways this works
|
||||
FS_ENCODING = 'utf-8'
|
||||
FS_ENCODING = "utf-8"
|
||||
|
||||
|
||||
# normalize the filesystem encoding name.
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
@@ -19,6 +18,7 @@ from .authsrv import AuthSrv
|
||||
from .tcpsrv import TcpSrv
|
||||
from .up2k import Up2k
|
||||
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||
|
||||
|
||||
class SvcHub(object):
|
||||
@@ -37,8 +37,11 @@ class SvcHub(object):
|
||||
self.argv = argv
|
||||
self.logf = None
|
||||
self.stop_req = False
|
||||
self.reload_req = False
|
||||
self.stopping = False
|
||||
self.reloading = False
|
||||
self.stop_cond = threading.Condition()
|
||||
self.retcode = 0
|
||||
self.httpsrv_up = 0
|
||||
|
||||
self.log_mutex = threading.Lock()
|
||||
@@ -54,6 +57,19 @@ class SvcHub(object):
|
||||
if args.log_thrs:
|
||||
start_log_thrs(self.log, args.log_thrs, 0)
|
||||
|
||||
if not args.use_fpool and args.j != 1:
|
||||
args.no_fpool = True
|
||||
m = "multithreading enabled with -j {}, so disabling fpool -- this can reduce upload performance on some filesystems"
|
||||
self.log("root", m.format(args.j))
|
||||
|
||||
if not args.no_fpool and args.j != 1:
|
||||
m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
|
||||
if ANYWIN:
|
||||
m = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
|
||||
args.no_fpool = True
|
||||
|
||||
self.log("root", m, c=3)
|
||||
|
||||
# initiate all services to manage
|
||||
self.asrv = AuthSrv(self.args, self.log)
|
||||
if args.ls:
|
||||
@@ -77,31 +93,57 @@ class SvcHub(object):
|
||||
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3
|
||||
)
|
||||
|
||||
if not args.no_acode and args.no_thumb:
|
||||
msg = "setting --no-acode because --no-thumb (sorry)"
|
||||
self.log("thumb", msg, c=6)
|
||||
args.no_acode = True
|
||||
|
||||
if not args.no_acode and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
||||
msg = "setting --no-acode because either FFmpeg or FFprobe is not available"
|
||||
self.log("thumb", msg, c=6)
|
||||
args.no_acode = True
|
||||
|
||||
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
||||
|
||||
if args.ftp or args.ftps:
|
||||
from .ftpd import Ftpd
|
||||
|
||||
self.ftpd = Ftpd(self)
|
||||
|
||||
# decide which worker impl to use
|
||||
if self.check_mp_enable():
|
||||
from .broker_mp import BrokerMp as Broker
|
||||
else:
|
||||
self.log("root", "cannot efficiently use multiple CPU cores")
|
||||
from .broker_thr import BrokerThr as Broker
|
||||
|
||||
self.broker = Broker(self)
|
||||
|
||||
def thr_httpsrv_up(self):
|
||||
time.sleep(5)
|
||||
failed = self.broker.num_workers - self.httpsrv_up
|
||||
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
||||
failed = expected - self.httpsrv_up
|
||||
if not failed:
|
||||
return
|
||||
|
||||
if self.args.ign_ebind_all:
|
||||
return
|
||||
|
||||
if self.args.ign_ebind and self.tcpsrv.srv:
|
||||
return
|
||||
|
||||
m = "{}/{} workers failed to start"
|
||||
m = m.format(failed, self.broker.num_workers)
|
||||
m = m.format(failed, expected)
|
||||
self.log("root", m, 1)
|
||||
os._exit(1)
|
||||
|
||||
self.retcode = 1
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
|
||||
def cb_httpsrv_up(self):
|
||||
self.httpsrv_up += 1
|
||||
if self.httpsrv_up != self.broker.num_workers:
|
||||
return
|
||||
|
||||
time.sleep(0.1) # purely cosmetic dw
|
||||
self.log("root", "workers OK\n")
|
||||
self.up2k.init_vols()
|
||||
|
||||
@@ -162,7 +204,11 @@ class SvcHub(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
for sig in [signal.SIGINT, signal.SIGTERM]:
|
||||
sigs = [signal.SIGINT, signal.SIGTERM]
|
||||
if not ANYWIN:
|
||||
sigs.append(signal.SIGUSR1)
|
||||
|
||||
for sig in sigs:
|
||||
signal.signal(sig, self.signal_handler)
|
||||
|
||||
# macos hangs after shutdown on sigterm with while-sleep,
|
||||
@@ -186,18 +232,45 @@ class SvcHub(object):
|
||||
else:
|
||||
self.stop_thr()
|
||||
|
||||
def reload(self):
|
||||
if self.reloading:
|
||||
return "cannot reload; already in progress"
|
||||
|
||||
self.reloading = True
|
||||
t = threading.Thread(target=self._reload)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
return "reload initiated"
|
||||
|
||||
def _reload(self):
|
||||
self.log("root", "reload scheduled")
|
||||
with self.up2k.mutex:
|
||||
self.asrv.reload()
|
||||
self.up2k.reload()
|
||||
self.broker.reload()
|
||||
|
||||
self.reloading = False
|
||||
|
||||
def stop_thr(self):
|
||||
while not self.stop_req:
|
||||
with self.stop_cond:
|
||||
self.stop_cond.wait(9001)
|
||||
|
||||
if self.reload_req:
|
||||
self.reload_req = False
|
||||
self.reload()
|
||||
|
||||
self.shutdown()
|
||||
|
||||
def signal_handler(self, sig, frame):
|
||||
if self.stopping:
|
||||
return
|
||||
|
||||
self.stop_req = True
|
||||
if sig == signal.SIGUSR1:
|
||||
self.reload_req = True
|
||||
else:
|
||||
self.stop_req = True
|
||||
|
||||
with self.stop_cond:
|
||||
self.stop_cond.notify_all()
|
||||
|
||||
@@ -205,6 +278,8 @@ class SvcHub(object):
|
||||
if self.stopping:
|
||||
return
|
||||
|
||||
# start_log_thrs(print, 0.1, 1)
|
||||
|
||||
self.stopping = True
|
||||
self.stop_req = True
|
||||
with self.stop_cond:
|
||||
@@ -230,8 +305,12 @@ class SvcHub(object):
|
||||
print("waiting for thumbsrv (10sec)...")
|
||||
|
||||
print("nailed it", end="")
|
||||
ret = 0
|
||||
ret = self.retcode
|
||||
finally:
|
||||
if self.args.wintitle:
|
||||
print("\033]0;\033\\", file=sys.stderr, end="")
|
||||
sys.stderr.flush()
|
||||
|
||||
print("\033[0m")
|
||||
if self.logf:
|
||||
self.logf.close()
|
||||
@@ -327,10 +406,10 @@ class SvcHub(object):
|
||||
|
||||
def check_mp_enable(self):
|
||||
if self.args.j == 1:
|
||||
self.log("root", "multiprocessing disabled by argument -j 1;")
|
||||
return False
|
||||
|
||||
if mp.cpu_count() <= 1:
|
||||
self.log("svchub", "only one CPU detected; multiprocessing disabled")
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -345,6 +424,7 @@ class SvcHub(object):
|
||||
return True
|
||||
else:
|
||||
self.log("svchub", err)
|
||||
self.log("svchub", "cannot efficiently use multiple CPU cores")
|
||||
return False
|
||||
|
||||
def sd_notify(self):
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import sys
|
||||
import socket
|
||||
|
||||
from .__init__ import MACOS, ANYWIN
|
||||
from .__init__ import MACOS, ANYWIN, unicode
|
||||
from .util import chkcmd
|
||||
|
||||
|
||||
@@ -21,6 +22,29 @@ class TcpSrv(object):
|
||||
|
||||
self.stopping = False
|
||||
|
||||
self.srv = []
|
||||
self.nsrv = 0
|
||||
ok = {}
|
||||
for ip in self.args.i:
|
||||
ok[ip] = []
|
||||
for port in self.args.p:
|
||||
self.nsrv += 1
|
||||
try:
|
||||
self._listen(ip, port)
|
||||
ok[ip].append(port)
|
||||
except Exception as ex:
|
||||
if self.args.ign_ebind or self.args.ign_ebind_all:
|
||||
m = "could not listen on {}:{}: {}"
|
||||
self.log("tcpsrv", m.format(ip, port, ex), c=3)
|
||||
else:
|
||||
raise
|
||||
|
||||
if not self.srv and not self.args.ign_ebind_all:
|
||||
raise Exception("could not listen on any of the given interfaces")
|
||||
|
||||
if self.nsrv != len(self.srv):
|
||||
self.log("tcpsrv", "")
|
||||
|
||||
ip = "127.0.0.1"
|
||||
eps = {ip: "local only"}
|
||||
nonlocals = [x for x in self.args.i if x != ip]
|
||||
@@ -31,20 +55,48 @@ class TcpSrv(object):
|
||||
eps[x] = "external"
|
||||
|
||||
msgs = []
|
||||
title_tab = {}
|
||||
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
|
||||
m = "available @ http://{}:{}/ (\033[33m{}\033[0m)"
|
||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||
for port in sorted(self.args.p):
|
||||
if port not in ok.get(ip, ok.get("0.0.0.0", [])):
|
||||
continue
|
||||
|
||||
msgs.append(m.format(ip, port, desc))
|
||||
|
||||
if not self.args.wintitle:
|
||||
continue
|
||||
|
||||
if port in [80, 443]:
|
||||
ep = ip
|
||||
else:
|
||||
ep = "{}:{}".format(ip, port)
|
||||
|
||||
hits = []
|
||||
if "pub" in title_vars and "external" in unicode(desc):
|
||||
hits.append(("pub", ep))
|
||||
|
||||
if "pub" in title_vars or "all" in title_vars:
|
||||
hits.append(("all", ep))
|
||||
|
||||
for var in title_vars:
|
||||
if var.startswith("ip-") and ep.startswith(var[3:]):
|
||||
hits.append((var, ep))
|
||||
|
||||
for tk, tv in hits:
|
||||
try:
|
||||
title_tab[tk][tv] = 1
|
||||
except:
|
||||
title_tab[tk] = {tv: 1}
|
||||
|
||||
if msgs:
|
||||
msgs[-1] += "\n"
|
||||
for m in msgs:
|
||||
self.log("tcpsrv", m)
|
||||
|
||||
self.srv = []
|
||||
for ip in self.args.i:
|
||||
for port in self.args.p:
|
||||
self.srv.append(self._listen(ip, port))
|
||||
if self.args.wintitle:
|
||||
self._set_wintitle(title_tab)
|
||||
|
||||
def _listen(self, ip, port):
|
||||
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
@@ -52,7 +104,7 @@ class TcpSrv(object):
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
try:
|
||||
srv.bind((ip, port))
|
||||
return srv
|
||||
self.srv.append(srv)
|
||||
except (OSError, socket.error) as ex:
|
||||
if ex.errno in [98, 48]:
|
||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
@@ -177,7 +229,7 @@ class TcpSrv(object):
|
||||
eps = self.ips_linux()
|
||||
|
||||
if "0.0.0.0" not in listen_ips:
|
||||
eps = {k: v for k, v in eps if k in listen_ips}
|
||||
eps = {k: v for k, v in eps.items() if k in listen_ips}
|
||||
|
||||
default_route = None
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
@@ -211,3 +263,26 @@ class TcpSrv(object):
|
||||
eps[default_route] = desc
|
||||
|
||||
return eps
|
||||
|
||||
def _set_wintitle(self, vars):
|
||||
vars["all"] = vars.get("all", {"Local-Only": 1})
|
||||
vars["pub"] = vars.get("pub", vars["all"])
|
||||
|
||||
vars2 = {}
|
||||
for k, eps in vars.items():
|
||||
vars2[k] = {
|
||||
ep: 1
|
||||
for ep in eps.keys()
|
||||
if ":" not in ep or ep.split(":")[0] not in eps
|
||||
}
|
||||
|
||||
title = ""
|
||||
vars = vars2
|
||||
for p in self.args.wintitle.split(" "):
|
||||
if p.startswith("$"):
|
||||
p = " and ".join(sorted(vars.get(p[1:], {"(None)": 1}).keys()))
|
||||
|
||||
title += "{} ".format(p)
|
||||
|
||||
print("\033]0;{}\033\\".format(title), file=sys.stderr, end="")
|
||||
sys.stderr.flush()
|
||||
|
||||
@@ -4,28 +4,44 @@ from __future__ import print_function, unicode_literals
|
||||
import os
|
||||
|
||||
from .util import Cooldown
|
||||
from .th_srv import thumb_path, THUMBABLE, FMT_FF
|
||||
from .th_srv import thumb_path, THUMBABLE, FMT_FFV, FMT_FFA
|
||||
from .bos import bos
|
||||
|
||||
|
||||
class ThumbCli(object):
|
||||
def __init__(self, broker):
|
||||
self.broker = broker
|
||||
self.args = broker.args
|
||||
self.asrv = broker.asrv
|
||||
def __init__(self, hsrv):
|
||||
self.broker = hsrv.broker
|
||||
self.log_func = hsrv.log
|
||||
self.args = hsrv.args
|
||||
self.asrv = hsrv.asrv
|
||||
|
||||
# cache on both sides for less broker spam
|
||||
self.cooldown = Cooldown(self.args.th_poke)
|
||||
|
||||
def log(self, msg, c=0):
|
||||
self.log_func("thumbcli", msg, c)
|
||||
|
||||
def get(self, ptop, rem, mtime, fmt):
|
||||
ext = rem.rsplit(".")[-1].lower()
|
||||
if ext not in THUMBABLE:
|
||||
return None
|
||||
|
||||
is_vid = ext in FMT_FF
|
||||
is_vid = ext in FMT_FFV
|
||||
if is_vid and self.args.no_vthumb:
|
||||
return None
|
||||
|
||||
want_opus = fmt in ("opus", "caf")
|
||||
is_au = ext in FMT_FFA
|
||||
if is_au:
|
||||
if want_opus:
|
||||
if self.args.no_acode:
|
||||
return None
|
||||
else:
|
||||
if self.args.no_athumb:
|
||||
return None
|
||||
elif want_opus:
|
||||
return None
|
||||
|
||||
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]:
|
||||
return os.path.join(ptop, rem)
|
||||
|
||||
@@ -33,10 +49,14 @@ class ThumbCli(object):
|
||||
fmt = "w"
|
||||
|
||||
if fmt == "w":
|
||||
if self.args.th_no_webp or (is_vid and self.args.th_ff_jpg):
|
||||
if self.args.th_no_webp or ((is_vid or is_au) and self.args.th_ff_jpg):
|
||||
fmt = "j"
|
||||
|
||||
histpath = self.asrv.vfs.histtab[ptop]
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
return None
|
||||
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
||||
ret = None
|
||||
try:
|
||||
@@ -53,6 +73,11 @@ class ThumbCli(object):
|
||||
if self.cooldown.poke(tdir):
|
||||
self.broker.put(False, "thumbsrv.poke", tdir)
|
||||
|
||||
if want_opus:
|
||||
# audio files expire individually
|
||||
if self.cooldown.poke(tpath):
|
||||
self.broker.put(False, "thumbsrv.poke", tpath)
|
||||
|
||||
return ret
|
||||
|
||||
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)
|
||||
|
||||
@@ -10,7 +10,7 @@ import threading
|
||||
import subprocess as sp
|
||||
|
||||
from .__init__ import PY2, unicode
|
||||
from .util import fsenc, vsplit, runcmd, Queue, Cooldown, BytesIO, min_ex
|
||||
from .util import fsenc, vsplit, statdir, runcmd, Queue, Cooldown, BytesIO, min_ex
|
||||
from .bos import bos
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||
|
||||
@@ -50,7 +50,8 @@ except:
|
||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||
# ffmpeg -formats
|
||||
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
|
||||
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
|
||||
FMT_FFV = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
|
||||
FMT_FFA = "aac m4a ogg opus flac alac mp3 mp2 ac3 dts wma ra wav aif aiff au alaw ulaw mulaw amr gsm ape tak tta wv"
|
||||
|
||||
if HAVE_HEIF:
|
||||
FMT_PIL += " heif heifs heic heics"
|
||||
@@ -58,7 +59,9 @@ if HAVE_HEIF:
|
||||
if HAVE_AVIF:
|
||||
FMT_PIL += " avif avifs"
|
||||
|
||||
FMT_PIL, FMT_FF = [{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FF]]
|
||||
FMT_PIL, FMT_FFV, FMT_FFA = [
|
||||
{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FFV, FMT_FFA]
|
||||
]
|
||||
|
||||
|
||||
THUMBABLE = {}
|
||||
@@ -67,7 +70,8 @@ if HAVE_PIL:
|
||||
THUMBABLE.update(FMT_PIL)
|
||||
|
||||
if HAVE_FFMPEG and HAVE_FFPROBE:
|
||||
THUMBABLE.update(FMT_FF)
|
||||
THUMBABLE.update(FMT_FFV)
|
||||
THUMBABLE.update(FMT_FFA)
|
||||
|
||||
|
||||
def thumb_path(histpath, rem, mtime, fmt):
|
||||
@@ -86,9 +90,13 @@ def thumb_path(histpath, rem, mtime, fmt):
|
||||
h = hashlib.sha512(fsenc(fn)).digest()
|
||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
|
||||
return "{}/th/{}/{}.{:x}.{}".format(
|
||||
histpath, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
|
||||
)
|
||||
if fmt in ("opus", "caf"):
|
||||
cat = "ac"
|
||||
else:
|
||||
fmt = "webp" if fmt == "w" else "jpg"
|
||||
cat = "th"
|
||||
|
||||
return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt)
|
||||
|
||||
|
||||
class ThumbSrv(object):
|
||||
@@ -105,9 +113,7 @@ class ThumbSrv(object):
|
||||
self.mutex = threading.Lock()
|
||||
self.busy = {}
|
||||
self.stopping = False
|
||||
self.nthr = self.args.th_mt
|
||||
if not self.nthr:
|
||||
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
self.nthr = max(1, self.args.th_mt)
|
||||
|
||||
self.q = Queue(self.nthr * 4)
|
||||
for n in range(self.nthr):
|
||||
@@ -117,7 +123,8 @@ class ThumbSrv(object):
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
||||
want_ff = not self.args.no_vthumb or not self.args.no_athumb
|
||||
if want_ff and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
||||
missing = []
|
||||
if not HAVE_FFMPEG:
|
||||
missing.append("FFmpeg")
|
||||
@@ -125,12 +132,12 @@ class ThumbSrv(object):
|
||||
if not HAVE_FFPROBE:
|
||||
missing.append("FFprobe")
|
||||
|
||||
msg = "cannot create video thumbnails because some of the required programs are not available: "
|
||||
msg = "cannot create audio/video thumbnails because some of the required programs are not available: "
|
||||
msg += ", ".join(missing)
|
||||
self.log(msg, c=3)
|
||||
|
||||
if self.args.th_clean:
|
||||
t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
|
||||
t = threading.Thread(target=self.cleaner, name="thumb.cln")
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
@@ -147,7 +154,11 @@ class ThumbSrv(object):
|
||||
return not self.nthr
|
||||
|
||||
def get(self, ptop, rem, mtime, fmt):
|
||||
histpath = self.asrv.vfs.histtab[ptop]
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
return None
|
||||
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
||||
abspath = os.path.join(ptop, rem)
|
||||
cond = threading.Condition(self.mutex)
|
||||
@@ -183,6 +194,7 @@ class ThumbSrv(object):
|
||||
try:
|
||||
st = bos.stat(tpath)
|
||||
if st.st_size:
|
||||
self.poke(tpath)
|
||||
return tpath
|
||||
except:
|
||||
pass
|
||||
@@ -201,8 +213,13 @@ class ThumbSrv(object):
|
||||
if not bos.path.exists(tpath):
|
||||
if ext in FMT_PIL:
|
||||
fun = self.conv_pil
|
||||
elif ext in FMT_FF:
|
||||
elif ext in FMT_FFV:
|
||||
fun = self.conv_ffmpeg
|
||||
elif ext in FMT_FFA:
|
||||
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
||||
fun = self.conv_opus
|
||||
else:
|
||||
fun = self.conv_spec
|
||||
|
||||
if fun:
|
||||
try:
|
||||
@@ -270,13 +287,14 @@ class ThumbSrv(object):
|
||||
fmts += ["RGBA", "LA"]
|
||||
args["method"] = 6
|
||||
else:
|
||||
pass # default q = 75
|
||||
# default q = 75
|
||||
args["progressive"] = True
|
||||
|
||||
if im.mode not in fmts:
|
||||
# print("conv {}".format(im.mode))
|
||||
im = im.convert("RGB")
|
||||
|
||||
im.save(tpath, quality=40, method=6)
|
||||
im.save(tpath, **args)
|
||||
|
||||
def conv_ffmpeg(self, abspath, tpath):
|
||||
ret, _ = ffprobe(abspath)
|
||||
@@ -327,25 +345,116 @@ class ThumbSrv(object):
|
||||
]
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
# self.log((b" ".join(cmd)).decode("utf-8"))
|
||||
self._run_ff(cmd)
|
||||
|
||||
ret, sout, serr = runcmd(cmd)
|
||||
def _run_ff(self, cmd):
|
||||
# self.log((b" ".join(cmd)).decode("utf-8"))
|
||||
ret, sout, serr = runcmd(cmd, timeout=self.args.th_convt)
|
||||
if ret != 0:
|
||||
m = "FFmpeg failed (probably a corrupt video file):\n"
|
||||
m += "\n".join(["ff: {}".format(x) for x in serr.split("\n")])
|
||||
self.log(m, c="1;30")
|
||||
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||
|
||||
def conv_spec(self, abspath, tpath):
|
||||
ret, _ = ffprobe(abspath)
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
fc = "[0:a:0]aresample=48000{},showspectrumpic=s=640x512,crop=780:544:70:50[o]"
|
||||
|
||||
if self.args.th_ff_swr:
|
||||
fco = ":filter_size=128:cutoff=0.877"
|
||||
else:
|
||||
fco = ":resampler=soxr"
|
||||
|
||||
fc = fc.format(fco)
|
||||
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
b"-filter_complex", fc.encode("utf-8"),
|
||||
b"-map", b"[o]"
|
||||
]
|
||||
# fmt: on
|
||||
|
||||
if tpath.endswith(".jpg"):
|
||||
cmd += [
|
||||
b"-q:v",
|
||||
b"6", # default=??
|
||||
]
|
||||
else:
|
||||
cmd += [
|
||||
b"-q:v",
|
||||
b"50", # default=75
|
||||
b"-compression_level:v",
|
||||
b"6", # default=4, 0=fast, 6=max
|
||||
]
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
|
||||
def conv_opus(self, abspath, tpath):
|
||||
if self.args.no_acode:
|
||||
raise Exception("disabled in server config")
|
||||
|
||||
ret, _ = ffprobe(abspath)
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
|
||||
want_caf = tpath.endswith(".caf")
|
||||
tmp_opus = tpath
|
||||
if want_caf:
|
||||
tmp_opus = tpath.rsplit(".", 1)[0] + ".opus"
|
||||
|
||||
if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)):
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
b"-map_metadata", b"-1",
|
||||
b"-map", b"0:a:0",
|
||||
b"-c:a", b"libopus",
|
||||
b"-b:a", b"128k",
|
||||
fsenc(tmp_opus)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
|
||||
if want_caf:
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath if src_opus else tmp_opus),
|
||||
b"-map_metadata", b"-1",
|
||||
b"-map", b"0:a:0",
|
||||
b"-c:a", b"copy",
|
||||
b"-f", b"caf",
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
|
||||
def poke(self, tdir):
|
||||
if not self.poke_cd.poke(tdir):
|
||||
return
|
||||
|
||||
ts = int(time.time())
|
||||
try:
|
||||
p1 = os.path.dirname(tdir)
|
||||
p2 = os.path.dirname(p1)
|
||||
for dp in [tdir, p1, p2]:
|
||||
bos.utime(dp, (ts, ts))
|
||||
for _ in range(4):
|
||||
bos.utime(tdir, (ts, ts))
|
||||
tdir = os.path.dirname(tdir)
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -365,25 +474,36 @@ class ThumbSrv(object):
|
||||
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
||||
|
||||
def clean(self, histpath):
|
||||
thumbpath = os.path.join(histpath, "th")
|
||||
ret = 0
|
||||
for cat in ["th", "ac"]:
|
||||
ret += self._clean(histpath, cat, None)
|
||||
|
||||
return ret
|
||||
|
||||
def _clean(self, histpath, cat, thumbpath):
|
||||
if not thumbpath:
|
||||
thumbpath = os.path.join(histpath, cat)
|
||||
|
||||
# self.log("cln {}".format(thumbpath))
|
||||
maxage = self.args.th_maxage
|
||||
exts = ["jpg", "webp"] if cat == "th" else ["opus", "caf"]
|
||||
maxage = getattr(self.args, cat + "_maxage")
|
||||
now = time.time()
|
||||
prev_b64 = None
|
||||
prev_fp = None
|
||||
try:
|
||||
ents = bos.listdir(thumbpath)
|
||||
ents = statdir(self.log, not self.args.no_scandir, False, thumbpath)
|
||||
ents = sorted(list(ents))
|
||||
except:
|
||||
return 0
|
||||
|
||||
ndirs = 0
|
||||
for f in sorted(ents):
|
||||
for f, inf in ents:
|
||||
fp = os.path.join(thumbpath, f)
|
||||
cmp = fp.lower().replace("\\", "/")
|
||||
|
||||
# "top" or b64 prefix/full (a folder)
|
||||
if len(f) <= 3 or len(f) == 24:
|
||||
age = now - bos.path.getmtime(fp)
|
||||
age = now - inf.st_mtime
|
||||
if age > maxage:
|
||||
with self.mutex:
|
||||
safe = True
|
||||
@@ -397,16 +517,15 @@ class ThumbSrv(object):
|
||||
self.log("rm -rf [{}]".format(fp))
|
||||
shutil.rmtree(fp, ignore_errors=True)
|
||||
else:
|
||||
ndirs += self.clean(fp)
|
||||
self._clean(histpath, cat, fp)
|
||||
|
||||
continue
|
||||
|
||||
# thumb file
|
||||
try:
|
||||
b64, ts, ext = f.split(".")
|
||||
if len(b64) != 24 or len(ts) != 8 or ext not in ["jpg", "webp"]:
|
||||
if len(b64) != 24 or len(ts) != 8 or ext not in exts:
|
||||
raise Exception()
|
||||
|
||||
ts = int(ts, 16)
|
||||
except:
|
||||
if f != "dir.txt":
|
||||
self.log("foreign file in thumbs dir: [{}]".format(fp), 1)
|
||||
@@ -417,6 +536,10 @@ class ThumbSrv(object):
|
||||
self.log("rm replaced [{}]".format(fp))
|
||||
bos.unlink(prev_fp)
|
||||
|
||||
if cat != "th" and inf.st_mtime + maxage < now:
|
||||
self.log("rm expired [{}]".format(fp))
|
||||
bos.unlink(fp)
|
||||
|
||||
prev_b64 = b64
|
||||
prev_fp = fp
|
||||
|
||||
|
||||
@@ -6,9 +6,10 @@ import os
|
||||
import time
|
||||
import threading
|
||||
from datetime import datetime
|
||||
from operator import itemgetter
|
||||
|
||||
from .__init__ import unicode
|
||||
from .util import s3dec, Pebkac, min_ex
|
||||
from .__init__ import ANYWIN, unicode
|
||||
from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey, quotep
|
||||
from .bos import bos
|
||||
from .up2k import up2k_wark_from_hashlist
|
||||
|
||||
@@ -50,11 +51,11 @@ class U2idx(object):
|
||||
fhash = body["hash"]
|
||||
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
|
||||
|
||||
uq = "where substr(w,1,16) = ? and w = ?"
|
||||
uq = "substr(w,1,16) = ? and w = ?"
|
||||
uv = [wark[:16], wark]
|
||||
|
||||
try:
|
||||
return self.run_query(vols, uq, uv)[0]
|
||||
return self.run_query(vols, uq, uv, True, False)[0]
|
||||
except:
|
||||
raise Pebkac(500, min_ex())
|
||||
|
||||
@@ -66,7 +67,11 @@ class U2idx(object):
|
||||
if cur:
|
||||
return cur
|
||||
|
||||
histpath = self.asrv.vfs.histtab[ptop]
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
return None
|
||||
|
||||
db_path = os.path.join(histpath, "up2k.db")
|
||||
if not bos.path.exists(db_path):
|
||||
return None
|
||||
@@ -82,17 +87,16 @@ class U2idx(object):
|
||||
|
||||
q = ""
|
||||
va = []
|
||||
joins = ""
|
||||
have_up = False # query has up.* operands
|
||||
have_mt = False
|
||||
is_key = True
|
||||
is_size = False
|
||||
is_date = False
|
||||
field_end = "" # closing parenthesis or whatever
|
||||
kw_key = ["(", ")", "and ", "or ", "not "]
|
||||
kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "]
|
||||
ptn_mt = re.compile(r"^\.?[a-z_-]+$")
|
||||
mt_ctr = 0
|
||||
mt_keycmp = "substr(up.w,1,16)"
|
||||
mt_keycmp2 = None
|
||||
ptn_lc = re.compile(r" (mt[0-9]+\.v) ([=<!>]+) \? $")
|
||||
ptn_lc = re.compile(r" (mt\.v) ([=<!>]+) \? \) $")
|
||||
ptn_lcv = re.compile(r"[a-zA-Z]")
|
||||
|
||||
while True:
|
||||
@@ -112,35 +116,47 @@ class U2idx(object):
|
||||
if ok:
|
||||
continue
|
||||
|
||||
v, uq = (uq + " ").split(" ", 1)
|
||||
if uq.startswith('"'):
|
||||
v, uq = uq[1:].split('"', 1)
|
||||
while v.endswith("\\"):
|
||||
v2, uq = uq.split('"', 1)
|
||||
v = v[:-1] + '"' + v2
|
||||
uq = uq.strip()
|
||||
else:
|
||||
v, uq = (uq + " ").split(" ", 1)
|
||||
v = v.replace('\\"', '"')
|
||||
|
||||
if is_key:
|
||||
is_key = False
|
||||
|
||||
if v == "size":
|
||||
v = "up.sz"
|
||||
is_size = True
|
||||
have_up = True
|
||||
|
||||
elif v == "date":
|
||||
v = "up.mt"
|
||||
is_date = True
|
||||
have_up = True
|
||||
|
||||
elif v == "path":
|
||||
v = "up.rd"
|
||||
v = "trim(?||up.rd,'/')"
|
||||
va.append("\nrd")
|
||||
have_up = True
|
||||
|
||||
elif v == "name":
|
||||
v = "up.fn"
|
||||
have_up = True
|
||||
|
||||
elif v == "tags" or ptn_mt.match(v):
|
||||
mt_ctr += 1
|
||||
mt_keycmp2 = "mt{}.w".format(mt_ctr)
|
||||
joins += "inner join mt mt{} on {} = {} ".format(
|
||||
mt_ctr, mt_keycmp, mt_keycmp2
|
||||
)
|
||||
mt_keycmp = mt_keycmp2
|
||||
have_mt = True
|
||||
field_end = ") "
|
||||
if v == "tags":
|
||||
v = "mt{0}.v".format(mt_ctr)
|
||||
vq = "mt.v"
|
||||
else:
|
||||
v = "+mt{0}.k = '{1}' and mt{0}.v".format(mt_ctr, v)
|
||||
vq = "+mt.k = '{}' and mt.v".format(v)
|
||||
|
||||
v = "exists(select 1 from mt where mt.w = mtw and " + vq
|
||||
|
||||
else:
|
||||
raise Pebkac(400, "invalid key [" + v + "]")
|
||||
@@ -186,6 +202,10 @@ class U2idx(object):
|
||||
va.append(v)
|
||||
is_key = True
|
||||
|
||||
if field_end:
|
||||
q += field_end
|
||||
field_end = ""
|
||||
|
||||
# lowercase tag searches
|
||||
m = ptn_lc.search(q)
|
||||
if not m or not ptn_lcv.search(unicode(v)):
|
||||
@@ -197,16 +217,16 @@ class U2idx(object):
|
||||
|
||||
field, oper = m.groups()
|
||||
if oper in ["=", "=="]:
|
||||
q += " {} like ? ".format(field)
|
||||
q += " {} like ? ) ".format(field)
|
||||
else:
|
||||
q += " lower({}) {} ? ".format(field, oper)
|
||||
q += " lower({}) {} ? ) ".format(field, oper)
|
||||
|
||||
try:
|
||||
return self.run_query(vols, joins + "where " + q, va)
|
||||
return self.run_query(vols, q, va, have_up, have_mt)
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
def run_query(self, vols, uq, uv):
|
||||
def run_query(self, vols, uq, uv, have_up, have_mt):
|
||||
done_flag = []
|
||||
self.active_id = "{:.6f}_{}".format(
|
||||
time.time(), threading.current_thread().ident
|
||||
@@ -223,16 +243,19 @@ class U2idx(object):
|
||||
thr.start()
|
||||
|
||||
if not uq or not uv:
|
||||
q = "select * from up"
|
||||
v = ()
|
||||
uq = "select * from up"
|
||||
uv = ()
|
||||
elif have_mt:
|
||||
uq = "select up.*, substr(up.w,1,16) mtw from up where " + uq
|
||||
uv = tuple(uv)
|
||||
else:
|
||||
q = "select up.* from up " + uq
|
||||
v = tuple(uv)
|
||||
uq = "select up.* from up where " + uq
|
||||
uv = tuple(uv)
|
||||
|
||||
self.log("qs: {!r} {!r}".format(q, v))
|
||||
self.log("qs: {!r} {!r}".format(uq, uv))
|
||||
|
||||
ret = []
|
||||
lim = 1000
|
||||
lim = int(self.args.srch_hits)
|
||||
taglist = {}
|
||||
for (vtop, ptop, flags) in vols:
|
||||
cur = self.get_cur(ptop)
|
||||
@@ -241,10 +264,19 @@ class U2idx(object):
|
||||
|
||||
self.active_cur = cur
|
||||
|
||||
vuv = []
|
||||
for v in uv:
|
||||
if v == "\nrd":
|
||||
v = vtop + "/"
|
||||
|
||||
vuv.append(v)
|
||||
vuv = tuple(vuv)
|
||||
|
||||
sret = []
|
||||
c = cur.execute(q, v)
|
||||
fk = flags.get("fk")
|
||||
c = cur.execute(uq, vuv)
|
||||
for hit in c:
|
||||
w, ts, sz, rd, fn, ip, at = hit
|
||||
w, ts, sz, rd, fn, ip, at = hit[:7]
|
||||
lim -= 1
|
||||
if lim <= 0:
|
||||
break
|
||||
@@ -252,7 +284,23 @@ class U2idx(object):
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
rp = "/".join([x for x in [vtop, rd, fn] if x])
|
||||
if not fk:
|
||||
suf = ""
|
||||
else:
|
||||
try:
|
||||
ap = absreal(os.path.join(ptop, rd, fn))
|
||||
inf = bos.stat(ap)
|
||||
except:
|
||||
continue
|
||||
|
||||
suf = (
|
||||
"?k="
|
||||
+ gen_filekey(
|
||||
self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino
|
||||
)[:fk]
|
||||
)
|
||||
|
||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x])) + suf
|
||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||
|
||||
for hit in sret:
|
||||
@@ -275,9 +323,13 @@ class U2idx(object):
|
||||
# undupe hits from multiple metadata keys
|
||||
if len(ret) > 1:
|
||||
ret = [ret[0]] + [
|
||||
y for x, y in zip(ret[:-1], ret[1:]) if x["rp"] != y["rp"]
|
||||
y
|
||||
for x, y in zip(ret[:-1], ret[1:])
|
||||
if x["rp"].split("?")[0] != y["rp"].split("?")[0]
|
||||
]
|
||||
|
||||
ret.sort(key=itemgetter("rp"))
|
||||
|
||||
return ret, list(taglist.keys())
|
||||
|
||||
def terminator(self, identifier, done_flag):
|
||||
|
||||
@@ -21,13 +21,17 @@ from .util import (
|
||||
Pebkac,
|
||||
Queue,
|
||||
ProgressPrinter,
|
||||
SYMTIME,
|
||||
fsdec,
|
||||
fsenc,
|
||||
absreal,
|
||||
sanitize_fn,
|
||||
ren_open,
|
||||
atomic_move,
|
||||
quotep,
|
||||
vsplit,
|
||||
w8b64enc,
|
||||
w8b64dec,
|
||||
s3enc,
|
||||
s3dec,
|
||||
rmdirs,
|
||||
@@ -60,13 +64,17 @@ class Up2k(object):
|
||||
|
||||
# state
|
||||
self.mutex = threading.Lock()
|
||||
self.rescan_cond = threading.Condition()
|
||||
self.hashq = Queue()
|
||||
self.tagq = Queue()
|
||||
self.n_hashq = 0
|
||||
self.n_tagq = 0
|
||||
self.gid = 0
|
||||
self.volstate = {}
|
||||
self.need_rescan = {}
|
||||
self.dupesched = {}
|
||||
self.registry = {}
|
||||
self.droppable = {}
|
||||
self.entags = {}
|
||||
self.flags = {}
|
||||
self.cur = {}
|
||||
@@ -109,15 +117,21 @@ class Up2k(object):
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def reload(self):
|
||||
self.gid += 1
|
||||
self.log("reload #{} initiated".format(self.gid))
|
||||
all_vols = self.asrv.vfs.all_vols
|
||||
self.rescan(all_vols, list(all_vols.keys()), True)
|
||||
|
||||
def deferred_init(self):
|
||||
all_vols = self.asrv.vfs.all_vols
|
||||
have_e2d = self.init_indexes(all_vols)
|
||||
|
||||
if have_e2d:
|
||||
thr = threading.Thread(target=self._snapshot, name="up2k-snapshot")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
thr = threading.Thread(target=self._snapshot, name="up2k-snapshot")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if have_e2d:
|
||||
thr = threading.Thread(target=self._hasher, name="up2k-hasher")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
@@ -127,9 +141,11 @@ class Up2k(object):
|
||||
thr.start()
|
||||
|
||||
if self.mtag:
|
||||
thr = threading.Thread(target=self._tagger, name="up2k-tagger")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
for n in range(max(1, self.args.mtag_mt)):
|
||||
name = "tagger-{}".format(n)
|
||||
thr = threading.Thread(target=self._tagger, name=name)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-init")
|
||||
thr.daemon = True
|
||||
@@ -161,15 +177,15 @@ class Up2k(object):
|
||||
}
|
||||
return json.dumps(ret, indent=4)
|
||||
|
||||
def rescan(self, all_vols, scan_vols):
|
||||
if hasattr(self, "pp"):
|
||||
def rescan(self, all_vols, scan_vols, wait):
|
||||
if not wait and hasattr(self, "pp"):
|
||||
return "cannot initiate; scan is already in progress"
|
||||
|
||||
args = (all_vols, scan_vols)
|
||||
t = threading.Thread(
|
||||
target=self.init_indexes,
|
||||
args=args,
|
||||
name="up2k-rescan-{}".format(scan_vols[0]),
|
||||
name="up2k-rescan-{}".format(scan_vols[0] if scan_vols else "all"),
|
||||
)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
@@ -177,9 +193,23 @@ class Up2k(object):
|
||||
|
||||
def _sched_rescan(self):
|
||||
volage = {}
|
||||
cooldown = 0
|
||||
timeout = time.time() + 3
|
||||
while True:
|
||||
time.sleep(self.args.re_int)
|
||||
timeout = max(timeout, cooldown)
|
||||
wait = max(0.1, timeout + 0.1 - time.time())
|
||||
with self.rescan_cond:
|
||||
self.rescan_cond.wait(wait)
|
||||
|
||||
now = time.time()
|
||||
if now < cooldown:
|
||||
continue
|
||||
|
||||
if hasattr(self, "pp"):
|
||||
cooldown = now + 5
|
||||
continue
|
||||
|
||||
timeout = now + 9001
|
||||
with self.mutex:
|
||||
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
|
||||
maxage = vol.flags.get("scan")
|
||||
@@ -189,14 +219,18 @@ class Up2k(object):
|
||||
if vp not in volage:
|
||||
volage[vp] = now
|
||||
|
||||
if now - volage[vp] >= maxage:
|
||||
deadline = volage[vp] + maxage
|
||||
if deadline <= now:
|
||||
self.need_rescan[vp] = 1
|
||||
|
||||
timeout = min(timeout, deadline)
|
||||
|
||||
vols = list(sorted(self.need_rescan.keys()))
|
||||
self.need_rescan = {}
|
||||
|
||||
if vols:
|
||||
err = self.rescan(self.asrv.vfs.all_vols, vols)
|
||||
cooldown = now + 10
|
||||
err = self.rescan(self.asrv.vfs.all_vols, vols, False)
|
||||
if err:
|
||||
for v in vols:
|
||||
self.need_rescan[v] = True
|
||||
@@ -218,8 +252,11 @@ class Up2k(object):
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
lifetime = int(lifetime)
|
||||
timeout = min(timeout, now + lifetime)
|
||||
|
||||
nrm = 0
|
||||
deadline = time.time() - int(lifetime)
|
||||
deadline = time.time() - lifetime
|
||||
q = "select rd, fn from up where at > 0 and at < ? limit 100"
|
||||
while True:
|
||||
with self.mutex:
|
||||
@@ -242,6 +279,16 @@ class Up2k(object):
|
||||
if nrm:
|
||||
self.log("{} files graduated in {}".format(nrm, vp))
|
||||
|
||||
if timeout < 10:
|
||||
continue
|
||||
|
||||
q = "select at from up where at > 0 order by at limit 1"
|
||||
with self.mutex:
|
||||
hits = cur.execute(q).fetchone()
|
||||
|
||||
if hits:
|
||||
timeout = min(timeout, now + lifetime - (now - hits[0]))
|
||||
|
||||
def _vis_job_progress(self, job):
|
||||
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
@@ -250,7 +297,8 @@ class Up2k(object):
|
||||
def _vis_reg_progress(self, reg):
|
||||
ret = []
|
||||
for _, job in reg.items():
|
||||
ret.append(self._vis_job_progress(job))
|
||||
if job["need"]:
|
||||
ret.append(self._vis_job_progress(job))
|
||||
|
||||
return ret
|
||||
|
||||
@@ -265,6 +313,16 @@ class Up2k(object):
|
||||
return True, ret
|
||||
|
||||
def init_indexes(self, all_vols, scan_vols=None):
|
||||
gid = self.gid
|
||||
while hasattr(self, "pp") and gid == self.gid:
|
||||
time.sleep(0.1)
|
||||
|
||||
if gid != self.gid:
|
||||
return
|
||||
|
||||
if gid:
|
||||
self.log("reload #{} running".format(self.gid))
|
||||
|
||||
self.pp = ProgressPrinter()
|
||||
vols = all_vols.values()
|
||||
t0 = time.time()
|
||||
@@ -395,7 +453,11 @@ class Up2k(object):
|
||||
return have_e2d
|
||||
|
||||
def register_vpath(self, ptop, flags):
|
||||
histpath = self.asrv.vfs.histtab[ptop]
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
return None
|
||||
|
||||
db_path = os.path.join(histpath, "up2k.db")
|
||||
if ptop in self.registry:
|
||||
try:
|
||||
@@ -424,26 +486,41 @@ class Up2k(object):
|
||||
self.log("/{} {}".format(vpath, " ".join(sorted(a))), "35")
|
||||
|
||||
reg = {}
|
||||
drp = None
|
||||
path = os.path.join(histpath, "up2k.snap")
|
||||
if "e2d" in flags and bos.path.exists(path):
|
||||
if bos.path.exists(path):
|
||||
with gzip.GzipFile(path, "rb") as f:
|
||||
j = f.read().decode("utf-8")
|
||||
|
||||
reg2 = json.loads(j)
|
||||
try:
|
||||
drp = reg2["droppable"]
|
||||
reg2 = reg2["registry"]
|
||||
except:
|
||||
pass
|
||||
|
||||
for k, job in reg2.items():
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
if bos.path.exists(path):
|
||||
reg[k] = job
|
||||
job["poke"] = time.time()
|
||||
job["busy"] = {}
|
||||
else:
|
||||
self.log("ign deleted file in snap: [{}]".format(path))
|
||||
|
||||
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
|
||||
if drp is None:
|
||||
drp = [k for k, v in reg.items() if not v.get("need", [])]
|
||||
else:
|
||||
drp = [x for x in drp if x in reg]
|
||||
|
||||
m = "loaded snap {} |{}| ({})".format(path, len(reg.keys()), len(drp or []))
|
||||
m = [m] + self._vis_reg_progress(reg)
|
||||
self.log("\n".join(m))
|
||||
|
||||
self.flags[ptop] = flags
|
||||
self.registry[ptop] = reg
|
||||
self.droppable[ptop] = drp or []
|
||||
self.regdrop(ptop, None)
|
||||
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
|
||||
return None
|
||||
|
||||
@@ -462,7 +539,8 @@ class Up2k(object):
|
||||
def _build_file_index(self, vol, all_vols):
|
||||
do_vac = False
|
||||
top = vol.realpath
|
||||
nohash = "dhash" in vol.flags
|
||||
rei = vol.flags.get("noidx")
|
||||
reh = vol.flags.get("nohash")
|
||||
with self.mutex:
|
||||
cur, _ = self.register_vpath(top, vol.flags)
|
||||
|
||||
@@ -477,38 +555,55 @@ class Up2k(object):
|
||||
if WINDOWS:
|
||||
excl = [x.replace("/", "\\") for x in excl]
|
||||
|
||||
n_add = self._build_dir(dbw, top, set(excl), top, nohash, [])
|
||||
n_rm = self._drop_lost(dbw[0], top)
|
||||
n_add = n_rm = 0
|
||||
try:
|
||||
n_add = self._build_dir(dbw, top, set(excl), top, rei, reh, [])
|
||||
n_rm = self._drop_lost(dbw[0], top)
|
||||
except:
|
||||
m = "failed to index volume [{}]:\n{}"
|
||||
self.log(m.format(top, min_ex()), c=1)
|
||||
|
||||
if dbw[1]:
|
||||
self.log("commit {} new files".format(dbw[1]))
|
||||
dbw[0].connection.commit()
|
||||
|
||||
dbw[0].connection.commit()
|
||||
|
||||
return True, n_add or n_rm or do_vac
|
||||
|
||||
def _build_dir(self, dbw, top, excl, cdir, nohash, seen):
|
||||
def _build_dir(self, dbw, top, excl, cdir, rei, reh, seen):
|
||||
rcdir = absreal(cdir) # a bit expensive but worth
|
||||
if rcdir in seen:
|
||||
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}"
|
||||
self.log(m.format(seen[-1], rcdir, cdir), 3)
|
||||
return 0
|
||||
|
||||
seen = seen + [cdir]
|
||||
seen = seen + [rcdir]
|
||||
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
||||
histpath = self.asrv.vfs.histtab[top]
|
||||
ret = 0
|
||||
seen_files = {}
|
||||
g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
|
||||
for iname, inf in sorted(g):
|
||||
abspath = os.path.join(cdir, iname)
|
||||
if rei and rei.search(abspath):
|
||||
continue
|
||||
|
||||
nohash = reh.search(abspath) if reh else False
|
||||
lmod = int(inf.st_mtime)
|
||||
sz = inf.st_size
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
if abspath in excl or abspath == histpath:
|
||||
continue
|
||||
# self.log(" dir: {}".format(abspath))
|
||||
ret += self._build_dir(dbw, top, excl, abspath, nohash, seen)
|
||||
try:
|
||||
ret += self._build_dir(dbw, top, excl, abspath, rei, reh, seen)
|
||||
except:
|
||||
m = "failed to index subdir [{}]:\n{}"
|
||||
self.log(m.format(abspath, min_ex()), c=1)
|
||||
else:
|
||||
# self.log("file: {}".format(abspath))
|
||||
rp = abspath[len(top) + 1 :]
|
||||
seen_files[iname] = 1
|
||||
rp = abspath[len(top) :].lstrip("/")
|
||||
if WINDOWS:
|
||||
rp = rp.replace("\\", "/").strip("/")
|
||||
|
||||
@@ -566,34 +661,65 @@ class Up2k(object):
|
||||
dbw[0].connection.commit()
|
||||
dbw[1] = 0
|
||||
dbw[2] = time.time()
|
||||
|
||||
# drop missing files
|
||||
rd = cdir[len(top) + 1 :].strip("/")
|
||||
if WINDOWS:
|
||||
rd = rd.replace("\\", "/").strip("/")
|
||||
|
||||
q = "select fn from up where rd = ?"
|
||||
try:
|
||||
c = dbw[0].execute(q, (rd,))
|
||||
except:
|
||||
c = dbw[0].execute(q, ("//" + w8b64enc(rd),))
|
||||
|
||||
hits = [w8b64dec(x[2:]) if x.startswith("//") else x for (x,) in c]
|
||||
rm_files = [x for x in hits if x not in seen_files]
|
||||
n_rm = len(rm_files)
|
||||
for fn in rm_files:
|
||||
self.db_rm(dbw[0], rd, fn)
|
||||
|
||||
if n_rm:
|
||||
self.log("forgot {} deleted files".format(n_rm))
|
||||
|
||||
return ret
|
||||
|
||||
def _drop_lost(self, cur, top):
|
||||
rm = []
|
||||
n_rm = 0
|
||||
nchecked = 0
|
||||
nfiles = next(cur.execute("select count(w) from up"))[0]
|
||||
c = cur.execute("select rd, fn from up")
|
||||
for drd, dfn in c:
|
||||
# `_build_dir` did all the files, now do dirs
|
||||
ndirs = next(cur.execute("select count(distinct rd) from up"))[0]
|
||||
c = cur.execute("select distinct rd from up order by rd desc")
|
||||
for (drd,) in c:
|
||||
nchecked += 1
|
||||
if drd.startswith("//") or dfn.startswith("//"):
|
||||
drd, dfn = s3dec(drd, dfn)
|
||||
if drd.startswith("//"):
|
||||
rd = w8b64dec(drd[2:])
|
||||
else:
|
||||
rd = drd
|
||||
|
||||
abspath = os.path.join(top, drd, dfn)
|
||||
# almost zero overhead dw
|
||||
self.pp.msg = "b{} {}".format(nfiles - nchecked, abspath)
|
||||
abspath = os.path.join(top, rd)
|
||||
self.pp.msg = "b{} {}".format(ndirs - nchecked, abspath)
|
||||
try:
|
||||
if not bos.path.exists(abspath):
|
||||
rm.append([drd, dfn])
|
||||
except Exception as ex:
|
||||
self.log("stat-rm: {} @ [{}]".format(repr(ex), abspath))
|
||||
if os.path.isdir(abspath):
|
||||
continue
|
||||
except:
|
||||
pass
|
||||
|
||||
if rm:
|
||||
self.log("forgetting {} deleted files".format(len(rm)))
|
||||
for rd, fn in rm:
|
||||
# self.log("{} / {}".format(rd, fn))
|
||||
self.db_rm(cur, rd, fn)
|
||||
rm.append(drd)
|
||||
|
||||
return len(rm)
|
||||
if not rm:
|
||||
return 0
|
||||
|
||||
q = "select count(w) from up where rd = ?"
|
||||
for rd in rm:
|
||||
n_rm += next(cur.execute(q, (rd,)))[0]
|
||||
|
||||
self.log("forgetting {} deleted dirs, {} files".format(len(rm), n_rm))
|
||||
for rd in rm:
|
||||
cur.execute("delete from up where rd = ?", (rd,))
|
||||
|
||||
return n_rm
|
||||
|
||||
def _build_tags_index(self, vol):
|
||||
ptop = vol.realpath
|
||||
@@ -647,7 +773,7 @@ class Up2k(object):
|
||||
return n_add, n_rm, False
|
||||
|
||||
mpool = False
|
||||
if self.mtag.prefer_mt and not self.args.no_mtag_mt:
|
||||
if self.mtag.prefer_mt and self.args.mtag_mt > 1:
|
||||
mpool = self._start_mpool()
|
||||
|
||||
conn = sqlite3.connect(db_path, timeout=15)
|
||||
@@ -714,10 +840,11 @@ class Up2k(object):
|
||||
return ret
|
||||
|
||||
def _run_all_mtp(self):
|
||||
gid = self.gid
|
||||
t0 = time.time()
|
||||
for ptop, flags in self.flags.items():
|
||||
if "mtp" in flags:
|
||||
self._run_one_mtp(ptop)
|
||||
self._run_one_mtp(ptop, gid)
|
||||
|
||||
td = time.time() - t0
|
||||
msg = "mtp finished in {:.2f} sec ({})"
|
||||
@@ -728,7 +855,10 @@ class Up2k(object):
|
||||
if "OFFLINE" not in self.volstate[k]:
|
||||
self.volstate[k] = "online, idle"
|
||||
|
||||
def _run_one_mtp(self, ptop):
|
||||
def _run_one_mtp(self, ptop, gid):
|
||||
if gid != self.gid:
|
||||
return
|
||||
|
||||
entags = self.entags[ptop]
|
||||
|
||||
parsers = {}
|
||||
@@ -761,6 +891,9 @@ class Up2k(object):
|
||||
in_progress = {}
|
||||
while True:
|
||||
with self.mutex:
|
||||
if gid != self.gid:
|
||||
break
|
||||
|
||||
q = "select w from mt where k = 't:mtp' limit ?"
|
||||
warks = cur.execute(q, (batch_sz,)).fetchall()
|
||||
warks = [x[0] for x in warks]
|
||||
@@ -838,6 +971,7 @@ class Up2k(object):
|
||||
|
||||
cur.connection.commit()
|
||||
if n_done:
|
||||
self.log("mtp: scanned {} files in {}".format(n_done, ptop), c=6)
|
||||
cur.execute("vacuum")
|
||||
|
||||
wcur.close()
|
||||
@@ -879,9 +1013,7 @@ class Up2k(object):
|
||||
def _start_mpool(self):
|
||||
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
|
||||
# both do crazy runahead so lets reinvent another wheel
|
||||
nw = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
if self.args.no_mtag_mt:
|
||||
nw = 1
|
||||
nw = max(1, self.args.mtag_mt)
|
||||
|
||||
if self.pending_tags is None:
|
||||
self.log("using {}x {}".format(nw, self.mtag.backend))
|
||||
@@ -939,7 +1071,15 @@ class Up2k(object):
|
||||
|
||||
def _tag_file(self, write_cur, entags, wark, abspath, tags=None):
|
||||
if tags is None:
|
||||
tags = self.mtag.get(abspath)
|
||||
try:
|
||||
tags = self.mtag.get(abspath)
|
||||
except Exception as ex:
|
||||
msg = "failed to read tags from {}:\n{}"
|
||||
self.log(msg.format(abspath, ex), c=3)
|
||||
return 0
|
||||
|
||||
if not bos.path.isfile(abspath):
|
||||
return 0
|
||||
|
||||
if entags:
|
||||
tags = {k: v for k, v in tags.items() if k in entags}
|
||||
@@ -1111,9 +1251,18 @@ class Up2k(object):
|
||||
if dp_dir.startswith("//") or dp_fn.startswith("//"):
|
||||
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
|
||||
|
||||
if job and (dp_dir != cj["prel"] or dp_fn != cj["name"]):
|
||||
continue
|
||||
|
||||
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
|
||||
# relying on path.exists to return false on broken symlinks
|
||||
if bos.path.exists(dp_abs):
|
||||
# relying on this to fail on broken symlinks
|
||||
try:
|
||||
sz = bos.path.getsize(dp_abs)
|
||||
except:
|
||||
sz = 0
|
||||
|
||||
if sz:
|
||||
# self.log("--- " + wark + " " + dp_abs + " found file", 4)
|
||||
job = {
|
||||
"name": dp_fn,
|
||||
"prel": dp_dir,
|
||||
@@ -1125,10 +1274,11 @@ class Up2k(object):
|
||||
"at": at,
|
||||
"hash": [],
|
||||
"need": [],
|
||||
"busy": {},
|
||||
}
|
||||
break
|
||||
|
||||
if job and wark in reg:
|
||||
# self.log("pop " + wark + " " + job["name"] + " handle_json db", 4)
|
||||
del reg[wark]
|
||||
|
||||
if job or wark in reg:
|
||||
@@ -1156,11 +1306,23 @@ class Up2k(object):
|
||||
if job["need"]:
|
||||
self.log("unfinished:\n {0}\n {1}".format(src, dst))
|
||||
err = "partial upload exists at a different location; please resume uploading here instead:\n"
|
||||
err += "/" + vsrc + " "
|
||||
err += "/" + quotep(vsrc) + " "
|
||||
|
||||
# registry is size-constrained + can only contain one unique wark;
|
||||
# let want_recheck trigger symlink (if still in reg) or reupload
|
||||
if cur:
|
||||
dupe = [cj["prel"], cj["name"], cj["lmod"]]
|
||||
try:
|
||||
self.dupesched[src].append(dupe)
|
||||
except:
|
||||
self.dupesched[src] = [dupe]
|
||||
|
||||
raise Pebkac(400, err)
|
||||
|
||||
elif "nodupe" in self.flags[job["ptop"]]:
|
||||
self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
|
||||
err = "upload rejected, file already exists:\n/" + vsrc + " "
|
||||
err = "upload rejected, file already exists:\n"
|
||||
err += "/" + quotep(vsrc) + " "
|
||||
raise Pebkac(400, err)
|
||||
else:
|
||||
# symlink to the client-provided name,
|
||||
@@ -1174,7 +1336,7 @@ class Up2k(object):
|
||||
dst = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
if not self.args.nw:
|
||||
bos.unlink(dst) # TODO ed pls
|
||||
self._symlink(src, dst)
|
||||
self._symlink(src, dst, lmod=cj["lmod"])
|
||||
|
||||
if cur:
|
||||
a = [cj[x] for x in "prel name lmod size addr".split()]
|
||||
@@ -1198,6 +1360,7 @@ class Up2k(object):
|
||||
"t0": now,
|
||||
"hash": deepcopy(cj["hash"]),
|
||||
"need": [],
|
||||
"busy": {},
|
||||
}
|
||||
# client-provided, sanitized by _get_wark: name, size, lmod
|
||||
for k in [
|
||||
@@ -1223,7 +1386,8 @@ class Up2k(object):
|
||||
|
||||
self._new_upload(job)
|
||||
|
||||
purl = "/{}/".format("{}/{}".format(job["vtop"], job["prel"]).strip("/"))
|
||||
purl = "{}/{}".format(job["vtop"], job["prel"]).strip("/")
|
||||
purl = "/{}/".format(purl) if purl else "/"
|
||||
|
||||
return {
|
||||
"name": job["name"],
|
||||
@@ -1240,18 +1404,22 @@ class Up2k(object):
|
||||
|
||||
# TODO broker which avoid this race and
|
||||
# provides a new filename if taken (same as bup)
|
||||
suffix = ".{:.6f}-{}".format(ts, ip)
|
||||
suffix = "-{:.6f}-{}".format(ts, ip.replace(":", "."))
|
||||
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
|
||||
return f["orz"][1]
|
||||
|
||||
def _symlink(self, src, dst, verbose=True):
|
||||
def _symlink(self, src, dst, verbose=True, lmod=None):
|
||||
if verbose:
|
||||
self.log("linking dupe:\n {0}\n {1}".format(src, dst))
|
||||
|
||||
if self.args.nw:
|
||||
return
|
||||
|
||||
linked = False
|
||||
try:
|
||||
if self.args.no_symlink:
|
||||
raise Exception("disabled in config")
|
||||
|
||||
lsrc = src
|
||||
ldst = dst
|
||||
fs1 = bos.stat(os.path.dirname(src)).st_dev
|
||||
@@ -1278,10 +1446,18 @@ class Up2k(object):
|
||||
hops = len(ndst[nc:]) - 1
|
||||
lsrc = "../" * hops + "/".join(lsrc)
|
||||
os.symlink(fsenc(lsrc), fsenc(ldst))
|
||||
linked = True
|
||||
except Exception as ex:
|
||||
self.log("cannot symlink; creating copy: " + repr(ex))
|
||||
shutil.copy2(fsenc(src), fsenc(dst))
|
||||
|
||||
if lmod and (not linked or SYMTIME):
|
||||
times = (int(time.time()), int(lmod))
|
||||
if ANYWIN:
|
||||
self.lastmod_q.put([dst, 0, times])
|
||||
else:
|
||||
bos.utime(dst, times, False)
|
||||
|
||||
def handle_chunk(self, ptop, wark, chash):
|
||||
with self.mutex:
|
||||
job = self.registry[ptop].get(wark)
|
||||
@@ -1300,6 +1476,14 @@ class Up2k(object):
|
||||
if not nchunk:
|
||||
raise Pebkac(400, "unknown chunk")
|
||||
|
||||
if chash in job["busy"]:
|
||||
nh = len(job["hash"])
|
||||
idx = job["hash"].index(chash)
|
||||
m = "that chunk is already being written to:\n {}\n {} {}/{}\n {}"
|
||||
raise Pebkac(400, m.format(wark, chash, idx, nh, job["name"]))
|
||||
|
||||
job["busy"][chash] = 1
|
||||
|
||||
job["poke"] = time.time()
|
||||
|
||||
chunksize = up2k_chunksize(job["size"])
|
||||
@@ -1309,6 +1493,14 @@ class Up2k(object):
|
||||
|
||||
return [chunksize, ofs, path, job["lmod"]]
|
||||
|
||||
def release_chunk(self, ptop, wark, chash):
|
||||
with self.mutex:
|
||||
job = self.registry[ptop].get(wark)
|
||||
if job:
|
||||
job["busy"].pop(chash, None)
|
||||
|
||||
return [True]
|
||||
|
||||
def confirm_chunk(self, ptop, wark, chash):
|
||||
with self.mutex:
|
||||
try:
|
||||
@@ -1319,6 +1511,8 @@ class Up2k(object):
|
||||
except Exception as ex:
|
||||
return "confirm_chunk, wark, " + repr(ex)
|
||||
|
||||
job["busy"].pop(chash, None)
|
||||
|
||||
try:
|
||||
job["need"].remove(chash)
|
||||
except Exception as ex:
|
||||
@@ -1329,23 +1523,75 @@ class Up2k(object):
|
||||
return ret, src
|
||||
|
||||
if self.args.nw:
|
||||
# del self.registry[ptop][wark]
|
||||
self.regdrop(ptop, wark)
|
||||
return ret, dst
|
||||
|
||||
atomic_move(src, dst)
|
||||
|
||||
if ANYWIN:
|
||||
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
|
||||
self.lastmod_q.put(a)
|
||||
|
||||
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
|
||||
a += [job.get("at") or time.time()]
|
||||
if self.idx_wark(*a):
|
||||
del self.registry[ptop][wark]
|
||||
# in-memory registry is reserved for unfinished uploads
|
||||
# windows cant rename open files
|
||||
if not ANYWIN or src == dst:
|
||||
self._finish_upload(ptop, wark)
|
||||
|
||||
return ret, dst
|
||||
|
||||
def finish_upload(self, ptop, wark):
|
||||
with self.mutex:
|
||||
self._finish_upload(ptop, wark)
|
||||
|
||||
def _finish_upload(self, ptop, wark):
|
||||
try:
|
||||
job = self.registry[ptop][wark]
|
||||
pdir = os.path.join(job["ptop"], job["prel"])
|
||||
src = os.path.join(pdir, job["tnam"])
|
||||
dst = os.path.join(pdir, job["name"])
|
||||
except Exception as ex:
|
||||
return "finish_upload, wark, " + repr(ex)
|
||||
|
||||
# self.log("--- " + wark + " " + dst + " finish_upload atomic " + dst, 4)
|
||||
atomic_move(src, dst)
|
||||
|
||||
if ANYWIN:
|
||||
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
|
||||
self.lastmod_q.put(a)
|
||||
|
||||
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
|
||||
a += [job.get("at") or time.time()]
|
||||
if self.idx_wark(*a):
|
||||
del self.registry[ptop][wark]
|
||||
else:
|
||||
self.regdrop(ptop, wark)
|
||||
|
||||
dupes = self.dupesched.pop(dst, [])
|
||||
if not dupes:
|
||||
return
|
||||
|
||||
cur = self.cur.get(ptop)
|
||||
for rd, fn, lmod in dupes:
|
||||
d2 = os.path.join(ptop, rd, fn)
|
||||
if os.path.exists(d2):
|
||||
continue
|
||||
|
||||
self._symlink(dst, d2, lmod=lmod)
|
||||
if cur:
|
||||
self.db_rm(cur, rd, fn)
|
||||
self.db_add(cur, wark, rd, fn, *a[-4:])
|
||||
|
||||
if cur:
|
||||
cur.connection.commit()
|
||||
|
||||
def regdrop(self, ptop, wark):
|
||||
t = self.droppable[ptop]
|
||||
if wark:
|
||||
t.append(wark)
|
||||
|
||||
if len(t) <= self.args.reg_cap:
|
||||
return
|
||||
|
||||
n = len(t) - int(self.args.reg_cap / 2)
|
||||
m = "up2k-registry [{}] has {} droppables; discarding {}"
|
||||
self.log(m.format(ptop, len(t), n))
|
||||
for k in t[:n]:
|
||||
self.registry[ptop].pop(k, None)
|
||||
self.droppable[ptop] = t[n:]
|
||||
|
||||
def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
|
||||
cur = self.cur.get(ptop)
|
||||
if not cur:
|
||||
@@ -1400,15 +1646,17 @@ class Up2k(object):
|
||||
try:
|
||||
permsets = [[True, False, False, True]]
|
||||
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
||||
vn, rem = vn.get_dbv(rem)
|
||||
unpost = False
|
||||
except:
|
||||
# unpost with missing permissions? try read+write and verify with db
|
||||
if not self.args.unpost:
|
||||
raise Pebkac(400, "the unpost feature was disabled by server config")
|
||||
raise Pebkac(400, "the unpost feature is disabled in server config")
|
||||
|
||||
unpost = True
|
||||
permsets = [[True, True]]
|
||||
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
||||
vn, rem = vn.get_dbv(rem)
|
||||
_, _, _, _, dip, dat = self._find_from_vpath(vn.realpath, rem)
|
||||
|
||||
m = "you cannot delete this: "
|
||||
@@ -1427,7 +1675,11 @@ class Up2k(object):
|
||||
ptop = vn.realpath
|
||||
atop = vn.canonical(rem, False)
|
||||
adir, fn = os.path.split(atop)
|
||||
st = bos.lstat(atop)
|
||||
try:
|
||||
st = bos.lstat(atop)
|
||||
except:
|
||||
raise Pebkac(400, "file not found on disk (already deleted?)")
|
||||
|
||||
scandir = not self.args.no_scandir
|
||||
if stat.S_ISLNK(st.st_mode) or stat.S_ISREG(st.st_mode):
|
||||
dbv, vrem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
||||
@@ -1450,16 +1702,18 @@ class Up2k(object):
|
||||
self.log("rm {}\n {}".format(vpath, abspath))
|
||||
_ = dbv.get(volpath, uname, *permsets[0])
|
||||
with self.mutex:
|
||||
cur = None
|
||||
try:
|
||||
ptop = dbv.realpath
|
||||
cur, wark, _, _, _, _ = self._find_from_vpath(ptop, volpath)
|
||||
self._forget_file(ptop, volpath, cur, wark, True)
|
||||
finally:
|
||||
cur.connection.commit()
|
||||
if cur:
|
||||
cur.connection.commit()
|
||||
|
||||
bos.unlink(abspath)
|
||||
|
||||
rm = rmdirs(self.log_func, scandir, True, atop)
|
||||
rm = rmdirs(self.log_func, scandir, True, atop, 1)
|
||||
return n_files, rm[0], rm[1]
|
||||
|
||||
def handle_mv(self, uname, svp, dvp):
|
||||
@@ -1501,7 +1755,7 @@ class Up2k(object):
|
||||
with self.mutex:
|
||||
self._mv_file(uname, svpf, dvpf)
|
||||
|
||||
rmdirs(self.log_func, scandir, True, sabs)
|
||||
rmdirs(self.log_func, scandir, True, sabs, 1)
|
||||
return "k"
|
||||
|
||||
def _mv_file(self, uname, svp, dvp):
|
||||
@@ -1515,6 +1769,14 @@ class Up2k(object):
|
||||
dabs = dvn.canonical(drem)
|
||||
drd, dfn = vsplit(drem)
|
||||
|
||||
n1 = svp.split("/")[-1]
|
||||
n2 = dvp.split("/")[-1]
|
||||
if n1.startswith(".") or n2.startswith("."):
|
||||
if self.args.no_dot_mv:
|
||||
raise Pebkac(400, "moving dotfiles is disabled in server config")
|
||||
elif self.args.no_dot_ren and n1 != n2:
|
||||
raise Pebkac(400, "renaming dotfiles is disabled in server config")
|
||||
|
||||
if bos.path.exists(dabs):
|
||||
raise Pebkac(400, "mv2: target file exists")
|
||||
|
||||
@@ -1524,12 +1786,16 @@ class Up2k(object):
|
||||
dlabs = absreal(sabs)
|
||||
m = "moving symlink from [{}] to [{}], target [{}]"
|
||||
self.log(m.format(sabs, dabs, dlabs))
|
||||
os.unlink(sabs)
|
||||
self._symlink(dlabs, dabs, False)
|
||||
mt = bos.path.getmtime(sabs, False)
|
||||
bos.unlink(sabs)
|
||||
self._symlink(dlabs, dabs, False, lmod=mt)
|
||||
|
||||
# folders are too scary, schedule rescan of both vols
|
||||
self.need_rescan[svn.vpath] = 1
|
||||
self.need_rescan[dvn.vpath] = 1
|
||||
with self.rescan_cond:
|
||||
self.rescan_cond.notify_all()
|
||||
|
||||
return "k"
|
||||
|
||||
c1, w, ftime, fsize, ip, at = self._find_from_vpath(svn.realpath, srem)
|
||||
@@ -1570,7 +1836,7 @@ class Up2k(object):
|
||||
def _find_from_vpath(self, ptop, vrem):
|
||||
cur = self.cur.get(ptop)
|
||||
if not cur:
|
||||
return None, None
|
||||
return [None] * 6
|
||||
|
||||
rd, fn = vsplit(vrem)
|
||||
q = "select w, mt, sz, ip, at from up where rd=? and fn=? limit 1"
|
||||
@@ -1607,7 +1873,7 @@ class Up2k(object):
|
||||
wark = [
|
||||
x
|
||||
for x, y in reg.items()
|
||||
if fn in [y["name"], y.get("tnam")] and y["prel"] == vrem
|
||||
if sfn in [y["name"], y.get("tnam")] and y["prel"] == vrem
|
||||
]
|
||||
|
||||
if wark and wark in reg:
|
||||
@@ -1652,25 +1918,30 @@ class Up2k(object):
|
||||
slabs = list(sorted(links.keys()))[0]
|
||||
ptop, rem = links.pop(slabs)
|
||||
self.log("linkswap [{}] and [{}]".format(sabs, slabs))
|
||||
mt = bos.path.getmtime(slabs, False)
|
||||
bos.unlink(slabs)
|
||||
bos.rename(sabs, slabs)
|
||||
bos.utime(slabs, (int(time.time()), int(mt)), False)
|
||||
self._symlink(slabs, sabs, False)
|
||||
full[slabs] = [ptop, rem]
|
||||
sabs = slabs
|
||||
|
||||
if not dabs:
|
||||
dabs = list(sorted(full.keys()))[0]
|
||||
|
||||
for alink in links.keys():
|
||||
lmod = None
|
||||
try:
|
||||
if alink != sabs and absreal(alink) != sabs:
|
||||
continue
|
||||
|
||||
self.log("relinking [{}] to [{}]".format(alink, dabs))
|
||||
lmod = bos.path.getmtime(alink, False)
|
||||
bos.unlink(alink)
|
||||
except:
|
||||
pass
|
||||
|
||||
self._symlink(dabs, alink, False)
|
||||
self._symlink(dabs, alink, False, lmod=lmod)
|
||||
|
||||
return len(full) + len(links)
|
||||
|
||||
@@ -1690,7 +1961,13 @@ class Up2k(object):
|
||||
except:
|
||||
cj["lmod"] = int(time.time())
|
||||
|
||||
wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"])
|
||||
if cj["hash"]:
|
||||
wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"])
|
||||
else:
|
||||
wark = up2k_wark_from_metadata(
|
||||
self.salt, cj["size"], cj["lmod"], cj["prel"], cj["name"]
|
||||
)
|
||||
|
||||
return wark
|
||||
|
||||
def _hashlist_from_file(self, path):
|
||||
@@ -1733,9 +2010,12 @@ class Up2k(object):
|
||||
|
||||
if self.args.nw:
|
||||
job["tnam"] = tnam
|
||||
if not job["hash"]:
|
||||
del self.registry[job["ptop"]][job["wark"]]
|
||||
return
|
||||
|
||||
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
||||
dip = job["addr"].replace(":", ".")
|
||||
suffix = "-{:.6f}-{}".format(job["t0"], dip)
|
||||
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
||||
f, job["tnam"] = f["orz"]
|
||||
if (
|
||||
@@ -1749,8 +2029,12 @@ class Up2k(object):
|
||||
except:
|
||||
self.log("could not sparse [{}]".format(fp), 3)
|
||||
|
||||
f.seek(job["size"] - 1)
|
||||
f.write(b"e")
|
||||
if job["hash"]:
|
||||
f.seek(job["size"] - 1)
|
||||
f.write(b"e")
|
||||
|
||||
if not job["hash"]:
|
||||
self._finish_upload(job["ptop"], job["wark"])
|
||||
|
||||
def _lastmodder(self):
|
||||
while True:
|
||||
@@ -1763,7 +2047,7 @@ class Up2k(object):
|
||||
for path, sz, times in ready:
|
||||
self.log("lmod: setting times {} on {}".format(times, path))
|
||||
try:
|
||||
bos.utime(path, times)
|
||||
bos.utime(path, times, False)
|
||||
except:
|
||||
self.log("lmod: failed to utime ({}, {})".format(path, times))
|
||||
|
||||
@@ -1779,7 +2063,8 @@ class Up2k(object):
|
||||
self.snap_prev = {}
|
||||
while True:
|
||||
time.sleep(self.snap_persist_interval)
|
||||
self.do_snapshot()
|
||||
if not hasattr(self, "pp"):
|
||||
self.do_snapshot()
|
||||
|
||||
def do_snapshot(self):
|
||||
with self.mutex:
|
||||
@@ -1788,7 +2073,10 @@ class Up2k(object):
|
||||
|
||||
def _snap_reg(self, ptop, reg):
|
||||
now = time.time()
|
||||
histpath = self.asrv.vfs.histtab[ptop]
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
return
|
||||
|
||||
rm = [x for x in reg.values() if now - x["poke"] > self.snap_discard_interval]
|
||||
if rm:
|
||||
m = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
|
||||
@@ -1825,7 +2113,8 @@ class Up2k(object):
|
||||
bos.makedirs(histpath)
|
||||
|
||||
path2 = "{}.{}".format(path, os.getpid())
|
||||
j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8")
|
||||
body = {"droppable": self.droppable[ptop], "registry": reg}
|
||||
j = json.dumps(body, indent=2, sort_keys=True).encode("utf-8")
|
||||
with gzip.GzipFile(path2, "wb") as f:
|
||||
f.write(j)
|
||||
|
||||
@@ -1848,11 +2137,16 @@ class Up2k(object):
|
||||
|
||||
# self.log("\n " + repr([ptop, rd, fn]))
|
||||
abspath = os.path.join(ptop, rd, fn)
|
||||
tags = self.mtag.get(abspath)
|
||||
ntags1 = len(tags)
|
||||
parsers = self._get_parsers(ptop, tags, abspath)
|
||||
if parsers:
|
||||
tags.update(self.mtag.get_bin(parsers, abspath))
|
||||
try:
|
||||
tags = self.mtag.get(abspath)
|
||||
ntags1 = len(tags)
|
||||
parsers = self._get_parsers(ptop, tags, abspath)
|
||||
if parsers:
|
||||
tags.update(self.mtag.get_bin(parsers, abspath))
|
||||
except Exception as ex:
|
||||
msg = "failed to read tags from {}:\n{}"
|
||||
self.log(msg.format(abspath, ex), c=3)
|
||||
continue
|
||||
|
||||
with self.mutex:
|
||||
cur = self.cur[ptop]
|
||||
|
||||
@@ -19,7 +19,7 @@ import subprocess as sp # nosec
|
||||
from datetime import datetime
|
||||
from collections import Counter
|
||||
|
||||
from .__init__ import PY2, WINDOWS, ANYWIN
|
||||
from .__init__ import PY2, WINDOWS, ANYWIN, VT100, unicode
|
||||
from .stolen import surrogateescape
|
||||
|
||||
FAKE_MP = False
|
||||
@@ -58,7 +58,7 @@ except:
|
||||
return struct.unpack(f.decode("ascii"), *a, **ka)
|
||||
|
||||
|
||||
ansi_re = re.compile("\033\\[[^m]*m")
|
||||
ansi_re = re.compile("\033\\[[^mK]*[mK]")
|
||||
|
||||
|
||||
surrogateescape.register_surrogateescape()
|
||||
@@ -67,8 +67,9 @@ if WINDOWS and PY2:
|
||||
FS_ENCODING = "utf-8"
|
||||
|
||||
|
||||
HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT"
|
||||
SYMTIME = sys.version_info >= (3, 6) and os.supports_follow_symlinks
|
||||
|
||||
HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT"
|
||||
|
||||
HTTPCODE = {
|
||||
200: "OK",
|
||||
@@ -100,10 +101,25 @@ IMPLICATIONS = [
|
||||
|
||||
|
||||
MIMES = {
|
||||
"md": "text/plain; charset=UTF-8",
|
||||
"md": "text/plain",
|
||||
"txt": "text/plain",
|
||||
"js": "text/javascript",
|
||||
"opus": "audio/ogg; codecs=opus",
|
||||
"webp": "image/webp",
|
||||
"caf": "audio/x-caf",
|
||||
"mp3": "audio/mpeg",
|
||||
"m4a": "audio/mp4",
|
||||
"jpg": "image/jpeg",
|
||||
}
|
||||
for ln in """text css html csv
|
||||
application json wasm xml pdf rtf zip
|
||||
image webp jpeg png gif bmp
|
||||
audio aac ogg wav
|
||||
video webm mp4 mpeg
|
||||
font woff woff2 otf ttf
|
||||
""".splitlines():
|
||||
k, vs = ln.split(" ", 1)
|
||||
for v in vs.strip().split():
|
||||
MIMES[v] = "{}/{}".format(k, v)
|
||||
|
||||
|
||||
REKOBO_KEY = {
|
||||
@@ -169,7 +185,7 @@ class Cooldown(object):
|
||||
return ret
|
||||
|
||||
|
||||
class Unrecv(object):
|
||||
class _Unrecv(object):
|
||||
"""
|
||||
undo any number of socket recv ops
|
||||
"""
|
||||
@@ -189,10 +205,117 @@ class Unrecv(object):
|
||||
except:
|
||||
return b""
|
||||
|
||||
def recv_ex(self, nbytes):
|
||||
"""read an exact number of bytes"""
|
||||
ret = self.recv(nbytes)
|
||||
while ret and len(ret) < nbytes:
|
||||
buf = self.recv(nbytes - len(ret))
|
||||
if not buf:
|
||||
break
|
||||
|
||||
ret += buf
|
||||
|
||||
return ret
|
||||
|
||||
def unrecv(self, buf):
|
||||
self.buf = buf + self.buf
|
||||
|
||||
|
||||
class _LUnrecv(object):
|
||||
"""
|
||||
with expensive debug logging
|
||||
"""
|
||||
|
||||
def __init__(self, s):
|
||||
self.s = s
|
||||
self.buf = b""
|
||||
|
||||
def recv(self, nbytes):
|
||||
if self.buf:
|
||||
ret = self.buf[:nbytes]
|
||||
self.buf = self.buf[nbytes:]
|
||||
m = "\033[0;7mur:pop:\033[0;1;32m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
|
||||
print(m.format(ret, self.buf), end="")
|
||||
return ret
|
||||
|
||||
try:
|
||||
ret = self.s.recv(nbytes)
|
||||
m = "\033[0;7mur:recv\033[0;1;33m {}\033[0m\n"
|
||||
print(m.format(ret), end="")
|
||||
return ret
|
||||
except:
|
||||
return b""
|
||||
|
||||
def recv_ex(self, nbytes):
|
||||
"""read an exact number of bytes"""
|
||||
ret = self.recv(nbytes)
|
||||
while ret and len(ret) < nbytes:
|
||||
buf = self.recv(nbytes - len(ret))
|
||||
if not buf:
|
||||
break
|
||||
|
||||
ret += buf
|
||||
|
||||
return ret
|
||||
|
||||
def unrecv(self, buf):
|
||||
self.buf = buf + self.buf
|
||||
m = "\033[0;7mur:push\033[0;1;31m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
|
||||
print(m.format(buf, self.buf), end="")
|
||||
|
||||
|
||||
Unrecv = _Unrecv
|
||||
|
||||
|
||||
class FHC(object):
|
||||
class CE(object):
|
||||
def __init__(self, fh):
|
||||
self.ts = 0
|
||||
self.fhs = [fh]
|
||||
|
||||
def __init__(self):
|
||||
self.cache = {}
|
||||
|
||||
def close(self, path):
|
||||
try:
|
||||
ce = self.cache[path]
|
||||
except:
|
||||
return
|
||||
|
||||
for fh in ce.fhs:
|
||||
fh.close()
|
||||
|
||||
del self.cache[path]
|
||||
|
||||
def clean(self):
|
||||
if not self.cache:
|
||||
return
|
||||
|
||||
keep = {}
|
||||
now = time.time()
|
||||
for path, ce in self.cache.items():
|
||||
if now < ce.ts + 5:
|
||||
keep[path] = ce
|
||||
else:
|
||||
for fh in ce.fhs:
|
||||
fh.close()
|
||||
|
||||
self.cache = keep
|
||||
|
||||
def pop(self, path):
|
||||
return self.cache[path].fhs.pop()
|
||||
|
||||
def put(self, path, fh):
|
||||
try:
|
||||
ce = self.cache[path]
|
||||
ce.fhs.append(fh)
|
||||
except:
|
||||
ce = self.CE(fh)
|
||||
self.cache[path] = ce
|
||||
|
||||
ce.ts = time.time()
|
||||
|
||||
|
||||
class ProgressPrinter(threading.Thread):
|
||||
"""
|
||||
periodically print progress info without linefeeds
|
||||
@@ -207,17 +330,22 @@ class ProgressPrinter(threading.Thread):
|
||||
|
||||
def run(self):
|
||||
msg = None
|
||||
fmt = " {}\033[K\r" if VT100 else " {} $\r"
|
||||
while not self.end:
|
||||
time.sleep(0.1)
|
||||
if msg == self.msg or self.end:
|
||||
continue
|
||||
|
||||
msg = self.msg
|
||||
uprint(" {}\033[K\r".format(msg))
|
||||
uprint(fmt.format(msg))
|
||||
if PY2:
|
||||
sys.stdout.flush()
|
||||
|
||||
print("\033[K", end="")
|
||||
if VT100:
|
||||
print("\033[K", end="")
|
||||
elif msg:
|
||||
print("------------------------")
|
||||
|
||||
sys.stdout.flush() # necessary on win10 even w/ stderr btw
|
||||
|
||||
|
||||
@@ -312,7 +440,7 @@ def stackmon(fp, ival, suffix):
|
||||
|
||||
|
||||
def start_log_thrs(logger, ival, nid):
|
||||
ival = int(ival)
|
||||
ival = float(ival)
|
||||
tname = lname = "log-thrs"
|
||||
if nid:
|
||||
tname = "logthr-n{}-i{:x}".format(nid, os.getpid())
|
||||
@@ -333,7 +461,7 @@ def log_thrs(log, ival, name):
|
||||
tv = [x.name for x in threading.enumerate()]
|
||||
tv = [
|
||||
x.split("-")[0]
|
||||
if x.startswith("httpconn-") or x.startswith("thumb-")
|
||||
if x.split("-")[0] in ["httpconn", "thumb", "tagger"]
|
||||
else "listen"
|
||||
if "-listen-" in x
|
||||
else x
|
||||
@@ -346,10 +474,12 @@ def log_thrs(log, ival, name):
|
||||
|
||||
def vol_san(vols, txt):
|
||||
for vol in vols:
|
||||
txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8"))
|
||||
txt = txt.replace(
|
||||
vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8")
|
||||
vol.realpath.encode("utf-8").replace(b"\\", b"\\\\"),
|
||||
vol.vpath.encode("utf-8"),
|
||||
)
|
||||
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
@@ -364,11 +494,12 @@ def min_ex():
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ren_open(fname, *args, **kwargs):
|
||||
fun = kwargs.pop("fun", open)
|
||||
fdir = kwargs.pop("fdir", None)
|
||||
suffix = kwargs.pop("suffix", None)
|
||||
|
||||
if fname == os.devnull:
|
||||
with open(fname, *args, **kwargs) as f:
|
||||
with fun(fname, *args, **kwargs) as f:
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
@@ -402,7 +533,7 @@ def ren_open(fname, *args, **kwargs):
|
||||
fname += suffix
|
||||
ext += suffix
|
||||
|
||||
with open(fsenc(fpath), *args, **kwargs) as f:
|
||||
with fun(fsenc(fpath), *args, **kwargs) as f:
|
||||
if b64:
|
||||
fp2 = "fn-trunc.{}.txt".format(b64)
|
||||
fp2 = os.path.join(fdir, fp2)
|
||||
@@ -447,8 +578,8 @@ class MultipartParser(object):
|
||||
self.log = log_func
|
||||
self.headers = http_headers
|
||||
|
||||
self.re_ctype = re.compile(r"^content-type: *([^;]+)", re.IGNORECASE)
|
||||
self.re_cdisp = re.compile(r"^content-disposition: *([^;]+)", re.IGNORECASE)
|
||||
self.re_ctype = re.compile(r"^content-type: *([^; ]+)", re.IGNORECASE)
|
||||
self.re_cdisp = re.compile(r"^content-disposition: *([^; ]+)", re.IGNORECASE)
|
||||
self.re_cdisp_field = re.compile(
|
||||
r'^content-disposition:(?: *|.*; *)name="([^"]+)"', re.IGNORECASE
|
||||
)
|
||||
@@ -584,19 +715,21 @@ class MultipartParser(object):
|
||||
yields [fieldname, unsanitized_filename, fieldvalue]
|
||||
where fieldvalue yields chunks of data
|
||||
"""
|
||||
while True:
|
||||
run = True
|
||||
while run:
|
||||
fieldname, filename = self._read_header()
|
||||
yield [fieldname, filename, self._read_data()]
|
||||
|
||||
tail = self.sr.recv(2)
|
||||
tail = self.sr.recv_ex(2)
|
||||
|
||||
if tail == b"--":
|
||||
# EOF indicated by this immediately after final boundary
|
||||
self.sr.recv(2)
|
||||
return
|
||||
tail = self.sr.recv_ex(2)
|
||||
run = False
|
||||
|
||||
if tail != b"\r\n":
|
||||
raise Pebkac(400, "protocol error after field value")
|
||||
m = "protocol error after field value: want b'\\r\\n', got {!r}"
|
||||
raise Pebkac(400, m.format(tail))
|
||||
|
||||
def _read_value(self, iterator, max_len):
|
||||
ret = b""
|
||||
@@ -645,7 +778,7 @@ class MultipartParser(object):
|
||||
def get_boundary(headers):
|
||||
# boundaries contain a-z A-Z 0-9 ' ( ) + _ , - . / : = ?
|
||||
# (whitespace allowed except as the last char)
|
||||
ptn = r"^multipart/form-data; *(.*; *)?boundary=([^;]+)"
|
||||
ptn = r"^multipart/form-data *; *(.*; *)?boundary=([^;]+)"
|
||||
ct = headers["content-type"]
|
||||
m = re.match(ptn, ct, re.IGNORECASE)
|
||||
if not m:
|
||||
@@ -682,6 +815,25 @@ def read_header(sr):
|
||||
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
|
||||
|
||||
|
||||
def gen_filekey(salt, fspath, fsize, inode):
|
||||
return base64.urlsafe_b64encode(
|
||||
hashlib.sha512(
|
||||
"{} {} {} {}".format(salt, fspath, fsize, inode).encode("utf-8", "replace")
|
||||
).digest()
|
||||
).decode("ascii")
|
||||
|
||||
|
||||
def gencookie(k, v, dur):
|
||||
v = v.replace(";", "")
|
||||
if dur:
|
||||
dt = datetime.utcfromtimestamp(time.time() + dur)
|
||||
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||
else:
|
||||
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
||||
|
||||
return "{}={}; Path=/; Expires={}; SameSite=Lax".format(k, v, exp)
|
||||
|
||||
|
||||
def humansize(sz, terse=False):
|
||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||
if sz < 1024:
|
||||
@@ -945,7 +1097,8 @@ def read_socket(sr, total_size):
|
||||
|
||||
buf = sr.recv(bufsz)
|
||||
if not buf:
|
||||
raise Pebkac(400, "client d/c during binary post")
|
||||
m = "client d/c during binary post after {} bytes, {} bytes remaining"
|
||||
raise Pebkac(400, m.format(total_size - remains, remains))
|
||||
|
||||
remains -= len(buf)
|
||||
yield buf
|
||||
@@ -982,8 +1135,12 @@ def read_socket_chunked(sr, log=None):
|
||||
raise Pebkac(400, err)
|
||||
|
||||
if chunklen == 0:
|
||||
sr.recv(2) # \r\n after final chunk
|
||||
return
|
||||
x = sr.recv_ex(2)
|
||||
if x == b"\r\n":
|
||||
return
|
||||
|
||||
m = "protocol error after final chunk: want b'\\r\\n', got {!r}"
|
||||
raise Pebkac(400, m.format(x))
|
||||
|
||||
if log:
|
||||
log("receiving {} byte chunk".format(chunklen))
|
||||
@@ -991,7 +1148,10 @@ def read_socket_chunked(sr, log=None):
|
||||
for chunk in read_socket(sr, chunklen):
|
||||
yield chunk
|
||||
|
||||
sr.recv(2) # \r\n after each chunk too
|
||||
x = sr.recv_ex(2)
|
||||
if x != b"\r\n":
|
||||
m = "protocol error in chunk separator: want b'\\r\\n', got {!r}"
|
||||
raise Pebkac(400, m.format(x))
|
||||
|
||||
|
||||
def yieldfile(fn):
|
||||
@@ -1004,13 +1164,15 @@ def yieldfile(fn):
|
||||
yield buf
|
||||
|
||||
|
||||
def hashcopy(fin, fout):
|
||||
def hashcopy(fin, fout, slp=0):
|
||||
hashobj = hashlib.sha512()
|
||||
tlen = 0
|
||||
for buf in fin:
|
||||
tlen += len(buf)
|
||||
hashobj.update(buf)
|
||||
fout.write(buf)
|
||||
if slp:
|
||||
time.sleep(slp)
|
||||
|
||||
digest = hashobj.digest()[:33]
|
||||
digest_b64 = base64.urlsafe_b64encode(digest).decode("utf-8")
|
||||
@@ -1018,12 +1180,14 @@ def hashcopy(fin, fout):
|
||||
return tlen, hashobj.hexdigest(), digest_b64
|
||||
|
||||
|
||||
def sendfile_py(lower, upper, f, s):
|
||||
def sendfile_py(log, lower, upper, f, s, bufsz, slp):
|
||||
remains = upper - lower
|
||||
f.seek(lower)
|
||||
while remains > 0:
|
||||
# time.sleep(0.01)
|
||||
buf = f.read(min(1024 * 32, remains))
|
||||
if slp:
|
||||
time.sleep(slp)
|
||||
|
||||
buf = f.read(min(bufsz, remains))
|
||||
if not buf:
|
||||
return remains
|
||||
|
||||
@@ -1036,17 +1200,24 @@ def sendfile_py(lower, upper, f, s):
|
||||
return 0
|
||||
|
||||
|
||||
def sendfile_kern(lower, upper, f, s):
|
||||
def sendfile_kern(log, lower, upper, f, s, bufsz, slp):
|
||||
out_fd = s.fileno()
|
||||
in_fd = f.fileno()
|
||||
ofs = lower
|
||||
stuck = None
|
||||
while ofs < upper:
|
||||
stuck = stuck or time.time()
|
||||
try:
|
||||
req = min(2 ** 30, upper - ofs)
|
||||
select.select([], [out_fd], [], 10)
|
||||
n = os.sendfile(out_fd, in_fd, ofs, req)
|
||||
stuck = None
|
||||
except Exception as ex:
|
||||
# print("sendfile: " + repr(ex))
|
||||
d = time.time() - stuck
|
||||
log("sendfile stuck for {:.3f} sec: {!r}".format(d, ex))
|
||||
if d < 3600 and ex.errno == 11: # eagain
|
||||
continue
|
||||
|
||||
n = 0
|
||||
|
||||
if n <= 0:
|
||||
@@ -1059,6 +1230,9 @@ def sendfile_kern(lower, upper, f, s):
|
||||
|
||||
|
||||
def statdir(logger, scandir, lstat, top):
|
||||
if lstat and ANYWIN:
|
||||
lstat = False
|
||||
|
||||
if lstat and not os.supports_follow_symlinks:
|
||||
scandir = False
|
||||
|
||||
@@ -1086,9 +1260,10 @@ def statdir(logger, scandir, lstat, top):
|
||||
logger(src, "{} @ {}".format(repr(ex), top), 1)
|
||||
|
||||
|
||||
def rmdirs(logger, scandir, lstat, top):
|
||||
def rmdirs(logger, scandir, lstat, top, depth):
|
||||
if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)):
|
||||
top = os.path.dirname(top)
|
||||
depth -= 1
|
||||
|
||||
dirs = statdir(logger, scandir, lstat, top)
|
||||
dirs = [x[0] for x in dirs if stat.S_ISDIR(x[1].st_mode)]
|
||||
@@ -1096,15 +1271,16 @@ def rmdirs(logger, scandir, lstat, top):
|
||||
ok = []
|
||||
ng = []
|
||||
for d in dirs[::-1]:
|
||||
a, b = rmdirs(logger, scandir, lstat, d)
|
||||
a, b = rmdirs(logger, scandir, lstat, d, depth + 1)
|
||||
ok += a
|
||||
ng += b
|
||||
|
||||
try:
|
||||
os.rmdir(fsenc(top))
|
||||
ok.append(top)
|
||||
except:
|
||||
ng.append(top)
|
||||
if depth:
|
||||
try:
|
||||
os.rmdir(fsenc(top))
|
||||
ok.append(top)
|
||||
except:
|
||||
ng.append(top)
|
||||
|
||||
return ok, ng
|
||||
|
||||
@@ -1143,18 +1319,33 @@ def guess_mime(url, fallback="application/octet-stream"):
|
||||
except:
|
||||
return fallback
|
||||
|
||||
ret = MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
|
||||
ret = MIMES.get(ext)
|
||||
|
||||
if not ret:
|
||||
x = mimetypes.guess_type(url)
|
||||
ret = "application/{}".format(x[1]) if x[1] else x[0]
|
||||
|
||||
if not ret:
|
||||
ret = fallback
|
||||
|
||||
if ";" not in ret:
|
||||
if ret.startswith("text/") or ret.endswith("/javascript"):
|
||||
ret += "; charset=UTF-8"
|
||||
ret += "; charset=utf-8"
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def runcmd(argv):
|
||||
def runcmd(argv, timeout=None):
|
||||
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
stdout, stderr = p.communicate()
|
||||
if not timeout or PY2:
|
||||
stdout, stderr = p.communicate()
|
||||
else:
|
||||
try:
|
||||
stdout, stderr = p.communicate(timeout=timeout)
|
||||
except sp.TimeoutExpired:
|
||||
p.kill()
|
||||
stdout, stderr = p.communicate()
|
||||
|
||||
stdout = stdout.decode("utf-8", "replace")
|
||||
stderr = stderr.decode("utf-8", "replace")
|
||||
return [p.returncode, stdout, stderr]
|
||||
|
||||
@@ -237,7 +237,7 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function keyDownHandler(e) {
|
||||
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
|
||||
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing || modal.busy)
|
||||
return;
|
||||
|
||||
var k = e.code + '', v = vid();
|
||||
@@ -331,7 +331,7 @@ window.baguetteBox = (function () {
|
||||
|
||||
function tglsel() {
|
||||
var thumb = currentGallery[currentIndex].imageElement,
|
||||
name = vsplit(thumb.href)[1],
|
||||
name = vsplit(thumb.href)[1].split('?')[0],
|
||||
files = msel.getall();
|
||||
|
||||
for (var a = 0; a < files.length; a++)
|
||||
@@ -345,7 +345,7 @@ window.baguetteBox = (function () {
|
||||
function selbg() {
|
||||
var img = vidimg(),
|
||||
thumb = currentGallery[currentIndex].imageElement,
|
||||
name = vsplit(thumb.href)[1],
|
||||
name = vsplit(thumb.href)[1].split('?')[0],
|
||||
files = msel.getsel(),
|
||||
sel = false;
|
||||
|
||||
@@ -530,9 +530,7 @@ window.baguetteBox = (function () {
|
||||
if (options.bodyClass && document.body.classList)
|
||||
document.body.classList.remove(options.bodyClass);
|
||||
|
||||
var h = ebi('bbox-halp');
|
||||
if (h)
|
||||
h.parentNode.removeChild(h);
|
||||
qsr('#bbox-halp');
|
||||
|
||||
if (options.afterHide)
|
||||
options.afterHide();
|
||||
@@ -590,8 +588,7 @@ window.baguetteBox = (function () {
|
||||
|
||||
image.addEventListener(is_vid ? 'loadedmetadata' : 'load', function () {
|
||||
// Remove loader element
|
||||
var spinner = QS('#baguette-img-' + index + ' .bbox-spinner');
|
||||
figure.removeChild(spinner);
|
||||
qsr('#baguette-img-' + index + ' .bbox-spinner');
|
||||
if (!options.async && callback)
|
||||
callback();
|
||||
});
|
||||
@@ -742,11 +739,11 @@ window.baguetteBox = (function () {
|
||||
if (rot || orot === null)
|
||||
return;
|
||||
|
||||
el.classList.add('nt');
|
||||
clmod(el, 'nt', 1);
|
||||
el.removeAttribute('rot');
|
||||
el.removeAttribute("style");
|
||||
rot = el.offsetHeight;
|
||||
el.classList.remove('nt');
|
||||
clmod(el, 'nt');
|
||||
timer.rm(rotn);
|
||||
}
|
||||
|
||||
@@ -799,15 +796,21 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function updateOffset() {
|
||||
var offset = -currentIndex * 100 + '%';
|
||||
var offset = -currentIndex * 100 + '%',
|
||||
xform = slider.style.perspective !== undefined;
|
||||
|
||||
if (options.animation === 'fadeIn') {
|
||||
slider.style.opacity = 0;
|
||||
setTimeout(function () {
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)';
|
||||
xform ?
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)' :
|
||||
slider.style.left = offset;
|
||||
slider.style.opacity = 1;
|
||||
}, 400);
|
||||
} else {
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)';
|
||||
xform ?
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)' :
|
||||
slider.style.left = offset;
|
||||
}
|
||||
playvid(false);
|
||||
var v = vid();
|
||||
@@ -828,9 +831,9 @@ window.baguetteBox = (function () {
|
||||
|
||||
var prev = QS('.full-image.vis');
|
||||
if (prev)
|
||||
prev.classList.remove('vis');
|
||||
clmod(prev, 'vis');
|
||||
|
||||
el.closest('div').classList.add('vis');
|
||||
clmod(el.closest('div'), 'vis', 1);
|
||||
}
|
||||
|
||||
function preloadNext(index) {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -18,9 +18,9 @@
|
||||
|
||||
<div id="op_search" class="opview">
|
||||
{%- if have_tags_idx %}
|
||||
<div id="srch_form" class="tags"></div>
|
||||
<div id="srch_form" class="tags opbox"></div>
|
||||
{%- else %}
|
||||
<div id="srch_form"></div>
|
||||
<div id="srch_form" class="opbox"></div>
|
||||
{%- endif %}
|
||||
<div id="srch_q"></div>
|
||||
</div>
|
||||
@@ -31,7 +31,7 @@
|
||||
<div id="u2err"></div>
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="bput" />
|
||||
<input type="file" name="f" multiple><br />
|
||||
<input type="file" name="f" multiple /><br />
|
||||
<input type="submit" value="start upload">
|
||||
</form>
|
||||
</div>
|
||||
@@ -39,7 +39,7 @@
|
||||
<div id="op_mkdir" class="opview opbox act">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="mkdir" />
|
||||
📂<input type="text" name="name" size="30">
|
||||
📂<input type="text" name="name" class="i">
|
||||
<input type="submit" value="make directory">
|
||||
</form>
|
||||
</div>
|
||||
@@ -47,15 +47,15 @@
|
||||
<div id="op_new_md" class="opview opbox">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="new_md" />
|
||||
📝<input type="text" name="name" size="30">
|
||||
📝<input type="text" name="name" class="i">
|
||||
<input type="submit" value="new markdown doc">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_msg" class="opview opbox act">
|
||||
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
📟<input type="text" name="msg" size="30">
|
||||
<input type="submit" value="send msg to server log">
|
||||
📟<input type="text" name="msg" class="i">
|
||||
<input type="submit" value="send msg to srv log">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
@@ -76,6 +76,12 @@
|
||||
|
||||
<div id="wrap">
|
||||
|
||||
{%- if doc %}
|
||||
<div id="bdoc"><pre>{{ doc|e }}</pre></div>
|
||||
{%- else %}
|
||||
<div id="bdoc"></div>
|
||||
{%- endif %}
|
||||
|
||||
<div id="pro" class="logue">{{ logues[0] }}</div>
|
||||
|
||||
<table id="files">
|
||||
@@ -113,6 +119,8 @@
|
||||
<div id="epi" class="logue">{{ logues[1] }}</div>
|
||||
|
||||
<h2><a href="/?h">control-panel</a></h2>
|
||||
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
</div>
|
||||
|
||||
@@ -128,14 +136,24 @@
|
||||
def_hcols = {{ def_hcols|tojson }},
|
||||
have_up2k_idx = {{ have_up2k_idx|tojson }},
|
||||
have_tags_idx = {{ have_tags_idx|tojson }},
|
||||
have_acode = {{ have_acode|tojson }},
|
||||
have_mv = {{ have_mv|tojson }},
|
||||
have_del = {{ have_del|tojson }},
|
||||
have_unpost = {{ have_unpost|tojson }},
|
||||
have_zip = {{ have_zip|tojson }};
|
||||
have_zip = {{ have_zip|tojson }},
|
||||
txt_ext = "{{ txt_ext }}",
|
||||
{% if no_prism %}no_prism = 1,{% endif %}
|
||||
readme = {{ readme|tojson }},
|
||||
ls0 = {{ ls0|tojson }};
|
||||
|
||||
document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark");
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/up2k.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}?_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,13 +1,17 @@
|
||||
@font-face {
|
||||
font-family: 'scp';
|
||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
||||
}
|
||||
html, body {
|
||||
color: #333;
|
||||
background: #eee;
|
||||
font-family: sans-serif;
|
||||
line-height: 1.5em;
|
||||
}
|
||||
#repl {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: .5em;
|
||||
border: none;
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
#mtw {
|
||||
display: none;
|
||||
}
|
||||
@@ -19,122 +23,8 @@ html, body {
|
||||
bottom: auto;
|
||||
top: 1.4em;
|
||||
}
|
||||
pre, code, a {
|
||||
color: #480;
|
||||
background: #f7f7f7;
|
||||
border: .07em solid #ddd;
|
||||
border-radius: .2em;
|
||||
padding: .1em .3em;
|
||||
margin: 0 .1em;
|
||||
}
|
||||
code {
|
||||
font-size: .96em;
|
||||
}
|
||||
pre, code, tt {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
pre {
|
||||
counter-reset: precode;
|
||||
}
|
||||
pre code {
|
||||
counter-increment: precode;
|
||||
display: inline-block;
|
||||
margin: 0 -.3em;
|
||||
padding: .4em .5em;
|
||||
border: none;
|
||||
border-bottom: 1px solid #cdc;
|
||||
min-width: calc(100% - .6em);
|
||||
line-height: 1.1em;
|
||||
}
|
||||
pre code:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
pre code::before {
|
||||
content: counter(precode);
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
display: inline-block;
|
||||
text-align: right;
|
||||
font-size: .75em;
|
||||
color: #48a;
|
||||
width: 4em;
|
||||
padding-right: 1.5em;
|
||||
margin-left: -5.5em;
|
||||
}
|
||||
pre code:hover {
|
||||
background: #fec;
|
||||
color: #360;
|
||||
}
|
||||
h1, h2 {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
h1 {
|
||||
font-size: 1.7em;
|
||||
text-align: center;
|
||||
border: 1em solid #777;
|
||||
border-width: .05em 0;
|
||||
margin: 3em 0;
|
||||
}
|
||||
h2 {
|
||||
font-size: 1.5em;
|
||||
font-weight: normal;
|
||||
background: #f7f7f7;
|
||||
border-top: .07em solid #fff;
|
||||
border-bottom: .07em solid #bbb;
|
||||
border-radius: .5em .5em 0 0;
|
||||
padding-left: .4em;
|
||||
margin-top: 3em;
|
||||
}
|
||||
h3 {
|
||||
border-bottom: .1em solid #999;
|
||||
}
|
||||
h1 a, h3 a, h5 a,
|
||||
h2 a, h4 a, h6 a {
|
||||
color: inherit;
|
||||
display: block;
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
#mp ul,
|
||||
#mp ol {
|
||||
border-left: .3em solid #ddd;
|
||||
}
|
||||
#m>ul,
|
||||
#m>ol {
|
||||
border-color: #bbb;
|
||||
}
|
||||
#mp ul>li {
|
||||
list-style-type: disc;
|
||||
}
|
||||
#mp ul>li,
|
||||
#mp ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
}
|
||||
blockquote {
|
||||
font-family: serif;
|
||||
background: #f7f7f7;
|
||||
border: .07em dashed #ccc;
|
||||
padding: 0 2em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
small {
|
||||
opacity: .8;
|
||||
a {
|
||||
text-decoration: none;
|
||||
}
|
||||
#toc {
|
||||
margin: 0 1em;
|
||||
@@ -182,14 +72,6 @@ small {
|
||||
color: #6b3;
|
||||
text-shadow: .02em 0 0 #6b3;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
margin: 1em 0;
|
||||
}
|
||||
th, td {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
blink {
|
||||
animation: blinker .7s cubic-bezier(.9, 0, .1, 1) infinite;
|
||||
}
|
||||
@@ -202,6 +84,36 @@ blink {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.mdo pre {
|
||||
counter-reset: precode;
|
||||
}
|
||||
.mdo pre code {
|
||||
counter-increment: precode;
|
||||
display: inline-block;
|
||||
border: none;
|
||||
border-bottom: 1px solid #cdc;
|
||||
min-width: calc(100% - .6em);
|
||||
}
|
||||
.mdo pre code:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
.mdo pre code::before {
|
||||
content: counter(precode);
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
display: inline-block;
|
||||
text-align: right;
|
||||
font-size: .75em;
|
||||
color: #48a;
|
||||
width: 4em;
|
||||
padding-right: 1.5em;
|
||||
margin-left: -5.5em;
|
||||
}
|
||||
|
||||
|
||||
@media screen {
|
||||
html, body {
|
||||
margin: 0;
|
||||
@@ -218,34 +130,6 @@ blink {
|
||||
#mp {
|
||||
max-width: 52em;
|
||||
margin-bottom: 6em;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
}
|
||||
a {
|
||||
color: #fff;
|
||||
background: #39b;
|
||||
text-decoration: none;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
border-bottom: .07em solid #079;
|
||||
}
|
||||
h2 {
|
||||
color: #fff;
|
||||
background: #555;
|
||||
margin-top: 2em;
|
||||
border-bottom: .22em solid #999;
|
||||
border-top: none;
|
||||
}
|
||||
h1 {
|
||||
color: #fff;
|
||||
background: #444;
|
||||
font-weight: normal;
|
||||
border-top: .4em solid #fb0;
|
||||
border-bottom: .4em solid #777;
|
||||
border-radius: 0 1em 0 1em;
|
||||
margin: 3em 0 1em 0;
|
||||
padding: .5em 0;
|
||||
}
|
||||
#mn {
|
||||
padding: 1.3em 0 .7em 1em;
|
||||
@@ -298,6 +182,8 @@ blink {
|
||||
color: #444;
|
||||
background: none;
|
||||
text-decoration: underline;
|
||||
margin: 0 .1em;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
}
|
||||
#mh a:hover {
|
||||
@@ -326,6 +212,10 @@ blink {
|
||||
#toolsbox a+a {
|
||||
text-decoration: none;
|
||||
}
|
||||
#lno {
|
||||
position: absolute;
|
||||
right: 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -346,55 +236,6 @@ blink {
|
||||
html.dark #toc li {
|
||||
border-width: 0;
|
||||
}
|
||||
html.dark #mp a {
|
||||
background: #057;
|
||||
}
|
||||
html.dark #mp h1 a, html.dark #mp h4 a,
|
||||
html.dark #mp h2 a, html.dark #mp h5 a,
|
||||
html.dark #mp h3 a, html.dark #mp h6 a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
color: #8c0;
|
||||
background: #1a1a1a;
|
||||
border: .07em solid #333;
|
||||
}
|
||||
html.dark #mp ul,
|
||||
html.dark #mp ol {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark #m>ul,
|
||||
html.dark #m>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
html.dark h1 {
|
||||
background: #383838;
|
||||
border-top: .4em solid #b80;
|
||||
border-bottom: .4em solid #4c4c4c;
|
||||
}
|
||||
html.dark h2 {
|
||||
background: #444;
|
||||
border-bottom: .22em solid #555;
|
||||
}
|
||||
html.dark td,
|
||||
html.dark th {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark blockquote {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
html.dark #mn a:not(:last-child)::after {
|
||||
border-color: rgba(255,255,255,0.3);
|
||||
}
|
||||
@@ -500,12 +341,15 @@ blink {
|
||||
mso-footer-margin: .6in;
|
||||
mso-paper-source: 0;
|
||||
}
|
||||
a {
|
||||
.mdo a {
|
||||
color: #079;
|
||||
text-decoration: none;
|
||||
border-bottom: .07em solid #4ac;
|
||||
padding: 0 .3em;
|
||||
}
|
||||
#repl {
|
||||
display: none;
|
||||
}
|
||||
#toc>ul {
|
||||
border-left: .1em solid #84c4dd;
|
||||
}
|
||||
@@ -530,18 +374,20 @@ blink {
|
||||
a[ctr]::before {
|
||||
content: attr(ctr) '. ';
|
||||
}
|
||||
h1 {
|
||||
.mdo h1 {
|
||||
margin: 2em 0;
|
||||
}
|
||||
h2 {
|
||||
.mdo h2 {
|
||||
margin: 2em 0 0 0;
|
||||
}
|
||||
h1, h2, h3 {
|
||||
.mdo h1,
|
||||
.mdo h2,
|
||||
.mdo h3 {
|
||||
page-break-inside: avoid;
|
||||
}
|
||||
h1::after,
|
||||
h2::after,
|
||||
h3::after {
|
||||
.mdo h1::after,
|
||||
.mdo h2::after,
|
||||
.mdo h3::after {
|
||||
content: 'orz';
|
||||
color: transparent;
|
||||
display: block;
|
||||
@@ -549,20 +395,20 @@ blink {
|
||||
padding: 4em 0 0 0;
|
||||
margin: 0 0 -5em 0;
|
||||
}
|
||||
p {
|
||||
.mdo p {
|
||||
page-break-inside: avoid;
|
||||
}
|
||||
table {
|
||||
.mdo table {
|
||||
page-break-inside: auto;
|
||||
}
|
||||
tr {
|
||||
.mdo tr {
|
||||
page-break-inside: avoid;
|
||||
page-break-after: auto;
|
||||
}
|
||||
thead {
|
||||
.mdo thead {
|
||||
display: table-header-group;
|
||||
}
|
||||
tfoot {
|
||||
.mdo tfoot {
|
||||
display: table-footer-group;
|
||||
}
|
||||
#mp a.vis::after {
|
||||
@@ -570,31 +416,32 @@ blink {
|
||||
border-bottom: 1px solid #bbb;
|
||||
color: #444;
|
||||
}
|
||||
blockquote {
|
||||
.mdo blockquote {
|
||||
border-color: #555;
|
||||
}
|
||||
code {
|
||||
.mdo code {
|
||||
border-color: #bbb;
|
||||
}
|
||||
pre, pre code {
|
||||
.mdo pre,
|
||||
.mdo pre code {
|
||||
border-color: #999;
|
||||
}
|
||||
pre code::before {
|
||||
.mdo pre code::before {
|
||||
color: #058;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark a {
|
||||
html.dark .mdo a {
|
||||
color: #000;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
html.dark .mdo pre,
|
||||
html.dark .mdo code {
|
||||
color: #240;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
html.dark .mdo p>em,
|
||||
html.dark .mdo li>em,
|
||||
html.dark .mdo td>em {
|
||||
color: #940;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,12 +10,12 @@
|
||||
{%- endif %}
|
||||
</head>
|
||||
<body>
|
||||
<div id="mn">navbar</div>
|
||||
<div id="mn"></div>
|
||||
<div id="mh">
|
||||
<a id="lightswitch" href="#">go dark</a>
|
||||
<a id="navtoggle" href="#">hide nav</a>
|
||||
{%- if edit %}
|
||||
<a id="save" href="?edit" tt="Hotkey: ctrl-s">save</a>
|
||||
<a id="save" href="{{ arg_base }}edit" tt="Hotkey: ctrl-s">save</a>
|
||||
<a id="sbs" href="#" tt="editor and preview side by side">sbs</a>
|
||||
<a id="nsbs" href="#" tt="switch between editor and preview$NHotkey: ctrl-e">editor</a>
|
||||
<div id="toolsbox">
|
||||
@@ -26,10 +26,11 @@
|
||||
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
|
||||
<a id="help" href="#">help</a>
|
||||
</div>
|
||||
<span id="lno">L#</span>
|
||||
{%- else %}
|
||||
<a href="?edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
|
||||
<a href="?edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
||||
<a href="?raw">view raw</a>
|
||||
<a href="{{ arg_base }}edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
|
||||
<a href="{{ arg_base }}edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
||||
<a href="{{ arg_base }}raw">view raw</a>
|
||||
{%- endif %}
|
||||
</div>
|
||||
<div id="toc"></div>
|
||||
@@ -43,8 +44,9 @@
|
||||
if you're still reading this, check that javascript is allowed
|
||||
</div>
|
||||
</div>
|
||||
<div id="mp"></div>
|
||||
<div id="mp" class="mdo"></div>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
{%- if edit %}
|
||||
<div id="helpbox">
|
||||
@@ -133,13 +135,13 @@ var md_opt = {
|
||||
|
||||
(function () {
|
||||
var l = localStorage,
|
||||
drk = l.getItem('lightmode') != 1,
|
||||
drk = l.lightmode != 1,
|
||||
btn = document.getElementById("lightswitch"),
|
||||
f = function (e) {
|
||||
if (e) { e.preventDefault(); drk = !drk; }
|
||||
document.documentElement.setAttribute("class", drk? "dark":"light");
|
||||
btn.innerHTML = "go " + (drk ? "light":"dark");
|
||||
l.setItem('lightmode', drk? 0:1);
|
||||
l.lightmode = drk? 0:1;
|
||||
};
|
||||
|
||||
btn.onclick = f;
|
||||
|
||||
@@ -24,23 +24,6 @@ var dbg = function () { };
|
||||
var md_plug = {};
|
||||
|
||||
|
||||
function hesc(txt) {
|
||||
return txt.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
}
|
||||
|
||||
|
||||
function cls(dom, name, add) {
|
||||
var re = new RegExp('(^| )' + name + '( |$)');
|
||||
var lst = (dom.getAttribute('class') + '').replace(re, "$1$2").replace(/ /, "");
|
||||
dom.setAttribute('class', lst + (add ? ' ' + name : ''));
|
||||
}
|
||||
|
||||
|
||||
function statify(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
|
||||
// dodge browser issues
|
||||
(function () {
|
||||
var ua = navigator.userAgent;
|
||||
@@ -56,20 +39,34 @@ function statify(obj) {
|
||||
|
||||
// add navbar
|
||||
(function () {
|
||||
var n = document.location + '';
|
||||
n = n.substr(n.indexOf('//') + 2).split('?')[0].split('/');
|
||||
n[0] = 'top';
|
||||
var loc = [];
|
||||
var nav = [];
|
||||
for (var a = 0; a < n.length; a++) {
|
||||
if (a > 0)
|
||||
loc.push(n[a]);
|
||||
|
||||
var dec = hesc(uricom_dec(n[a])[0]);
|
||||
|
||||
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
||||
var parts = get_evpath().split('/'), link = '', o;
|
||||
for (var a = 0, aa = parts.length - 2; a <= aa; a++) {
|
||||
link += parts[a] + (a < aa ? '/' : '');
|
||||
o = mknod('a');
|
||||
o.setAttribute('href', link);
|
||||
o.textContent = uricom_dec(parts[a])[0] || 'top';
|
||||
dom_nav.appendChild(o);
|
||||
}
|
||||
dom_nav.innerHTML = nav.join('');
|
||||
})();
|
||||
|
||||
|
||||
// image load handler
|
||||
var img_load = (function () {
|
||||
var r = {};
|
||||
r.callbacks = [];
|
||||
|
||||
function fire() {
|
||||
for (var a = 0; a < r.callbacks.length; a++)
|
||||
r.callbacks[a]();
|
||||
}
|
||||
|
||||
var timeout = null;
|
||||
r.done = function () {
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(fire, 500);
|
||||
};
|
||||
|
||||
return r;
|
||||
})();
|
||||
|
||||
|
||||
@@ -88,13 +85,13 @@ function copydom(src, dst, lv) {
|
||||
|
||||
var rpl = [];
|
||||
for (var a = sc.length - 1; a >= 0; a--) {
|
||||
var st = sc[a].tagName,
|
||||
dt = dc[a].tagName;
|
||||
var st = sc[a].tagName || sc[a].nodeType,
|
||||
dt = dc[a].tagName || dc[a].nodeType;
|
||||
|
||||
if (st !== dt) {
|
||||
dbg("replace L%d (%d/%d) type %s/%s", lv, a, sc.length, st, dt);
|
||||
rpl.push(a);
|
||||
continue;
|
||||
dst.innerHTML = src.innerHTML;
|
||||
return;
|
||||
}
|
||||
|
||||
var sa = sc[a].attributes || [],
|
||||
@@ -143,8 +140,11 @@ function copydom(src, dst, lv) {
|
||||
// repl is reversed; build top-down
|
||||
var nbytes = 0;
|
||||
for (var a = rpl.length - 1; a >= 0; a--) {
|
||||
var html = sc[rpl[a]].outerHTML;
|
||||
dc[rpl[a]].outerHTML = html;
|
||||
var i = rpl[a],
|
||||
prop = sc[i].nodeType == 1 ? 'outerHTML' : 'nodeValue';
|
||||
|
||||
var html = sc[i][prop];
|
||||
dc[i][prop] = html;
|
||||
nbytes += html.length;
|
||||
}
|
||||
if (nbytes > 0)
|
||||
@@ -161,10 +161,7 @@ function copydom(src, dst, lv) {
|
||||
|
||||
|
||||
function md_plug_err(ex, js) {
|
||||
var errbox = ebi('md_errbox');
|
||||
if (errbox)
|
||||
errbox.parentNode.removeChild(errbox);
|
||||
|
||||
qsr('#md_errbox');
|
||||
if (!ex)
|
||||
return;
|
||||
|
||||
@@ -180,12 +177,12 @@ function md_plug_err(ex, js) {
|
||||
o.textContent = lns[ln - 1];
|
||||
}
|
||||
}
|
||||
errbox = mknod('div');
|
||||
var errbox = mknod('div');
|
||||
errbox.setAttribute('id', 'md_errbox');
|
||||
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
|
||||
errbox.textContent = msg;
|
||||
errbox.onclick = function () {
|
||||
modal.alert('<pre>' + ex.stack + '</pre>');
|
||||
modal.alert('<pre>' + esc(ex.stack) + '</pre>');
|
||||
};
|
||||
if (o) {
|
||||
errbox.appendChild(o);
|
||||
@@ -256,7 +253,7 @@ function convert_markdown(md_text, dest_dom) {
|
||||
Object.assign(marked_opts, ext[0]);
|
||||
|
||||
try {
|
||||
var md_html = marked(md_text, marked_opts);
|
||||
var md_html = marked.parse(md_text, marked_opts);
|
||||
}
|
||||
catch (ex) {
|
||||
if (ext)
|
||||
@@ -264,7 +261,14 @@ function convert_markdown(md_text, dest_dom) {
|
||||
|
||||
throw ex;
|
||||
}
|
||||
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
||||
var md_dom = dest_dom;
|
||||
try {
|
||||
md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
||||
}
|
||||
catch (ex) {
|
||||
md_dom.innerHTML = md_html;
|
||||
window.copydom = noop;
|
||||
}
|
||||
|
||||
var nodes = md_dom.getElementsByTagName('a');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
@@ -356,6 +360,10 @@ function convert_markdown(md_text, dest_dom) {
|
||||
|
||||
copydom(md_dom, dest_dom, 0);
|
||||
|
||||
var imgs = dest_dom.getElementsByTagName('img');
|
||||
for (var a = 0, aa = imgs.length; a < aa; a++)
|
||||
imgs[a].onload = img_load.done;
|
||||
|
||||
if (ext && ext[0].render2)
|
||||
try {
|
||||
ext[0].render2(dest_dom);
|
||||
@@ -367,8 +375,7 @@ function convert_markdown(md_text, dest_dom) {
|
||||
|
||||
|
||||
function init_toc() {
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
qsr('#ml');
|
||||
|
||||
var anchors = []; // list of toc entries, complex objects
|
||||
var anchor = null; // current toc node
|
||||
@@ -490,13 +497,16 @@ function init_toc() {
|
||||
// "main" :p
|
||||
convert_markdown(dom_src.value, dom_pre);
|
||||
var toc = init_toc();
|
||||
img_load.callbacks = [toc.refresh];
|
||||
|
||||
|
||||
// scroll handler
|
||||
var redraw = (function () {
|
||||
var sbs = false;
|
||||
var sbs = true;
|
||||
function onresize() {
|
||||
sbs = window.matchMedia('(min-width: 64em)').matches;
|
||||
if (window.matchMedia)
|
||||
sbs = window.matchMedia('(min-width: 64em)').matches;
|
||||
|
||||
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
|
||||
if (sbs) {
|
||||
dom_toc.style.top = y;
|
||||
|
||||
@@ -50,7 +50,7 @@
|
||||
outline: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-family: 'consolas', monospace, monospace;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
|
||||
@@ -98,7 +98,7 @@ var draw_md = (function () {
|
||||
var src = dom_src.value;
|
||||
convert_markdown(src, dom_pre);
|
||||
|
||||
var lines = hesc(src).replace(/\r/g, "").split('\n');
|
||||
var lines = esc(src).replace(/\r/g, "").split('\n');
|
||||
nlines = lines.length;
|
||||
var html = [];
|
||||
for (var a = 0; a < lines.length; a++)
|
||||
@@ -108,7 +108,7 @@ var draw_md = (function () {
|
||||
map_src = genmap(dom_ref, map_src);
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
|
||||
cls(ebi('save'), 'disabled', src == server_md);
|
||||
clmod(ebi('save'), 'disabled', src == server_md);
|
||||
|
||||
var t1 = Date.now();
|
||||
delay = t1 - t0 > 100 ? 25 : 1;
|
||||
@@ -127,6 +127,12 @@ var draw_md = (function () {
|
||||
})();
|
||||
|
||||
|
||||
// discard TOC callback, just regen editor scroll map
|
||||
img_load.callbacks = [function () {
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
}];
|
||||
|
||||
|
||||
// resize handler
|
||||
redraw = (function () {
|
||||
function onresize() {
|
||||
@@ -136,7 +142,6 @@ redraw = (function () {
|
||||
dom_ref.style.width = getComputedStyle(dom_src).offsetWidth + 'px';
|
||||
map_src = genmap(dom_ref, map_src);
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
dbg(document.body.clientWidth + 'x' + document.body.clientHeight);
|
||||
}
|
||||
function setsbs() {
|
||||
dom_wrap.setAttribute('class', '');
|
||||
@@ -225,44 +230,40 @@ redraw = (function () {
|
||||
|
||||
// modification checker
|
||||
function Modpoll() {
|
||||
this.skip_one = true;
|
||||
this.disabled = false;
|
||||
|
||||
this.periodic = function () {
|
||||
var that = this;
|
||||
setTimeout(function () {
|
||||
that.periodic();
|
||||
}, 1000 * md_opt.modpoll_freq);
|
||||
var r = {
|
||||
skip_one: true,
|
||||
disabled: false
|
||||
};
|
||||
|
||||
r.periodic = function () {
|
||||
var skip = null;
|
||||
|
||||
if (toast.visible)
|
||||
skip = 'toast';
|
||||
|
||||
else if (this.skip_one)
|
||||
else if (r.skip_one)
|
||||
skip = 'saved';
|
||||
|
||||
else if (this.disabled)
|
||||
else if (r.disabled)
|
||||
skip = 'disabled';
|
||||
|
||||
if (skip) {
|
||||
console.log('modpoll skip, ' + skip);
|
||||
this.skip_one = false;
|
||||
r.skip_one = false;
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('modpoll...');
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.modpoll = this;
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = this.cb;
|
||||
xhr.onreadystatechange = r.cb;
|
||||
xhr.send();
|
||||
}
|
||||
};
|
||||
|
||||
this.cb = function () {
|
||||
if (this.modpoll.disabled || this.modpoll.skip_one) {
|
||||
r.cb = function () {
|
||||
if (r.disabled || r.skip_one) {
|
||||
console.log('modpoll abort');
|
||||
return;
|
||||
}
|
||||
@@ -283,7 +284,7 @@ function Modpoll() {
|
||||
|
||||
if (server_ref != server_now) {
|
||||
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
|
||||
this.modpoll.disabled = true;
|
||||
r.disabled = true;
|
||||
var msg = [
|
||||
"The document has changed on the server.",
|
||||
"The changes will NOT be loaded into your editor automatically.",
|
||||
@@ -297,12 +298,12 @@ function Modpoll() {
|
||||
}
|
||||
|
||||
console.log('modpoll eq');
|
||||
}
|
||||
};
|
||||
|
||||
if (md_opt.modpoll_freq > 0)
|
||||
this.periodic();
|
||||
setInterval(r.periodic, 1000 * md_opt.modpoll_freq);
|
||||
|
||||
return this;
|
||||
return r;
|
||||
}
|
||||
var modpoll = new Modpoll();
|
||||
|
||||
@@ -370,8 +371,8 @@ function save_cb() {
|
||||
}
|
||||
|
||||
if (!r.ok) {
|
||||
if (!this.btn.classList.contains('force-save')) {
|
||||
this.btn.classList.add('force-save');
|
||||
if (!clgot(this.btn, 'force-save')) {
|
||||
clmod(this.btn, 'force-save', 1);
|
||||
var msg = [
|
||||
'This file has been modified since you started editing it!\n',
|
||||
'if you really want to overwrite, press save again.\n',
|
||||
@@ -387,7 +388,7 @@ function save_cb() {
|
||||
return toast.err(0, 'Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
|
||||
}
|
||||
|
||||
this.btn.classList.remove('force-save');
|
||||
clmod(this.btn, 'force-save');
|
||||
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
|
||||
|
||||
run_savechk(r.lastmod, this.txt, this.btn, 0);
|
||||
@@ -874,6 +875,40 @@ function cfg_uni(e) {
|
||||
}
|
||||
|
||||
|
||||
var set_lno = (function () {
|
||||
var t = null,
|
||||
pi = null,
|
||||
pv = null,
|
||||
lno = ebi('lno');
|
||||
|
||||
function poke() {
|
||||
clearTimeout(t);
|
||||
t = setTimeout(fire, 20);
|
||||
}
|
||||
|
||||
function fire() {
|
||||
try {
|
||||
clearTimeout(t);
|
||||
|
||||
var i = dom_src.selectionStart;
|
||||
if (i === pi)
|
||||
return;
|
||||
|
||||
var v = 'L' + dom_src.value.slice(0, i).split('\n').length;
|
||||
if (v != pv)
|
||||
lno.innerHTML = v;
|
||||
|
||||
pi = i;
|
||||
pv = v;
|
||||
}
|
||||
catch (e) { }
|
||||
}
|
||||
|
||||
timer.add(fire);
|
||||
return poke;
|
||||
})();
|
||||
|
||||
|
||||
// hotkeys / toolbar
|
||||
(function () {
|
||||
function keydown(ev) {
|
||||
@@ -892,6 +927,8 @@ function cfg_uni(e) {
|
||||
if (document.activeElement != dom_src)
|
||||
return true;
|
||||
|
||||
set_lno();
|
||||
|
||||
if (ctrl(ev)) {
|
||||
if (ev.code == "KeyH" || kc == 72) {
|
||||
md_header(ev.shiftKey);
|
||||
@@ -1086,9 +1123,9 @@ action_stack = (function () {
|
||||
ref = newtxt;
|
||||
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
|
||||
if (hist.un.length > 0)
|
||||
dbg(statify(hist.un.slice(-1)[0]));
|
||||
dbg(jcp(hist.un.slice(-1)[0]));
|
||||
if (hist.re.length > 0)
|
||||
dbg(statify(hist.re.slice(-1)[0]));
|
||||
dbg(jcp(hist.re.slice(-1)[0]));
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -7,6 +7,8 @@ html .editor-toolbar>button.active { border-color: rgba(0,0,0,0.4); background:
|
||||
html .editor-toolbar>i.separator { border-left: 1px solid #ccc; }
|
||||
html .editor-toolbar.disabled-for-preview>button:not(.no-disable) { opacity: .35 }
|
||||
|
||||
|
||||
|
||||
html {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
@@ -22,6 +24,18 @@ html, body {
|
||||
bottom: auto;
|
||||
top: 1.4em;
|
||||
}
|
||||
#repl {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: .5em;
|
||||
border: none;
|
||||
color: inherit;
|
||||
background: none;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
#mn {
|
||||
font-weight: normal;
|
||||
margin: 1.3em 0 .7em 1em;
|
||||
@@ -63,148 +77,12 @@ html .editor-toolbar>button.disabled {
|
||||
html .editor-toolbar>button.save.force-save {
|
||||
background: #f97;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* copied from md.css for now */
|
||||
.mdo pre,
|
||||
.mdo code,
|
||||
.mdo a {
|
||||
color: #480;
|
||||
background: #f7f7f7;
|
||||
border: .07em solid #ddd;
|
||||
border-radius: .2em;
|
||||
padding: .1em .3em;
|
||||
margin: 0 .1em;
|
||||
}
|
||||
.mdo code {
|
||||
font-size: .96em;
|
||||
}
|
||||
.mdo pre,
|
||||
.mdo code {
|
||||
font-family: monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
.mdo pre code {
|
||||
display: block;
|
||||
margin: 0 -.3em;
|
||||
padding: .4em .5em;
|
||||
line-height: 1.1em;
|
||||
}
|
||||
.mdo a {
|
||||
color: #fff;
|
||||
background: #39b;
|
||||
text-decoration: none;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
border-bottom: .07em solid #079;
|
||||
}
|
||||
.mdo h2 {
|
||||
color: #fff;
|
||||
background: #555;
|
||||
margin-top: 2em;
|
||||
border-bottom: .22em solid #999;
|
||||
border-top: none;
|
||||
}
|
||||
.mdo h1 {
|
||||
color: #fff;
|
||||
background: #444;
|
||||
font-weight: normal;
|
||||
border-top: .4em solid #fb0;
|
||||
border-bottom: .4em solid #777;
|
||||
border-radius: 0 1em 0 1em;
|
||||
margin: 3em 0 1em 0;
|
||||
padding: .5em 0;
|
||||
}
|
||||
h1, h2 {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
h1 {
|
||||
font-size: 1.7em;
|
||||
text-align: center;
|
||||
border: 1em solid #777;
|
||||
border-width: .05em 0;
|
||||
margin: 3em 0;
|
||||
}
|
||||
h2 {
|
||||
font-size: 1.5em;
|
||||
font-weight: normal;
|
||||
background: #f7f7f7;
|
||||
border-top: .07em solid #fff;
|
||||
border-bottom: .07em solid #bbb;
|
||||
border-radius: .5em .5em 0 0;
|
||||
padding-left: .4em;
|
||||
margin-top: 3em;
|
||||
}
|
||||
.mdo ul,
|
||||
.mdo ol {
|
||||
border-left: .3em solid #ddd;
|
||||
}
|
||||
.mdo>ul,
|
||||
.mdo>ol {
|
||||
border-color: #bbb;
|
||||
}
|
||||
.mdo ul>li {
|
||||
list-style-type: disc;
|
||||
}
|
||||
.mdo ul>li,
|
||||
.mdo ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
}
|
||||
blockquote {
|
||||
font-family: serif;
|
||||
background: #f7f7f7;
|
||||
border: .07em dashed #ccc;
|
||||
padding: 0 2em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
small {
|
||||
opacity: .8;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
td {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
th {
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* mde support */
|
||||
.mdo {
|
||||
padding: 1em;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
html.dark .mdo {
|
||||
background: #1c1c1c;
|
||||
}
|
||||
.CodeMirror {
|
||||
background: #f7f7f7;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* darkmode */
|
||||
html.dark .mdo,
|
||||
html.dark .CodeMirror {
|
||||
@@ -228,55 +106,6 @@ html.dark .CodeMirror-selectedtext {
|
||||
background: #246;
|
||||
color: #fff;
|
||||
}
|
||||
html.dark .mdo a {
|
||||
background: #057;
|
||||
}
|
||||
html.dark .mdo h1 a, html.dark .mdo h4 a,
|
||||
html.dark .mdo h2 a, html.dark .mdo h5 a,
|
||||
html.dark .mdo h3 a, html.dark .mdo h6 a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
color: #8c0;
|
||||
background: #1a1a1a;
|
||||
border: .07em solid #333;
|
||||
}
|
||||
html.dark .mdo ul,
|
||||
html.dark .mdo ol {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark .mdo>ul,
|
||||
html.dark .mdo>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
html.dark h1 {
|
||||
background: #383838;
|
||||
border-top: .4em solid #b80;
|
||||
border-bottom: .4em solid #4c4c4c;
|
||||
}
|
||||
html.dark h2 {
|
||||
background: #444;
|
||||
border-bottom: .22em solid #555;
|
||||
}
|
||||
html.dark td,
|
||||
html.dark th {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark blockquote {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -312,4 +141,15 @@ html.dark .editor-toolbar>button.active {
|
||||
html.dark .editor-toolbar::after,
|
||||
html.dark .editor-toolbar::before {
|
||||
background: none;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* ui.css overrides */
|
||||
.mdo {
|
||||
padding: 1em;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
html.dark .mdo {
|
||||
background: #1c1c1c;
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
<textarea id="mt" style="display:none" autocomplete="off">{{ md }}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
var last_modified = {{ lastmod }};
|
||||
@@ -32,11 +33,11 @@ var md_opt = {
|
||||
|
||||
var lightswitch = (function () {
|
||||
var l = localStorage,
|
||||
drk = l.getItem('lightmode') != 1,
|
||||
drk = l.lightmode != 1,
|
||||
f = function (e) {
|
||||
if (e) drk = !drk;
|
||||
document.documentElement.setAttribute("class", drk? "dark":"light");
|
||||
l.setItem('lightmode', drk? 0:1);
|
||||
l.lightmode = drk? 0:1;
|
||||
};
|
||||
f();
|
||||
return f;
|
||||
@@ -44,6 +45,7 @@ l.setItem('lightmode', drk? 0:1);
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/mde.js?_={{ ts }}"></script>
|
||||
</body></html>
|
||||
|
||||
@@ -65,8 +65,7 @@ var mde = (function () {
|
||||
mde.codemirror.on("change", function () {
|
||||
md_changed(mde);
|
||||
});
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
qsr('#ml');
|
||||
return mde;
|
||||
})();
|
||||
|
||||
@@ -96,20 +95,16 @@ function md_changed(mde, on_srv) {
|
||||
var md_now = mde.value();
|
||||
var save_btn = QS('.editor-toolbar button.save');
|
||||
|
||||
if (md_now == window.md_saved)
|
||||
save_btn.classList.add('disabled');
|
||||
else
|
||||
save_btn.classList.remove('disabled');
|
||||
|
||||
clmod(save_btn, 'disabled', md_now == window.md_saved);
|
||||
set_jumpto();
|
||||
}
|
||||
|
||||
function save(mde) {
|
||||
var save_btn = QS('.editor-toolbar button.save');
|
||||
if (save_btn.classList.contains('disabled'))
|
||||
if (clgot(save_btn, 'disabled'))
|
||||
return toast.inf(2, 'no changes');
|
||||
|
||||
var force = save_btn.classList.contains('force-save');
|
||||
var force = clgot(save_btn, 'force-save');
|
||||
function save2() {
|
||||
var txt = mde.value();
|
||||
|
||||
@@ -153,8 +148,8 @@ function save_cb() {
|
||||
}
|
||||
|
||||
if (!r.ok) {
|
||||
if (!this.btn.classList.contains('force-save')) {
|
||||
this.btn.classList.add('force-save');
|
||||
if (!clgot(this.btn, 'force-save')) {
|
||||
clmod(this.btn, 'force-save', 1);
|
||||
var msg = [
|
||||
'This file has been modified since you started editing it!\n',
|
||||
'if you really want to overwrite, press save again.\n',
|
||||
@@ -170,7 +165,7 @@ function save_cb() {
|
||||
return toast.err(0, 'Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
|
||||
}
|
||||
|
||||
this.btn.classList.remove('force-save');
|
||||
clmod(this.btn, 'force-save');
|
||||
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
|
||||
|
||||
// download the saved doc from the server and compare
|
||||
|
||||
@@ -11,6 +11,7 @@ html {
|
||||
background: #333;
|
||||
font-family: sans-serif;
|
||||
text-shadow: 1px 1px 0px #000;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
html, body {
|
||||
margin: 0;
|
||||
@@ -25,4 +26,4 @@ pre {
|
||||
}
|
||||
a {
|
||||
color: #fc5;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>copyparty</title>
|
||||
<title>{{ svcname }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">
|
||||
|
||||
@@ -3,6 +3,9 @@ html, body, #wrap {
|
||||
background: #f7f7f7;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
html {
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#wrap {
|
||||
max-width: 40em;
|
||||
margin: 2em auto;
|
||||
@@ -22,10 +25,30 @@ a {
|
||||
color: #047;
|
||||
background: #fff;
|
||||
text-decoration: none;
|
||||
border-bottom: 1px solid #aaa;
|
||||
border-bottom: 1px solid #8ab;
|
||||
border-radius: .2em;
|
||||
padding: .2em .8em;
|
||||
}
|
||||
a+a {
|
||||
margin-left: .5em;
|
||||
}
|
||||
.refresh,
|
||||
.logout {
|
||||
float: right;
|
||||
margin: -.2em 0 0 .5em;
|
||||
}
|
||||
.logout,
|
||||
.btns a,
|
||||
a.r {
|
||||
color: #c04;
|
||||
border-color: #c7a;
|
||||
}
|
||||
#repl {
|
||||
border: none;
|
||||
background: none;
|
||||
color: inherit;
|
||||
padding: 0;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
@@ -33,6 +56,7 @@ table {
|
||||
.vols th {
|
||||
padding: .3em .6em;
|
||||
text-align: left;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.num {
|
||||
border-right: 1px solid #bbb;
|
||||
@@ -46,6 +70,22 @@ table {
|
||||
.btns {
|
||||
margin: 1em 0;
|
||||
}
|
||||
#msg {
|
||||
margin: 3em 0;
|
||||
}
|
||||
#msg h1 {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
#msg h1 + p {
|
||||
margin-top: .3em;
|
||||
text-align: right;
|
||||
}
|
||||
blockquote {
|
||||
margin: 0 0 1.6em .6em;
|
||||
padding: .7em 1em 0 1em;
|
||||
border-left: .3em solid rgba(128,128,128,0.5);
|
||||
border-radius: 0 0 0 .25em;
|
||||
}
|
||||
|
||||
|
||||
html.dark,
|
||||
@@ -62,10 +102,16 @@ html.dark a {
|
||||
background: #057;
|
||||
border-color: #37a;
|
||||
}
|
||||
html.dark .logout,
|
||||
html.dark .btns a,
|
||||
html.dark a.r {
|
||||
background: #804;
|
||||
border-color: #c28;
|
||||
}
|
||||
html.dark input {
|
||||
color: #fff;
|
||||
background: #624;
|
||||
border: 1px solid #c27;
|
||||
background: #626;
|
||||
border: 1px solid #c2c;
|
||||
border-width: 1px 0 0 0;
|
||||
border-radius: .5em;
|
||||
padding: .5em .7em;
|
||||
@@ -73,4 +119,4 @@ html.dark input {
|
||||
}
|
||||
html.dark .num {
|
||||
border-color: #777;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,15 +3,29 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>copyparty</title>
|
||||
<title>{{ svcname }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="wrap">
|
||||
<p>hello {{ this.uname }}</p>
|
||||
<a href="/?h" class="refresh">refresh</a>
|
||||
|
||||
{%- if this.uname == '*' %}
|
||||
<p>howdy stranger <small>(you're not logged in)</small></p>
|
||||
{%- else %}
|
||||
<a href="/?pw=x" class="logout">logout</a>
|
||||
<p>welcome back, <strong>{{ this.uname }}</strong></p>
|
||||
{%- endif %}
|
||||
|
||||
{%- if msg %}
|
||||
<div id="msg">
|
||||
{{ msg }}
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
{%- if avol %}
|
||||
<h1>admin panel:</h1>
|
||||
@@ -35,7 +49,8 @@
|
||||
</table>
|
||||
</td></tr></table>
|
||||
<div class="btns">
|
||||
<a href="/?stack">dump stack</a>
|
||||
<a href="/?stack" tt="shows the state of all active threads">dump stack</a>
|
||||
<a href="/?reload=cfg" tt="reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes">reload cfg</a>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
@@ -57,20 +72,34 @@
|
||||
</ul>
|
||||
{%- endif %}
|
||||
|
||||
<h1 id="cc">client config:</h1>
|
||||
<ul>
|
||||
{% if k304 %}
|
||||
<li><a href="/?k304=n">disable k304</a> (currently enabled)
|
||||
{%- else %}
|
||||
<li><a href="/?k304=y" class="r">enable k304</a> (currently disabled)
|
||||
{% endif %}
|
||||
<blockquote>enabling this will disconnect your client on every HTTP 304, which can prevent some buggy browsers/proxies from getting stuck (suddenly not being able to load pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
||||
|
||||
<li><a href="/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||
</ul>
|
||||
|
||||
<h1>login for more:</h1>
|
||||
<ul>
|
||||
<form method="post" enctype="multipart/form-data" action="/">
|
||||
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
|
||||
<input type="hidden" name="act" value="login" />
|
||||
<input type="password" name="cppwd" />
|
||||
<input type="submit" value="Login" />
|
||||
</form>
|
||||
</ul>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
if (localStorage.getItem('lightmode') != 1)
|
||||
document.documentElement.setAttribute("class", "dark");
|
||||
document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark");
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script>tt.init();</script>
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
|
||||
@@ -1,15 +1,26 @@
|
||||
@font-face {
|
||||
font-family: 'scp';
|
||||
font-display: swap;
|
||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
||||
}
|
||||
html {
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#tt, #toast {
|
||||
position: fixed;
|
||||
max-width: 34em;
|
||||
background: #222;
|
||||
max-width: min(34em, 90%);
|
||||
max-width: min(34em, calc(100% - 7em));
|
||||
background: #333;
|
||||
border: 0 solid #777;
|
||||
box-shadow: 0 .2em .5em #222;
|
||||
box-shadow: 0 .2em .5em #111;
|
||||
border-radius: .4em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#tt {
|
||||
max-width: min(34em, calc(100% - 3.3em));
|
||||
overflow: hidden;
|
||||
margin-top: 1em;
|
||||
margin: .7em 0;
|
||||
padding: 0 1.3em;
|
||||
height: 0;
|
||||
opacity: .1;
|
||||
@@ -20,7 +31,9 @@
|
||||
right: -1em;
|
||||
line-height: 1.5em;
|
||||
padding: 1em 1.3em;
|
||||
margin-left: 3em;
|
||||
border-width: .4em 0;
|
||||
overflow-wrap: break-word;
|
||||
transform: translateX(100%);
|
||||
transition:
|
||||
transform .4s cubic-bezier(.2, 1.2, .5, 1),
|
||||
@@ -28,7 +41,14 @@
|
||||
text-shadow: 1px 1px 0 #000;
|
||||
color: #fff;
|
||||
}
|
||||
#toastc {
|
||||
#toast a {
|
||||
color: inherit;
|
||||
text-shadow: inherit;
|
||||
background: rgba(0, 0, 0, 0.4);
|
||||
border-radius: .3em;
|
||||
padding: .2em .3em;
|
||||
}
|
||||
#toast a#toastc {
|
||||
display: inline-block;
|
||||
position: absolute;
|
||||
overflow: hidden;
|
||||
@@ -37,7 +57,7 @@
|
||||
opacity: 0;
|
||||
padding: .3em 0;
|
||||
margin: -.3em 0 0 0;
|
||||
line-height: 1.5em;
|
||||
line-height: 1.3em;
|
||||
color: #000;
|
||||
border: none;
|
||||
outline: none;
|
||||
@@ -45,12 +65,22 @@
|
||||
border-radius: .5em 0 0 .5em;
|
||||
transition: left .3s, width .3s, padding .3s, opacity .3s;
|
||||
}
|
||||
#toastb {
|
||||
max-height: 70vh;
|
||||
overflow-y: auto;
|
||||
}
|
||||
#toast.scroll #toastb {
|
||||
overflow-y: scroll;
|
||||
margin-right: -1.2em;
|
||||
padding-right: .7em;
|
||||
}
|
||||
#toast pre {
|
||||
margin: 0;
|
||||
}
|
||||
#toast.vis {
|
||||
right: 1.3em;
|
||||
transform: unset;
|
||||
transform: inherit;
|
||||
transform: initial;
|
||||
}
|
||||
#toast.vis #toastc {
|
||||
left: -2em;
|
||||
@@ -73,7 +103,7 @@
|
||||
background: #8e4;
|
||||
}
|
||||
#toast.warn {
|
||||
background: #970;
|
||||
background: #960;
|
||||
border-color: #fc0;
|
||||
}
|
||||
#toast.warn #toastc {
|
||||
@@ -86,6 +116,20 @@
|
||||
#toast.err #toastc {
|
||||
background: #d06;
|
||||
}
|
||||
#tth {
|
||||
color: #fff;
|
||||
background: #111;
|
||||
font-size: .9em;
|
||||
padding: 0 .26em;
|
||||
line-height: .97em;
|
||||
border-radius: 1em;
|
||||
position: absolute;
|
||||
display: none;
|
||||
}
|
||||
#tth.act {
|
||||
display: block;
|
||||
z-index: 9001;
|
||||
}
|
||||
#tt.b {
|
||||
padding: 0 2em;
|
||||
border-radius: .5em;
|
||||
@@ -101,8 +145,10 @@
|
||||
padding: 1.5em 2em;
|
||||
border-width: .5em 0;
|
||||
}
|
||||
#modalc code,
|
||||
#tt code {
|
||||
background: #3c3c3c;
|
||||
color: #eee;
|
||||
background: #444;
|
||||
padding: .1em .3em;
|
||||
border-top: 1px solid #777;
|
||||
border-radius: .3em;
|
||||
@@ -119,6 +165,7 @@ html.light #tt,
|
||||
html.light #toast {
|
||||
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
|
||||
}
|
||||
#modalc code,
|
||||
html.light #tt code {
|
||||
background: #060;
|
||||
color: #fff;
|
||||
@@ -126,6 +173,10 @@ html.light #tt code {
|
||||
html.light #tt em {
|
||||
color: #d38;
|
||||
}
|
||||
html.light #tth {
|
||||
color: #000;
|
||||
background: #fff;
|
||||
}
|
||||
#modal {
|
||||
position: fixed;
|
||||
overflow: auto;
|
||||
@@ -165,6 +216,16 @@ html.light #tt em {
|
||||
min-width: 30em;
|
||||
}
|
||||
}
|
||||
#modalc li {
|
||||
margin: 1em 0;
|
||||
}
|
||||
#modalc h6 {
|
||||
font-size: 1.3em;
|
||||
border-bottom: 1px solid #999;
|
||||
margin: 0;
|
||||
padding: .3em;
|
||||
text-align: center;
|
||||
}
|
||||
#modalb {
|
||||
position: sticky;
|
||||
text-align: right;
|
||||
@@ -191,6 +252,8 @@ html.light #tt em {
|
||||
}
|
||||
#modali {
|
||||
display: block;
|
||||
background: #fff;
|
||||
color: #000;
|
||||
width: calc(100% - 1.25em);
|
||||
margin: 1em -.1em 0 -.1em;
|
||||
padding: .5em;
|
||||
@@ -200,4 +263,229 @@ html.light #tt em {
|
||||
}
|
||||
#modali:focus {
|
||||
border-color: #06d;
|
||||
}
|
||||
}
|
||||
#repl_pre {
|
||||
max-width: 24em;
|
||||
}
|
||||
*:focus,
|
||||
#pctl *:focus,
|
||||
.btn:focus {
|
||||
box-shadow: 0 .1em .2em #fc0 inset;
|
||||
border-radius: .2em;
|
||||
}
|
||||
html.light *:focus,
|
||||
html.light #pctl *:focus,
|
||||
html.light .btn:focus {
|
||||
box-shadow: 0 .1em .2em #037 inset;
|
||||
}
|
||||
input[type="text"]:focus,
|
||||
input:not([type]):focus,
|
||||
textarea:focus {
|
||||
box-shadow: 0 .1em .3em #fc0, 0 -.1em .3em #fc0;
|
||||
}
|
||||
html.light input[type="text"]:focus,
|
||||
html.light input:not([type]):focus,
|
||||
html.light textarea:focus {
|
||||
box-shadow: 0 .1em .3em #037, 0 -.1em .3em #037;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.mdo pre,
|
||||
.mdo code,
|
||||
.mdo a {
|
||||
color: #480;
|
||||
background: #f7f7f7;
|
||||
border: .07em solid #ddd;
|
||||
border-radius: .2em;
|
||||
padding: .1em .3em;
|
||||
margin: 0 .1em;
|
||||
}
|
||||
.mdo pre,
|
||||
.mdo code,
|
||||
.mdo tt {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
.mdo code {
|
||||
font-size: .96em;
|
||||
}
|
||||
.mdo h1,
|
||||
.mdo h2 {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
.mdo h1 {
|
||||
font-size: 1.7em;
|
||||
text-align: center;
|
||||
border: 1em solid #777;
|
||||
border-width: .05em 0;
|
||||
margin: 3em 0;
|
||||
}
|
||||
.mdo h2 {
|
||||
font-size: 1.5em;
|
||||
font-weight: normal;
|
||||
background: #f7f7f7;
|
||||
border-top: .07em solid #fff;
|
||||
border-bottom: .07em solid #bbb;
|
||||
border-radius: .5em .5em 0 0;
|
||||
padding-left: .4em;
|
||||
margin-top: 3em;
|
||||
}
|
||||
.mdo h3 {
|
||||
border-bottom: .1em solid #999;
|
||||
}
|
||||
.mdo h1 a, .mdo h3 a, .mdo h5 a,
|
||||
.mdo h2 a, .mdo h4 a, .mdo h6 a {
|
||||
color: inherit;
|
||||
display: block;
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
.mdo ul,
|
||||
.mdo ol {
|
||||
padding-left: 1em;
|
||||
}
|
||||
.mdo ul ul,
|
||||
.mdo ul ol,
|
||||
.mdo ol ul,
|
||||
.mdo ol ol {
|
||||
padding-left: 2em;
|
||||
border-left: .3em solid #ddd;
|
||||
}
|
||||
.mdo ul>li,
|
||||
.mdo ol>li {
|
||||
margin: .7em 0;
|
||||
list-style-type: disc;
|
||||
}
|
||||
.mdo strong {
|
||||
color: #000;
|
||||
}
|
||||
.mdo p>em,
|
||||
.mdo li>em,
|
||||
.mdo td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
}
|
||||
.mdo blockquote {
|
||||
font-family: serif;
|
||||
background: #f7f7f7;
|
||||
border: .07em dashed #ccc;
|
||||
padding: 0 2em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
.mdo small {
|
||||
opacity: .8;
|
||||
}
|
||||
.mdo pre code {
|
||||
display: block;
|
||||
margin: 0 -.3em;
|
||||
padding: .4em .5em;
|
||||
line-height: 1.1em;
|
||||
}
|
||||
.mdo pre code:hover {
|
||||
background: #fec;
|
||||
color: #360;
|
||||
}
|
||||
.mdo table {
|
||||
border-collapse: collapse;
|
||||
margin: 1em 0;
|
||||
}
|
||||
.mdo th,
|
||||
.mdo td {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
|
||||
@media screen {
|
||||
.mdo {
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
}
|
||||
html.light .mdo a,
|
||||
.mdo a {
|
||||
color: #fff;
|
||||
background: #39b;
|
||||
text-decoration: none;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
border-bottom: .07em solid #079;
|
||||
}
|
||||
.mdo h1 {
|
||||
color: #fff;
|
||||
background: #444;
|
||||
font-weight: normal;
|
||||
border-top: .4em solid #fb0;
|
||||
border-bottom: .4em solid #777;
|
||||
border-radius: 0 1em 0 1em;
|
||||
margin: 3em 0 1em 0;
|
||||
padding: .5em 0;
|
||||
}
|
||||
.mdo h2 {
|
||||
color: #fff;
|
||||
background: #555;
|
||||
margin-top: 2em;
|
||||
border-bottom: .22em solid #999;
|
||||
border-top: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark .mdo a {
|
||||
background: #057;
|
||||
}
|
||||
html.dark .mdo h1 a, html.dark .mdo h4 a,
|
||||
html.dark .mdo h2 a, html.dark .mdo h5 a,
|
||||
html.dark .mdo h3 a, html.dark .mdo h6 a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
html.dark .mdo pre,
|
||||
html.dark .mdo code {
|
||||
color: #8c0;
|
||||
background: #1a1a1a;
|
||||
border: .07em solid #333;
|
||||
}
|
||||
html.dark .mdo ul,
|
||||
html.dark .mdo ol {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark .mdo strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark .mdo p>em,
|
||||
html.dark .mdo li>em,
|
||||
html.dark .mdo td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
html.dark .mdo h1 {
|
||||
background: #383838;
|
||||
border-top: .4em solid #b80;
|
||||
border-bottom: .4em solid #4c4c4c;
|
||||
}
|
||||
html.dark .mdo h2 {
|
||||
background: #444;
|
||||
border-bottom: .22em solid #555;
|
||||
}
|
||||
html.dark .mdo td,
|
||||
html.dark .mdo th {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark .mdo blockquote {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,15 +1,13 @@
|
||||
# example `.epilogue.html`
|
||||
save one of these as `.epilogue.html` inside a folder to customize it:
|
||||
|
||||
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||
**NOTE:** there's more stuff (sharex config, service scripts, nginx configs, ...) in [`/contrib/`](/contrib/)
|
||||
|
||||
|
||||
|
||||
# example browser-css
|
||||
point `--css-browser` to one of these by URL:
|
||||
# utilities
|
||||
|
||||
* [`browser.css`](browser.css) changes the background
|
||||
* [`browser-icons.css`](browser-icons.css) adds filetype icons
|
||||
## [`multisearch.html`](multisearch.html)
|
||||
* takes a list of filenames of youtube rips, grabs the youtube-id of each file, and does a search on the server for those
|
||||
* use it by putting it somewhere on the server and opening it as an html page
|
||||
* also serves as an extendable template for other specific search behaviors
|
||||
|
||||
|
||||
|
||||
@@ -19,4 +17,23 @@ point `--css-browser` to one of these by URL:
|
||||
* notes on using rclone as a fuse client/server
|
||||
|
||||
## [`example.conf`](example.conf)
|
||||
* example config file for `-c` which never really happened
|
||||
* example config file for `-c`
|
||||
|
||||
|
||||
|
||||
# junk
|
||||
|
||||
alphabetical list of the remaining files
|
||||
|
||||
| what | why |
|
||||
| -- | -- |
|
||||
| [biquad.html](biquad.html) | bruteforce calibrator for the audio equalizer since im not that good at maths |
|
||||
| [design.txt](design.txt) | initial brainstorming of the copyparty design, unmaintained, incorrect, sentimental value only |
|
||||
| [hls.html](hls.html) | experimenting with hls playback using `hls.js`, works p well, almost became a thing |
|
||||
| [music-analysis.sh](music-analysis.sh) | testing various bpm/key detection libraries before settling on the ones used in [`/bin/mtag/`](/bin/mtag/) |
|
||||
| [notes.sh](notes.sh) | notepad, just scraps really |
|
||||
| [nuitka.txt](nuitka.txt) | how to build a copyparty exe using nuitka (not maintained) |
|
||||
| [pretend-youre-qnap.patch](pretend-youre-qnap.patch) | simulate a NAS which keeps returning old cached data even though you just modified the file yourself |
|
||||
| [tcp-debug.sh](tcp-debug.sh) | looks like this was to debug stuck tcp connections? |
|
||||
| [unirange.py](unirange.py) | uhh |
|
||||
| [up2k.txt](up2k.txt) | initial ideas for how up2k should work, another unmaintained sentimental-value-only thing |
|
||||
|
||||
@@ -3,6 +3,24 @@
|
||||
setTimeout(location.reload.bind(location), 700);
|
||||
document.documentElement.scrollLeft = 0;
|
||||
|
||||
var cali = (function() {
|
||||
var ac = new AudioContext(),
|
||||
fi = ac.createBiquadFilter(),
|
||||
freqs = new Float32Array(1),
|
||||
mag = new Float32Array(1),
|
||||
phase = new Float32Array(1);
|
||||
|
||||
freqs[0] = 14000;
|
||||
fi.type = 'peaking';
|
||||
fi.frequency.value = 18000;
|
||||
fi.Q.value = 0.8;
|
||||
fi.gain.value = 1;
|
||||
fi.getFrequencyResponse(freqs, mag, phase);
|
||||
|
||||
return mag[0]; // 1.0407 good, 1.0563 bad
|
||||
})(),
|
||||
mp = cali < 1.05;
|
||||
|
||||
var can = document.createElement('canvas'),
|
||||
cc = can.getContext('2d'),
|
||||
w = 2048,
|
||||
@@ -28,12 +46,12 @@ var cfg = [ // hz, q, g
|
||||
[1000, 0.9, 1.1],
|
||||
[2000, 0.9, 1.105],
|
||||
[4000, 0.88, 1.05],
|
||||
[8000 * 1.006, 0.73, 1.24],
|
||||
[8000 * 1.006, 0.73, mp ? 1.24 : 1.2],
|
||||
//[16000 * 1.00, 0.5, 1.75], // peak.v1
|
||||
//[16000 * 1.19, 0, 1.8] // shelf.v1
|
||||
[16000 * 0.89, 0.7, 1.26], // peak
|
||||
[16000 * 1.13, 0.82, 1.09], // peak
|
||||
[16000 * 1.205, 0, 1.9] // shelf
|
||||
[16000 * 0.89, 0.7, mp ? 1.26 : 1.2], // peak
|
||||
[16000 * 1.13, 0.82, mp ? 1.09 : 0.75], // peak
|
||||
[16000 * 1.205, 0, mp ? 1.9 : 1.85] // shelf
|
||||
];
|
||||
|
||||
var freqs = new Float32Array(22000),
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
/* put filetype icons inline with text
|
||||
#ggrid>a>span:before,
|
||||
#ggrid>a>span.dir:before {
|
||||
display: inline;
|
||||
line-height: 0;
|
||||
font-size: 1.7em;
|
||||
margin: -.7em .1em -.5em -.6em;
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
/* move folder icons top-left */
|
||||
#ggrid>a>span.dir:before {
|
||||
content: initial;
|
||||
}
|
||||
#ggrid>a[href$="/"]:before {
|
||||
content: '📂';
|
||||
}
|
||||
|
||||
|
||||
/* put filetype icons top-left */
|
||||
#ggrid>a:before {
|
||||
display: block;
|
||||
position: absolute;
|
||||
padding: .3em 0;
|
||||
margin: -.4em;
|
||||
text-shadow: 0 0 .1em #000;
|
||||
background: linear-gradient(135deg,rgba(255,255,255,0) 50%,rgba(255,255,255,0.2));
|
||||
border-radius: .3em;
|
||||
font-size: 2em;
|
||||
}
|
||||
|
||||
|
||||
/* video */
|
||||
#ggrid>a:is(
|
||||
[href$=".mkv"i],
|
||||
[href$=".mp4"i],
|
||||
[href$=".webm"i],
|
||||
):before {
|
||||
content: '📺';
|
||||
}
|
||||
|
||||
|
||||
/* audio */
|
||||
#ggrid>a:is(
|
||||
[href$=".mp3"i],
|
||||
[href$=".ogg"i],
|
||||
[href$=".opus"i],
|
||||
[href$=".flac"i],
|
||||
[href$=".m4a"i],
|
||||
[href$=".aac"i],
|
||||
):before {
|
||||
content: '🎵';
|
||||
}
|
||||
|
||||
|
||||
/* image */
|
||||
#ggrid>a:is(
|
||||
[href$=".jpg"i],
|
||||
[href$=".jpeg"i],
|
||||
[href$=".png"i],
|
||||
[href$=".gif"i],
|
||||
[href$=".webp"i],
|
||||
):before {
|
||||
content: '🎨';
|
||||
}
|
||||
@@ -1,3 +1,10 @@
|
||||
# append some arguments to the commandline;
|
||||
# the first space in a line counts as a separator,
|
||||
# any additional spaces are part of the value
|
||||
-e2dsa
|
||||
-e2ts
|
||||
-i 127.0.0.1
|
||||
|
||||
# create users:
|
||||
# u username:password
|
||||
u ed:123
|
||||
@@ -24,7 +31,8 @@ rw ed
|
||||
r k
|
||||
rw ed
|
||||
|
||||
# this does the same thing:
|
||||
# this does the same thing,
|
||||
# and will cause an error on startup since /priv is already taken:
|
||||
./priv
|
||||
/priv
|
||||
r ed k
|
||||
@@ -47,5 +55,5 @@ c e2d
|
||||
c nodupe
|
||||
|
||||
# this entire config file can be replaced with these arguments:
|
||||
# -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d:c,nodupe
|
||||
# -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d,nodupe
|
||||
# but note that the config file always wins in case of conflicts
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
|
||||
method = self.s.recv(4)
|
||||
self.s.unrecv(method)
|
||||
print("xxx unrecv'd [{}]".format(method))
|
||||
|
||||
# jython used to do this, they stopped since it's broken
|
||||
# but reimplementing sendall is out of scope for now
|
||||
if not getattr(self.s.s, "sendall", None):
|
||||
self.s.s.sendall = self.s.s.send
|
||||
|
||||
# TODO this is also pretty bad
|
||||
have = dir(self.s)
|
||||
for k in self.s.s.__dict__:
|
||||
if k not in have and not k.startswith("__"):
|
||||
if k == "recv":
|
||||
raise Exception("wait what")
|
||||
|
||||
self.s.__dict__[k] = self.s.s.__dict__[k]
|
||||
|
||||
have = dir(self.s)
|
||||
for k in dir(self.s.s):
|
||||
if k not in have and not k.startswith("__"):
|
||||
if k == "recv":
|
||||
raise Exception("wait what")
|
||||
|
||||
setattr(self.s, k, getattr(self.s.s, k))
|
||||
124
docs/multisearch.html
Normal file
124
docs/multisearch.html
Normal file
@@ -0,0 +1,124 @@
|
||||
<!DOCTYPE html><html lang="en"><head>
|
||||
<meta charset="utf-8">
|
||||
<title>multisearch</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<style>
|
||||
|
||||
html, body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
color: #ddd;
|
||||
background: #222;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
body {
|
||||
padding: 1em;
|
||||
}
|
||||
a {
|
||||
color: #fc5;
|
||||
}
|
||||
ul {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
code {
|
||||
color: #fc5;
|
||||
border: 1px solid #444;
|
||||
padding: .1em .2em;
|
||||
font-family: sans-serif, sans-serif;
|
||||
}
|
||||
#src {
|
||||
display: block;
|
||||
width: calc(100% - 1em);
|
||||
padding: .5em;
|
||||
margin: 0;
|
||||
}
|
||||
td {
|
||||
padding-left: 1em;
|
||||
}
|
||||
.hit,
|
||||
.miss {
|
||||
font-weight: bold;
|
||||
padding-left: 0;
|
||||
padding-top: 1em;
|
||||
}
|
||||
.hit {color: #af0;}
|
||||
.miss {color: #f0c;}
|
||||
.hit:before {content: '✅';}
|
||||
.miss:before {content: '❌';}
|
||||
|
||||
</style></head><body>
|
||||
<ul>
|
||||
<li>paste a list of filenames (youtube rips) below and hit search</li>
|
||||
<li>it will grab the youtube-id from the filenames and search for each id</li>
|
||||
<li>filenames must be like <code>-YTID.webm</code> (youtube-dl style) or <code>[YTID].webm</code> (ytdlp style)</li>
|
||||
</ul>
|
||||
<textarea id="src"></textarea>
|
||||
<button id="go">search</button>
|
||||
<div id="res"></div>
|
||||
<script>
|
||||
|
||||
var ebi = document.getElementById.bind(document);
|
||||
function esc(txt) {
|
||||
return txt.replace(/[&"<>]/g, function (c) {
|
||||
return {
|
||||
'&': '&',
|
||||
'"': '"',
|
||||
'<': '<',
|
||||
'>': '>'
|
||||
}[c];
|
||||
});
|
||||
}
|
||||
|
||||
ebi('go').onclick = async function() {
|
||||
var queries = [];
|
||||
for (var ln of ebi('src').value.split(/\n/g)) {
|
||||
// filter the list of input files,
|
||||
// only keeping youtube videos,
|
||||
// meaning the filename ends with either
|
||||
// [YOUTUBEID].EXTENSION or
|
||||
// -YOUTUBEID.EXTENSION
|
||||
var m = /[[-]([0-9a-zA-Z_-]{11})\]?\.(mp4|webm|mkv)$/.exec(ln);
|
||||
if (!m || !(m = m[1]))
|
||||
continue;
|
||||
|
||||
// create a search query for each line: name like *youtubeid*
|
||||
queries.push([ln, `name like *${m}*`]);
|
||||
}
|
||||
|
||||
var a = 0, html = ['<table>'], hits = [], misses = [];
|
||||
for (var [fn, q] of queries) {
|
||||
var r = await fetch('/?srch', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({'q': q})
|
||||
});
|
||||
r = await r.json();
|
||||
|
||||
var cl, tab2;
|
||||
if (r.hits.length) {
|
||||
tab2 = hits;
|
||||
cl = 'hit';
|
||||
}
|
||||
else {
|
||||
tab2 = misses;
|
||||
cl = 'miss';
|
||||
}
|
||||
var h = `<tr><td class="${cl}" colspan="9">${esc(fn)}</td></tr>`;
|
||||
tab2.push(h);
|
||||
html.push(h);
|
||||
for (var h of r.hits) {
|
||||
var link = `<a href="/${h.rp}">${esc(decodeURIComponent(h.rp))}</a>`;
|
||||
html.push(`<tr><td>${h.sz}</td><td>${link}</td></tr>`);
|
||||
}
|
||||
ebi('res').innerHTML = `searching, ${++a} / ${queries.length} done, ${hits.length} hits, ${misses.length} miss`;
|
||||
}
|
||||
html.push('<tr><td><h1>hits:</h1></td></tr>');
|
||||
html = html.concat(hits);
|
||||
|
||||
html.push('<tr><td><h1>miss:</h1></td></tr>');
|
||||
html = html.concat(misses);
|
||||
|
||||
html.push('</table>');
|
||||
ebi('res').innerHTML = html.join('\n');
|
||||
};
|
||||
|
||||
</script></body></html>
|
||||
@@ -38,12 +38,19 @@ para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}
|
||||
avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} csz=$1;sum=0;nsmp=0} {sub(/\r$/,"")} /^[0-9]+$/ {pr($1);next} / MiB/ {sub(/ MiB.*/,"");sub(/.* /,"");sum+=$1;nsmp++} END {pr(0)}' "$1"; }
|
||||
|
||||
|
||||
##
|
||||
## time between first and last upload
|
||||
|
||||
python3 -um copyparty -nw -v srv::rw -i 127.0.0.1 2>&1 | tee log
|
||||
cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} !a{a=t;sa=s} {b=t;sb=s} END {print b-a,sa,sb}'
|
||||
|
||||
|
||||
##
|
||||
## bad filenames
|
||||
|
||||
dirs=("$HOME/vfs/ほげ" "$HOME/vfs/ほげ/ぴよ" "$HOME/vfs/$(printf \\xed\\x91)" "$HOME/vfs/$(printf \\xed\\x91/\\xed\\x92)")
|
||||
dirs=("./ほげ" "./ほげ/ぴよ" "./$(printf \\xed\\x91)" "./$(printf \\xed\\x91/\\xed\\x92)" './qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd fgh')
|
||||
mkdir -p "${dirs[@]}"
|
||||
for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qwe,rty;asd fgh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd fgh'; do echo "$dir" > "$dir/$fn.html"; done; done
|
||||
for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd fgh'; do echo "$dir" > "$dir/$fn.html"; done; done
|
||||
# qw er+ty%20ui%%20op<as>df&gh&jk#zx'cv"bn`m=qw*er^ty?ui@op,as.df-gh_jk
|
||||
|
||||
##
|
||||
@@ -73,16 +80,20 @@ shab64() { sp=$1; f="$2"; v=0; sz=$(stat -c%s "$f"); while true; do w=$((v+sp*10
|
||||
command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s ",k,p}}') 2>&1 | awk -v ts=$t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s \033[3%dm%s %s\033[0m\n",ts,c,t2,t1}'; sleep 0.1 || break; done
|
||||
|
||||
|
||||
##
|
||||
## track an up2k upload and print all chunks in file-order
|
||||
|
||||
grep '"name": "2021-07-18 02-17-59.mkv"' fug.log | head -n 1 | sed -r 's/.*"hash": \[//; s/\].*//' | tr '"' '\n' | grep -E '^[a-zA-Z0-9_-]{44}$' | while IFS= read -r cid; do cat -n fug.log | grep -vF '"purl": "' | grep -- "$cid"; echo; done | stdbuf -oL tr '\t' ' ' | while IFS=' ' read -r ln _ _ _ _ _ ts ip port msg; do [ -z "$msg" ] && echo && continue; printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; read -r ln _ _ _ _ _ ts ip port msg < <(cat -n fug.log | tail -n +$((ln+1)) | grep -F "$ip $port" | head -n 1); printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; done
|
||||
|
||||
|
||||
##
|
||||
## js oneliners
|
||||
|
||||
# get all up2k search result URLs
|
||||
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
|
||||
|
||||
# rename all selected songs to <leading-track-number> + <Title> + <extension>
|
||||
var sel=msel.getsel(), ci=find_file_col('Title')[0], re=[]; for (var a=0; a<sel.length; a++) { var url=sel[a].vp, tag=ebi(sel[a].id).closest('tr').querySelectorAll('td')[ci].textContent, name=uricom_dec(vsplit(url)[1])[0], m=/^([0-9]+[\. -]+)?.*(\.[^\.]+$)/.exec(name), name2=(m[1]||'')+tag+m[2], url2=vsplit(url)[0]+uricom_enc(name2,false); if (url!=url2) re.push([url, url2]); }
|
||||
console.log(JSON.stringify(re, null, ' '));
|
||||
function f() { if (!re.length) return treectl.goto(get_evpath()); var [u1,u2] = re.shift(); fetch(u1+'?move='+u2).then((rsp) => {if (rsp.ok) f(); }); }; f();
|
||||
# debug md-editor line tracking
|
||||
var s=mknod('style');s.innerHTML='*[data-ln]:before {content:attr(data-ln)!important;color:#f0c;background:#000;position:absolute;left:-1.5em;font-size:1rem}';document.head.appendChild(s);
|
||||
|
||||
##
|
||||
## bash oneliners
|
||||
@@ -126,6 +137,13 @@ e=6; s=10; d=~/dev/copyparty/srv/aus; n=1; p=0; e=$((e*60)); rm -rf $d; mkdir $d
|
||||
-v srv/aus:aus:r:ce2dsa:ce2ts:cmtp=fgsfds=bin/mtag/sleep.py
|
||||
sqlite3 .hist/up2k.db 'select * from mt where k="fgsfds" or k="t:mtp"' | tee /dev/stderr | wc -l
|
||||
|
||||
# generate the sine meme
|
||||
for ((f=420;f<1200;f++)); do sz=$(ffmpeg -y -f lavfi -i sine=frequency=$f:duration=2 -vf volume=0.1 -ac 1 -ar 44100 -f s16le /dev/shm/a.wav 2>/dev/null; base64 -w0 </dev/shm/a.wav | gzip -c | wc -c); printf '%d %d\n' $f $sz; done | tee /dev/stderr | sort -nrk2,2
|
||||
ffmpeg -y -f lavfi -i sine=frequency=1050:duration=2 -vf volume=0.1 -ac 1 -ar 44100 /dev/shm/a.wav
|
||||
|
||||
# play icon calibration pics
|
||||
for w in 150 170 190 210 230 250; do for h in 130 150 170 190 210; do /c/Program\ Files/ImageMagick-7.0.11-Q16-HDRI/magick.exe convert -size ${w}x${h} xc:brown -fill orange -draw "circle $((w/2)),$((h/2)) $((w/2)),$((h/3))" $w-$h.png; done; done
|
||||
|
||||
|
||||
##
|
||||
## vscode
|
||||
@@ -157,7 +175,7 @@ brew install python@2
|
||||
pip install virtualenv
|
||||
|
||||
# readme toc
|
||||
cat README.md | awk '!/^#/{next} {lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab)} {printf "%" ((lv-1)*4+1) "s [%s](#%s)\n", "*",$0,bab}'
|
||||
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#|]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
|
||||
|
||||
# fix firefox phantom breakpoints,
|
||||
# suggestions from bugtracker, doesnt work (debugger is not attachable)
|
||||
@@ -173,8 +191,13 @@ about:config >> devtools.debugger.prefs-schema-version = -1
|
||||
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
|
||||
|
||||
# download all sfx versions
|
||||
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="copyparty $v $t.py"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
|
||||
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="$(printf '%s\n' "copyparty $v $t.py" | tr / -)"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
|
||||
|
||||
# push to multiple git remotes
|
||||
git config -l | grep '^remote'
|
||||
git remote add all git@github.com:9001/copyparty.git
|
||||
git remote set-url --add --push all git@gitlab.com:9001/copyparty.git
|
||||
git remote set-url --add --push all git@github.com:9001/copyparty.git
|
||||
|
||||
##
|
||||
## http 206
|
||||
|
||||
@@ -10,14 +10,38 @@ set -e
|
||||
# (and those are usually linux so bash is good inaff)
|
||||
# (but that said this even has macos support)
|
||||
#
|
||||
# bundle will look like:
|
||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty
|
||||
# -rw-r--r-- 0 ed ed 491318 Nov 19 00:40 copyparty-extras/copyparty-0.5.4.tar.gz
|
||||
# -rwxr-xr-x 0 ed ed 30254 Nov 17 23:58 copyparty-extras/copyparty-fuse.py
|
||||
# -rwxr-xr-x 0 ed ed 481403 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.sh
|
||||
# -rwxr-xr-x 0 ed ed 506043 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.py
|
||||
# -rwxr-xr-x 0 ed ed 167699 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.sh
|
||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
|
||||
# output summary (filesizes and contents):
|
||||
#
|
||||
# 550760 copyparty-extras/sfx-full/copyparty-sfx.py
|
||||
# `- original unmodified sfx from github
|
||||
#
|
||||
# 572923 copyparty-extras/sfx-full/copyparty-sfx-gz.py
|
||||
# `- unmodified but recompressed from bzip2 to gzip
|
||||
#
|
||||
# 353975 copyparty-extras/sfx-ent/copyparty-sfx.py
|
||||
# 376934 copyparty-extras/sfx-ent/copyparty-sfx-gz.py
|
||||
# `- removed iOS ogg/opus/vorbis audio decoder,
|
||||
# removed the audio tray mouse cursor,
|
||||
# "enterprise edition"
|
||||
#
|
||||
# 270004 copyparty-extras/sfx-lite/copyparty-sfx.py
|
||||
# 293159 copyparty-extras/sfx-lite/copyparty-sfx-gz.py
|
||||
# `- also removed the codemirror markdown editor
|
||||
# and the text-viewer syntax hilighting,
|
||||
# only essential features remaining
|
||||
#
|
||||
# 646297 copyparty-extras/copyparty-1.0.14.tar.gz
|
||||
# 4823 copyparty-extras/copyparty-repack.sh
|
||||
# `- source files from github
|
||||
#
|
||||
# 23663 copyparty-extras/up2k.py
|
||||
# `- standalone utility to upload or search for files
|
||||
#
|
||||
# 32280 copyparty-extras/copyparty-fuse.py
|
||||
# `- standalone to mount a URL as a local read-only filesystem
|
||||
#
|
||||
# 270004 copyparty
|
||||
# `- minimal binary, same as sfx-lite/copyparty-sfx.py
|
||||
|
||||
|
||||
command -v gnutar && tar() { gnutar "$@"; }
|
||||
@@ -54,6 +78,7 @@ cache="$od/.copyparty-repack.cache"
|
||||
# fallback to awk (sorry)
|
||||
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
|
||||
) |
|
||||
grep -E '(sfx\.py|tar\.gz)$' |
|
||||
tee /dev/stderr |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
@@ -64,7 +89,7 @@ cache="$od/.copyparty-repack.cache"
|
||||
|
||||
# move src into copyparty-extras/,
|
||||
# move sfx into copyparty-extras/sfx-full/
|
||||
mkdir -p copyparty-extras/sfx-{full,lite}
|
||||
mkdir -p copyparty-extras/sfx-{full,ent,lite}
|
||||
mv copyparty-sfx.* copyparty-extras/sfx-full/
|
||||
mv copyparty-*.tar.gz copyparty-extras/
|
||||
|
||||
@@ -111,15 +136,18 @@ repack() {
|
||||
)
|
||||
}
|
||||
|
||||
repack sfx-full "re gz no-sh"
|
||||
repack sfx-lite "re no-ogv no-cm"
|
||||
repack sfx-lite "re no-ogv no-cm gz no-sh"
|
||||
repack sfx-full "re gz"
|
||||
repack sfx-ent "re no-dd"
|
||||
repack sfx-ent "re no-dd gz"
|
||||
repack sfx-lite "re no-dd no-cm no-hl"
|
||||
repack sfx-lite "re no-dd no-cm no-hl gz"
|
||||
|
||||
|
||||
# move fuse client into copyparty-extras/,
|
||||
# move fuse and up2k clients into copyparty-extras/,
|
||||
# copy lite-sfx.py to ./copyparty,
|
||||
# delete extracted source code
|
||||
( cd copyparty-extras/
|
||||
mv copyparty-*/bin/up2k.py .
|
||||
mv copyparty-*/bin/copyparty-fuse.py .
|
||||
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
|
||||
rm -rf copyparty-{0..9}*.*.*{0..9}
|
||||
|
||||
@@ -1,21 +1,19 @@
|
||||
FROM alpine:3.13
|
||||
FROM alpine:3.15
|
||||
WORKDIR /z
|
||||
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
||||
ver_hashwasm=4.7.0 \
|
||||
ver_marked=1.1.0 \
|
||||
ver_ogvjs=1.8.0 \
|
||||
ver_mde=2.14.0 \
|
||||
ver_codemirror=5.59.3 \
|
||||
ver_hashwasm=4.9.0 \
|
||||
ver_marked=4.0.12 \
|
||||
ver_mde=2.16.1 \
|
||||
ver_codemirror=5.65.2 \
|
||||
ver_fontawesome=5.13.0 \
|
||||
ver_zopfli=1.0.3
|
||||
|
||||
|
||||
# download;
|
||||
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
||||
# the scp url is regular latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
||||
RUN mkdir -p /z/dist/no-pk \
|
||||
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
|
||||
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
|
||||
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \
|
||||
&& wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
|
||||
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
|
||||
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
|
||||
@@ -23,7 +21,6 @@ RUN mkdir -p /z/dist/no-pk \
|
||||
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
|
||||
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
|
||||
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \
|
||||
&& unzip ogvjs.zip \
|
||||
&& (mkdir hash-wasm \
|
||||
&& cd hash-wasm \
|
||||
&& unzip ../hash-wasm.zip) \
|
||||
@@ -45,6 +42,12 @@ RUN mkdir -p /z/dist/no-pk \
|
||||
&& tar -xf zopfli.tgz
|
||||
|
||||
|
||||
# todo
|
||||
# https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/highlight.min.js
|
||||
# https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/styles/default.min.css
|
||||
# https://prismjs.com/download.html#themes=prism-funky&languages=markup+css+clike+javascript+autohotkey+bash+basic+batch+c+csharp+cpp+cmake+diff+docker+go+ini+java+json+kotlin+latex+less+lisp+lua+makefile+objectivec+perl+powershell+python+r+jsx+ruby+rust+sass+scss+sql+swift+systemd+toml+typescript+vbnet+verilog+vhdl+yaml&plugins=line-highlight+line-numbers+autolinker
|
||||
|
||||
|
||||
# build fonttools (which needs zopfli)
|
||||
RUN tar -xf zopfli.tgz \
|
||||
&& cd zopfli* \
|
||||
@@ -71,28 +74,6 @@ RUN cd hash-wasm \
|
||||
&& mv sha512.umd.min.js /z/dist/sha512.hw.js
|
||||
|
||||
|
||||
# build ogvjs
|
||||
RUN cd ogvjs-$ver_ogvjs \
|
||||
&& cp -pv \
|
||||
ogv.js \
|
||||
ogv-worker-audio.js \
|
||||
ogv-demuxer-ogg-wasm.js \
|
||||
ogv-demuxer-ogg-wasm.wasm \
|
||||
ogv-demuxer-webm-wasm.js \
|
||||
ogv-demuxer-webm-wasm.wasm \
|
||||
ogv-decoder-audio-opus-wasm.js \
|
||||
ogv-decoder-audio-opus-wasm.wasm \
|
||||
ogv-decoder-audio-vorbis-wasm.js \
|
||||
ogv-decoder-audio-vorbis-wasm.wasm \
|
||||
/z/dist
|
||||
|
||||
# ogv-demuxer-ogg.js \
|
||||
# ogv-demuxer-webm.js \
|
||||
# ogv-decoder-audio-opus.js \
|
||||
# ogv-decoder-audio-vorbis.js \
|
||||
# dynamicaudio.swf \
|
||||
|
||||
|
||||
# build marked
|
||||
COPY marked.patch /z/
|
||||
COPY marked-ln.patch /z/
|
||||
@@ -101,7 +82,6 @@ RUN cd marked-$ver_marked \
|
||||
&& patch -p1 < /z/marked.patch \
|
||||
&& npm run build \
|
||||
&& cp -pv marked.min.js /z/dist/marked.js \
|
||||
&& cp -pv lib/marked.js /z/dist/marked.full.js \
|
||||
&& mkdir -p /z/nodepkgs \
|
||||
&& ln -s $(pwd) /z/nodepkgs/marked
|
||||
# && npm run test \
|
||||
@@ -120,9 +100,11 @@ RUN cd CodeMirror-$ver_codemirror \
|
||||
COPY easymde.patch /z/
|
||||
RUN cd easy-markdown-editor-$ver_mde \
|
||||
&& patch -p1 < /z/easymde.patch \
|
||||
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-0.8.2.tgz`file:/z/nodepkgs/marked`' package-lock.json \
|
||||
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \
|
||||
&& sed -ri 's`https://registry.npmjs.org/codemirror/-/codemirror-[0-9\.]+.tgz`file:/z/nodepkgs/codemirror`' package-lock.json \
|
||||
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
|
||||
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
|
||||
&& sed -ri 's`^var marked = require\(.marked.\).marked;$`var marked = window.marked;`' src/js/easymde.js \
|
||||
&& npm install
|
||||
|
||||
COPY easymde-ln.patch /z/
|
||||
@@ -136,6 +118,7 @@ RUN cd easy-markdown-editor-$ver_mde \
|
||||
# build fontawesome and scp
|
||||
COPY mini-fa.sh /z
|
||||
COPY mini-fa.css /z
|
||||
COPY shiftbase.py /z
|
||||
RUN /bin/ash /z/mini-fa.sh
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
diff -NarU2 codemirror-5.59.3-orig/mode/gfm/gfm.js codemirror-5.59.3/mode/gfm/gfm.js
|
||||
--- codemirror-5.59.3-orig/mode/gfm/gfm.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/mode/gfm/gfm.js 2021-02-21 20:42:02.166174775 +0000
|
||||
diff -wNarU2 codemirror-5.65.1-orig/mode/gfm/gfm.js codemirror-5.65.1/mode/gfm/gfm.js
|
||||
--- codemirror-5.65.1-orig/mode/gfm/gfm.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/mode/gfm/gfm.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -97,5 +97,5 @@
|
||||
}
|
||||
}
|
||||
@@ -15,9 +15,9 @@ diff -NarU2 codemirror-5.59.3-orig/mode/gfm/gfm.js codemirror-5.59.3/mode/gfm/gf
|
||||
+ }*/
|
||||
stream.next();
|
||||
return null;
|
||||
diff -NarU2 codemirror-5.59.3-orig/mode/meta.js codemirror-5.59.3/mode/meta.js
|
||||
--- codemirror-5.59.3-orig/mode/meta.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/mode/meta.js 2021-02-21 20:42:54.798742821 +0000
|
||||
diff -wNarU2 codemirror-5.65.1-orig/mode/meta.js codemirror-5.65.1/mode/meta.js
|
||||
--- codemirror-5.65.1-orig/mode/meta.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/mode/meta.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -13,4 +13,5 @@
|
||||
|
||||
CodeMirror.modeInfo = [
|
||||
@@ -62,10 +62,10 @@ diff -NarU2 codemirror-5.59.3-orig/mode/meta.js codemirror-5.59.3/mode/meta.js
|
||||
+ */
|
||||
];
|
||||
// Ensure all modes have a mime property for backwards compatibility
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/display/selection.js codemirror-5.59.3/src/display/selection.js
|
||||
--- codemirror-5.59.3-orig/src/display/selection.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/display/selection.js 2021-02-21 20:44:14.860894328 +0000
|
||||
@@ -84,29 +84,21 @@
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/display/selection.js codemirror-5.65.1/src/display/selection.js
|
||||
--- codemirror-5.65.1-orig/src/display/selection.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/display/selection.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -96,29 +96,21 @@
|
||||
let order = getOrder(lineObj, doc.direction)
|
||||
iterateBidiSections(order, fromArg || 0, toArg == null ? lineLen : toArg, (from, to, dir, i) => {
|
||||
- let ltr = dir == "ltr"
|
||||
@@ -105,24 +105,24 @@ diff -NarU2 codemirror-5.59.3-orig/src/display/selection.js codemirror-5.59.3/sr
|
||||
+ botRight = openEnd && last ? rightSide : toPos.right
|
||||
add(topLeft, fromPos.top, topRight - topLeft, fromPos.bottom)
|
||||
if (fromPos.bottom < toPos.top) add(leftSide, fromPos.bottom, null, toPos.top)
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/input/ContentEditableInput.js codemirror-5.59.3/src/input/ContentEditableInput.js
|
||||
--- codemirror-5.59.3-orig/src/input/ContentEditableInput.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/input/ContentEditableInput.js 2021-02-21 20:44:33.273953867 +0000
|
||||
@@ -399,4 +399,5 @@
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/input/ContentEditableInput.js codemirror-5.65.1/src/input/ContentEditableInput.js
|
||||
--- codemirror-5.65.1-orig/src/input/ContentEditableInput.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/input/ContentEditableInput.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -400,4 +400,5 @@
|
||||
let info = mapFromLineView(view, line, pos.line)
|
||||
|
||||
+ /*
|
||||
let order = getOrder(line, cm.doc.direction), side = "left"
|
||||
if (order) {
|
||||
@@ -404,4 +405,5 @@
|
||||
@@ -405,4 +406,5 @@
|
||||
side = partPos % 2 ? "right" : "left"
|
||||
}
|
||||
+ */
|
||||
let result = nodeAndOffsetInLineMap(info.map, pos.ch, side)
|
||||
result.offset = result.collapse == "right" ? result.end : result.start
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/input/movement.js codemirror-5.59.3/src/input/movement.js
|
||||
--- codemirror-5.59.3-orig/src/input/movement.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/input/movement.js 2021-02-21 20:45:12.763093671 +0000
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/input/movement.js codemirror-5.65.1/src/input/movement.js
|
||||
--- codemirror-5.65.1-orig/src/input/movement.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/input/movement.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -15,4 +15,5 @@
|
||||
|
||||
export function endOfLine(visually, cm, lineObj, lineNo, dir) {
|
||||
@@ -146,9 +146,16 @@ diff -NarU2 codemirror-5.59.3-orig/src/input/movement.js codemirror-5.59.3/src/i
|
||||
return null
|
||||
+ */
|
||||
}
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/line/line_data.js codemirror-5.59.3/src/line/line_data.js
|
||||
--- codemirror-5.59.3-orig/src/line/line_data.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/line/line_data.js 2021-02-21 20:45:36.472549599 +0000
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/line/line_data.js codemirror-5.65.1/src/line/line_data.js
|
||||
--- codemirror-5.65.1-orig/src/line/line_data.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/line/line_data.js 2022-02-09 22:54:11.542722046 +0100
|
||||
@@ -3,5 +3,5 @@
|
||||
import { elt, eltP, joinClasses } from "../util/dom.js"
|
||||
import { eventMixin, signal } from "../util/event.js"
|
||||
-import { hasBadBidiRects, zeroWidthElement } from "../util/feature_detection.js"
|
||||
+import { zeroWidthElement } from "../util/feature_detection.js"
|
||||
import { lst, spaceStr } from "../util/misc.js"
|
||||
|
||||
@@ -79,6 +79,6 @@
|
||||
// Optionally wire in some hacks into the token-rendering
|
||||
// algorithm, to deal with browser quirks.
|
||||
@@ -158,10 +165,10 @@ diff -NarU2 codemirror-5.59.3-orig/src/line/line_data.js codemirror-5.59.3/src/l
|
||||
+ // builder.addToken = buildTokenBadBidi(builder.addToken, order)
|
||||
builder.map = []
|
||||
let allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line)
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/measurement/position_measurement.js codemirror-5.59.3/src/measurement/position_measurement.js
|
||||
--- codemirror-5.59.3-orig/src/measurement/position_measurement.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/measurement/position_measurement.js 2021-02-21 20:50:52.372945293 +0000
|
||||
@@ -380,5 +380,6 @@
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/measurement/position_measurement.js codemirror-5.65.1/src/measurement/position_measurement.js
|
||||
--- codemirror-5.65.1-orig/src/measurement/position_measurement.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/measurement/position_measurement.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -382,5 +382,6 @@
|
||||
sticky = "after"
|
||||
}
|
||||
- if (!order) return get(sticky == "before" ? ch - 1 : ch, sticky == "before")
|
||||
@@ -169,39 +176,39 @@ diff -NarU2 codemirror-5.59.3-orig/src/measurement/position_measurement.js codem
|
||||
+ /*
|
||||
|
||||
function getBidi(ch, partPos, invert) {
|
||||
@@ -391,4 +392,5 @@
|
||||
@@ -393,4 +394,5 @@
|
||||
if (other != null) val.other = getBidi(ch, other, sticky != "before")
|
||||
return val
|
||||
+ */
|
||||
}
|
||||
|
||||
@@ -468,4 +470,5 @@
|
||||
@@ -470,4 +472,5 @@
|
||||
let begin = 0, end = lineObj.text.length, ltr = true
|
||||
|
||||
+ /*
|
||||
let order = getOrder(lineObj, cm.doc.direction)
|
||||
// If the line isn't plain left-to-right text, first figure out
|
||||
@@ -482,4 +485,5 @@
|
||||
@@ -484,4 +487,5 @@
|
||||
end = ltr ? part.to : part.from - 1
|
||||
}
|
||||
+ */
|
||||
|
||||
// A binary search to find the first character whose bounding box
|
||||
@@ -526,4 +530,5 @@
|
||||
@@ -528,4 +532,5 @@
|
||||
}
|
||||
|
||||
+/*
|
||||
function coordsBidiPart(cm, lineObj, lineNo, preparedMeasure, order, x, y) {
|
||||
// Bidi parts are sorted left-to-right, and in a non-line-wrapping
|
||||
@@ -580,4 +585,5 @@
|
||||
@@ -582,4 +587,5 @@
|
||||
return part
|
||||
}
|
||||
+*/
|
||||
|
||||
let measureText
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/util/bidi.js codemirror-5.59.3/src/util/bidi.js
|
||||
--- codemirror-5.59.3-orig/src/util/bidi.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/util/bidi.js 2021-02-21 20:52:18.168092225 +0000
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/util/bidi.js codemirror-5.65.1/src/util/bidi.js
|
||||
--- codemirror-5.65.1-orig/src/util/bidi.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/util/bidi.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -4,5 +4,5 @@
|
||||
|
||||
export function iterateBidiSections(order, from, to, f) {
|
||||
@@ -259,9 +266,9 @@ diff -NarU2 codemirror-5.59.3-orig/src/util/bidi.js codemirror-5.59.3/src/util/b
|
||||
- return order
|
||||
+ return false;
|
||||
}
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/util/feature_detection.js codemirror-5.59.3/src/util/feature_detection.js
|
||||
--- codemirror-5.59.3-orig/src/util/feature_detection.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/util/feature_detection.js 2021-02-21 20:49:22.191269270 +0000
|
||||
diff -wNarU2 codemirror-5.65.1-orig/src/util/feature_detection.js codemirror-5.65.1/src/util/feature_detection.js
|
||||
--- codemirror-5.65.1-orig/src/util/feature_detection.js 2022-01-20 13:06:23.000000000 +0100
|
||||
+++ codemirror-5.65.1/src/util/feature_detection.js 2022-02-09 22:50:18.145862052 +0100
|
||||
@@ -25,4 +25,5 @@
|
||||
}
|
||||
|
||||
|
||||
12
scripts/deps-docker/easymde-marked6.patch
Normal file
12
scripts/deps-docker/easymde-marked6.patch
Normal file
@@ -0,0 +1,12 @@
|
||||
diff --git a/src/js/easymde.js b/src/js/easymde.js
|
||||
--- a/src/js/easymde.js
|
||||
+++ b/src/js/easymde.js
|
||||
@@ -1962,7 +1962,7 @@ EasyMDE.prototype.markdown = function (text) {
|
||||
marked.setOptions(markedOptions);
|
||||
|
||||
// Convert the markdown to HTML
|
||||
- var htmlText = marked(text);
|
||||
+ var htmlText = marked.parse(text);
|
||||
|
||||
// Sanitize HTML
|
||||
if (this.options.renderingConfig && typeof this.options.renderingConfig.sanitizerFunction === 'function') {
|
||||
@@ -1,52 +1,52 @@
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/gulpfile.js easy-markdown-editor-2.14.0/gulpfile.js
|
||||
--- easy-markdown-editor-2.14.0-orig/gulpfile.js 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/gulpfile.js 2021-02-21 20:55:37.134701007 +0000
|
||||
diff -wNarU2 easy-markdown-editor-2.16.1-orig/gulpfile.js easy-markdown-editor-2.16.1/gulpfile.js
|
||||
--- easy-markdown-editor-2.16.1-orig/gulpfile.js 2022-01-14 23:27:44.000000000 +0100
|
||||
+++ easy-markdown-editor-2.16.1/gulpfile.js 2022-02-09 23:06:01.694592535 +0100
|
||||
@@ -25,5 +25,4 @@
|
||||
'./node_modules/codemirror/lib/codemirror.css',
|
||||
'./src/css/*.css',
|
||||
- './node_modules/codemirror-spell-checker/src/css/spell-checker.css',
|
||||
];
|
||||
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/package.json easy-markdown-editor-2.14.0/package.json
|
||||
--- easy-markdown-editor-2.14.0-orig/package.json 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/package.json 2021-02-21 20:55:47.761190082 +0000
|
||||
@@ -21,5 +21,4 @@
|
||||
"dependencies": {
|
||||
"codemirror": "^5.59.2",
|
||||
diff -wNarU2 easy-markdown-editor-2.16.1-orig/package.json easy-markdown-editor-2.16.1/package.json
|
||||
--- easy-markdown-editor-2.16.1-orig/package.json 2022-01-14 23:27:44.000000000 +0100
|
||||
+++ easy-markdown-editor-2.16.1/package.json 2022-02-09 23:06:24.778501888 +0100
|
||||
@@ -23,5 +23,4 @@
|
||||
"@types/marked": "^4.0.1",
|
||||
"codemirror": "^5.63.1",
|
||||
- "codemirror-spell-checker": "1.1.2",
|
||||
"marked": "^2.0.0"
|
||||
"marked": "^4.0.10"
|
||||
},
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/src/js/easymde.js easy-markdown-editor-2.14.0/src/js/easymde.js
|
||||
--- easy-markdown-editor-2.14.0-orig/src/js/easymde.js 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/src/js/easymde.js 2021-02-21 20:57:09.143171536 +0000
|
||||
diff -wNarU2 easy-markdown-editor-2.16.1-orig/src/js/easymde.js easy-markdown-editor-2.16.1/src/js/easymde.js
|
||||
--- easy-markdown-editor-2.16.1-orig/src/js/easymde.js 2022-01-14 23:27:44.000000000 +0100
|
||||
+++ easy-markdown-editor-2.16.1/src/js/easymde.js 2022-02-09 23:07:21.203131415 +0100
|
||||
@@ -12,5 +12,4 @@
|
||||
require('codemirror/mode/gfm/gfm.js');
|
||||
require('codemirror/mode/xml/xml.js');
|
||||
-var CodeMirrorSpellChecker = require('codemirror-spell-checker');
|
||||
var marked = require('marked/lib/marked');
|
||||
var marked = require('marked').marked;
|
||||
|
||||
@@ -1762,9 +1761,4 @@
|
||||
@@ -1816,9 +1815,4 @@
|
||||
options.autosave.uniqueId = options.autosave.unique_id;
|
||||
|
||||
- // If overlay mode is specified and combine is not provided, default it to true
|
||||
- if (options.overlayMode && options.overlayMode.combine === undefined) {
|
||||
- options.overlayMode.combine = true;
|
||||
- options.overlayMode.combine = true;
|
||||
- }
|
||||
-
|
||||
// Update this options
|
||||
this.options = options;
|
||||
@@ -2003,28 +1997,7 @@
|
||||
@@ -2057,34 +2051,7 @@
|
||||
var mode, backdrop;
|
||||
|
||||
- // CodeMirror overlay mode
|
||||
- if (options.overlayMode) {
|
||||
- CodeMirror.defineMode('overlay-mode', function(config) {
|
||||
- return CodeMirror.overlayMode(CodeMirror.getMode(config, options.spellChecker !== false ? 'spell-checker' : 'gfm'), options.overlayMode.mode, options.overlayMode.combine);
|
||||
- });
|
||||
- CodeMirror.defineMode('overlay-mode', function (config) {
|
||||
- return CodeMirror.overlayMode(CodeMirror.getMode(config, options.spellChecker !== false ? 'spell-checker' : 'gfm'), options.overlayMode.mode, options.overlayMode.combine);
|
||||
- });
|
||||
-
|
||||
- mode = 'overlay-mode';
|
||||
- backdrop = options.parsingConfig;
|
||||
- backdrop.gitHubSpice = false;
|
||||
- mode = 'overlay-mode';
|
||||
- backdrop = options.parsingConfig;
|
||||
- backdrop.gitHubSpice = false;
|
||||
- } else {
|
||||
mode = options.parsingConfig;
|
||||
mode.name = 'gfm';
|
||||
@@ -58,31 +58,35 @@ diff -NarU2 easy-markdown-editor-2.14.0-orig/src/js/easymde.js easy-markdown-edi
|
||||
- backdrop.name = 'gfm';
|
||||
- backdrop.gitHubSpice = false;
|
||||
-
|
||||
- CodeMirrorSpellChecker({
|
||||
- codeMirrorInstance: CodeMirror,
|
||||
- });
|
||||
- if (typeof options.spellChecker === 'function') {
|
||||
- options.spellChecker({
|
||||
- codeMirrorInstance: CodeMirror,
|
||||
- });
|
||||
- } else {
|
||||
- CodeMirrorSpellChecker({
|
||||
- codeMirrorInstance: CodeMirror,
|
||||
- });
|
||||
- }
|
||||
- }
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/types/easymde.d.ts easy-markdown-editor-2.14.0/types/easymde.d.ts
|
||||
--- easy-markdown-editor-2.14.0-orig/types/easymde.d.ts 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/types/easymde.d.ts 2021-02-21 20:57:42.492620979 +0000
|
||||
@@ -160,9 +160,4 @@
|
||||
diff -wNarU2 easy-markdown-editor-2.16.1-orig/types/easymde.d.ts easy-markdown-editor-2.16.1/types/easymde.d.ts
|
||||
--- easy-markdown-editor-2.16.1-orig/types/easymde.d.ts 2022-01-14 23:27:44.000000000 +0100
|
||||
+++ easy-markdown-editor-2.16.1/types/easymde.d.ts 2022-02-09 23:07:55.427605243 +0100
|
||||
@@ -167,9 +167,4 @@
|
||||
}
|
||||
|
||||
- interface OverlayModeOptions {
|
||||
- mode: CodeMirror.Mode<any>
|
||||
- combine?: boolean
|
||||
- mode: CodeMirror.Mode<any>;
|
||||
- combine?: boolean;
|
||||
- }
|
||||
-
|
||||
interface Options {
|
||||
autoDownloadFontAwesome?: boolean;
|
||||
@@ -214,7 +209,5 @@
|
||||
interface SpellCheckerOptions {
|
||||
codeMirrorInstance: CodeMirror.Editor;
|
||||
@@ -229,6 +224,4 @@
|
||||
syncSideBySidePreviewScroll?: boolean;
|
||||
|
||||
promptTexts?: PromptTexts;
|
||||
- syncSideBySidePreviewScroll?: boolean;
|
||||
- overlayMode?: OverlayModeOptions;
|
||||
-
|
||||
- overlayMode?: OverlayModeOptions
|
||||
+ syncSideBySidePreviewScroll?: boolean
|
||||
direction?: 'ltr' | 'rtl';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||
adds linetracking to marked.js v1.0.0 +git;
|
||||
adds linetracking to marked.js v4.0.6;
|
||||
add data-ln="%d" to most tags, %d is the source markdown line
|
||||
--- a/src/Lexer.js
|
||||
+++ b/src/Lexer.js
|
||||
@@ -49,4 +49,5 @@ function mangle(text) {
|
||||
module.exports = class Lexer {
|
||||
@@ -50,4 +50,5 @@ function mangle(text) {
|
||||
export class Lexer {
|
||||
constructor(options) {
|
||||
+ this.ln = 1; // like most editors, start couting from 1
|
||||
this.tokens = [];
|
||||
this.tokens.links = Object.create(null);
|
||||
@@ -108,4 +109,15 @@ module.exports = class Lexer {
|
||||
@@ -127,4 +128,15 @@ export class Lexer {
|
||||
}
|
||||
|
||||
+ set_ln(token, ln = this.ln) {
|
||||
@@ -25,142 +25,142 @@ add data-ln="%d" to most tags, %d is the source markdown line
|
||||
+
|
||||
/**
|
||||
* Lexing
|
||||
@@ -113,10 +125,15 @@ module.exports = class Lexer {
|
||||
blockTokens(src, tokens = [], top = true) {
|
||||
src = src.replace(/^ +$/gm, '');
|
||||
- let token, i, l, lastToken;
|
||||
+ let token, i, l, lastToken, ln;
|
||||
@@ -134,7 +146,11 @@ export class Lexer {
|
||||
src = src.replace(/^ +$/gm, '');
|
||||
}
|
||||
- let token, lastToken, cutSrc, lastParagraphClipped;
|
||||
+ let token, lastToken, cutSrc, lastParagraphClipped, ln;
|
||||
|
||||
while (src) {
|
||||
+ // this.ln will be bumped by recursive calls into this func;
|
||||
+ // reset the count and rely on the outermost token's raw only
|
||||
+ ln = this.ln;
|
||||
+
|
||||
// newline
|
||||
if (this.options.extensions
|
||||
&& this.options.extensions.block
|
||||
@@ -142,4 +158,5 @@ export class Lexer {
|
||||
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
return true;
|
||||
@@ -153,4 +170,5 @@ export class Lexer {
|
||||
if (token = this.tokenizer.space(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token); // is \n if not type
|
||||
+ this.set_ln(token, ln); // is \n if not type
|
||||
if (token.type) {
|
||||
tokens.push(token);
|
||||
@@ -128,4 +145,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.code(src, tokens)) {
|
||||
@@ -162,4 +180,5 @@ export class Lexer {
|
||||
if (token = this.tokenizer.code(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
if (token.type) {
|
||||
tokens.push(token);
|
||||
@@ -141,4 +159,5 @@ module.exports = class Lexer {
|
||||
+ this.set_ln(token, ln);
|
||||
lastToken = tokens[tokens.length - 1];
|
||||
// An indented code block cannot interrupt a paragraph.
|
||||
@@ -177,4 +196,5 @@ export class Lexer {
|
||||
if (token = this.tokenizer.fences(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -148,4 +167,5 @@ module.exports = class Lexer {
|
||||
@@ -184,4 +204,5 @@ export class Lexer {
|
||||
if (token = this.tokenizer.heading(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -155,4 +175,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.nptable(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -162,4 +183,5 @@ module.exports = class Lexer {
|
||||
@@ -191,4 +212,5 @@ export class Lexer {
|
||||
if (token = this.tokenizer.hr(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -170,4 +192,7 @@ module.exports = class Lexer {
|
||||
@@ -198,4 +220,5 @@ export class Lexer {
|
||||
if (token = this.tokenizer.blockquote(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
token.tokens = this.blockTokens(token.text, [], top);
|
||||
+ // recursive call to blockTokens probably bumped this.ln,
|
||||
+ // token.raw is more reliable so reset this.ln and use that
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -180,5 +205,9 @@ module.exports = class Lexer {
|
||||
for (i = 0; i < l; i++) {
|
||||
token.items[i].tokens = this.blockTokens(token.items[i].text, [], false);
|
||||
+ // list entries don't bump the linecounter, so let's
|
||||
+ this.ln++;
|
||||
}
|
||||
+ // then reset like blockquote
|
||||
@@ -205,4 +228,5 @@ export class Lexer {
|
||||
if (token = this.tokenizer.list(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -188,4 +217,5 @@ module.exports = class Lexer {
|
||||
@@ -212,4 +236,5 @@ export class Lexer {
|
||||
if (token = this.tokenizer.html(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -195,4 +225,5 @@ module.exports = class Lexer {
|
||||
if (top && (token = this.tokenizer.def(src))) {
|
||||
@@ -219,4 +244,5 @@ export class Lexer {
|
||||
if (token = this.tokenizer.def(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
if (!this.tokens.links[token.tag]) {
|
||||
this.tokens.links[token.tag] = {
|
||||
@@ -207,4 +238,5 @@ module.exports = class Lexer {
|
||||
+ this.set_ln(token, ln);
|
||||
lastToken = tokens[tokens.length - 1];
|
||||
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
|
||||
@@ -236,4 +262,5 @@ export class Lexer {
|
||||
if (token = this.tokenizer.table(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -214,4 +246,5 @@ module.exports = class Lexer {
|
||||
@@ -243,4 +270,5 @@ export class Lexer {
|
||||
if (token = this.tokenizer.lheading(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -221,4 +254,5 @@ module.exports = class Lexer {
|
||||
if (top && (token = this.tokenizer.paragraph(src))) {
|
||||
@@ -263,4 +291,5 @@ export class Lexer {
|
||||
}
|
||||
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
|
||||
+ this.set_ln(token, ln);
|
||||
lastToken = tokens[tokens.length - 1];
|
||||
if (lastParagraphClipped && lastToken.type === 'paragraph') {
|
||||
@@ -280,4 +309,6 @@ export class Lexer {
|
||||
if (token = this.tokenizer.text(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -228,4 +262,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.text(src, tokens)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
if (token.type) {
|
||||
tokens.push(token);
|
||||
@@ -263,4 +298,7 @@ module.exports = class Lexer {
|
||||
for (i = 0; i < l; i++) {
|
||||
token = tokens[i];
|
||||
+ // this.ln is at EOF when inline() is invoked;
|
||||
+ // all this affects <br> tags only so no biggie if it breaks
|
||||
+ this.ln = token.ln || this.ln;
|
||||
switch (token.type) {
|
||||
case 'paragraph':
|
||||
@@ -386,4 +424,6 @@ module.exports = class Lexer {
|
||||
+ this.set_ln(token, ln);
|
||||
+ this.ln++;
|
||||
lastToken = tokens[tokens.length - 1];
|
||||
if (lastToken && lastToken.type === 'text') {
|
||||
@@ -355,4 +386,5 @@ export class Lexer {
|
||||
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.ln = token.ln || this.ln;
|
||||
tokens.push(token);
|
||||
return true;
|
||||
@@ -420,4 +452,6 @@ export class Lexer {
|
||||
if (token = this.tokenizer.br(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ // no need to reset (no more blockTokens anyways)
|
||||
+ token.ln = this.ln++;
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -462,4 +496,5 @@ export class Lexer {
|
||||
if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.ln = token.ln || this.ln;
|
||||
if (token.raw.slice(-1) !== '_') { // Track prevChar before string of ____ started
|
||||
prevChar = token.raw.slice(-1);
|
||||
diff --git a/src/Parser.js b/src/Parser.js
|
||||
--- a/src/Parser.js
|
||||
+++ b/src/Parser.js
|
||||
@@ -18,4 +18,5 @@ module.exports = class Parser {
|
||||
@@ -18,4 +18,5 @@ export class Parser {
|
||||
this.textRenderer = new TextRenderer();
|
||||
this.slugger = new Slugger();
|
||||
+ this.ln = 0; // error indicator; should always be set >=1 from tokens
|
||||
}
|
||||
|
||||
@@ -55,4 +56,9 @@ module.exports = class Parser {
|
||||
@@ -64,4 +65,8 @@ export class Parser {
|
||||
for (i = 0; i < l; i++) {
|
||||
token = tokens[i];
|
||||
+ // take line-numbers from tokens whenever possible
|
||||
+ // and update the renderer's html attribute with the new value
|
||||
+ this.ln = token.ln || this.ln;
|
||||
+ this.renderer.tag_ln(this.ln);
|
||||
+
|
||||
switch (token.type) {
|
||||
case 'space': {
|
||||
@@ -105,7 +111,10 @@ module.exports = class Parser {
|
||||
|
||||
// Run any renderer extensions
|
||||
@@ -124,7 +129,10 @@ export class Parser {
|
||||
}
|
||||
|
||||
- body += this.renderer.tablerow(cell);
|
||||
@@ -173,7 +173,7 @@ diff --git a/src/Parser.js b/src/Parser.js
|
||||
+ out += this.renderer.tag_ln(token.ln).table(header, body);
|
||||
continue;
|
||||
}
|
||||
@@ -148,8 +157,12 @@ module.exports = class Parser {
|
||||
@@ -167,8 +175,12 @@ export class Parser {
|
||||
|
||||
itemBody += this.parse(item.tokens, loose);
|
||||
- body += this.renderer.listitem(itemBody, task, checked);
|
||||
@@ -188,7 +188,7 @@ diff --git a/src/Parser.js b/src/Parser.js
|
||||
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
|
||||
continue;
|
||||
}
|
||||
@@ -160,5 +173,6 @@ module.exports = class Parser {
|
||||
@@ -179,5 +191,6 @@ export class Parser {
|
||||
}
|
||||
case 'paragraph': {
|
||||
- out += this.renderer.paragraph(this.parseInline(token.tokens));
|
||||
@@ -196,26 +196,18 @@ diff --git a/src/Parser.js b/src/Parser.js
|
||||
+ out += this.renderer.tag_ln(token.ln).paragraph(t);
|
||||
continue;
|
||||
}
|
||||
@@ -199,4 +213,6 @@ module.exports = class Parser {
|
||||
for (i = 0; i < l; i++) {
|
||||
@@ -221,4 +234,7 @@ export class Parser {
|
||||
token = tokens[i];
|
||||
|
||||
+ // another thing that only affects <br/> and other inlines
|
||||
+ this.ln = token.ln || this.ln;
|
||||
switch (token.type) {
|
||||
case 'escape': {
|
||||
@@ -229,5 +245,7 @@ module.exports = class Parser {
|
||||
}
|
||||
case 'br': {
|
||||
- out += renderer.br();
|
||||
+ // update the html attribute before writing each <br/>,
|
||||
+ // don't care about the others
|
||||
+ out += renderer.tag_ln(this.ln).br();
|
||||
break;
|
||||
}
|
||||
+
|
||||
// Run any renderer extensions
|
||||
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
|
||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
--- a/src/Renderer.js
|
||||
+++ b/src/Renderer.js
|
||||
@@ -11,6 +11,12 @@ module.exports = class Renderer {
|
||||
@@ -11,6 +11,12 @@ export class Renderer {
|
||||
constructor(options) {
|
||||
this.options = options || defaults;
|
||||
+ this.ln = "";
|
||||
@@ -228,7 +220,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
+
|
||||
code(code, infostring, escaped) {
|
||||
const lang = (infostring || '').match(/\S*/)[0];
|
||||
@@ -24,10 +30,10 @@ module.exports = class Renderer {
|
||||
@@ -26,10 +32,10 @@ export class Renderer {
|
||||
|
||||
if (!lang) {
|
||||
- return '<pre><code>'
|
||||
@@ -241,58 +233,69 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
+ return '<pre' + this.ln + '><code class="'
|
||||
+ this.options.langPrefix
|
||||
+ escape(lang, true)
|
||||
@@ -38,5 +44,5 @@ module.exports = class Renderer {
|
||||
@@ -40,5 +46,5 @@ export class Renderer {
|
||||
|
||||
blockquote(quote) {
|
||||
- return '<blockquote>\n' + quote + '</blockquote>\n';
|
||||
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n';
|
||||
}
|
||||
|
||||
@@ -49,4 +55,5 @@ module.exports = class Renderer {
|
||||
@@ -51,4 +57,5 @@ export class Renderer {
|
||||
return '<h'
|
||||
+ level
|
||||
+ + this.ln
|
||||
+ ' id="'
|
||||
+ this.options.headerPrefix
|
||||
@@ -59,5 +66,5 @@ module.exports = class Renderer {
|
||||
@@ -61,5 +68,5 @@ export class Renderer {
|
||||
}
|
||||
// ignore IDs
|
||||
- return '<h' + level + '>' + text + '</h' + level + '>\n';
|
||||
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n';
|
||||
}
|
||||
|
||||
@@ -73,5 +80,5 @@ module.exports = class Renderer {
|
||||
@@ -75,5 +82,5 @@ export class Renderer {
|
||||
|
||||
listitem(text) {
|
||||
- return '<li>' + text + '</li>\n';
|
||||
+ return '<li' + this.ln + '>' + text + '</li>\n';
|
||||
}
|
||||
|
||||
@@ -85,5 +92,5 @@ module.exports = class Renderer {
|
||||
@@ -87,5 +94,5 @@ export class Renderer {
|
||||
|
||||
paragraph(text) {
|
||||
- return '<p>' + text + '</p>\n';
|
||||
+ return '<p' + this.ln + '>' + text + '</p>\n';
|
||||
}
|
||||
|
||||
@@ -100,5 +107,5 @@ module.exports = class Renderer {
|
||||
@@ -102,5 +109,5 @@ export class Renderer {
|
||||
|
||||
tablerow(content) {
|
||||
- return '<tr>\n' + content + '</tr>\n';
|
||||
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n';
|
||||
}
|
||||
|
||||
@@ -125,5 +132,5 @@ module.exports = class Renderer {
|
||||
@@ -127,5 +134,5 @@ export class Renderer {
|
||||
|
||||
br() {
|
||||
- return this.options.xhtml ? '<br/>' : '<br>';
|
||||
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>';
|
||||
}
|
||||
|
||||
@@ -151,5 +158,5 @@ module.exports = class Renderer {
|
||||
@@ -153,5 +160,5 @@ export class Renderer {
|
||||
}
|
||||
|
||||
- let out = '<img src="' + href + '" alt="' + text + '"';
|
||||
+ let out = '<img' + this.ln + ' src="' + href + '" alt="' + text + '"';
|
||||
if (title) {
|
||||
out += ' title="' + title + '"';
|
||||
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
--- a/src/Tokenizer.js
|
||||
+++ b/src/Tokenizer.js
|
||||
@@ -297,4 +297,7 @@ export class Tokenizer {
|
||||
const l = list.items.length;
|
||||
|
||||
+ // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad
|
||||
+ this.lexer.ln--;
|
||||
+
|
||||
// Item child tokens handled here at end because we needed to have the final item to trim it first
|
||||
for (i = 0; i < l; i++) {
|
||||
|
||||
@@ -1,52 +1,52 @@
|
||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||
--- a/src/Lexer.js
|
||||
+++ b/src/Lexer.js
|
||||
@@ -5,5 +5,5 @@ const { block, inline } = require('./rules.js');
|
||||
@@ -6,5 +6,5 @@ import { repeatString } from './helpers.js';
|
||||
/**
|
||||
* smartypants text replacement
|
||||
- */
|
||||
+ *
|
||||
function smartypants(text) {
|
||||
return text
|
||||
@@ -26,5 +26,5 @@ function smartypants(text) {
|
||||
@@ -27,5 +27,5 @@ function smartypants(text) {
|
||||
/**
|
||||
* mangle email addresses
|
||||
- */
|
||||
+ *
|
||||
function mangle(text) {
|
||||
let out = '',
|
||||
@@ -439,5 +439,5 @@ module.exports = class Lexer {
|
||||
@@ -466,5 +466,5 @@ export class Lexer {
|
||||
|
||||
// autolink
|
||||
- if (token = this.tokenizer.autolink(src, mangle)) {
|
||||
+ if (token = this.tokenizer.autolink(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
@@ -446,5 +446,5 @@ module.exports = class Lexer {
|
||||
@@ -473,5 +473,5 @@ export class Lexer {
|
||||
|
||||
// url (gfm)
|
||||
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
|
||||
+ if (!inLink && (token = this.tokenizer.url(src))) {
|
||||
- if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) {
|
||||
+ if (!this.state.inLink && (token = this.tokenizer.url(src))) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
@@ -453,5 +453,5 @@ module.exports = class Lexer {
|
||||
|
||||
// text
|
||||
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
|
||||
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
|
||||
@@ -494,5 +494,5 @@ export class Lexer {
|
||||
}
|
||||
}
|
||||
- if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
|
||||
+ if (token = this.tokenizer.inlineText(cutSrc)) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
this.ln = token.ln || this.ln;
|
||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
--- a/src/Renderer.js
|
||||
+++ b/src/Renderer.js
|
||||
@@ -140,5 +140,5 @@ module.exports = class Renderer {
|
||||
@@ -142,5 +142,5 @@ export class Renderer {
|
||||
|
||||
link(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
+ href = cleanUrl(this.options.baseUrl, href);
|
||||
if (href === null) {
|
||||
return text;
|
||||
@@ -153,5 +153,5 @@ module.exports = class Renderer {
|
||||
@@ -155,5 +155,5 @@ export class Renderer {
|
||||
|
||||
image(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
@@ -56,22 +56,23 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
--- a/src/Tokenizer.js
|
||||
+++ b/src/Tokenizer.js
|
||||
@@ -287,11 +287,8 @@ module.exports = class Tokenizer {
|
||||
if (cap) {
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
- ? 'paragraph'
|
||||
- : 'html',
|
||||
+ type: 'html',
|
||||
@@ -320,14 +320,7 @@ export class Tokenizer {
|
||||
type: 'html',
|
||||
raw: cap[0],
|
||||
- pre: !this.options.sanitizer
|
||||
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
|
||||
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
|
||||
+ text: cap[0]
|
||||
+ pre: (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||
text: cap[0]
|
||||
};
|
||||
- if (this.options.sanitize) {
|
||||
- token.type = 'paragraph';
|
||||
- token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
|
||||
- token.tokens = [];
|
||||
- this.lexer.inline(token.text, token.tokens);
|
||||
- }
|
||||
return token;
|
||||
}
|
||||
@@ -421,15 +418,9 @@ module.exports = class Tokenizer {
|
||||
@@ -476,15 +469,9 @@ export class Tokenizer {
|
||||
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
@@ -79,8 +80,8 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
- : 'html',
|
||||
+ type: 'html',
|
||||
raw: cap[0],
|
||||
inLink,
|
||||
inRawBlock,
|
||||
inLink: this.lexer.state.inLink,
|
||||
inRawBlock: this.lexer.state.inRawBlock,
|
||||
- text: this.options.sanitize
|
||||
- ? (this.options.sanitizer
|
||||
- ? this.options.sanitizer(cap[0])
|
||||
@@ -89,7 +90,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
+ text: cap[0]
|
||||
};
|
||||
}
|
||||
@@ -550,10 +541,10 @@ module.exports = class Tokenizer {
|
||||
@@ -671,10 +658,10 @@ export class Tokenizer {
|
||||
}
|
||||
|
||||
- autolink(src, mangle) {
|
||||
@@ -102,7 +103,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
+ text = escape(cap[1]);
|
||||
href = 'mailto:' + text;
|
||||
} else {
|
||||
@@ -578,10 +569,10 @@ module.exports = class Tokenizer {
|
||||
@@ -699,10 +686,10 @@ export class Tokenizer {
|
||||
}
|
||||
|
||||
- url(src, mangle) {
|
||||
@@ -115,15 +116,15 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
+ text = escape(cap[0]);
|
||||
href = 'mailto:' + text;
|
||||
} else {
|
||||
@@ -615,12 +606,12 @@ module.exports = class Tokenizer {
|
||||
@@ -736,12 +723,12 @@ export class Tokenizer {
|
||||
}
|
||||
|
||||
- inlineText(src, inRawBlock, smartypants) {
|
||||
+ inlineText(src, inRawBlock) {
|
||||
- inlineText(src, smartypants) {
|
||||
+ inlineText(src) {
|
||||
const cap = this.rules.inline.text.exec(src);
|
||||
if (cap) {
|
||||
let text;
|
||||
if (inRawBlock) {
|
||||
if (this.lexer.state.inRawBlock) {
|
||||
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
|
||||
+ text = cap[0];
|
||||
} else {
|
||||
@@ -134,7 +135,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
diff --git a/src/defaults.js b/src/defaults.js
|
||||
--- a/src/defaults.js
|
||||
+++ b/src/defaults.js
|
||||
@@ -8,12 +8,8 @@ function getDefaults() {
|
||||
@@ -9,12 +9,8 @@ export function getDefaults() {
|
||||
highlight: null,
|
||||
langPrefix: 'language-',
|
||||
- mangle: true,
|
||||
@@ -150,10 +151,10 @@ diff --git a/src/defaults.js b/src/defaults.js
|
||||
diff --git a/src/helpers.js b/src/helpers.js
|
||||
--- a/src/helpers.js
|
||||
+++ b/src/helpers.js
|
||||
@@ -64,18 +64,5 @@ function edit(regex, opt) {
|
||||
@@ -64,18 +64,5 @@ export function edit(regex, opt) {
|
||||
const nonWordAndColonTest = /[^\w:]/g;
|
||||
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
|
||||
-function cleanUrl(sanitize, base, href) {
|
||||
-export function cleanUrl(sanitize, base, href) {
|
||||
- if (sanitize) {
|
||||
- let prot;
|
||||
- try {
|
||||
@@ -167,44 +168,43 @@ diff --git a/src/helpers.js b/src/helpers.js
|
||||
- return null;
|
||||
- }
|
||||
- }
|
||||
+function cleanUrl(base, href) {
|
||||
+export function cleanUrl(base, href) {
|
||||
if (base && !originIndependentUrl.test(href)) {
|
||||
href = resolveUrl(base, href);
|
||||
@@ -223,10 +210,4 @@ function findClosingBracket(str, b) {
|
||||
@@ -227,10 +214,4 @@ export function findClosingBracket(str, b) {
|
||||
}
|
||||
|
||||
-function checkSanitizeDeprecation(opt) {
|
||||
-export function checkSanitizeDeprecation(opt) {
|
||||
- if (opt && opt.sanitize && !opt.silent) {
|
||||
- console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options');
|
||||
- }
|
||||
-}
|
||||
-
|
||||
module.exports = {
|
||||
escape,
|
||||
@@ -239,5 +220,4 @@ module.exports = {
|
||||
splitCells,
|
||||
rtrim,
|
||||
- findClosingBracket,
|
||||
- checkSanitizeDeprecation
|
||||
+ findClosingBracket
|
||||
};
|
||||
// copied from https://stackoverflow.com/a/5450113/806777
|
||||
export function repeatString(pattern, count) {
|
||||
diff --git a/src/marked.js b/src/marked.js
|
||||
--- a/src/marked.js
|
||||
+++ b/src/marked.js
|
||||
@@ -7,5 +7,4 @@ const Slugger = require('./Slugger.js');
|
||||
const {
|
||||
@@ -7,5 +7,4 @@ import { Slugger } from './Slugger.js';
|
||||
import {
|
||||
merge,
|
||||
- checkSanitizeDeprecation,
|
||||
escape
|
||||
} = require('./helpers.js');
|
||||
@@ -35,5 +34,4 @@ function marked(src, opt, callback) {
|
||||
} from './helpers.js';
|
||||
@@ -35,5 +34,4 @@ export function marked(src, opt, callback) {
|
||||
|
||||
opt = merge({}, marked.defaults, opt || {});
|
||||
- checkSanitizeDeprecation(opt);
|
||||
|
||||
if (callback) {
|
||||
@@ -108,5 +106,5 @@ function marked(src, opt, callback) {
|
||||
return Parser.parse(tokens, opt);
|
||||
@@ -302,5 +300,4 @@ marked.parseInline = function(src, opt) {
|
||||
|
||||
opt = merge({}, marked.defaults, opt || {});
|
||||
- checkSanitizeDeprecation(opt);
|
||||
|
||||
try {
|
||||
@@ -311,5 +308,5 @@ marked.parseInline = function(src, opt) {
|
||||
return Parser.parseInline(tokens, opt);
|
||||
} catch (e) {
|
||||
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
|
||||
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
|
||||
@@ -213,37 +213,37 @@ diff --git a/src/marked.js b/src/marked.js
|
||||
diff --git a/test/bench.js b/test/bench.js
|
||||
--- a/test/bench.js
|
||||
+++ b/test/bench.js
|
||||
@@ -33,5 +33,4 @@ async function runBench(options) {
|
||||
@@ -37,5 +37,4 @@ export async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
});
|
||||
@@ -45,5 +44,4 @@ async function runBench(options) {
|
||||
@@ -49,5 +48,4 @@ export async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
});
|
||||
@@ -58,5 +56,4 @@ async function runBench(options) {
|
||||
@@ -62,5 +60,4 @@ export async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
});
|
||||
@@ -70,5 +67,4 @@ async function runBench(options) {
|
||||
@@ -74,5 +71,4 @@ export async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
});
|
||||
@@ -83,5 +79,4 @@ async function runBench(options) {
|
||||
@@ -87,5 +83,4 @@ export async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: true,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
});
|
||||
@@ -95,5 +90,4 @@ async function runBench(options) {
|
||||
@@ -99,5 +94,4 @@ export async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: true,
|
||||
- sanitize: false,
|
||||
@@ -252,86 +252,87 @@ diff --git a/test/bench.js b/test/bench.js
|
||||
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
|
||||
--- a/test/specs/run-spec.js
|
||||
+++ b/test/specs/run-spec.js
|
||||
@@ -22,8 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
|
||||
@@ -25,9 +25,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
|
||||
}
|
||||
|
||||
- if (spec.options.sanitizer) {
|
||||
- // eslint-disable-next-line no-eval
|
||||
- spec.options.sanitizer = eval(spec.options.sanitizer);
|
||||
- }
|
||||
|
||||
-
|
||||
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
|
||||
@@ -53,3 +49,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
||||
const before = process.hrtime();
|
||||
@@ -56,3 +51,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
||||
runSpecs('New', './new');
|
||||
runSpecs('ReDOS', './redos');
|
||||
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
|
||||
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
|
||||
--- a/test/unit/Lexer-spec.js
|
||||
+++ b/test/unit/Lexer-spec.js
|
||||
@@ -465,5 +465,5 @@ a | b
|
||||
@@ -635,5 +635,5 @@ paragraph
|
||||
});
|
||||
|
||||
- it('sanitize', () => {
|
||||
+ /*it('sanitize', () => {
|
||||
expectTokens({
|
||||
md: '<div>html</div>',
|
||||
@@ -483,5 +483,5 @@ a | b
|
||||
@@ -653,5 +653,5 @@ paragraph
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
|
||||
@@ -587,5 +587,5 @@ a | b
|
||||
@@ -698,5 +698,5 @@ paragraph
|
||||
});
|
||||
|
||||
- it('html sanitize', () => {
|
||||
+ /*it('html sanitize', () => {
|
||||
expectInlineTokens({
|
||||
md: '<div>html</div>',
|
||||
@@ -597,5 +597,5 @@ a | b
|
||||
@@ -706,5 +706,5 @@ paragraph
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
it('link', () => {
|
||||
@@ -909,5 +909,5 @@ a | b
|
||||
@@ -1017,5 +1017,5 @@ paragraph
|
||||
});
|
||||
|
||||
- it('autolink mangle email', () => {
|
||||
+ /*it('autolink mangle email', () => {
|
||||
expectInlineTokens({
|
||||
md: '<test@example.com>',
|
||||
@@ -929,5 +929,5 @@ a | b
|
||||
@@ -1037,5 +1037,5 @@ paragraph
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
it('url', () => {
|
||||
@@ -966,5 +966,5 @@ a | b
|
||||
@@ -1074,5 +1074,5 @@ paragraph
|
||||
});
|
||||
|
||||
- it('url mangle email', () => {
|
||||
+ /*it('url mangle email', () => {
|
||||
expectInlineTokens({
|
||||
md: 'test@example.com',
|
||||
@@ -986,5 +986,5 @@ a | b
|
||||
@@ -1094,5 +1094,5 @@ paragraph
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
|
||||
@@ -1002,5 +1002,5 @@ a | b
|
||||
@@ -1110,5 +1110,5 @@ paragraph
|
||||
});
|
||||
|
||||
- describe('smartypants', () => {
|
||||
+ /*describe('smartypants', () => {
|
||||
it('single quotes', () => {
|
||||
expectInlineTokens({
|
||||
@@ -1072,5 +1072,5 @@ a | b
|
||||
@@ -1180,5 +1180,5 @@ paragraph
|
||||
});
|
||||
});
|
||||
- });
|
||||
|
||||
@@ -29,3 +29,10 @@ pyftsubset "$orig_woff" --unicodes-file=/z/icon.list --no-ignore-missing-unicode
|
||||
|
||||
# scp is easier, just want basic latin
|
||||
pyftsubset /z/scp.woff2 --unicodes="20-7e,ab,b7,bb,2022" --no-ignore-missing-unicodes --flavor=woff2 --output-file=/z/dist/no-pk/scp.woff2 --verbose
|
||||
|
||||
exit 0
|
||||
|
||||
# kinda works but ruins hinting on windows, just use the old version of the font which has correct baseline
|
||||
python3 shiftbase.py /z/dist/no-pk/scp.woff2
|
||||
cd /z/dist/no-pk/
|
||||
mv scp.woff2.woff2 scp.woff2
|
||||
|
||||
27
scripts/deps-docker/shiftbase.py
Executable file
27
scripts/deps-docker/shiftbase.py
Executable file
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
from fontTools.ttLib import TTFont, newTable
|
||||
|
||||
|
||||
def main():
|
||||
woff = sys.argv[1]
|
||||
font = TTFont(woff)
|
||||
print(repr(font["hhea"].__dict__))
|
||||
print(repr(font["OS/2"].__dict__))
|
||||
# font["hhea"].ascent = round(base_asc * mul)
|
||||
# font["hhea"].descent = round(base_desc * mul)
|
||||
# font["OS/2"].usWinAscent = round(base_asc * mul)
|
||||
font["OS/2"].usWinDescent = round(font["OS/2"].usWinDescent * 1.1)
|
||||
font["OS/2"].sTypoDescender = round(font["OS/2"].sTypoDescender * 1.1)
|
||||
|
||||
try:
|
||||
del font["post"].mapping["Delta#1"]
|
||||
except:
|
||||
pass
|
||||
|
||||
font.save(woff + ".woff2")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -2,7 +2,7 @@ all: $(addsuffix .gz, $(wildcard *.*))
|
||||
|
||||
%.gz: %
|
||||
#brotli -q 11 $<
|
||||
pigz -11 -J 34 -I 573 $<
|
||||
pigz -11 -I 573 $<
|
||||
|
||||
# pigz -11 -J 34 -I 100 -F < $< > $@.first
|
||||
|
||||
|
||||
@@ -86,8 +86,6 @@ function have() {
|
||||
python -c "import $1; $1; $1.__version__"
|
||||
}
|
||||
|
||||
mv copyparty/web/deps/marked.full.js.gz srv/ || true
|
||||
|
||||
. buildenv/bin/activate
|
||||
have setuptools
|
||||
have wheel
|
||||
|
||||
@@ -14,16 +14,13 @@ help() { exec cat <<'EOF'
|
||||
#
|
||||
# `gz` creates a gzip-compressed python sfx instead of bzip2
|
||||
#
|
||||
# `no-sh` makes just the python sfx, skips the sh/unix sfx
|
||||
#
|
||||
# `no-ogv` saves ~500k by removing the opus/vorbis audio codecs
|
||||
# (only affects apple devices; everything else has native support)
|
||||
#
|
||||
# `no-cm` saves ~90k by removing easymde/codemirror
|
||||
# `no-cm` saves ~82k by removing easymde/codemirror
|
||||
# (the fancy markdown editor)
|
||||
#
|
||||
# `no-hl` saves ~41k by removing syntax hilighting in the text viewer
|
||||
#
|
||||
# `no-fnt` saves ~9k by removing the source-code-pro font
|
||||
# (mainly used my the markdown viewer/editor)
|
||||
# (browsers will try to use 'Consolas' instead)
|
||||
#
|
||||
# `no-dd` saves ~2k by removing the mouse cursor
|
||||
|
||||
@@ -37,6 +34,8 @@ gtar=$(command -v gtar || command -v gnutar) || true
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
shuf() { gshuf "$@"; }
|
||||
nproc() { gnproc; }
|
||||
sha1sum() { shasum "$@"; }
|
||||
unexpand() { gunexpand "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
@@ -63,19 +62,17 @@ pybin=$(command -v python3 || command -v python) || {
|
||||
}
|
||||
|
||||
use_gz=
|
||||
do_sh=1
|
||||
do_py=1
|
||||
zopf=2560
|
||||
while [ ! -z "$1" ]; do
|
||||
case $1 in
|
||||
clean) clean=1 ; ;;
|
||||
re) repack=1 ; ;;
|
||||
gz) use_gz=1 ; ;;
|
||||
no-ogv) no_ogv=1 ; ;;
|
||||
no-fnt) no_fnt=1 ; ;;
|
||||
no-hl) no_hl=1 ; ;;
|
||||
no-dd) no_dd=1 ; ;;
|
||||
no-cm) no_cm=1 ; ;;
|
||||
no-sh) do_sh= ; ;;
|
||||
no-py) do_py= ; ;;
|
||||
fast) zopf=100 ; ;;
|
||||
*) help ; ;;
|
||||
esac
|
||||
shift
|
||||
@@ -104,7 +101,7 @@ tmpdir="$(
|
||||
[ $repack ] && {
|
||||
old="$tmpdir/pe-copyparty"
|
||||
echo "repack of files in $old"
|
||||
cp -pR "$old/"*{dep-j2,copyparty} .
|
||||
cp -pR "$old/"*{dep-j2,dep-ftp,copyparty} .
|
||||
}
|
||||
|
||||
[ $repack ] || {
|
||||
@@ -131,19 +128,47 @@ tmpdir="$(
|
||||
mkdir dep-j2/
|
||||
mv {markupsafe,jinja2} dep-j2/
|
||||
|
||||
echo collecting pyftpdlib
|
||||
f="../build/pyftpdlib-1.5.6.tar.gz"
|
||||
[ -e "$f" ] ||
|
||||
(url=https://github.com/giampaolo/pyftpdlib/archive/refs/tags/release-1.5.6.tar.gz;
|
||||
wget -O$f "$url" || curl -L "$url" >$f)
|
||||
|
||||
tar -zxf $f
|
||||
mv pyftpdlib-release-*/pyftpdlib .
|
||||
rm -rf pyftpdlib-release-* pyftpdlib/test
|
||||
|
||||
mkdir dep-ftp/
|
||||
mv pyftpdlib dep-ftp/
|
||||
|
||||
echo collecting asyncore, asynchat
|
||||
for n in asyncore.py asynchat.py; do
|
||||
f=../build/$n
|
||||
[ -e "$f" ] ||
|
||||
(url=https://raw.githubusercontent.com/python/cpython/c4d45ee670c09d4f6da709df072ec80cb7dfad22/Lib/$n;
|
||||
wget -O$f "$url" || curl -L "$url" >$f)
|
||||
done
|
||||
|
||||
# msys2 tar is bad, make the best of it
|
||||
echo collecting source
|
||||
[ $clean ] && {
|
||||
(cd .. && git archive master >tar) && tar -xf ../tar copyparty
|
||||
(cd .. && git archive hovudstraum >tar) && tar -xf ../tar copyparty
|
||||
(cd .. && tar -cf tar copyparty/web/deps) && tar -xf ../tar
|
||||
}
|
||||
[ $clean ] || {
|
||||
(cd .. && tar -cf tar copyparty) && tar -xf ../tar
|
||||
}
|
||||
rm -f ../tar
|
||||
|
||||
# insert asynchat
|
||||
mkdir copyparty/vend
|
||||
for n in asyncore.py asynchat.py; do
|
||||
awk 'NR<4||NR>27;NR==4{print"# license: https://opensource.org/licenses/ISC\n"}' ../build/$n >copyparty/vend/$n
|
||||
done
|
||||
}
|
||||
|
||||
ver=
|
||||
[ -z "$repack" ] &&
|
||||
git describe --tags >/dev/null 2>/dev/null && {
|
||||
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
|
||||
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//')";
|
||||
@@ -175,7 +200,7 @@ git describe --tags >/dev/null 2>/dev/null && {
|
||||
|
||||
[ -z "$ver" ] &&
|
||||
ver="$(awk '/^VERSION *= \(/ {
|
||||
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
|
||||
gsub(/[^0-9,a-g-]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
|
||||
|
||||
ts=$(date -u +%s)
|
||||
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
|
||||
@@ -201,8 +226,14 @@ while IFS= read -r x; do
|
||||
tmv "$x"
|
||||
done
|
||||
|
||||
[ $no_ogv ] &&
|
||||
rm -rf copyparty/web/deps/{dynamicaudio,ogv}*
|
||||
find copyparty | LC_ALL=C sort | sed 's/\.gz$//;s/$/,/' > have
|
||||
cat have | while IFS= read -r x; do
|
||||
grep -qF -- "$x" ../scripts/sfx.ls || {
|
||||
echo "unexpected file: $x"
|
||||
exit 1
|
||||
}
|
||||
done
|
||||
rm have
|
||||
|
||||
[ $no_cm ] && {
|
||||
rm -rf copyparty/web/mde.* copyparty/web/deps/easymde*
|
||||
@@ -212,19 +243,22 @@ done
|
||||
tmv "$f"
|
||||
}
|
||||
|
||||
[ $no_hl ] &&
|
||||
rm -rf copyparty/web/deps/prism*
|
||||
|
||||
[ $no_fnt ] && {
|
||||
rm -f copyparty/web/deps/scp.woff2
|
||||
f=copyparty/web/md.css
|
||||
gzip -d "$f"
|
||||
sed -r '/scp\.woff2/d' <$f >t
|
||||
f=copyparty/web/ui.css
|
||||
gzip -d "$f.gz" || true
|
||||
sed -r "s/src:.*scp.*\)/src:local('Consolas')/" <$f >t
|
||||
tmv "$f"
|
||||
}
|
||||
|
||||
[ $no_dd ] && {
|
||||
rm -rf copyparty/web/dd
|
||||
f=copyparty/web/browser.css
|
||||
gzip -d "$f"
|
||||
sed -r 's/(cursor: )url\([^)]+\), (pointer)/\1\2/; /[0-9]+% \{cursor:/d; /animation: cursor/d' <$f >t
|
||||
gzip -d "$f.gz" || true
|
||||
sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; s/[0-9]+% \{cursor:[^}]+\}//; s/animation: ?cursor[^};]+//' <$f >t
|
||||
tmv "$f"
|
||||
}
|
||||
|
||||
@@ -238,6 +272,12 @@ f=dep-j2/jinja2/constants.py
|
||||
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
|
||||
tmv "$f"
|
||||
|
||||
grep -rLE '^#[^a-z]*coding: utf-8' dep-j2 |
|
||||
while IFS= read -r f; do
|
||||
(echo "# coding: utf-8"; cat "$f") >t
|
||||
tmv "$f"
|
||||
done
|
||||
|
||||
# up2k goes from 28k to 22k laff
|
||||
awk 'BEGIN{gensub(//,"",1)}' </dev/null &&
|
||||
echo entabbening &&
|
||||
@@ -251,7 +291,7 @@ find | grep -E '\.css$' | while IFS= read -r f; do
|
||||
}
|
||||
!/\}$/ {printf "%s",$0;next}
|
||||
1
|
||||
' <$f | sed 's/;\}$/}/' >t
|
||||
' <$f | sed -r 's/;\}$/}/; /\{\}$/d' >t
|
||||
tmv "$f"
|
||||
done
|
||||
unexpand -h 2>/dev/null &&
|
||||
@@ -262,14 +302,27 @@ done
|
||||
|
||||
gzres() {
|
||||
command -v pigz &&
|
||||
pk='pigz -11 -I 256' ||
|
||||
pk="pigz -11 -I $zopf" ||
|
||||
pk='gzip'
|
||||
|
||||
echo "$pk"
|
||||
find | grep -E '\.(js|css)$' | grep -vF /deps/ | while IFS= read -r f; do
|
||||
np=$(nproc)
|
||||
echo "$pk #$np"
|
||||
|
||||
while IFS=' ' read -r _ f; do
|
||||
while true; do
|
||||
na=$(ps auxwww | grep -F "$pk" | wc -l)
|
||||
[ $na -le $np ] && break
|
||||
sleep 0.2
|
||||
done
|
||||
echo -n .
|
||||
$pk "$f"
|
||||
done
|
||||
$pk "$f" &
|
||||
done < <(
|
||||
find -printf '%s %p\n' |
|
||||
grep -E '\.(js|css)$' |
|
||||
grep -vF /deps/ |
|
||||
sort -nr
|
||||
)
|
||||
wait
|
||||
echo
|
||||
}
|
||||
|
||||
@@ -299,11 +352,11 @@ nf=$(ls -1 "$zdir"/arc.* | wc -l)
|
||||
|
||||
|
||||
echo gen tarlist
|
||||
for d in copyparty dep-j2; do find $d -type f; done |
|
||||
for d in copyparty dep-j2 dep-ftp; do find $d -type f; done |
|
||||
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
|
||||
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
|
||||
|
||||
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1) >list || true
|
||||
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | shuf) >list || true
|
||||
|
||||
echo creating tar
|
||||
args=(--owner=1000 --group=1000)
|
||||
@@ -318,41 +371,27 @@ pe=bz2
|
||||
|
||||
echo compressing tar
|
||||
# detect best level; bzip2 -7 is usually better than -9
|
||||
[ $do_py ] && { for n in {2..9}; do cp tar t.$n; $pc -$n t.$n & done; wait; mv -v $(ls -1S t.*.$pe | tail -n 1) tar.bz2; }
|
||||
[ $do_sh ] && { for n in {2..9}; do cp tar t.$n; xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz | tail -n 1) tar.xz; }
|
||||
for n in {2..9}; do cp tar t.$n; $pc -$n t.$n & done; wait; mv -v $(ls -1S t.*.$pe | tail -n 1) tar.bz2
|
||||
rm t.* || true
|
||||
exts=()
|
||||
|
||||
|
||||
[ $do_sh ] && {
|
||||
exts+=(.sh)
|
||||
echo creating unix sfx
|
||||
(
|
||||
sed "s/PACK_TS/$ts/; s/PACK_HTS/$hts/; s/CPP_VER/$ver/" <../scripts/sfx.sh |
|
||||
grep -E '^sfx_eof$' -B 9001;
|
||||
cat tar.xz
|
||||
) >$sfx_out.sh
|
||||
echo creating sfx
|
||||
|
||||
py=../scripts/sfx.py
|
||||
suf=
|
||||
[ $use_gz ] && {
|
||||
sed -r 's/"r:bz2"/"r:gz"/' <$py >$py.t
|
||||
py=$py.t
|
||||
suf=-gz
|
||||
}
|
||||
|
||||
$pybin $py --sfx-make tar.bz2 $ver $ts
|
||||
mv sfx.out $sfx_out$suf.py
|
||||
|
||||
[ $do_py ] && {
|
||||
echo creating generic sfx
|
||||
|
||||
py=../scripts/sfx.py
|
||||
suf=
|
||||
[ $use_gz ] && {
|
||||
sed -r 's/"r:bz2"/"r:gz"/' <$py >$py.t
|
||||
py=$py.t
|
||||
suf=-gz
|
||||
}
|
||||
|
||||
$pybin $py --sfx-make tar.bz2 $ver $ts
|
||||
mv sfx.out $sfx_out$suf.py
|
||||
|
||||
exts+=($suf.py)
|
||||
[ $use_gz ] &&
|
||||
rm $py
|
||||
}
|
||||
exts+=($suf.py)
|
||||
[ $use_gz ] &&
|
||||
rm $py
|
||||
|
||||
|
||||
chmod 755 $sfx_out*
|
||||
@@ -363,4 +402,4 @@ for ext in ${exts[@]}; do
|
||||
done
|
||||
|
||||
# apk add bash python3 tar xz bzip2
|
||||
# while true; do ./make-sfx.sh; for f in ..//dist/copyparty-sfx.{sh,py}; do mv $f $f.$(wc -c <$f | awk '{print$1}'); done; done
|
||||
# while true; do ./make-sfx.sh; f=../dist/copyparty-sfx.py; mv $f $f.$(wc -c <$f | awk '{print$1}'); done
|
||||
|
||||
@@ -35,8 +35,6 @@ ver="$1"
|
||||
exit 1
|
||||
}
|
||||
|
||||
mv copyparty/web/deps/marked.full.js.gz srv/ || true
|
||||
|
||||
mkdir -p dist
|
||||
zip_path="$(pwd)/dist/copyparty-$ver.zip"
|
||||
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"
|
||||
@@ -61,7 +59,7 @@ rls_dir="$tmp/copyparty-$ver"
|
||||
mkdir "$rls_dir"
|
||||
|
||||
echo ">>> export from git"
|
||||
git archive master | tar -xC "$rls_dir"
|
||||
git archive hovudstraum | tar -xC "$rls_dir"
|
||||
|
||||
echo ">>> export untracked deps"
|
||||
tar -c copyparty/web/deps | tar -xC "$rls_dir"
|
||||
@@ -122,5 +120,5 @@ echo " $zip_path"
|
||||
echo " $tgz_path"
|
||||
echo
|
||||
|
||||
# function alr() { ls -alR copyparty-$1 | sed -r "s/copyparty-$1/copyparty/" | sed -r 's/[A-Z][a-z]{2} [0-9 ]{2} [0-9]{2}:[0-9]{2}//' > $1; }; for x in master rls src ; do alr $x; done
|
||||
# function alr() { ls -alR copyparty-$1 | sed -r "s/copyparty-$1/copyparty/" | sed -r 's/[A-Z][a-z]{2} [0-9 ]{2} [0-9]{2}:[0-9]{2}//' > $1; }; for x in hovudstraum rls src ; do alr $x; done
|
||||
|
||||
|
||||
31
scripts/rls.sh
Executable file
31
scripts/rls.sh
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
cd ~/dev/copyparty/scripts
|
||||
|
||||
v=$1
|
||||
printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1
|
||||
grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1
|
||||
|
||||
git push all
|
||||
git tag v$v
|
||||
git push all --tags
|
||||
|
||||
rm -rf ../dist
|
||||
|
||||
./make-pypi-release.sh u
|
||||
(cd .. && python3 ./setup.py clean2)
|
||||
|
||||
./make-tgz-release.sh $v
|
||||
|
||||
rm -f ../dist/copyparty-sfx.*
|
||||
f=../dist/copyparty-sfx.py
|
||||
./make-sfx.sh
|
||||
$f -h
|
||||
|
||||
while true; do
|
||||
mv $f $f.$(wc -c <$f | awk '{print$1}')
|
||||
./make-sfx.sh re $ar
|
||||
done
|
||||
|
||||
# git tag -d v$v; git push --delete origin v$v
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user