Compare commits

...

511 Commits

Author SHA1 Message Date
ed
0061d29534 v0.11.8 2021-06-06 19:09:55 +02:00
ed
a891f34a93 update sharex example 2021-06-06 19:06:33 +02:00
ed
d6a1e62a95 append file-ext when avoiding name collisions 2021-06-06 18:53:32 +02:00
ed
cda36ea8b4 support json replies from bput 2021-06-06 18:47:21 +02:00
ed
909a76434a a 2021-06-06 03:07:11 +02:00
ed
39348ef659 add sharex example 2021-06-06 02:53:01 +02:00
ed
99d30edef3 v0.11.7 2021-06-05 03:33:29 +02:00
ed
b63ab15bf9 gallery links in new tab if a selection is atcive 2021-06-05 03:27:44 +02:00
ed
485cb4495c minify asmcrypto a bit 2021-06-05 03:25:54 +02:00
ed
df018eb1f2 add colors 2021-06-05 01:34:39 +02:00
ed
49aa47a9b8 way faster sha512 wasm fallback 2021-06-05 01:14:16 +02:00
ed
7d20eb202a optimize 2021-06-04 19:35:08 +02:00
ed
c533da9129 fix single-threaded mtag 2021-06-04 19:00:24 +02:00
ed
5cba31a814 spin on thumbnails too 2021-06-04 17:38:57 +02:00
ed
1d824cb26c add volume lister / containment checker 2021-06-04 02:23:46 +02:00
ed
83b903d60e readme: update todos 2021-06-02 09:42:33 +02:00
ed
9c8ccabe8e v0.11.6 2021-06-01 08:25:35 +02:00
ed
b1f2c4e70d gain 1000x performance with one weird trick 2021-06-01 06:17:46 +00:00
ed
273ca0c8da run tests on commit 2021-06-01 05:49:41 +02:00
ed
d6f516b34f pypi exclusive 2021-06-01 04:14:23 +02:00
ed
83127858ca v0.11.4 2021-06-01 03:55:51 +02:00
ed
d89329757e fix permission check in tar/zip generator (gdi) 2021-06-01 03:55:31 +02:00
ed
49ffec5320 v0.11.3 2021-06-01 03:11:02 +02:00
ed
2eaae2b66a fix youtube query example 2021-06-01 02:53:54 +02:00
ed
ea4441e25c v0.11.2 2021-06-01 02:47:37 +02:00
ed
e5f34042f9 more precise volume state in admin panel 2021-06-01 02:32:53 +02:00
ed
271096874a fix adv and date handling in query lang 2021-06-01 02:10:17 +02:00
ed
8efd780a72 thumbnail cleaner too noisy 2021-06-01 01:51:03 +02:00
ed
41bcf7308d fix search results as thumbnails 2021-06-01 01:41:36 +02:00
ed
d102bb3199 fix on-upload hasher (0.11.1 regression) 2021-06-01 01:20:34 +02:00
ed
d0bed95415 search: add a query language 2021-06-01 01:16:40 +02:00
ed
2528729971 add dbtool 2021-05-30 16:49:08 +00:00
ed
292c18b3d0 v0.11.1 2021-05-29 23:39:39 +02:00
ed
0be7c5e2d8 live db/tags rescan 2021-05-29 23:35:07 +02:00
ed
eb5aaddba4 v0.11.0 2021-05-29 15:03:32 +02:00
ed
d8fd82bcb5 ffthumb only gets one shot 2021-05-29 12:32:51 +02:00
ed
97be495861 another chrome bug:
navigating somewhere and back can return a REALLY OLD copy of the page
2021-05-29 12:31:06 +02:00
ed
8b53c159fc dodge chrome bug 2021-05-29 10:58:21 +02:00
ed
81e281f703 add opus mimetype 2021-05-29 10:17:24 +02:00
ed
3948214050 drop deleted files from snap 2021-05-29 09:03:18 +02:00
ed
c5e9a643e7 more accurate url escaping 2021-05-29 09:02:42 +02:00
ed
d25881d5c3 mojibake fixes 2021-05-29 09:01:59 +02:00
ed
38d8d9733f fix bugs 2021-05-29 05:50:41 +02:00
ed
118ebf668d fix bugs 2021-05-29 05:43:09 +02:00
ed
a86f09fa46 mtp: file extension filtering 2021-05-29 04:18:57 +02:00
ed
dd4fb35c8f nit 2021-05-29 03:45:02 +02:00
ed
621eb4cf95 add multitag example 2021-05-29 03:43:30 +02:00
ed
deea66ad0b support multiple tags from mtp helpers 2021-05-29 03:43:14 +02:00
ed
bf99445377 groking the ffprobe tarot cards 2021-05-28 06:25:44 +02:00
ed
7b54a63396 icon fix 2021-05-28 06:25:00 +02:00
ed
0fcb015f9a minor fixes 2021-05-28 05:16:28 +02:00
ed
0a22b1ffb6 dont log thumbnail GETs by default 2021-05-28 05:16:01 +02:00
ed
68cecc52ab dont grow thumbs 2021-05-28 05:01:25 +02:00
ed
53657ccfff add avif read support 2021-05-28 05:01:12 +02:00
ed
96223fda01 detect missing webp support 2021-05-28 05:00:08 +02:00
ed
374ff3433e gj 2021-05-28 02:52:03 +02:00
ed
5d63949e98 create webp thumbnails by default 2021-05-28 02:44:13 +02:00
ed
6b065d507d crop thumbs for AESTHETICS 2021-05-28 01:46:27 +02:00
ed
e79997498a a 2021-05-27 01:42:22 +02:00
ed
f7ee02ec35 ux fixes 2021-05-27 01:41:50 +02:00
ed
69dc433e1c ffprobe parser less bad 2021-05-27 01:41:12 +02:00
ed
c880cd848c gridview lightmode 2021-05-26 22:53:40 +02:00
ed
5752b6db48 hook up the multiselect ui 2021-05-26 00:47:43 +02:00
ed
b36f905eab sort folders first + tweak thumbs ui 2021-05-25 21:15:54 +02:00
ed
483dd527c6 add cache eviction 2021-05-25 19:46:35 +02:00
ed
e55678e28f fix thumb/ico bugs 2021-05-25 17:36:31 +02:00
ed
3f4a8b9d6f fixes 2021-05-25 06:35:12 +02:00
ed
02a856ecb4 create video thumbnails 2021-05-25 06:14:25 +02:00
ed
4dff726310 initial thumbnail and icon stuff 2021-05-25 03:37:01 +02:00
ed
cbc449036f readme: todo 2021-05-23 02:43:40 +02:00
ed
8f53152220 todays mistake 2021-05-21 02:30:45 +02:00
ed
bbb1e165d6 v0.10.22 2021-05-18 04:10:37 +02:00
ed
fed8d94885 handle unsupported codecs better 2021-05-18 03:44:30 +02:00
ed
58040cc0ed fix the treesize off-by-one (*finally*) 2021-05-18 03:21:53 +02:00
ed
03d692db66 add now-playing clipboard meme 2021-05-18 02:54:52 +02:00
ed
903f8e8453 logging 2021-05-17 18:45:15 +02:00
ed
405ae1308e v0.10.21 2021-05-16 20:22:33 +02:00
ed
8a0f583d71 oh no 2021-05-16 11:01:32 +02:00
ed
b6d7017491 readme 2021-05-16 09:05:40 +02:00
ed
0f0217d203 readme 2021-05-16 08:52:22 +02:00
ed
a203e33347 v0.10.20 2021-05-16 07:51:39 +02:00
ed
3b8f697dd4 include links in bup summary 2021-05-16 07:51:22 +02:00
ed
78ba16f722 log filtering by url regex 2021-05-16 07:29:34 +02:00
ed
0fcfe79994 general-purpose file parsing 2021-05-16 07:04:18 +02:00
ed
c0e6df4b63 let it gooo 2021-05-16 05:27:04 +02:00
ed
322abdcb43 more dino support 2021-05-16 05:04:44 +02:00
ed
31100787ce ahh whatever 2021-05-16 03:21:49 +02:00
ed
c57d721be4 ie11 doesnt support sha512 2021-05-16 03:11:37 +02:00
ed
3b5a03e977 this too 2021-05-16 02:34:36 +02:00
ed
ed807ee43e native sha512 on old iphones 2021-05-16 02:25:00 +02:00
ed
073c130ae6 respect tooltip pref in up2k 2021-05-16 02:18:54 +02:00
ed
8810e0be13 add option to log headers 2021-05-16 02:11:09 +02:00
ed
f93016ab85 dont suggest bup if no write-access 2021-05-16 00:30:32 +02:00
ed
b19cf260c2 drop the control-panel link too 2021-05-14 20:07:48 +02:00
ed
db03e1e7eb readme 2021-05-14 16:38:07 +02:00
ed
e0d975e36a v0.10.19 2021-05-14 00:00:15 +02:00
ed
cfeb15259f not careful enough 2021-05-13 23:29:15 +02:00
ed
3b3f8fc8fb careful rice 2021-05-13 23:00:51 +02:00
ed
88bd2c084c misc 2021-05-13 22:58:36 +02:00
ed
bd367389b0 broke windows 2021-05-13 22:58:23 +02:00
ed
58ba71a76f option to hide incomplete uploads 2021-05-13 22:56:52 +02:00
ed
d03e34d55d v0.10.18 2021-05-13 17:42:06 +02:00
ed
24f239a46c ui tweaks 2021-05-13 17:41:14 +02:00
ed
2c0826f85a conditional sections in volume listing 2021-05-13 17:24:37 +02:00
ed
c061461d01 fix md perm reqs + dyn up2k modeset 2021-05-13 17:22:31 +02:00
ed
e7982a04fe explicit redirect to single non-roots 2021-05-13 16:54:31 +02:00
ed
33b91a7513 set password cookie expiration 2021-05-13 16:23:28 +02:00
ed
9bb1323e44 rclone faster + query params correctness 2021-05-13 16:02:30 +02:00
ed
e62bb807a5 better 2021-05-13 01:36:14 +02:00
ed
3fc0d2cc4a better 2021-05-13 00:43:25 +02:00
ed
0c786b0766 v0.10.17 2021-05-12 23:39:54 +02:00
ed
68c7528911 yes good 2021-05-12 23:26:30 +02:00
ed
26e18ae800 disallow uploading logues 2021-05-12 23:22:43 +02:00
ed
c30dc0b546 write-only QoL mostly 2021-05-12 23:06:13 +02:00
ed
f94aa46a11 open write-only folders from tree 2021-05-12 21:50:32 +02:00
ed
403261a293 support pyinstaller 2021-05-12 21:21:07 +02:00
ed
c7d9cbb11f show logues in write-only folders 2021-05-12 21:20:59 +02:00
ed
57e1c53cbb mention volume flags in the cfg-file example 2021-05-02 09:48:19 +02:00
ed
0754b553dd v0.10.16 2021-05-02 09:18:19 +02:00
ed
50661d941b cfg-parser: fix wildcard permissions 2021-05-02 09:16:14 +02:00
ed
c5db7c1a0c pickle needs this ;_; 2021-04-29 22:41:57 +02:00
ed
2cef5365f7 readme again 2021-04-27 09:26:14 +02:00
ed
fbc4e94007 readme (realized this was confusing) 2021-04-27 09:24:50 +02:00
ed
037ed5a2ad readme 2021-04-26 04:02:22 +02:00
ed
69dfa55705 readme 2021-04-26 04:01:47 +02:00
ed
a79a5c4e3e readme + ui tweaks 2021-04-25 22:44:50 +02:00
ed
7e80eabfe6 readme 2021-04-25 21:42:45 +02:00
ed
375b72770d readme 2021-04-25 04:34:06 +02:00
ed
e2dd683def does this look better 2021-04-25 03:04:24 +02:00
ed
9eba50c6e4 readme 2021-04-25 03:00:47 +02:00
ed
5a579dba52 sfx: help bzip2 make smaller archives 2021-04-24 22:07:09 +02:00
ed
e86c719575 sfx: cooperate better with other instances 2021-04-24 22:06:50 +02:00
ed
0e87f35547 ui tweaks 2021-04-24 22:06:21 +02:00
ed
b6d3d791a5 shave 2021-04-24 20:08:07 +02:00
ed
c9c3302664 a 2021-04-24 19:22:15 +02:00
ed
c3e4d65b80 v0.10.15 2021-04-24 04:05:57 +02:00
ed
27a03510c5 quick upload test too 2021-04-24 03:35:58 +02:00
ed
ed7727f7cb fix write-only volumes + add regression test 2021-04-24 02:48:41 +02:00
ed
127ec10c0d js cleanup + minor tweaks 2021-04-23 20:04:17 +02:00
ed
5a9c0ad225 ui tweaks 2021-04-22 09:10:32 +02:00
ed
7e8daf650e v0.10.14 2021-04-21 22:04:21 +02:00
ed
0cf737b4ce 404 rather than redirect home if 404 or 403 2021-04-21 21:51:27 +02:00
ed
74635e0113 phew 2021-04-21 21:42:37 +02:00
ed
e5c4f49901 ok ok 2021-04-21 21:26:55 +02:00
ed
e4654ee7f1 uhh 2021-04-21 21:13:16 +02:00
ed
e5d05c05ed up2k ui tweaks 2021-04-21 20:50:10 +02:00
ed
73c4f99687 add markdown streaming 2021-04-21 20:28:50 +02:00
ed
28c12ef3bf cleanup 2021-04-21 18:48:23 +02:00
ed
eed82dbb54 remove dead code 2021-04-21 18:44:47 +02:00
ed
2c4b4ab928 up2k-cli: cond. readahead 2021-04-21 18:39:55 +02:00
ed
505a8fc6f6 up2k: sparse alloc on windows 2021-04-21 18:32:21 +02:00
ed
e4801d9b06 support msys2-python 2021-04-21 18:28:44 +02:00
ed
04f1b2cf3a v0.10.13 2021-04-21 01:19:22 +02:00
ed
c06d928bb5 sorry android 2021-04-21 01:10:18 +02:00
ed
ab09927e7b v0.10.12 2021-04-19 21:58:49 +02:00
ed
779437db67 up2k: more runahead 2021-04-19 21:58:30 +02:00
ed
28cbdb652e v0.10.11 2021-04-19 21:43:08 +02:00
ed
2b2415a7d8 up2k: gotta go faster 2021-04-19 21:29:43 +02:00
ed
746a8208aa v0.10.10 2021-04-19 17:17:07 +02:00
ed
a2a041a98a optimize 2021-04-19 16:54:38 +02:00
ed
10b436e449 browser: add media fragment uris 2021-04-19 16:41:06 +02:00
ed
4d62b34786 browser: add light mode 2021-04-19 15:40:32 +02:00
ed
0546210687 fix up2k progressbars 2021-04-19 13:18:29 +02:00
ed
f8c11faada don't start 2t stuff if there's no backend avail 2021-04-19 13:17:34 +02:00
ed
16d6e9be1f tweaks 2021-04-17 09:24:25 +02:00
ed
aff8185f2e v0.10.9 2021-04-17 01:29:27 +02:00
ed
217d15fe81 up2k: cheap progress bars 2021-04-17 00:57:35 +02:00
ed
171e93c201 up2k: show realtime speeds 2021-04-17 00:01:03 +02:00
ed
acc1d2e9e3 up2k: show some context in the busy-tab 2021-04-16 23:49:57 +02:00
ed
49c2f37154 up2k: replace progressbars with text 2021-04-16 21:23:53 +02:00
ed
69e54497aa yes good 2021-04-14 16:03:15 +02:00
ed
9aa1885669 hide search tab when d2d 2021-04-14 15:23:25 +02:00
ed
4418508513 dodge cpython bug 2021-04-14 14:37:44 +02:00
ed
e897df3b34 v0.10.8 2021-04-11 21:26:39 +02:00
ed
8cd97ab0e7 much better 2021-04-11 21:07:41 +02:00
ed
bf4949353d support url-pwd on mounts page 2021-04-11 20:43:35 +02:00
ed
98a944f7cc no bopping 2021-04-11 20:23:38 +02:00
ed
7c10f81c92 stop eating browser hotkeys 2021-04-11 20:01:03 +02:00
ed
126ecc55c3 listen to the linter 2021-04-11 19:51:51 +02:00
ed
1034a51bd2 support ~ paths 2021-04-11 17:36:38 +02:00
ed
a2657887cc vscode: get no-dbg args from launch.json 2021-04-11 17:22:42 +02:00
ed
c14b17bfaf whoops 2021-04-10 20:22:33 +02:00
ed
59ebc795e7 tree scroll snapping 2021-04-10 19:30:30 +02:00
ed
8e128d917e sfx: support non-bz2 py 2021-04-10 18:30:58 +02:00
ed
ea762b05e0 guess they stole it from win10, sausage 2021-04-10 18:16:57 +02:00
ed
db374b19f1 mention the new cflags in -h 2021-04-07 21:13:45 +02:00
ed
ab3839ef36 w/a argparser bug fixed 2018-06-08 2021-04-07 20:31:29 +02:00
ed
9886c442f2 add missing uridecode 2021-04-03 23:58:51 +02:00
ed
c8d1926d52 h 2021-04-03 08:26:42 +02:00
ed
a6bd699e52 safari funny 2021-04-03 08:08:43 +02:00
ed
12143f2702 http/1.0, minimal dir listing, pw in url 2021-04-03 07:56:35 +02:00
ed
480705dee9 more todo 2021-04-03 04:41:10 +02:00
ed
781d5094f4 update todo 2021-04-03 04:13:51 +02:00
ed
5615cb94cd adj browser support table 2021-04-03 02:58:50 +02:00
ed
302302a2ac fix zip touch events on iOS 2021-04-03 02:52:19 +02:00
ed
9761b4e3e9 v0.10.7 2021-04-03 00:35:46 +02:00
ed
0cf6924dca v0.10.6 2021-04-02 03:11:40 +02:00
ed
5fd81e9f90 fix unreadable links when playing search results 2021-04-02 03:05:23 +02:00
ed
52bf6f892b more 2021-04-02 02:55:41 +02:00
ed
f3cce232a4 restore minimal support for old browsers 2021-04-02 02:43:07 +02:00
ed
53d3c8b28e decode urlform messages 2021-04-01 23:36:14 +02:00
ed
83fec3cca7 v0.10.5 2021-03-31 01:28:58 +02:00
ed
3cefc99b7d search fixes 2021-03-31 01:20:09 +02:00
ed
3a38dcbc05 v0.10.4 2021-03-29 20:53:20 +02:00
ed
7ff08bce57 browser: stable sort 2021-03-29 20:08:32 +02:00
ed
fd490af434 explain the jank 2021-03-29 06:11:33 +02:00
ed
1195b8f17e v0.10.3 2021-03-29 04:47:59 +02:00
ed
28dce13776 no load-balancer spam when -q 2021-03-28 03:06:52 +02:00
ed
431f20177a make tar 6x faster (1.8 GiB/s) 2021-03-28 01:50:16 +01:00
ed
87aff54d9d v0.10.2 2021-03-27 18:03:33 +01:00
ed
f50462de82 persist lead-column sort 2021-03-27 17:56:21 +01:00
ed
9bda8c7eb6 better errlog name 2021-03-27 17:38:59 +01:00
ed
e83c63d239 fix unix permissions in zip files 2021-03-27 17:28:25 +01:00
ed
b38533b0cc recover from file access errors when zipping 2021-03-27 17:16:59 +01:00
ed
5ccca3fbd5 more 2021-03-27 16:12:47 +01:00
ed
9e850fc3ab zip selection 2021-03-27 15:48:52 +01:00
ed
ffbfcd7e00 h 2021-03-27 03:35:57 +01:00
ed
5ea7590748 readme: mention zip configs 2021-03-27 03:34:03 +01:00
ed
290c3bc2bb reclining 2021-03-27 03:07:44 +01:00
ed
b12131e91c v0.10.1 2021-03-27 02:44:40 +01:00
ed
3b354447b0 v0.10.0 2021-03-27 02:08:07 +01:00
ed
d09ec6feaa tehe 2021-03-27 01:49:58 +01:00
ed
21405c3fda be nice to windows 2021-03-27 01:43:02 +01:00
ed
13e5c96cab finish adding zip-crc (semi-streaming) 2021-03-27 01:27:12 +01:00
ed
426687b75e archive format selection in browser 2021-03-27 01:10:05 +01:00
ed
c8f59fb978 up2k: add folder upload 2021-03-27 00:20:42 +01:00
ed
871dde79a9 download as tar + utf8 zip + optimize walk 2021-03-26 20:43:25 +01:00
ed
e14d81bc6f fix utf8 content-disposition 2021-03-26 02:54:19 +01:00
ed
514d046d1f download folders as zip 2021-03-26 01:51:38 +01:00
ed
4ed9528d36 5x faster reply on 1st req on new conns 2021-03-25 19:29:16 +01:00
ed
625560e642 steal from diodes 2021-03-25 02:59:04 +01:00
ed
73ebd917d1 i know too much about zip now 2021-03-25 02:31:25 +01:00
ed
cd3e0afad2 v0.9.13 2021-03-23 02:13:28 +01:00
ed
d8d1f94a86 v0.9.12 2021-03-23 01:24:37 +01:00
ed
00dfd8cfd1 v0.9.11 2021-03-23 00:36:48 +01:00
ed
273de6db31 propagate d2d/d2t properly 2021-03-23 00:33:18 +01:00
ed
c6c0eeb0ff better volflags presentation 2021-03-23 00:28:11 +01:00
ed
e70c74a3b5 support nullmapping subfolders with -v :/foo/bar:cd2d 2021-03-23 00:08:23 +01:00
ed
f7d939eeab more sfx tweaks 2021-03-21 22:31:07 +01:00
ed
e815c091b9 v0.9.10 2021-03-21 22:05:46 +01:00
ed
963529b7cf readme 2021-03-21 20:38:29 +01:00
ed
638a52374d readme 2021-03-21 20:20:11 +01:00
ed
d9d42b7aa2 aaa 2021-03-21 20:11:03 +01:00
ed
ec7e5f36a2 make-sfx tweaks 2021-03-21 18:06:31 +01:00
ed
56110883ea readme 2021-03-21 17:56:05 +01:00
ed
7f8d7d6006 v0.9.9 2021-03-21 17:15:47 +01:00
ed
49e4fb7e12 finally time to undefault this 2021-03-21 16:19:45 +01:00
ed
8dbbea473f mtp incoming files too 2021-03-21 15:21:07 +01:00
ed
3d375d5114 assert mtm/mtp is used by mte 2021-03-21 14:15:55 +01:00
ed
f3eae67d97 readme 2021-03-21 09:41:05 +01:00
ed
40c1b19235 detrimental to search results 2021-03-21 08:16:17 +01:00
ed
ccaf0ab159 gj 2021-03-21 07:49:05 +01:00
ed
d07f147423 fixes 2021-03-21 06:00:21 +01:00
ed
f5cb9f92b9 better task recovery on restart 2021-03-21 05:57:24 +01:00
ed
f991f74983 hotkeys for directory traversal 2021-03-21 04:04:30 +01:00
ed
6b3295059e add time markers in player 2021-03-21 03:21:05 +01:00
ed
b18a07ae6b fix file srch 2021-03-21 02:46:40 +01:00
ed
8ab03dabda ok 2021-03-21 02:41:15 +01:00
ed
5e760e35dc togglebutton for tooltips to save iphone users 2021-03-21 02:33:53 +01:00
ed
afbfa04514 fixes 2021-03-21 01:55:12 +01:00
ed
7aace470c5 preserve file refs on sort (crc32 instead of idx) 2021-03-21 01:41:18 +01:00
ed
b4acb24f6a remember sort order 2021-03-20 10:56:35 +01:00
ed
bcee8a4934 Merge branch 'master' of gh:9001/copyparty 2021-03-20 09:20:12 +01:00
ed
36b0718542 media plauer hoetkeys 2021-03-20 09:20:08 +01:00
ed
9a92bca45d Merge branch 'master' of github:9001/copyparty
idk forgot to pull
2021-03-20 07:32:28 +00:00
ed
b07445a363 search ratecontrol and timeouts cause it can get bad 2021-03-20 07:32:01 +00:00
ed
a62ec0c27e fixes 2021-03-20 05:58:34 +01:00
ed
57e3a2d382 normalize keys to rekobo on index 2021-03-20 05:45:34 +01:00
ed
b61022b374 fixes 2021-03-20 03:08:16 +00:00
ed
a3e2b2ec87 mm:ss durations on initial html too 2021-03-20 01:27:51 +01:00
ed
a83d3f8801 prevent dupe tags from mtp (replace id3 tags) 2021-03-20 01:00:57 +01:00
ed
90c5f2b9d2 spread search interface horizontally 2021-03-20 01:00:44 +01:00
ed
4885653c07 advanced search (key/bpm/...)
man i hope sqlite is good at opimizing
2021-03-20 00:06:11 +01:00
ed
21e1cd87ca nice on windows 2021-03-19 23:43:34 +01:00
ed
81f82e8e9f nice them too 2021-03-19 21:28:10 +01:00
ed
c0e31851da add timeouts for mtp calls 2021-03-19 21:22:56 +01:00
ed
6599c3eced no racing pls 2021-03-19 20:42:33 +01:00
ed
5d6c61a861 add mtp eta 2021-03-19 01:20:01 +01:00
ed
1a5c66edd3 build beatroot from source if need be 2021-03-19 00:43:23 +01:00
ed
deae9fe95a vscode is entirely too helpful 2021-03-19 00:14:42 +01:00
ed
abd65c6334 support metadata plugins 2021-03-19 00:08:31 +01:00
ed
8137a99904 mtag-bin: support alpine + misc health checks 2021-03-18 01:01:57 +01:00
ed
6f6f9c1f74 mtag-bin: support macos (macports) 2021-03-17 23:57:18 +01:00
ed
7b575f716f mtag-bin: support windows (mingw64) 2021-03-17 23:17:02 +01:00
ed
6ba6ea3572 linkify 2021-03-17 01:42:59 +01:00
ed
9a22ad5ea3 this makes more sense 2021-03-17 01:37:59 +01:00
ed
beaab9778e make mistakes 2021-03-17 00:55:27 +01:00
ed
f327bdb6b4 never trust tags かしら 2021-03-15 23:12:13 +01:00
ed
ae180e0f5f save bpm/tempo notes from the bitbucket 2021-03-15 03:10:14 +01:00
ed
e3f1d19756 v0.9.8 2021-03-15 01:13:46 +01:00
ed
93c2bd6ef6 fix tree trying to make surprise appearances 2021-03-13 02:29:13 +01:00
ed
4d0e5ff6db turns out whitespace compresses better than tabs 2021-03-13 00:16:07 +01:00
ed
0893f06919 browser: reload music player on column-sort
so tracks play in the right order
2021-03-13 00:15:53 +01:00
ed
46b6abde3f fuse-client: password from file 2021-03-13 00:14:22 +01:00
ed
0696610dee give up, just try both and see what sticks 2021-03-13 00:14:07 +01:00
ed
edf0d3684c sfx: improvements from r0c 2021-03-13 00:13:10 +01:00
ed
7af159f5f6 heh 2021-03-09 21:36:14 +01:00
ed
7f2cb6764a v0.9.7 2021-03-08 03:51:26 +01:00
ed
96495a9bf1 v0.9.6 2021-03-07 21:44:25 +01:00
ed
b2fafec5fc handle key-normalization errors 2021-03-07 21:41:36 +01:00
ed
0850b8ae2b v0.9.5 2021-03-07 19:25:24 +01:00
ed
8a68a96c57 css tweaks 2021-03-07 19:15:19 +01:00
ed
d3aae8ed6a more mojibake fixes 2021-03-07 18:58:26 +01:00
ed
c62ebadda8 separate tree scrollbar 2021-03-07 18:26:57 +01:00
ed
ffcee6d390 add tooltips and more mojibake compat 2021-03-07 04:14:55 +01:00
ed
de32838346 key notation normalization (why tho) 2021-03-07 02:46:17 +01:00
ed
b9a4e47ea2 mojibake support for the spa stuff 2021-03-06 22:48:49 +01:00
ed
57d994422d logging cleanup 2021-03-06 17:38:56 +01:00
ed
6ecd745323 so much for sessionStorage 2021-03-06 16:34:55 +01:00
ed
bd769f5bdb fix py2 + encourage py3 2021-03-06 02:42:17 +01:00
ed
2381692aba js cfg 2021-03-06 02:30:36 +01:00
ed
24fdada0a0 did you know rhel 7 has an sqlite3 from 2015 2021-03-06 02:28:49 +01:00
ed
bb5169710a warn people when they're gonna have a bad time 2021-03-06 00:30:05 +01:00
ed
9cde2352f3 v0.9.4 2021-03-05 02:06:18 +01:00
ed
482dd7a938 v0.9.3 2021-03-05 00:00:22 +01:00
ed
bddcc69438 v0.9.2 2021-03-04 22:58:22 +01:00
ed
19d4540630 good 2021-03-04 22:38:12 +01:00
ed
4f5f6c81f5 add buttons to adjust tree width 2021-03-04 22:34:09 +01:00
ed
7e4c1238ba oh 2021-03-04 21:12:54 +01:00
ed
f7196ac773 dodge pushstate size limit 2021-03-04 21:06:59 +01:00
ed
7a7c832000 sfx-builder: support ancient git versions 2021-03-04 20:30:28 +01:00
ed
2b4ccdbebb multithread the slow mtag backends 2021-03-04 20:28:03 +01:00
ed
0d16b49489 broke this too 2021-03-04 01:35:09 +01:00
ed
768405b691 tree broke 2021-03-04 01:32:44 +01:00
ed
da01413b7b remove speedbumps 2021-03-04 01:21:04 +01:00
ed
914e22c53e async tagging of incoming files 2021-03-03 18:36:05 +01:00
ed
43a23bf733 v0.9.1 2021-03-03 01:28:32 +01:00
ed
92bb00c6d2 faster sorting 2021-03-03 01:27:41 +01:00
ed
b0b97a2648 fix bugs 2021-03-03 00:46:15 +01:00
ed
2c452fe323 readme nitpicks 2021-03-02 01:02:13 +01:00
ed
ad73d0c77d update feature list in readme 2021-03-02 00:31:08 +01:00
ed
7f9bf1c78c v0.9.0 2021-03-02 00:12:15 +01:00
ed
61a6bc3a65 make browser columns compactable 2021-03-02 00:07:04 +01:00
ed
46e10b0e9f yab 2021-03-01 03:15:41 +01:00
ed
8441206e26 read media-tags from files (for display/searching) 2021-03-01 02:50:10 +01:00
ed
9fdc5ee748 use one sqlite3 cursor, closes #1 2021-02-25 22:30:40 +01:00
ed
00ff133387 support receiving chunked PUT 2021-02-25 22:26:03 +01:00
ed
96164cb934 v0.8.3 2021-02-22 21:58:37 +01:00
ed
82fb21ae69 v0.8.2 2021-02-22 21:40:55 +01:00
ed
89d4a2b4c4 hide up2k mode-toggle in read-only folders 2021-02-22 21:27:44 +01:00
ed
fc0c7ff374 correct up2k mode in mixed-r/w 2021-02-22 21:11:30 +01:00
ed
5148c4f2e9 include pro/epilogues in ?ls 2021-02-22 21:09:57 +01:00
ed
c3b59f7bcf restore win8/7/xp support 2021-02-22 20:59:44 +01:00
ed
61e148202b too much 2021-02-22 20:56:19 +01:00
ed
8a4e0739bc v0.8.1 2021-02-22 03:54:34 +01:00
ed
f75c5f2fe5 v0.8.0 2021-02-22 03:46:02 +01:00
ed
81d5859588 h 2021-02-22 03:33:24 +01:00
ed
721886bb7a this isnt really helping is it 2021-02-22 03:01:32 +01:00
ed
b23c272820 mention the search syntax 2021-02-22 02:33:30 +01:00
ed
cd02bfea7a better path/name search syntax 2021-02-22 02:16:47 +01:00
ed
6774bd88f9 make search/upload toggling more visible 2021-02-22 01:25:13 +01:00
ed
1046a4f376 update web deps 2021-02-22 00:47:53 +01:00
ed
8081f9ddfd add up2k cleanup button 2021-02-22 00:47:21 +01:00
ed
fa656577d1 prevent non-spa navigation while uploading 2021-02-21 21:08:53 +01:00
ed
b14b86990f toggle upload widgets in spa 2021-02-21 20:50:12 +01:00
ed
2a6dd7b512 add close button to search results 2021-02-21 05:33:57 +00:00
ed
feebdee88b correctness 2021-02-21 05:15:08 +00:00
ed
99d9277f5d look at him go 2021-02-21 05:36:26 +01:00
ed
9af64d6156 debug pypy3/7.3.3/gcc9.2.0/gentoo 2021-02-21 02:48:25 +00:00
ed
5e3775c1af fuse.py prefers ?ls if available 2021-02-21 02:07:34 +00:00
ed
2d2e8a3da7 less jank ?ls 2021-02-21 01:31:49 +00:00
ed
b2a560b76f update readme with new features 2021-02-21 00:29:10 +00:00
ed
39397a489d rearrange readme status list 2021-02-21 00:26:29 +00:00
ed
ff593a0904 fix folder tree presentation in mixed-r/w volumes 2021-02-20 19:10:16 +00:00
ed
f12789cf44 reversible mojibake marshaling for sqlite 2021-02-20 18:12:36 +00:00
ed
4f8cf2fc87 qol 2021-02-20 17:39:08 +01:00
ed
fda98730ac 77.6KiB changeset nice 2021-02-20 04:59:43 +00:00
ed
06c6ddffb6 v0.7.7 2021-02-14 02:13:52 +01:00
ed
d29f0c066c logging 2021-02-14 01:32:16 +01:00
ed
c9e4de3346 up2k: fix rejected files not counting as progress 2021-02-13 04:30:46 +01:00
ed
ca0b97f72d oh cool 2021-02-13 03:59:38 +01:00
ed
b38f20b408 up2k: make tabsync optional 2021-02-13 03:45:40 +01:00
ed
05b1dbaf56 up2k: upload semaphore across tabs/windows 2021-02-13 02:57:51 +01:00
ed
b8481e32ba lovely priority inversions 2021-02-12 23:53:13 +01:00
ed
9c03c65e07 v0.7.6 2021-02-12 20:53:29 +01:00
ed
d8ed006b9b up2k: 128 MiB runahead 2021-02-12 20:41:42 +01:00
ed
63c0623a5e vscode: windows support 2021-02-12 19:47:18 +01:00
ed
fd84506db0 don't list up2k db in browser 2021-02-12 19:25:57 +01:00
ed
d8bcb44e44 vscode: no-debug launcher 2021-02-12 19:25:01 +01:00
ed
56a26b0916 up2k: print final commit too 2021-02-12 17:10:08 +01:00
ed
efcf1d6b90 add cfssl.sh 2021-02-12 07:30:20 +00:00
ed
9f578bfec6 v0.7.5 2021-02-12 07:06:38 +00:00
ed
1f170d7d28 up2k scanner messages less useless 2021-02-12 07:04:35 +00:00
ed
5ae14cf9be up2k scanner more better 2021-02-12 01:07:55 +00:00
ed
aaf9d53be9 more ssl options 2021-02-12 00:31:28 +00:00
ed
75c73f7ba7 add --http-only (might as well) 2021-02-11 22:54:40 +00:00
ed
b6dba8beee imagine going plaintext in the middle of a tls reply 2021-02-11 22:50:59 +00:00
ed
94521cdc1a add --https-only 2021-02-11 22:48:10 +00:00
ed
3365b1c355 add --ssl-ver (ssl/tls versions to allow) 2021-02-11 21:24:17 +00:00
ed
6c957c4923 v0.7.4 2021-02-04 01:01:42 +01:00
ed
833997f04c shrink sfx.py from 515k to 472k 2021-02-04 01:01:11 +01:00
ed
68d51e4037 rem 2021-02-04 01:00:41 +01:00
ed
ce274d2011 handle url-encoded posts 2021-02-03 23:18:11 +01:00
ed
280778ed43 catch macos socket errors 2021-02-03 22:32:16 +01:00
ed
0f558ecbbf upgrade bundled jinja2 2021-02-03 22:32:01 +01:00
ed
58f9e05d93 v0.7.3 2021-02-03 00:50:51 +01:00
ed
1ec981aea7 bind multiple ip/ports 2021-02-03 00:49:51 +01:00
ed
2a90286a7c dim the socket debug msgs 2021-02-03 00:25:13 +01:00
ed
12d25d09b2 limit gz/br unpacker to embedded resources 2021-02-03 00:19:14 +01:00
ed
a039fae1a4 remove extra anon-rw warning 2021-02-03 00:17:12 +01:00
ed
322b9abadc v0.7.2 2021-01-29 00:52:41 +01:00
ed
0aaf954cea up2k: increase purge timeout 2021-01-29 00:52:22 +01:00
ed
c2d22aa3d1 up2k: make confirmation optional 2021-01-29 00:49:35 +01:00
ed
6934c75bba nice 2021-01-29 00:43:57 +01:00
ed
c58cf78f86 yabe 2021-01-24 16:14:01 +01:00
ed
7f0de790ab more macports compat 2021-01-23 21:19:29 +01:00
ed
d4bb4e3a73 v0.7.1 2021-01-23 19:55:35 +01:00
ed
d25612d038 make-sfx: support macports 2021-01-23 19:55:24 +01:00
ed
116b2351b0 mention howto purge partial uploads 2021-01-23 19:25:25 +01:00
ed
69b83dfdc4 up2k: limit runahead in client 2021-01-23 19:05:45 +01:00
ed
3b1839c2ce up2k: ask before starting the upload 2021-01-23 18:51:08 +01:00
ed
13742ebdf8 verify that PARTIALs exist after a restart 2021-01-23 18:49:43 +01:00
ed
634657bea1 up2k: discard empty PARTIALs 2021-01-23 18:10:11 +01:00
ed
46e70d50b7 v0.7.0 2021-01-10 17:49:56 +01:00
ed
d64e9b85a7 prefer sqlite over registry snaps 2021-01-10 17:47:27 +01:00
ed
fb853edbe3 prevent index loss on mid-write crash 2021-01-10 17:16:55 +01:00
ed
cc076c1be1 persist/timeout incomplete uploads too 2021-01-10 16:47:35 +01:00
ed
98cc9a6755 mojibake support + exception handling 2021-01-10 09:48:26 +01:00
ed
7bd2b9c23a sqlite3 as up2k db + build index on boot + rproxy ip fix 2021-01-10 09:27:11 +01:00
ed
de724a1ff3 up2k: add volume flag to reject existing files 2021-01-09 15:20:02 +01:00
ed
2163055dae media-player: play links don't scroll on click 2021-01-09 14:40:56 +01:00
ed
93ed0fc10b v0.6.3 2021-01-07 01:09:32 +01:00
ed
0d98cefd40 fix dumb 2021-01-07 01:06:31 +01:00
ed
d58988a033 use sendfile when possible 2021-01-07 00:50:42 +01:00
ed
2acfab1e3f cleanup 2021-01-06 22:54:54 +01:00
ed
b915dfe9a6 nagle adds ~.2sec delay on last packet 2021-01-06 21:08:52 +00:00
ed
25bd5a823e fuse-client: add timestamps to logger 2021-01-06 17:40:42 +01:00
ed
1c35de4716 fuse-client: cache tweaks 2021-01-06 17:22:07 +01:00
ed
4c00435a0a fuse: add windows-explorer settings 2021-01-06 17:18:37 +01:00
ed
844e3079a8 saved for posterity 2021-01-06 17:13:24 +01:00
ed
4778cb5b2c readme: add quickstart 2021-01-02 22:57:48 +01:00
ed
ec5d60b919 fuse-client: fix directory parser 2021-01-01 21:54:56 +01:00
ed
e1f4b960e8 oh no 2020-12-20 02:33:37 +01:00
ed
669e46da54 update TODOs 2020-12-14 09:19:43 +01:00
ed
ba94cc5df7 v0.6.2 2020-12-14 04:28:21 +01:00
ed
d08245c3df v0.6.1 2020-12-14 03:51:24 +01:00
ed
5c18d12cbf self-upgrading upgrader... getting too meta 2020-12-14 03:45:59 +01:00
ed
580a42dec7 sfx-repack: support wget 2020-12-14 02:59:15 +01:00
ed
29286e159b up2k-client: ignore rejected dupes 2020-12-12 00:55:42 +01:00
ed
19bcf90e9f support uploads with huge filenames 2020-12-12 00:35:54 +01:00
ed
dae9c00742 always display world-readable subvolumes 2020-12-04 23:28:18 +01:00
ed
35324ceb7c tests: support windows 2020-12-04 23:26:46 +01:00
ed
5aadd47199 dodge python-bug #7980 2020-12-01 23:20:44 +01:00
ed
7d9057cc62 v0.6.0 2020-12-01 02:58:11 +01:00
ed
c4b322b883 this commit sponsored by eslint 2020-12-01 02:25:46 +01:00
ed
19b09c898a fix sfx repack whoops 2020-11-30 03:27:27 +01:00
ed
eafe2098b6 v0.5.7 2020-11-30 03:01:14 +01:00
ed
2bc6a20d71 md: poll server for changes 2020-11-30 03:00:44 +01:00
ed
8b502a7235 v0.5.6 2020-11-29 19:49:16 +01:00
ed
37567844af md: add render2 plugin func 2020-11-29 19:34:08 +01:00
ed
2f6c4e0e34 refactoring 2020-11-29 19:32:22 +01:00
ed
1c7cc4cb2b ignore border when sizing table 2020-11-29 18:48:55 +01:00
ed
f83db3648e git tag as sfx version 2020-11-28 20:02:20 +01:00
ed
b164aa00d4 md: fix eof scroll glitch 2020-11-27 21:25:52 +01:00
ed
a2d866d0c2 show plugin errors 2020-11-27 21:10:47 +01:00
ed
2dfe4ac4c6 v0.5.5 2020-11-27 03:25:14 +01:00
ed
db65d05cb5 fix unittest for recent macos versions 2020-11-27 03:24:55 +01:00
ed
300c0194c7 add inline markdown plugins 2020-11-27 03:22:41 +01:00
ed
37a0d2b087 good idea 2020-11-19 02:24:26 +01:00
ed
a4959300ea add sfx downloader/repacker 2020-11-19 01:23:24 +01:00
ed
223657e5f8 v0.5.4 2020-11-17 23:58:08 +01:00
ed
0c53de6767 more lenient md table formatter 2020-11-17 23:55:14 +01:00
ed
9c309b1498 add filetype column 2020-11-17 23:43:55 +01:00
ed
1aa1b34c80 add reverse-proxy support 2020-11-17 23:42:33 +01:00
ed
755a2ee023 v0.5.3 2020-11-13 03:31:07 +01:00
ed
69d3359e47 lots of stuff:
* show per-connection and per-transfer speeds
* support multiple cookies in parser
* set SameSite=Lax
* restore macos support in sfx.sh
* md-editor: add mojibake/unicode hunter
* md-editor: add table formatter
* md-editor: make bold bolder
* md-editor: more hotkeys
* md-editor: fix saving in fancy
* md-editor: fix eof-scrolling in chrome
* md-editor: fix text erasure with newline
* md-editor: fix backspace behavior in gutter
2020-11-13 02:58:38 +01:00
ed
a90c49b8fb fuse.py: support mojibake on windows 2020-10-25 08:07:17 +01:00
ed
b1222edb27 mention rclone in docs 2020-10-25 08:05:11 +01:00
ed
b967a92f69 support rclone as fuse client 2020-10-25 08:04:41 +01:00
ed
90a5cb5e59 fuse: support https + passwords, use argparse,
better handle windows trying to listdir(file)
2020-08-31 03:44:46 +02:00
ed
7aba9cb76b add contrib 2020-08-23 22:40:25 +00:00
ed
f550a8171d sfx: support ubuntu and openrc:
-- ubuntu does not let root follow symlinks created by other users
-- openrc expects copyparty to die if you kill the sfx parent
2020-08-23 22:32:44 +00:00
ed
82e568d4c9 sfx: support py27 on win10 when %TEMP% contains Skatteoppgjør.pdf 2020-08-18 19:23:17 +00:00
ed
7b2a4a3d59 v0.5.2 2020-08-18 18:22:23 +00:00
ed
0265455cd1 v0.5.1 2020-08-17 21:55:16 +00:00
ed
afafc886a4 support windows 2020-08-17 21:53:24 +00:00
ed
8a959f6ac4 add server info banner thing 2020-08-17 21:33:06 +00:00
ed
1c3aa0d2c5 deal with a soho nas (and FF60esr) 2020-08-17 20:39:46 +00:00
ed
79b7d3316a v0.5.0 2020-08-16 23:04:10 +00:00
ed
fa7768583a md-editor: tolerate inaccurate mtimes 2020-08-17 00:44:22 +00:00
ed
faf49f6c15 md-editor: add paragraph jumping 2020-08-17 00:42:05 +00:00
ed
765af31b83 improve fuse-fuzzer 2020-08-13 04:43:13 +00:00
ed
b6a3c52d67 fuse: be nicer to software which fails on truncated reads, such as Wimgapi.dll 2020-08-11 18:16:37 +00:00
ed
b025c2f660 fuse: windows optimizations 2020-08-09 04:09:42 +00:00
ed
e559a7c878 another fuse cache fix 2020-08-09 00:51:48 +00:00
ed
5c8855aafd trailing whitespace best syntax fug 2020-08-08 00:51:37 +00:00
ed
b5fc537b89 support PUT and ACAO 2020-08-08 00:47:54 +00:00
ed
14899d3a7c fix fuse cache bugs 2020-08-07 23:55:48 +00:00
ed
0ea7881652 fuse: cache options 2020-08-07 21:55:40 +00:00
ed
ec29b59d1e black 2020-08-07 20:00:30 +00:00
ed
9405597c15 workaround python-issue2494 on windows 2020-08-06 19:31:52 +00:00
ed
82441978c6 fuse: windows howto 2020-08-06 18:22:25 +00:00
ed
e0e6291bdb cleanup + readme 2020-08-04 23:46:57 +00:00
ed
b2b083fd0a fuse: support windows/msys2 2020-08-04 22:50:45 +00:00
ed
f8a51b68e7 fuse: add fork based on fuse-python 2020-08-04 22:42:40 +00:00
ed
e0a19108e5 ensure firefox shows the latest md 2020-06-25 00:07:50 +00:00
ed
770ea68ca8 workaround systemd being a joke 2020-06-24 23:53:23 +00:00
ed
ce36c52baf 1234 too popular 2020-06-24 23:52:42 +00:00
ed
a7da1dd233 v0.4.3 2020-05-17 16:46:47 +02:00
ed
678ef296b4 fully hide the navbar when asked 2020-05-17 16:44:58 +02:00
ed
9e5627d805 drop opus audio support on old iOS versions 2020-05-17 16:44:17 +02:00
ed
5958ee4439 autoindent oversight 2020-05-17 08:20:54 +02:00
ed
7127e57f0e happens on macs too 2020-05-17 02:58:22 +02:00
ed
ee9c6dc8aa use marked.js v1.1.0 2020-05-17 02:28:03 +02:00
ed
92779b3f48 2x chrome editor perf 2020-05-17 00:49:49 +02:00
ed
2f1baf17d4 numbered headers for paper-prints 2020-05-17 00:33:34 +02:00
ed
583da3d4a9 actually consider paper-printing 2020-05-16 02:24:27 +02:00
ed
bf9ff78bcc autofill blank link descriptions 2020-05-16 02:19:45 +02:00
ed
2cb07792cc add monospace font 2020-05-16 02:13:34 +02:00
ed
47bc8bb466 multiprocessing adds latency; default to off 2020-05-16 02:05:18 +02:00
ed
94ad1f5732 option to list dotfiles 2020-05-16 01:40:29 +02:00
107 changed files with 17498 additions and 2846 deletions

12
.eslintrc.json Normal file
View File

@@ -0,0 +1,12 @@
{
"env": {
"browser": true,
"es2021": true
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaVersion": 12
},
"rules": {
}
}

2
.gitattributes vendored
View File

@@ -1,4 +1,6 @@
* text eol=lf
*.reg text eol=crlf
*.png binary
*.gif binary

19
.vscode/launch.json vendored
View File

@@ -9,15 +9,28 @@
"console": "integratedTerminal",
"cwd": "${workspaceFolder}",
"args": [
"-j",
"0",
//"-nw",
"-ed",
"-emp",
"-e2dsa",
"-e2ts",
"-mtp",
".bpm=f,bin/mtag/audio-bpm.py",
"-a",
"ed:wark",
"-v",
"srv::r:aed"
"srv::r:aed:cnodupe",
"-v",
"dist:dist:r"
]
},
{
"name": "No debug",
"preLaunchTask": "no_dbg",
"type": "python",
//"request": "attach", "port": 42069
// fork: nc -l 42069 </dev/null
},
{
"name": "Run active unit test",
"type": "python",

35
.vscode/launch.py vendored Normal file
View File

@@ -0,0 +1,35 @@
# takes arguments from launch.json
# is used by no_dbg in tasks.json
# launches 10x faster than mspython debugpy
# and is stoppable with ^C
import os
import sys
import shlex
sys.path.insert(0, os.getcwd())
import jstyleson
from copyparty.__main__ import main as copyparty
with open(".vscode/launch.json", "r", encoding="utf-8") as f:
tj = f.read()
oj = jstyleson.loads(tj)
argv = oj["configurations"][0]["args"]
try:
sargv = " ".join([shlex.quote(x) for x in argv])
print(sys.executable + " -m copyparty " + sargv + "\n")
except:
pass
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
try:
copyparty(["a"] + argv)
except SystemExit as ex:
if ex.code:
raise
print("\n\033[32mokke\033[0m")
sys.exit(1)

14
.vscode/settings.json vendored
View File

@@ -37,7 +37,7 @@
"python.linting.banditEnabled": true,
"python.linting.flake8Args": [
"--max-line-length=120",
"--ignore=E722,F405,E203,W503,W293",
"--ignore=E722,F405,E203,W503,W293,E402",
],
"python.linting.banditArgs": [
"--ignore=B104"
@@ -50,11 +50,9 @@
"files.associations": {
"*.makefile": "makefile"
},
"editor.codeActionsOnSaveTimeout": 9001,
"editor.formatOnSaveTimeout": 9001,
//
// things you may wanna edit:
//
"python.pythonPath": ".venv/bin/python",
//"python.linting.enabled": true,
"python.formatting.blackArgs": [
"-t",
"py27"
],
"python.linting.enabled": true,
}

15
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,15 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "pre",
"command": "true;rm -rf inc/* inc/.hist/;mkdir -p inc;",
"type": "shell"
},
{
"label": "no_dbg",
"type": "shell",
"command": "${config:python.pythonPath} .vscode/launch.py"
}
]
}

499
README.md
View File

@@ -8,57 +8,469 @@
turn your phone or raspi into a portable file server with resumable uploads/downloads using IE6 or any other browser
* server runs on anything with `py2.7` or `py3.2+`
* *resumable* uploads need `firefox 12+` / `chrome 6+` / `safari 6+` / `IE 10+`
* server runs on anything with `py2.7` or `py3.3+`
* browse/upload with IE4 / netscape4.0 on win3.11 (heh)
* *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+` for full speed
* code standard: `black`
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [thumbnails](#thumbnails) // [md-viewer](#markdown-viewer) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [ie4](#browser-support)
## readme toc
* top
* [quickstart](#quickstart)
* [notes](#notes)
* [status](#status)
* [bugs](#bugs)
* [general bugs](#general-bugs)
* [not my bugs](#not-my-bugs)
* [the browser](#the-browser)
* [tabs](#tabs)
* [hotkeys](#hotkeys)
* [tree-mode](#tree-mode)
* [thumbnails](#thumbnails)
* [zip downloads](#zip-downloads)
* [uploading](#uploading)
* [file-search](#file-search)
* [markdown viewer](#markdown-viewer)
* [other tricks](#other-tricks)
* [searching](#searching)
* [search configuration](#search-configuration)
* [metadata from audio files](#metadata-from-audio-files)
* [file parser plugins](#file-parser-plugins)
* [complete examples](#complete-examples)
* [browser support](#browser-support)
* [client examples](#client-examples)
* [up2k](#up2k)
* [dependencies](#dependencies)
* [optional dependencies](#optional-dependencies)
* [install recommended deps](#install-recommended-deps)
* [optional gpl stuff](#optional-gpl-stuff)
* [sfx](#sfx)
* [sfx repack](#sfx-repack)
* [install on android](#install-on-android)
* [dev env setup](#dev-env-setup)
* [how to release](#how-to-release)
* [todo](#todo)
## quickstart
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone full access to the current folder; see `-h` for help if you want accounts and volumes etc
you may also want these, especially on servers:
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
## notes
* iPhone/iPad: use Firefox to download files
* Android-Chrome: set max "parallel uploads" for 200% upload speed (android bug)
* Android-Firefox: takes a while to select files (in order to avoid the above android-chrome issue)
* Desktop-Firefox: may use gigabytes of RAM if your connection is great and your files are massive
* Android-Chrome: increase "parallel uploads" for higher speed (android bug)
* Android-Firefox: takes a while to select files (their fix for ☝️)
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
* because no browsers currently implement the media-query to do this properly orz
## status
* [x] sanic multipart parser
* [x] load balancer (multiprocessing)
* [x] upload (plain multipart, ie6 support)
* [x] upload (js, resumable, multithreaded)
* [x] download
* [x] browser
* [x] media player
* [ ] thumbnails
* [ ] download as zip
* [x] volumes
* [x] accounts
* [x] markdown viewer
* [x] markdown editor
summary: all planned features work! now please enjoy the bloatening
summary: it works! you can use it! (but technically not even close to beta)
* backend stuff
* ☑ sanic multipart parser
* ☑ load balancer (multiprocessing)
* ☑ volumes (mountpoints)
* ☑ accounts
* upload
* ☑ basic: plain multipart, ie6 support
* ☑ up2k: js, resumable, multithreaded
* ☑ stash: simple PUT filedropper
* ☑ symlink/discard existing files (content-matching)
* download
* ☑ single files in browser
* ☑ folders as zip / tar files
* ☑ FUSE client (read-only)
* browser
* ☑ tree-view
* ☑ media player
* ☑ thumbnails
* ☑ images using Pillow
* ☑ videos using FFmpeg
* ☑ cache eviction (max-age; maybe max-size eventually)
* ☑ SPA (browse while uploading)
* if you use the file-tree on the left only, not folders in the file list
* server indexing
* ☑ locate files by contents
* ☑ search by name/path/date/size
* ☑ search by ID3-tags etc.
* markdown
* ☑ viewer
* ☑ editor (sure why not)
# bugs
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* Windows: python 2.7 cannot handle filenames with mojibake
## general bugs
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
* cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1`
* hiding the contents at url `/d1/d2/d3` using `-v :d1/d2/d3:cd2d` has the side-effect of creating databases (for files/tags) inside folders d1 and d2, and those databases take precedence over the main db at the top of the vfs - this means all files in d2 and below will be reindexed unless you already had a vfs entry at or below d2
* probably more, pls let me know
## not my bugs
* Windows: msys2-python 3.8.6 occasionally throws "RuntimeError: release unlocked lock" when leaving a scoped mutex in up2k
* this is an msys2 bug, the regular windows edition of python is fine
# the browser
![copyparty-browser-fs8](https://user-images.githubusercontent.com/241032/115978054-65106380-a57d-11eb-98f8-59e3dee73557.png)
## tabs
* `[🔎]` search by size, date, path/name, mp3-tags ... see [searching](#searching)
* `[🚀]` and `[🎈]` are the uploaders, see [uploading](#uploading)
* `[📂]` mkdir, create directories
* `[📝]` new-md, create a new markdown document
* `[📟]` send-msg, either to server-log or into textfiles if `--urlform save`
* `[⚙️]` client configuration options
## hotkeys
the browser has the following hotkeys
* `I/K` prev/next folder
* `P` parent folder
* `G` toggle list / grid view
* `T` toggle thumbnails / icons
* when playing audio:
* `0..9` jump to 10%..90%
* `U/O` skip 10sec back/forward
* `J/L` prev/next song
* `J` also starts playing the folder
* in the grid view:
* `S` toggle multiselect
* `A/D` zoom
## tree-mode
by default there's a breadcrumbs path; you can replace this with a tree-browser sidebar thing by clicking the 🌲
click `[-]` and `[+]` to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
## thumbnails
![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/120070302-10836b00-c08a-11eb-8eb4-82004a34c342.png)
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how destructive your users are
## zip downloads
the `zip` link next to folders can produce various types of zip/tar files using these alternatives in the browser settings tab:
| name | url-suffix | description |
|--|--|--|
| `tar` | `?tar` | plain gnutar, works great with `curl \| tar -xv` |
| `zip` | `?zip=utf8` | works everywhere, glitchy filenames on win7 and older |
| `zip_dos` | `?zip` | traditional cp437 (no unicode) to fix glitchy filenames |
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
* hidden files (dotfiles) are excluded unless `-ed`
* the up2k.db is always excluded
* `zip_crc` will take longer to download since the server has to read each file twice
* please let me know if you find a program old enough to actually need this
you can also zip a selection of files or folders by clicking them in the browser, that brings up a selection editor and zip button in the bottom right
![copyparty-zipsel-fs8](https://user-images.githubusercontent.com/241032/116008321-372a2e00-a614-11eb-9a4a-4a1fd9074224.png)
## uploading
two upload methods are available in the html client:
* `🎈 bup`, the basic uploader, supports almost every browser since netscape 4.0
* `🚀 up2k`, the fancy one
up2k has several advantages:
* you can drop folders into the browser (files are added recursively)
* files are processed in chunks, and each chunk is checksummed
* uploads resume if they are interrupted (for example by a reboot)
* server detects any corruption; the client reuploads affected chunks
* the client doesn't upload anything that already exists on the server
* the last-modified timestamp of the file is preserved
see [up2k](#up2k) for details on how it works
![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/115978061-680b5400-a57d-11eb-9ef6-cbb5f60aeccc.png)
**protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
the up2k UI is the epitome of polished inutitive experiences:
* "parallel uploads" specifies how many chunks to upload at the same time
* `[🏃]` analysis of other files should continue while one is uploading
* `[💭]` ask for confirmation before files are added to the list
* `[💤]` sync uploading between other copyparty tabs so only one is active
* `[🔎]` switch between upload and file-search mode
and then theres the tabs below it,
* `[ok]` is uploads which completed successfully
* `[ng]` is the uploads which failed / got rejected (already exists, ...)
* `[done]` shows a combined list of `[ok]` and `[ng]`, chronological order
* `[busy]` files which are currently hashing, pending-upload, or uploading
* plus up to 3 entries each from `[done]` and `[que]` for context
* `[que]` is all the files that are still queued
### file-search
![copyparty-fsearch-fs8](https://user-images.githubusercontent.com/241032/116008320-36919780-a614-11eb-803f-04162326a700.png)
in the `[🚀 up2k]` tab, after toggling the `[🔎]` switch green, any files/folders you drop onto the dropzone will be hashed on the client-side. Each hash is sent to the server which checks if that file exists somewhere already
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files
note that since up2k has to read the file twice, `[🎈 bup]` can be up to 2x faster in extreme cases (if your internet connection is faster than the read-speed of your HDD)
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well thanks to tls also functioning as an integrity check
## markdown viewer
![copyparty-md-read-fs8](https://user-images.githubusercontent.com/241032/115978057-66419080-a57d-11eb-8539-d2be843991aa.png)
* the document preview has a max-width which is the same as an A4 paper when printed
## other tricks
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
# searching
![copyparty-search-fs8](https://user-images.githubusercontent.com/241032/115978060-6772bd80-a57d-11eb-81d3-174e869b72c3.png)
when started with `-e2dsa` copyparty will scan/index all your files. This avoids duplicates on upload, and also makes the volumes searchable through the web-ui:
* make search queries by `size`/`date`/`directory-path`/`filename`, or...
* drag/drop a local file to see if the same contents exist somewhere on the server, see [file-search](#file-search)
path/name queries are space-separated, AND'ed together, and words are negated with a `-` prefix, so for example:
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
add `-e2ts` to also scan/index tags from music files:
## search configuration
searching relies on two databases, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`). Configuration can be done through arguments, volume flags, or a mix of both.
through arguments:
* `-e2d` enables file indexing on upload
* `-e2ds` scans writable folders on startup
* `-e2dsa` scans all mounted volumes (including readonly ones)
* `-e2t` enables metadata indexing on upload
* `-e2ts` scans for tags in all files that don't have tags yet
* `-e2tsr` deletes all existing tags, so a full reindex
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
* `-v ~/music::r:ce2dsa:ce2tsr` does a full reindex of everything on startup
* `-v ~/music::r:cd2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
## metadata from audio files
`-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume:
* `-v ~/music::r:cmte=title,artist` indexes and displays *title* followed by *artist*
if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected
`-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux`
tags that start with a `.` such as `.bpm` and `.dur`(ation) indicate numeric value
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
* is about 20x slower than mutagen
* catches a few tags that mutagen doesn't
* melodic key, video resolution, framerate, pixfmt
* avoids pulling any GPL code into copyparty
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
## file parser plugins
copyparty can invoke external programs to collect additional metadata for files using `mtp` (as argument or volume flag), there is a default timeout of 30sec
* `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
* `-v ~/music::r:cmtp=.bpm=~/bin/audio-bpm.py:cmtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly
*but wait, there's more!* `-mtp` can be used for non-audio files as well using the `a` flag: `ay` only do audio files, `an` only do non-audio files, or `ad` do all files (d as in dontcare)
* `-mtp ext=an,~/bin/file-ext.py` runs `~/bin/file-ext.py` to get the `ext` tag only if file is not audio (`an`)
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
## complete examples
* read-only music server with bpm and key scanning
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts -mtp .bpm=f,audio-bpm.py -mtp key=f,audio-key.py`
# browser support
![copyparty-ie4-fs8](https://user-images.githubusercontent.com/241032/118192791-fb31fe00-b446-11eb-9647-898ea8efc1f7.png)
`ie` = internet-explorer, `ff` = firefox, `c` = chrome, `iOS` = iPhone/iPad, `Andr` = Android
| feature | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
| --------------- | --- | --- | ---- | ---- | ----- | ---- | --- | ---- |
| browse files | yep | yep | yep | yep | yep | yep | yep | yep |
| basic uploader | yep | yep | yep | yep | yep | yep | yep | yep |
| make directory | yep | yep | yep | yep | yep | yep | yep | yep |
| send message | yep | yep | yep | yep | yep | yep | yep | yep |
| set sort order | - | yep | yep | yep | yep | yep | yep | yep |
| zip selection | - | yep | yep | yep | yep | yep | yep | yep |
| directory tree | - | - | `*1` | yep | yep | yep | yep | yep |
| up2k | - | - | yep | yep | yep | yep | yep | yep |
| icons work | - | - | yep | yep | yep | yep | yep | yep |
| markdown editor | - | - | yep | yep | yep | yep | yep | yep |
| markdown viewer | - | - | yep | yep | yep | yep | yep | yep |
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
| play ogg/opus | - | - | - | - | yep | yep | `*2` | yep |
* internet explorer 6 to 8 behave the same
* firefox 52 and chrome 49 are the last winxp versions
* `*1` only public folders (login session is dropped) and no history / back-button
* `*2` using a wasm decoder which can sometimes get stuck and consumes a bit more power
quick summary of more eccentric web-browsers trying to view a directory index:
| browser | will it blend |
| ------- | ------------- |
| **safari** (14.0.3/macos) | is chrome with janky wasm, so playing opus can deadlock the javascript engine |
| **safari** (14.0.1/iOS) | same as macos, except it recovers from the deadlocks if you poke it a bit |
| **links** (2.21/macports) | can browse, login, upload/mkdir/msg |
| **lynx** (2.8.9/macports) | can browse, login, upload/mkdir/msg |
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
| **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` |
| **SerenityOS** (22d13d8) | hits a page fault, works with `?b=u`, file input not-impl, url params are multiplying |
# client examples
* javascript: dump some state into a file (two separate examples)
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* curl/wget: upload some files (post=file, chunk=stdin)
* `post(){ curl -b cppwd=wark http://127.0.0.1:3923/ -F act=bput -F f=@"$1";}`
`post movie.mkv`
* `post(){ wget --header='Cookie: cppwd=wark' http://127.0.0.1:3923/?raw --post-file="$1" -O-;}`
`post movie.mkv`
* `chunk(){ curl -b cppwd=wark http://127.0.0.1:3923/ -T-;}`
`chunk <movie.mkv`
* FUSE: mount a copyparty server as a local filesystem
* cross-platform python client available in [./bin/](bin/)
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
* sharex (screenshot utility): see [./contrib/sharex.sxcu](contrib/#sharexsxcu)
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
b512 <movie.mkv
# up2k
quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
* the up2k client splits a file into an "optimal" number of chunks
* 1 MiB each, unless that becomes more than 256 chunks
* tries 1.5M, 2M, 3, 4, 6, ... until <= 256 chunks or size >= 32M
* client posts the list of hashes, filename, size, last-modified
* server creates the `wark`, an identifier for this upload
* `sha512( salt + filesize + chunk_hashes )`
* and a sparse file is created for the chunks to drop into
* client uploads each chunk
* header entries for the chunk-hash and wark
* server writes chunks into place based on the hash
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
# dependencies
* `jinja2`
* pulls in `markupsafe` as of v2.7; use jinja 2.6 on py3.2
* `jinja2` (is built into the SFX)
optional, enables thumbnails:
## optional dependencies
enable music tags:
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
enable image thumbnails:
* `Pillow` (requires py2.7 or py3.5+)
enable video thumbnails:
* `ffmpeg` and `ffprobe` somewhere in `$PATH`
enable reading HEIF pictures:
* `pyheif-pillow-opener` (requires Linux or a C compiler)
enable reading AVIF pictures:
* `pillow-avif-plugin`
## install recommended deps
```
python -m pip install --user -U jinja2 mutagen Pillow
```
## optional gpl stuff
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
these are standalone programs and will never be imported / evaluated by copyparty
# sfx
currently there are two self-contained binaries:
* `copyparty-sfx.sh` for unix (linux and osx) -- smaller, more robust
* `copyparty-sfx.py` for windows (unix too) -- crossplatform, beta
currently there are two self-contained "binaries":
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere, **recommended**
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos, kinda deprecated
launch either of them and it'll unpack and run copyparty, assuming you have python installed of course
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `copyparty.py` and keep it in the same folder because `sys.path` is funky
## sfx repack
if you don't need all the features you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except for either msys2 or WSL if you're on windows)
* `724K` original size as of v0.4.0
* `256K` after `./scripts/make-sfx.sh re no-ogv`
@@ -70,16 +482,18 @@ the features you can opt to drop are
for the `re`pack to work, first run one of the sfx'es once to unpack it
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a `no-ogv no-cm` repack; works on linux/macos (and windows with msys2 or WSL)
# install on android
install [Termux](https://termux.com/) (see [ocv.me/termux](https://ocv.me/termux/)) and then copy-paste this into Termux (long-tap) all at once:
```sh
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install curl && cd && curl -L https://github.com/9001/copyparty/raw/master/scripts/copyparty-android.sh > copyparty-android.sh && chmod 755 copyparty-android.sh && ./copyparty-android.sh -h
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install python && python -m ensurepip && python -m pip install -U copyparty
echo $?
```
after the initial setup (and restarting bash), you can launch copyparty at any time by running "copyparty" in Termux
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
# dev env setup
@@ -98,6 +512,7 @@ pip install black bandit pylint flake8 # vscode tooling
in the `scripts` folder:
* run `make -C deps-docker` to build all dependencies
* `git tag v1.2.3 && git push origin --tags`
* create github release with `make-tgz-release.sh`
* upload to pypi with `make-pypi-release.(sh|bat)`
* create sfx with `make-sfx.sh`
@@ -107,13 +522,25 @@ in the `scripts` folder:
roughly sorted by priority
* up2k handle filename too long
* up2k fails on empty files? alert then stuck
* drop onto folders
* look into android thumbnail cache file format
* support pillow-simd
* readme.md as epilogue
* single sha512 across all up2k chunks? maybe
* reduce up2k roundtrips
* start from a chunk index and just go
* terminate client on bad data
discarded ideas
* separate sqlite table per tag
* performance fixed by skipping some indexes (`+mt.k`)
* audio fingerprinting
* only makes sense if there can be a wasm client and that doesn't exist yet (except for olaf which is agpl hence counts as not existing)
* `os.copy_file_range` for up2k cloning
* almost never hit this path anyways
* up2k partials ui
* feels like there isn't much point
* cache sha512 chunks on client
* symlink existing files on upload
* too dangerous
* comment field
* figure out the deal with pixel3a not being connectable as hotspot
* pixel3a having unpredictable 3sec latency in general :||||
* nah
* look into android thumbnail cache file format
* absolutely not

62
bin/README.md Normal file
View File

@@ -0,0 +1,62 @@
# [`copyparty-fuse.py`](copyparty-fuse.py)
* mount a copyparty server as a local filesystem (read-only)
* **supports Windows!** -- expect `194 MiB/s` sequential read
* **supports Linux** -- expect `117 MiB/s` sequential read
* **supports macos** -- expect `85 MiB/s` sequential read
filecache is default-on for windows and macos;
* macos readsize is 64kB, so speed ~32 MiB/s without the cache
* windows readsize varies by software; explorer=1M, pv=32k
note that copyparty should run with `-ed` to enable dotfiles (hidden otherwise)
also consider using [../docs/rclone.md](../docs/rclone.md) instead for 5x performance
## to run this on windows:
* install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/)
* [x] add python 3.x to PATH (it asks during install)
* `python -m pip install --user fusepy`
* `python ./copyparty-fuse.py n: http://192.168.1.69:3923/`
10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled:
* `pacman -S mingw64/mingw-w64-x86_64-python{,-pip}`
* `/mingw64/bin/python3 -m pip install --user fusepy`
* `/mingw64/bin/python3 ./copyparty-fuse.py [...]`
you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releases/latest), let me know if you [figure out how](https://github.com/dokan-dev/dokany/wiki/FUSE)
(winfsp's sshfs leaks, doesn't look like winfsp itself does, should be fine)
# [`copyparty-fuse🅱.py`](copyparty-fuseb.py)
* mount a copyparty server as a local filesystem (read-only)
* does the same thing except more correct, `samba` approves
* **supports Linux** -- expect `18 MiB/s` (wait what)
* **supports Macos** -- probably
# [`copyparty-fuse-streaming.py`](copyparty-fuse-streaming.py)
* pretend this doesn't exist
# [`mtag/`](mtag/)
* standalone programs which perform misc. file analysis
* copyparty can Popen programs like these during file indexing to collect additional metadata
# [`dbtool.py`](dbtool.py)
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty recommends to wipe the DB and reindex because it now collects additional metadata during analysis, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
for that example (upgrading to v0.11.0), first move the old db aside, launch copyparty, let it rebuild the db until the point where it starts running mtp (colored messages as it adds the mtp tags), then CTRL-C and patch in the old mtp tags from the old db instead
so assuming you have `-mtp` parsers to provide the tags `key` and `.bpm`:
```
~/bin/dbtool.py -ls up2k.db
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -cmp
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy key
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy .bpm -vac
```

1100
bin/copyparty-fuse-streaming.py Executable file

File diff suppressed because it is too large Load Diff

748
bin/copyparty-fuse.py Normal file → Executable file

File diff suppressed because it is too large Load Diff

592
bin/copyparty-fuseb.py Executable file
View File

@@ -0,0 +1,592 @@
#!/usr/bin/env python3
from __future__ import print_function, unicode_literals
"""copyparty-fuseb: remote copyparty as a local filesystem"""
__author__ = "ed <copyparty@ocv.me>"
__copyright__ = 2020
__license__ = "MIT"
__url__ = "https://github.com/9001/copyparty/"
import re
import os
import sys
import time
import stat
import errno
import struct
import threading
import http.client # py2: httplib
import urllib.parse
from datetime import datetime
from urllib.parse import quote_from_bytes as quote
try:
import fuse
from fuse import Fuse
fuse.fuse_python_api = (0, 2)
if not hasattr(fuse, "__version__"):
raise Exception("your fuse-python is way old")
except:
print(
"\n could not import fuse; these may help:\n python3 -m pip install --user fuse-python\n apt install libfuse\n modprobe fuse\n"
)
raise
"""
mount a copyparty server (local or remote) as a filesystem
usage:
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas
dependencies:
sudo apk add fuse-dev python3-dev
python3 -m pip install --user fuse-python
fork of copyparty-fuse.py based on fuse-python which
appears to be more compliant than fusepy? since this works with samba
(probably just my garbage code tbh)
"""
def threadless_log(msg):
print(msg + "\n", end="")
def boring_log(msg):
msg = "\033[36m{:012x}\033[0m {}\n".format(threading.current_thread().ident, msg)
print(msg[4:], end="")
def rice_tid():
tid = threading.current_thread().ident
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
def fancy_log(msg):
print("{} {}\n".format(rice_tid(), msg), end="")
def null_log(msg):
pass
info = fancy_log
log = fancy_log
dbg = fancy_log
log = null_log
dbg = null_log
def get_tid():
return threading.current_thread().ident
def html_dec(txt):
return (
txt.replace("&lt;", "<")
.replace("&gt;", ">")
.replace("&quot;", '"')
.replace("&amp;", "&")
)
class CacheNode(object):
def __init__(self, tag, data):
self.tag = tag
self.data = data
self.ts = time.time()
class Stat(fuse.Stat):
def __init__(self):
self.st_mode = 0
self.st_ino = 0
self.st_dev = 0
self.st_nlink = 1
self.st_uid = 1000
self.st_gid = 1000
self.st_size = 0
self.st_atime = 0
self.st_mtime = 0
self.st_ctime = 0
class Gateway(object):
def __init__(self, base_url):
self.base_url = base_url
ui = urllib.parse.urlparse(base_url)
self.web_root = ui.path.strip("/")
try:
self.web_host, self.web_port = ui.netloc.split(":")
self.web_port = int(self.web_port)
except:
self.web_host = ui.netloc
if ui.scheme == "http":
self.web_port = 80
elif ui.scheme == "https":
raise Exception("todo")
else:
raise Exception("bad url?")
self.conns = {}
def quotep(self, path):
# TODO: mojibake support
path = path.encode("utf-8", "ignore")
return quote(path, safe="/")
def getconn(self, tid=None):
tid = tid or get_tid()
try:
return self.conns[tid]
except:
info("new conn [{}] [{}]".format(self.web_host, self.web_port))
conn = http.client.HTTPConnection(self.web_host, self.web_port, timeout=260)
self.conns[tid] = conn
return conn
def closeconn(self, tid=None):
tid = tid or get_tid()
try:
self.conns[tid].close()
del self.conns[tid]
except:
pass
def sendreq(self, *args, **kwargs):
tid = get_tid()
try:
c = self.getconn(tid)
c.request(*list(args), **kwargs)
return c.getresponse()
except:
self.closeconn(tid)
c = self.getconn(tid)
c.request(*list(args), **kwargs)
return c.getresponse()
def listdir(self, path):
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
r = self.sendreq("GET", web_path)
if r.status != 200:
self.closeconn()
raise Exception(
"http error {} reading dir {} in {}".format(
r.status, web_path, rice_tid()
)
)
return self.parse_html(r)
def download_file_range(self, path, ofs1, ofs2):
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
log("downloading {}".format(hdr_range))
r = self.sendreq("GET", web_path, headers={"Range": hdr_range})
if r.status != http.client.PARTIAL_CONTENT:
self.closeconn()
raise Exception(
"http error {} reading file {} range {} in {}".format(
r.status, web_path, hdr_range, rice_tid()
)
)
return r.read()
def parse_html(self, datasrc):
ret = []
remainder = b""
ptn = re.compile(
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
)
while True:
buf = remainder + datasrc.read(4096)
# print('[{}]'.format(buf.decode('utf-8')))
if not buf:
break
remainder = b""
endpos = buf.rfind(b"\n")
if endpos >= 0:
remainder = buf[endpos + 1 :]
buf = buf[:endpos]
lines = buf.decode("utf-8").split("\n")
for line in lines:
m = ptn.match(line)
if not m:
# print(line)
continue
ftype, fname, fsize, fdate = m.groups()
fname = html_dec(fname)
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
sz = int(fsize)
if ftype == "-":
ret.append([fname, self.stat_file(ts, sz), 0])
else:
ret.append([fname, self.stat_dir(ts, sz), 0])
return ret
def stat_dir(self, ts, sz=4096):
ret = Stat()
ret.st_mode = stat.S_IFDIR | 0o555
ret.st_nlink = 2
ret.st_size = sz
ret.st_atime = ts
ret.st_mtime = ts
ret.st_ctime = ts
return ret
def stat_file(self, ts, sz):
ret = Stat()
ret.st_mode = stat.S_IFREG | 0o444
ret.st_size = sz
ret.st_atime = ts
ret.st_mtime = ts
ret.st_ctime = ts
return ret
class CPPF(Fuse):
def __init__(self, *args, **kwargs):
Fuse.__init__(self, *args, **kwargs)
self.url = None
self.dircache = []
self.dircache_mtx = threading.Lock()
self.filecache = []
self.filecache_mtx = threading.Lock()
def init2(self):
# TODO figure out how python-fuse wanted this to go
self.gw = Gateway(self.url) # .decode('utf-8'))
info("up")
def clean_dircache(self):
"""not threadsafe"""
now = time.time()
cutoff = 0
for cn in self.dircache:
if now - cn.ts > 1:
cutoff += 1
else:
break
if cutoff > 0:
self.dircache = self.dircache[cutoff:]
def get_cached_dir(self, dirpath):
# with self.dircache_mtx:
if True:
self.clean_dircache()
for cn in self.dircache:
if cn.tag == dirpath:
return cn
return None
"""
,-------------------------------, g1>=c1, g2<=c2
|cache1 cache2| buf[g1-c1:(g1-c1)+(g2-g1)]
`-------------------------------'
,---------------,
|get1 get2|
`---------------'
__________________________________________________________________________
,-------------------------------, g2<=c2, (g2>=c1)
|cache1 cache2| cdr=buf[:g2-c1]
`-------------------------------' dl car; g1-512K:c1
,---------------,
|get1 get2|
`---------------'
__________________________________________________________________________
,-------------------------------, g1>=c1, (g1<=c2)
|cache1 cache2| car=buf[c2-g1:]
`-------------------------------' dl cdr; c2:c2+1M
,---------------,
|get1 get2|
`---------------'
"""
def get_cached_file(self, path, get1, get2, file_sz):
car = None
cdr = None
ncn = -1
# with self.filecache_mtx:
if True:
dbg("cache request from {} to {}, size {}".format(get1, get2, file_sz))
for cn in self.filecache:
ncn += 1
cache_path, cache1 = cn.tag
if cache_path != path:
continue
cache2 = cache1 + len(cn.data)
if get2 <= cache1 or get1 >= cache2:
continue
if get1 >= cache1 and get2 <= cache2:
# keep cache entry alive by moving it to the end
self.filecache = (
self.filecache[:ncn] + self.filecache[ncn + 1 :] + [cn]
)
buf_ofs = get1 - cache1
buf_end = buf_ofs + (get2 - get1)
dbg(
"found all ({}, {} to {}, len {}) [{}:{}] = {}".format(
ncn,
cache1,
cache2,
len(cn.data),
buf_ofs,
buf_end,
buf_end - buf_ofs,
)
)
return cn.data[buf_ofs:buf_end]
if get2 < cache2:
x = cn.data[: get2 - cache1]
if not cdr or len(cdr) < len(x):
dbg(
"found car ({}, {} to {}, len {}) [:{}-{}] = [:{}] = {}".format(
ncn,
cache1,
cache2,
len(cn.data),
get2,
cache1,
get2 - cache1,
len(x),
)
)
cdr = x
continue
if get1 > cache1:
x = cn.data[-(cache2 - get1) :]
if not car or len(car) < len(x):
dbg(
"found cdr ({}, {} to {}, len {}) [-({}-{}):] = [-{}:] = {}".format(
ncn,
cache1,
cache2,
len(cn.data),
cache2,
get1,
cache2 - get1,
len(x),
)
)
car = x
continue
raise Exception("what")
if car and cdr:
dbg("<cache> have both")
ret = car + cdr
if len(ret) == get2 - get1:
return ret
raise Exception("{} + {} != {} - {}".format(len(car), len(cdr), get2, get1))
elif cdr:
h_end = get1 + (get2 - get1) - len(cdr)
h_ofs = h_end - 512 * 1024
if h_ofs < 0:
h_ofs = 0
buf_ofs = (get2 - get1) - len(cdr)
dbg(
"<cache> cdr {}, car {}-{}={} [-{}:]".format(
len(cdr), h_ofs, h_end, h_end - h_ofs, buf_ofs
)
)
buf = self.gw.download_file_range(path, h_ofs, h_end)
ret = buf[-buf_ofs:] + cdr
elif car:
h_ofs = get1 + len(car)
h_end = h_ofs + 1024 * 1024
if h_end > file_sz:
h_end = file_sz
buf_ofs = (get2 - get1) - len(car)
dbg(
"<cache> car {}, cdr {}-{}={} [:{}]".format(
len(car), h_ofs, h_end, h_end - h_ofs, buf_ofs
)
)
buf = self.gw.download_file_range(path, h_ofs, h_end)
ret = car + buf[:buf_ofs]
else:
h_ofs = get1 - 256 * 1024
h_end = get2 + 1024 * 1024
if h_ofs < 0:
h_ofs = 0
if h_end > file_sz:
h_end = file_sz
buf_ofs = get1 - h_ofs
buf_end = buf_ofs + get2 - get1
dbg(
"<cache> {}-{}={} [{}:{}]".format(
h_ofs, h_end, h_end - h_ofs, buf_ofs, buf_end
)
)
buf = self.gw.download_file_range(path, h_ofs, h_end)
ret = buf[buf_ofs:buf_end]
cn = CacheNode([path, h_ofs], buf)
# with self.filecache_mtx:
if True:
if len(self.filecache) > 6:
self.filecache = self.filecache[1:] + [cn]
else:
self.filecache.append(cn)
return ret
def _readdir(self, path):
path = path.strip("/")
log("readdir {}".format(path))
ret = self.gw.listdir(path)
# with self.dircache_mtx:
if True:
cn = CacheNode(path, ret)
self.dircache.append(cn)
self.clean_dircache()
return ret
def readdir(self, path, offset):
for e in self._readdir(path)[offset:]:
# log("yield [{}]".format(e[0]))
yield fuse.Direntry(e[0])
def open(self, path, flags):
if (flags & (os.O_RDONLY | os.O_WRONLY | os.O_RDWR)) != os.O_RDONLY:
return -errno.EACCES
st = self.getattr(path)
try:
if st.st_nlink > 0:
return st
except:
return st # -int(os.errcode)
def read(self, path, length, offset, fh=None, *args):
if args:
log("unexpected args [" + "] [".join(repr(x) for x in args) + "]")
raise Exception()
path = path.strip("/")
ofs2 = offset + length
log("read {} @ {} len {} end {}".format(path, offset, length, ofs2))
st = self.getattr(path)
try:
file_sz = st.st_size
except:
return st # -int(os.errcode)
if ofs2 > file_sz:
ofs2 = file_sz
log("truncate to len {} end {}".format(ofs2 - offset, ofs2))
if file_sz == 0 or offset >= ofs2:
return b""
# toggle cache here i suppose
# return self.get_cached_file(path, offset, ofs2, file_sz)
return self.gw.download_file_range(path, offset, ofs2)
def getattr(self, path):
log("getattr [{}]".format(path))
path = path.strip("/")
try:
dirpath, fname = path.rsplit("/", 1)
except:
dirpath = ""
fname = path
if not path:
ret = self.gw.stat_dir(time.time())
dbg("=root")
return ret
cn = self.get_cached_dir(dirpath)
if cn:
log("cache ok")
dents = cn.data
else:
log("cache miss")
dents = self._readdir(dirpath)
for cache_name, cache_stat, _ in dents:
if cache_name == fname:
dbg("=file")
return cache_stat
log("=404")
return -errno.ENOENT
def main():
time.strptime("19970815", "%Y%m%d") # python#7980
server = CPPF()
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
server.parse(values=server, errex=1)
if not server.url or not str(server.url).startswith("http"):
print("\nerror:")
print(" need argument: -o url=<...>")
print(" need argument: mount-path")
print("example:")
print(
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas"
)
sys.exit(1)
server.init2()
threading.Thread(target=server.main, daemon=True).start()
while True:
time.sleep(9001)
if __name__ == "__main__":
main()

198
bin/dbtool.py Executable file
View File

@@ -0,0 +1,198 @@
#!/usr/bin/env python3
import os
import sys
import sqlite3
import argparse
DB_VER = 3
def die(msg):
print("\033[31m\n" + msg + "\n\033[0m")
sys.exit(1)
def read_ver(db):
for tab in ["ki", "kv"]:
try:
c = db.execute(r"select v from {} where k = 'sver'".format(tab))
except:
continue
rows = c.fetchall()
if rows:
return int(rows[0][0])
return "corrupt"
def ls(db):
nfiles = next(db.execute("select count(w) from up"))[0]
ntags = next(db.execute("select count(w) from mt"))[0]
print(f"{nfiles} files")
print(f"{ntags} tags\n")
print("number of occurences for each tag,")
print(" 'x' = file has no tags")
print(" 't:mtp' = the mtp flag (file not mtp processed yet)")
print()
for k, nk in db.execute("select k, count(k) from mt group by k order by k"):
print(f"{nk:9} {k}")
def compare(n1, d1, n2, d2, verbose):
nt = next(d1.execute("select count(w) from up"))[0]
n = 0
miss = 0
for w, rd, fn in d1.execute("select w, rd, fn from up"):
n += 1
if n % 25_000 == 0:
m = f"\033[36mchecked {n:,} of {nt:,} files in {n1} against {n2}\033[0m"
print(m)
q = "select w from up where substr(w,1,16) = ?"
hit = d2.execute(q, (w[:16],)).fetchone()
if not hit:
miss += 1
if verbose:
print(f"file in {n1} missing in {n2}: [{w}] {rd}/{fn}")
print(f" {miss} files in {n1} missing in {n2}\n")
nt = next(d1.execute("select count(w) from mt"))[0]
n = 0
miss = {}
nmiss = 0
for w, k, v in d1.execute("select * from mt"):
n += 1
if n % 100_000 == 0:
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
print(m)
v2 = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
if v2:
v2 = v2[0]
# if v != v2 and v2 and k in [".bpm", "key"] and n2 == "src":
# print(f"{w} [{rd}/{fn}] {k} = [{v}] / [{v2}]")
if v2 is not None:
if k.startswith("."):
try:
diff = abs(float(v) - float(v2))
if diff > float(v) / 0.9:
v2 = None
else:
v2 = v
except:
pass
if v != v2:
v2 = None
if v2 is None:
nmiss += 1
try:
miss[k] += 1
except:
miss[k] = 1
if verbose:
q = "select rd, fn from up where substr(w,1,16) = ?"
rd, fn = d1.execute(q, (w,)).fetchone()
print(f"missing in {n2}: [{w}] [{rd}/{fn}] {k} = {v}")
for k, v in sorted(miss.items()):
if v:
print(f"{n1} has {v:6} more {k:<6} tags than {n2}")
print(f"in total, {nmiss} missing tags in {n2}\n")
def copy_mtp(d1, d2, tag, rm):
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
n = 0
ndone = 0
for w, k, v in d1.execute("select * from mt where k = ?", (tag,)):
n += 1
if n % 25_000 == 0:
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
print(m)
hit = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
if hit:
hit = hit[0]
if hit != v:
ndone += 1
if hit is not None:
d2.execute("delete from mt where w = ? and +k = ?", (w, k))
d2.execute("insert into mt values (?,?,?)", (w, k, v))
if rm:
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w,))
d2.commit()
print(f"copied {ndone} {tag} tags over")
def main():
os.system("")
print()
ap = argparse.ArgumentParser()
ap.add_argument("db", help="database to work on")
ap.add_argument("-src", metavar="DB", type=str, help="database to copy from")
ap2 = ap.add_argument_group("informational / read-only stuff")
ap2.add_argument("-v", action="store_true", help="verbose")
ap2.add_argument("-ls", action="store_true", help="list summary for db")
ap2.add_argument("-cmp", action="store_true", help="compare databases")
ap2 = ap.add_argument_group("options which modify target db")
ap2.add_argument("-copy", metavar="TAG", type=str, help="mtp tag to copy over")
ap2.add_argument(
"-rm-mtp-flag",
action="store_true",
help="when an mtp tag is copied over, also mark that as done, so copyparty won't run mtp on it",
)
ap2.add_argument("-vac", action="store_true", help="optimize DB")
ar = ap.parse_args()
for v in [ar.db, ar.src]:
if v and not os.path.exists(v):
die("database must exist")
db = sqlite3.connect(ar.db)
ds = sqlite3.connect(ar.src) if ar.src else None
for d, n in [[ds, "src"], [db, "dst"]]:
if not d:
continue
ver = read_ver(d)
if ver == "corrupt":
die("{} database appears to be corrupt, sorry")
if ver != DB_VER:
m = f"{n} db is version {ver}, this tool only supports version {DB_VER}, please upgrade it with copyparty first"
die(m)
if ar.ls:
ls(db)
if ar.cmp:
if not ds:
die("need src db to compare against")
compare("src", ds, "dst", db, ar.v)
compare("dst", db, "src", ds, ar.v)
if ar.copy:
copy_mtp(ds, db, ar.copy, ar.rm_mtp_flag)
if __name__ == "__main__":
main()

34
bin/mtag/README.md Normal file
View File

@@ -0,0 +1,34 @@
standalone programs which take an audio file as argument
some of these rely on libraries which are not MIT-compatible
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
# dependencies
run [`install-deps.sh`](install-deps.sh) to build/install most dependencies required by these programs (supports windows/linux/macos)
*alternatively* (or preferably) use packages from your distro instead, then you'll need at least these:
* from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg`
* from pypy: `keyfinder vamp`
# usage from copyparty
`copyparty -e2dsa -e2ts -mtp key=f,audio-key.py -mtp .bpm=f,audio-bpm.py`
* `f,` makes the detected value replace any existing values
* the `.` in `.bpm` indicates numeric value
* assumes the python files are in the folder you're launching copyparty from, replace the filename with a relative/absolute path if that's not the case
* `mtp` modules will not run if a file has existing tags in the db, so clear out the tags with `-e2tsr` the first time you launch with new `mtp` options
## usage with volume-flags
instead of affecting all volumes, you can set the options for just one volume like so:
```
copyparty -v /mnt/nas/music:/music:r:cmtp=key=f,audio-key.py:cmtp=.bpm=f,audio-bpm.py:ce2dsa:ce2ts
```

69
bin/mtag/audio-bpm.py Executable file
View File

@@ -0,0 +1,69 @@
#!/usr/bin/env python
import os
import sys
import vamp
import tempfile
import numpy as np
import subprocess as sp
from copyparty.util import fsenc
"""
dep: vamp
dep: beatroot-vamp
dep: ffmpeg
"""
def det(tf):
# fmt: off
sp.check_call([
"ffmpeg",
"-nostdin",
"-hide_banner",
"-v", "fatal",
"-ss", "13",
"-y", "-i", fsenc(sys.argv[1]),
"-ac", "1",
"-ar", "22050",
"-t", "300",
"-f", "f32le",
tf
])
# fmt: on
with open(tf, "rb") as f:
d = np.fromfile(f, dtype=np.float32)
try:
# 98% accuracy on jcore
c = vamp.collect(d, 22050, "beatroot-vamp:beatroot")
cl = c["list"]
except:
# fallback; 73% accuracy
plug = "vamp-example-plugins:fixedtempo"
c = vamp.collect(d, 22050, plug, parameters={"maxdflen": 40})
print(c["list"][0]["label"].split(" ")[0])
return
# throws if detection failed:
bpm = float(cl[-1]["timestamp"] - cl[1]["timestamp"])
bpm = round(60 * ((len(cl) - 1) / bpm), 2)
print(f"{bpm:.2f}")
def main():
with tempfile.NamedTemporaryFile(suffix=".pcm", delete=False) as f:
f.write(b"h")
tf = f.name
try:
det(tf)
except:
pass
finally:
os.unlink(tf)
if __name__ == "__main__":
main()

18
bin/mtag/audio-key.py Executable file
View File

@@ -0,0 +1,18 @@
#!/usr/bin/env python
import sys
import keyfinder
"""
dep: github/mixxxdj/libkeyfinder
dep: pypi/keyfinder
dep: ffmpeg
note: cannot fsenc
"""
try:
print(keyfinder.key(sys.argv[1]).camelot())
except:
pass

96
bin/mtag/exe.py Normal file
View File

@@ -0,0 +1,96 @@
#!/usr/bin/env python
import sys
import time
import json
import pefile
"""
retrieve exe info,
example for multivalue providers
"""
def unk(v):
return "unk({:04x})".format(v)
class PE2(pefile.PE):
def __init__(self, *a, **ka):
for k in [
# -- parse_data_directories:
"parse_import_directory",
"parse_export_directory",
# "parse_resources_directory",
"parse_debug_directory",
"parse_relocations_directory",
"parse_directory_tls",
"parse_directory_load_config",
"parse_delay_import_directory",
"parse_directory_bound_imports",
# -- full_load:
"parse_rich_header",
]:
setattr(self, k, self.noop)
super(PE2, self).__init__(*a, **ka)
def noop(*a, **ka):
pass
try:
pe = PE2(sys.argv[1], fast_load=False)
except:
sys.exit(0)
arch = pe.FILE_HEADER.Machine
if arch == 0x14C:
arch = "x86"
elif arch == 0x8664:
arch = "x64"
else:
arch = unk(arch)
try:
buildtime = time.gmtime(pe.FILE_HEADER.TimeDateStamp)
buildtime = time.strftime("%Y-%m-%d_%H:%M:%S", buildtime)
except:
buildtime = "invalid"
ui = pe.OPTIONAL_HEADER.Subsystem
if ui == 2:
ui = "GUI"
elif ui == 3:
ui = "cmdline"
else:
ui = unk(ui)
extra = {}
if hasattr(pe, "FileInfo"):
for v1 in pe.FileInfo:
for v2 in v1:
if v2.name != "StringFileInfo":
continue
for v3 in v2.StringTable:
for k, v in v3.entries.items():
v = v.decode("utf-8", "replace").strip()
if not v:
continue
if k in [b"FileVersion", b"ProductVersion"]:
extra["ver"] = v
if k in [b"OriginalFilename", b"InternalName"]:
extra["orig"] = v
r = {
"arch": arch,
"built": buildtime,
"ui": ui,
"cksum": "{:08x}".format(pe.OPTIONAL_HEADER.CheckSum),
}
r.update(extra)
print(json.dumps(r, indent=4))

9
bin/mtag/file-ext.py Normal file
View File

@@ -0,0 +1,9 @@
#!/usr/bin/env python
import sys
"""
example that just prints the file extension
"""
print(sys.argv[1].split(".")[-1])

265
bin/mtag/install-deps.sh Executable file
View File

@@ -0,0 +1,265 @@
#!/bin/bash
set -e
# install dependencies for audio-*.py
#
# linux: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf
# win64: requires msys2-mingw64 environment
# macos: requires macports
#
# has the following manual dependencies, especially on mac:
# https://www.vamp-plugins.org/pack.html
#
# installs stuff to the following locations:
# ~/pe/
# whatever your python uses for --user packages
#
# does the following terrible things:
# modifies the keyfinder python lib to load the .so in ~/pe
linux=1
win=
[ ! -z "$MSYSTEM" ] || [ -e /msys2.exe ] && {
[ "$MSYSTEM" = MINGW64 ] || {
echo windows detected, msys2-mingw64 required
exit 1
}
pacman -S --needed mingw-w64-x86_64-{ffmpeg,python,python-pip,vamp-plugin-sdk}
win=1
linux=
}
mac=
[ $(uname -s) = Darwin ] && {
#pybin="$(printf '%s\n' /opt/local/bin/python* | (sed -E 's/(.*\/[^/0-9]+)([0-9]?[^/]*)$/\2 \1/' || cat) | (sort -nr || cat) | (sed -E 's/([^ ]*) (.*)/\2\1/' || cat) | grep -E '/(python|pypy)[0-9\.-]*$' | head -n 1)"
pybin=/opt/local/bin/python3.9
[ -e "$pybin" ] || {
echo mac detected, python3 from macports required
exit 1
}
pkgs='ffmpeg python39 py39-wheel'
ninst=$(port installed | awk '/^ /{print$1}' | sort | uniq | grep -E '^('"$(echo "$pkgs" | tr ' ' '|')"')$' | wc -l)
[ $ninst -eq 3 ] || {
sudo port install $pkgs
}
mac=1
linux=
}
hash -r
[ $mac ] || {
command -v python3 && pybin=python3 || pybin=python
}
$pybin -m pip install --user numpy
command -v gnutar && tar() { gnutar "$@"; }
command -v gtar && tar() { gtar "$@"; }
command -v gsed && sed() { gsed "$@"; }
need() {
command -v $1 >/dev/null || {
echo need $1
exit 1
}
}
need cmake
need ffmpeg
need $pybin
#need patchelf
td="$(mktemp -d)"
cln() {
rm -rf "$td"
}
trap cln EXIT
cd "$td"
pwd
dl_text() {
command -v curl >/dev/null && exec curl "$@"
exec wget -O- "$@"
}
dl_files() {
local yolo= ex=
[ $1 = "yolo" ] && yolo=1 && ex=k && shift
command -v curl >/dev/null && exec curl -${ex}JOL "$@"
[ $yolo ] && ex=--no-check-certificate
exec wget --trust-server-names $ex "$@"
}
export -f dl_files
github_tarball() {
dl_text "$1" |
tee json |
(
# prefer jq if available
jq -r '.tarball_url' ||
# fallback to awk (sorry)
awk -F\" '/"tarball_url": "/ {print$4}'
) |
tee /dev/stderr |
tr -d '\r' | tr '\n' '\0' |
xargs -0 bash -c 'dl_files "$@"' _
}
gitlab_tarball() {
dl_text "$1" |
tee json |
(
# prefer jq if available
jq -r '.[0].assets.sources[]|select(.format|test("tar.gz")).url' ||
# fallback to abomination
tr \" '\n' | grep -E '\.tar\.gz$' | head -n 1
) |
tee /dev/stderr |
tr -d '\r' | tr '\n' '\0' |
tee links |
xargs -0 bash -c 'dl_files "$@"' _
}
install_keyfinder() {
# windows support:
# use msys2 in mingw-w64 mode
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python}
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
tar -xf mixxxdj-libkeyfinder-*
rm -- *.tar.gz
cd mixxxdj-libkeyfinder*
h="$HOME"
so="lib/libkeyfinder.so"
memes=()
[ $win ] &&
so="bin/libkeyfinder.dll" &&
h="$(printf '%s\n' "$USERPROFILE" | tr '\\' '/')" &&
memes+=(-G "MinGW Makefiles" -DBUILD_TESTING=OFF)
[ $mac ] &&
so="lib/libkeyfinder.dylib"
cmake -DCMAKE_INSTALL_PREFIX="$h/pe/keyfinder" "${memes[@]}" -S . -B build
cmake --build build --parallel $(nproc || echo 4)
cmake --install build
libpath="$h/pe/keyfinder/$so"
[ $linux ] && [ ! -e "$libpath" ] &&
so=lib64/libkeyfinder.so
libpath="$h/pe/keyfinder/$so"
[ -e "$libpath" ] || {
echo "so not found at $sop"
exit 1
}
# rm -rf /Users/ed/Library/Python/3.9/lib/python/site-packages/*keyfinder*
CFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include" \
LDFLAGS="-L$h/pe/keyfinder/lib -L$h/pe/keyfinder/lib64 -L/opt/local/lib" \
PKG_CONFIG_PATH=/c/msys64/mingw64/lib/pkgconfig \
$pybin -m pip install --user keyfinder
pypath="$($pybin -c 'import keyfinder; print(keyfinder.__file__)')"
for pyso in "${pypath%/*}"/*.so; do
[ -e "$pyso" ] || break
patchelf --set-rpath "${libpath%/*}" "$pyso" ||
echo "WARNING: patchelf failed (only fatal on musl-based distros)"
done
mv "$pypath"{,.bak}
(
printf 'import ctypes\nctypes.cdll.LoadLibrary("%s")\n' "$libpath"
cat "$pypath.bak"
) >"$pypath"
echo
echo libkeyfinder successfully installed to the following locations:
echo " $libpath"
echo " $pypath"
}
have_beatroot() {
$pybin -c 'import vampyhost, sys; plugs = vampyhost.list_plugins(); sys.exit(0 if "beatroot-vamp:beatroot" in plugs else 1)'
}
install_vamp() {
# windows support:
# use msys2 in mingw-w64 mode
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python,python-pip,vamp-plugin-sdk}
$pybin -m pip install --user vamp
have_beatroot || {
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz)
sha512sum -c <(
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
) <beatroot-vamp-v1.0.tar.gz
tar -xf beatroot-vamp-v1.0.tar.gz
cd beatroot-vamp-v1.0
make -f Makefile.linux -j4
# /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp
mkdir ~/vamp
cp -pv beatroot-vamp.* ~/vamp/
}
have_beatroot &&
printf '\033[32mfound the vamp beatroot plugin, nice\033[0m\n' ||
printf '\033[31mWARNING: could not find the vamp beatroot plugin, please install it for optimal results\033[0m\n'
}
# not in use because it kinda segfaults, also no windows support
install_soundtouch() {
gitlab_tarball https://gitlab.com/api/v4/projects/soundtouch%2Fsoundtouch/releases
tar -xvf soundtouch-*
rm -- *.tar.gz
cd soundtouch-*
# https://github.com/jrising/pysoundtouch
./bootstrap
./configure --enable-integer-samples CXXFLAGS="-fPIC" --prefix="$HOME/pe/soundtouch"
make -j$(nproc || echo 4)
make install
CFLAGS=-I$HOME/pe/soundtouch/include/ \
LDFLAGS=-L$HOME/pe/soundtouch/lib \
$pybin -m pip install --user git+https://github.com/snowxmas/pysoundtouch.git
pypath="$($pybin -c 'import importlib; print(importlib.util.find_spec("soundtouch").origin)')"
libpath="$(echo "$HOME/pe/soundtouch/lib/")"
patchelf --set-rpath "$libpath" "$pypath"
echo
echo soundtouch successfully installed to the following locations:
echo " $libpath"
echo " $pypath"
}
[ "$1" = keyfinder ] && { install_keyfinder; exit $?; }
[ "$1" = soundtouch ] && { install_soundtouch; exit $?; }
[ "$1" = vamp ] && { install_vamp; exit $?; }
echo no args provided, installing keyfinder and vamp
install_keyfinder
install_vamp

8
bin/mtag/sleep.py Normal file
View File

@@ -0,0 +1,8 @@
#!/usr/bin/env python
import time
import random
v = random.random() * 6
time.sleep(v)
print(f"{v:.2f}")

View File

@@ -118,7 +118,7 @@ printf ']}' >> /dev/shm/$salt.hs
printf '\033[36m'
#curl "http://$target:1234$posturl/handshake.php" -H "Content-Type: text/plain;charset=UTF-8" -H "Cookie: cppwd=$passwd" --data "$(cat "/dev/shm/$salt.hs")" | tee /dev/shm/$salt.res
#curl "http://$target:3923$posturl/handshake.php" -H "Content-Type: text/plain;charset=UTF-8" -H "Cookie: cppwd=$passwd" --data "$(cat "/dev/shm/$salt.hs")" | tee /dev/shm/$salt.res
{
{
@@ -135,7 +135,7 @@ EOF
cat /dev/shm/$salt.hs
} |
tee /dev/shm/$salt.hsb |
ncat $target 1234 |
ncat $target 3923 |
tee /dev/shm/$salt.hs1r
wark="$(cat /dev/shm/$salt.hs1r | getwark)"
@@ -190,7 +190,7 @@ EOF
nchunk=$((nchunk+1))
done |
ncat $target 1234 |
ncat $target 3923 |
tee /dev/shm/$salt.pr
t=$(date +%s.%N)
@@ -201,7 +201,7 @@ t=$(date +%s.%N)
printf '\033[36m'
ncat $target 1234 < /dev/shm/$salt.hsb |
ncat $target 3923 < /dev/shm/$salt.hsb |
tee /dev/shm/$salt.hs2r |
grep -E '"hash": ?\[ *\]'

37
contrib/README.md Normal file
View File

@@ -0,0 +1,37 @@
### [`copyparty.bat`](copyparty.bat)
* launches copyparty with no arguments (anon read+write within same folder)
* intended for windows machines with no python.exe in PATH
* works on windows, linux and macos
* assumes `copyparty-sfx.py` was renamed to `copyparty.py` in the same folder as `copyparty.bat`
### [`index.html`](index.html)
* drop-in redirect from an httpd to copyparty
* assumes the webserver and copyparty is running on the same server/IP
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
### [`sharex.sxcu`](sharex.sxcu)
* sharex config file to upload screenshots and grab the URL
* `RequestURL`: full URL to the target folder
* `pw`: password (remove the `pw` line if anon-write)
however if your copyparty is behind a reverse-proxy, you may want to use [`sharex-html.sxcu`](sharex-html.sxcu) instead:
* `RequestURL`: full URL to the target folder
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
* `pw`: password (remove `Parameters` if anon-write)
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
* disables thumbnails and folder-type detection in windows explorer
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
### [`cfssl.sh`](cfssl.sh)
* creates CA and server certificates using cfssl
* give a 3rd argument to install it to your copyparty config
# OS integration
init-scripts to start copyparty as a service
* [`systemd/copyparty.service`](systemd/copyparty.service)
* [`openrc/copyparty`](openrc/copyparty)
# Reverse-proxy
copyparty has basic support for running behind another webserver
* [`nginx/copyparty.conf`](nginx/copyparty.conf)

72
contrib/cfssl.sh Executable file
View File

@@ -0,0 +1,72 @@
#!/bin/bash
set -e
# ca-name and server-name
ca_name="$1"
srv_name="$2"
[ -z "$srv_name" ] && {
echo "need arg 1: ca name"
echo "need arg 2: server name"
exit 1
}
gen_ca() {
(tee /dev/stderr <<EOF
{"CN": "$ca_name ca",
"CA": {"expiry":"87600h", "pathlen":0},
"key": {"algo":"rsa", "size":4096},
"names": [{"O":"$ca_name ca"}]}
EOF
)|
cfssl gencert -initca - |
cfssljson -bare ca
mv ca-key.pem ca.key
rm ca.csr
}
gen_srv() {
(tee /dev/stderr <<EOF
{"key": {"algo":"rsa", "size":4096},
"names": [{"O":"$ca_name - $srv_name"}]}
EOF
)|
cfssl gencert -ca ca.pem -ca-key ca.key \
-profile=www -hostname="$srv_name.$ca_name" - |
cfssljson -bare "$srv_name"
mv "$srv_name-key.pem" "$srv_name.key"
rm "$srv_name.csr"
}
# create ca if not exist
[ -e ca.key ] ||
gen_ca
# always create server cert
gen_srv
# dump cert info
show() {
openssl x509 -text -noout -in $1 |
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
}
show ca.pem
show "$srv_name.pem"
# write cert into copyparty config
[ -z "$3" ] || {
mkdir -p ~/.config/copyparty
cat "$srv_name".{key,pem} ca.pem >~/.config/copyparty/cert.pem
}
# rm *.key *.pem
# cfssl print-defaults config
# cfssl print-defaults csr

33
contrib/copyparty.bat Normal file
View File

@@ -0,0 +1,33 @@
exec python "$(dirname "$0")"/copyparty.py
@rem on linux, the above will execute and the script will terminate
@rem on windows, the rest of this script will run
@echo off
cls
set py=
for /f %%i in ('where python 2^>nul') do (
set "py=%%i"
goto c1
)
:c1
if [%py%] == [] (
for /f %%i in ('where /r "%localappdata%\programs\python" python 2^>nul') do (
set "py=%%i"
goto c2
)
)
:c2
if [%py%] == [] set "py=c:\python27\python.exe"
if not exist "%py%" (
echo could not find python
echo(
pause
exit /b
)
start cmd /c %py% "%~dp0\copyparty.py"

View File

@@ -0,0 +1,31 @@
Windows Registry Editor Version 5.00
; this will do 3 things, all optional:
; 1) disable thumbnails
; 2) delete all existing folder type settings/detections
; 3) disable folder type detection (force default columns)
;
; this makes the file explorer way faster,
; especially on slow/networked locations
; =====================================================================
; 1) disable thumbnails
[HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Explorer\Advanced]
"IconsOnly"=dword:00000001
; =====================================================================
; 2) delete all existing folder type settings/detections
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags]
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\BagMRU]
; =====================================================================
; 3) disable folder type detection
[HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags\AllFolders\Shell]
"FolderType"="NotSpecified"

43
contrib/index.html Normal file
View File

@@ -0,0 +1,43 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>⇆🎉 redirect</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<style>
html, body {
font-family: sans-serif;
}
body {
padding: 1em 2em;
font-size: 1.5em;
}
a {
font-size: 1.2em;
padding: .1em;
}
</style>
</head>
<body>
<span id="desc">you probably want</span> <a id="redir" href="//10.13.1.1:3923/">copyparty</a>
<script>
var a = document.getElementById('redir'),
proto = window.location.protocol.indexOf('https') === 0 ? 'https' : 'http',
loc = window.location.hostname || '127.0.0.1',
port = a.getAttribute('href').split(':').pop().split('/')[0],
url = proto + '://' + loc + ':' + port + '/';
a.setAttribute('href', url);
document.getElementById('desc').innerHTML = 'redirecting to';
setTimeout(function() {
window.location.href = url;
}, 500);
</script>
</body>
</html>

View File

@@ -0,0 +1,26 @@
upstream cpp {
server 127.0.0.1:3923;
keepalive 120;
}
server {
listen 443 ssl;
listen [::]:443 ssl;
server_name fs.example.com;
location / {
proxy_pass http://cpp;
proxy_redirect off;
# disable buffering (next 4 lines)
proxy_http_version 1.1;
client_max_body_size 0;
proxy_buffering off;
proxy_request_buffering off;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Connection "Keep-Alive";
}
}

18
contrib/openrc/copyparty Normal file
View File

@@ -0,0 +1,18 @@
#!/sbin/openrc-run
# this will start `/usr/local/bin/copyparty-sfx.py`
# and share '/mnt' with anonymous read+write
#
# installation:
# cp -pv copyparty /etc/init.d && rc-update add copyparty
#
# you may want to:
# change '/usr/bin/python' to another interpreter
# change '/mnt::a' to another location or permission-set
name="$SVCNAME"
command_background=true
pidfile="/var/run/$SVCNAME.pid"
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
command_args="-q -v /mnt::a"

19
contrib/sharex-html.sxcu Normal file
View File

@@ -0,0 +1,19 @@
{
"Version": "13.5.0",
"Name": "copyparty-html",
"DestinationType": "ImageUploader",
"RequestMethod": "POST",
"RequestURL": "http://127.0.0.1:3923/sharex",
"Parameters": {
"pw": "wark"
},
"Body": "MultipartFormData",
"Arguments": {
"act": "bput"
},
"FileFormName": "f",
"RegexList": [
"bytes // <a href=\"/([^\"]+)\""
],
"URL": "http://127.0.0.1:3923/$regex:1|1$"
}

17
contrib/sharex.sxcu Normal file
View File

@@ -0,0 +1,17 @@
{
"Version": "13.5.0",
"Name": "copyparty",
"DestinationType": "ImageUploader",
"RequestMethod": "POST",
"RequestURL": "http://127.0.0.1:3923/sharex",
"Parameters": {
"pw": "wark",
"j": null
},
"Body": "MultipartFormData",
"Arguments": {
"act": "bput"
},
"FileFormName": "f",
"URL": "$json:files[0].url$"
}

View File

@@ -0,0 +1,19 @@
# this will start `/usr/local/bin/copyparty-sfx.py`
# and share '/mnt' with anonymous read+write
#
# installation:
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
#
# you may want to:
# change '/usr/bin/python' to another interpreter
# change '/mnt::a' to another location or permission-set
[Unit]
Description=copyparty file server
[Service]
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
[Install]
WantedBy=multi-user.target

View File

@@ -2,6 +2,7 @@
from __future__ import print_function, unicode_literals
import platform
import time
import sys
import os
@@ -16,12 +17,18 @@ if platform.system() == "Windows":
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393]
# introduced in anniversary update
ANYWIN = WINDOWS or sys.platform in ["msys"]
MACOS = platform.system() == "Darwin"
class EnvParams(object):
def __init__(self):
self.t0 = time.time()
self.mod = os.path.dirname(os.path.realpath(__file__))
if self.mod.endswith("__init__"):
self.mod = os.path.dirname(self.mod)
if sys.platform == "win32":
self.cfg = os.path.normpath(os.environ["APPDATA"] + "/copyparty")
elif sys.platform == "darwin":

View File

@@ -8,17 +8,28 @@ __copyright__ = 2019
__license__ = "MIT"
__url__ = "https://github.com/9001/copyparty/"
import re
import os
import sys
import time
import shutil
import filecmp
import locale
import argparse
import threading
import traceback
from textwrap import dedent
from .__init__ import E, WINDOWS, VT100
from .__init__ import E, WINDOWS, VT100, PY2
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub
from .util import py_desc
from .util import py_desc, align_tab, IMPLICATIONS
HAVE_SSL = True
try:
import ssl
except:
HAVE_SSL = False
class RiceFormatter(argparse.HelpFormatter):
@@ -44,6 +55,16 @@ class RiceFormatter(argparse.HelpFormatter):
return "".join(indent + line + "\n" for line in text.splitlines())
class Dodge11874(RiceFormatter):
def __init__(self, *args, **kwargs):
kwargs["width"] = 9003
super(Dodge11874, self).__init__(*args, **kwargs)
def warn(msg):
print("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
def ensure_locale():
for x in [
"en_US.UTF-8",
@@ -84,9 +105,221 @@ def ensure_cert():
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
def main():
def configure_ssl_ver(al):
def terse_sslver(txt):
txt = txt.lower()
for c in ["_", "v", "."]:
txt = txt.replace(c, "")
return txt.replace("tls10", "tls1")
# oh man i love openssl
# check this out
# hold my beer
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
sslver = terse_sslver(al.ssl_ver).split(",")
flags = [k for k in ssl.__dict__ if ptn.match(k)]
# SSLv2 SSLv3 TLSv1 TLSv1_1 TLSv1_2 TLSv1_3
if "help" in sslver:
avail = [terse_sslver(x[6:]) for x in flags]
avail = " ".join(sorted(avail) + ["all"])
print("\navailable ssl/tls versions:\n " + avail)
sys.exit(0)
al.ssl_flags_en = 0
al.ssl_flags_de = 0
for flag in sorted(flags):
ver = terse_sslver(flag[6:])
num = getattr(ssl, flag)
if ver in sslver:
al.ssl_flags_en |= num
else:
al.ssl_flags_de |= num
if sslver == ["all"]:
x = al.ssl_flags_en
al.ssl_flags_en = al.ssl_flags_de
al.ssl_flags_de = x
for k in ["ssl_flags_en", "ssl_flags_de"]:
num = getattr(al, k)
print("{}: {:8x} ({})".format(k, num, num))
# think i need that beer now
def configure_ssl_ciphers(al):
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
if al.ssl_ver:
ctx.options &= ~al.ssl_flags_en
ctx.options |= al.ssl_flags_de
is_help = al.ciphers == "help"
if al.ciphers and not is_help:
try:
ctx.set_ciphers(al.ciphers)
except:
print("\n\033[1;31mfailed to set ciphers\033[0m\n")
if not hasattr(ctx, "get_ciphers"):
print("cannot read cipher list: openssl or python too old")
else:
ciphers = [x["description"] for x in ctx.get_ciphers()]
print("\n ".join(["\nenabled ciphers:"] + align_tab(ciphers) + [""]))
if is_help:
sys.exit(0)
def sighandler(sig=None, frame=None):
msg = [""] * 5
for th in threading.enumerate():
msg.append(str(th))
msg.extend(traceback.format_stack(sys._current_frames()[th.ident]))
msg.append("\n")
print("\n".join(msg))
def run_argparse(argv, formatter):
ap = argparse.ArgumentParser(
formatter_class=formatter,
prog="copyparty",
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
epilog=dedent(
"""
-a takes username:password,
-v takes src:dst:permset:permset:cflag:cflag:...
where "permset" is accesslevel followed by username (no separator)
and "cflag" is config flags to set on this volume
list of cflags:
"cnodupe" rejects existing files (instead of symlinking them)
"ce2d" sets -e2d (all -e2* args can be set using ce2* cflags)
"cd2t" disables metadata collection, overrides -e2t*
"cd2d" disables all database stuff, overrides -e2*
example:\033[35m
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
mount current directory at "/" with
* r (read-only) for everyone
* a (read+write) for ed
mount ../inc at "/dump" with
* w (write-only) for everyone
* a (read+write) for ed
* reject duplicate files \033[0m
if no accounts or volumes are configured,
current folder will be read/write for everyone
consider the config file for more flexible account/volume management,
including dynamic reload at runtime (and being more readable w)
values for --urlform:
"stash" dumps the data to file and returns length + checksum
"save,get" dumps to file and returns the page like a GET
"print,get" prints the data in the log and returns GET
(leave out the ",get" to return an error instead)
--ciphers help = available ssl/tls ciphers,
--ssl-ver help = available ssl/tls versions,
default is what python considers safe, usually >= TLS1
values for --ls:
"USR" is a user to browse as; * is anonymous, ** is all users
"VOL" is a single volume to scan, default is * (all vols)
"FLAG" is flags;
"v" in addition to realpaths, print usernames and vpaths
"ln" only prints symlinks leaving the volume mountpoint
"p" exits 1 if any such symlinks are found
"r" resumes startup after the listing
examples:
--ls '**' # list all files which are possible to read
--ls '**,*,ln' # check for dangerous symlinks
--ls '**,*,ln,p,r' # check, then start normally if safe
"""
),
)
# fmt: off
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
ap.add_argument("-q", action="store_true", help="quiet")
ap.add_argument("-ed", action="store_true", help="enable ?dots")
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap.add_argument("-nih", action="store_true", help="no info hostname")
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
ap.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms")
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
ap2 = ap.add_argument_group('admin panel options')
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
ap2 = ap.add_argument_group('thumbnail options')
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
ap2 = ap.add_argument_group('database options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps")
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
ap2.add_argument("--ssl-ver", metavar="LIST", type=str, help="ssl/tls versions to allow")
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
ap2 = ap.add_argument_group('debug options')
ap2.add_argument("--ls", metavar="U[,V[,F]]", help="scan all volumes")
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing")
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header")
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
return ap.parse_args(args=argv[1:])
# fmt: on
def main(argv=None):
time.strptime("19970815", "%Y%m%d") # python#7980
if WINDOWS:
os.system("") # enables colors
os.system("rem") # enables colors
if argv is None:
argv = sys.argv
desc = py_desc().replace("[", "\033[1;30m[")
@@ -94,47 +327,57 @@ def main():
print(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
ensure_locale()
ensure_cert()
if HAVE_SSL:
ensure_cert()
ap = argparse.ArgumentParser(
formatter_class=RiceFormatter,
prog="copyparty",
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
epilog=dedent(
"""
-a takes username:password,
-v takes src:dst:permset:permset:... where "permset" is
accesslevel followed by username (no separator)
example:\033[35m
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed \033[36m
mount current directory at "/" with
* r (read-only) for everyone
* a (read+write) for ed
mount ../inc at "/dump" with
* w (write-only) for everyone
* a (read+write) for ed \033[0m
if no accounts or volumes are configured,
current folder will be read/write for everyone
deprecated = [["-e2s", "-e2ds"]]
for dk, nk in deprecated:
try:
idx = argv.index(dk)
except:
continue
consider the config file for more flexible account/volume management,
including dynamic reload at runtime (and being more readable w)
"""
),
)
ap.add_argument(
"-c", metavar="PATH", type=str, action="append", help="add config file"
)
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind")
ap.add_argument("-p", metavar="PORT", type=int, default=1234, help="port to bind")
ap.add_argument("-nc", metavar="NUM", type=int, default=16, help="max num clients")
ap.add_argument("-j", metavar="CORES", type=int, help="max num cpu cores")
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
ap.add_argument("-q", action="store_true", help="quiet")
ap.add_argument("-nw", action="store_true", help="benchmark: disable writing")
al = ap.parse_args()
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
print(msg.format(dk, nk))
argv[idx] = nk
time.sleep(2)
try:
al = run_argparse(argv, RiceFormatter)
except AssertionError:
al = run_argparse(argv, Dodge11874)
# propagate implications
for k1, k2 in IMPLICATIONS:
if getattr(al, k1):
setattr(al, k2, True)
al.i = al.i.split(",")
try:
if "-" in al.p:
lo, hi = [int(x) for x in al.p.split("-")]
al.p = list(range(lo, hi + 1))
else:
al.p = [int(x) for x in al.p.split(",")]
except:
raise Exception("invalid value for -p")
if HAVE_SSL:
if al.ssl_ver:
configure_ssl_ver(al)
if al.ciphers:
configure_ssl_ciphers(al)
else:
warn("ssl module does not exist; cannot enable https")
if PY2 and WINDOWS and al.e2d:
warn(
"windows py2 cannot do unicode filenames with -e2d\n"
+ " (if you crash with codec errors then that is why)"
)
# signal.signal(signal.SIGINT, sighandler)
SvcHub(al).run()

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (0, 4, 2)
CODENAME = "NIH"
BUILD_DT = (2020, 5, 15)
VERSION = (0, 11, 8)
CODENAME = "the grid"
BUILD_DT = (2021, 6, 6)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -1,23 +1,40 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import re
import os
import sys
import stat
import threading
from .__init__ import PY2, WINDOWS
from .util import undot, Pebkac, fsdec, fsenc
from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint
class VFS(object):
"""single level in the virtual fs"""
def __init__(self, realpath, vpath, uread=[], uwrite=[]):
def __init__(self, realpath, vpath, uread=[], uwrite=[], uadm=[], flags={}):
self.realpath = realpath # absolute path on host filesystem
self.vpath = vpath # absolute path in the virtual filesystem
self.uread = uread # users who can read this
self.uwrite = uwrite # users who can write this
self.uadm = uadm # users who are regular admins
self.flags = flags # config switches
self.nodes = {} # child nodes
self.all_vols = {vpath: self} # flattened recursive
def __repr__(self):
return "VFS({})".format(
", ".join(
"{}={!r}".format(k, self.__dict__[k])
for k in "realpath vpath uread uwrite uadm flags".split()
)
)
def _trk(self, vol):
self.all_vols[vol.vpath] = vol
return vol
def add(self, src, dst):
"""get existing, or add new path to the vfs"""
@@ -29,16 +46,19 @@ class VFS(object):
name, dst = dst.split("/", 1)
if name in self.nodes:
# exists; do not manipulate permissions
return self.nodes[name].add(src, dst)
return self._trk(self.nodes[name].add(src, dst))
vn = VFS(
"{}/{}".format(self.realpath, name),
"{}/{}".format(self.vpath, name).lstrip("/"),
self.uread,
self.uwrite,
self.uadm,
self.flags,
)
self._trk(vn)
self.nodes[name] = vn
return vn.add(src, dst)
return self._trk(vn.add(src, dst))
if dst in self.nodes:
# leaf exists; return as-is
@@ -48,7 +68,7 @@ class VFS(object):
vp = "{}/{}".format(self.vpath, dst).lstrip("/")
vn = VFS(src, vp)
self.nodes[dst] = vn
return vn
return self._trk(vn)
def _find(self, vpath):
"""return [vfs,remainder]"""
@@ -93,63 +113,155 @@ class VFS(object):
if rem:
rp += "/" + rem
return fsdec(os.path.realpath(fsenc(rp)))
try:
return fsdec(os.path.realpath(fsenc(rp)))
except:
if not WINDOWS:
raise
def ls(self, rem, uname):
# cpython bug introduced in 3.8, still exists in 3.9.1;
# some win7sp1 and win10:20H2 boxes cannot realpath a
# networked drive letter such as b"n:" or b"n:\\"
#
# requirements to trigger:
# * bytestring (not unicode str)
# * just the drive letter (subfolders are ok)
# * networked drive (regular disks and vmhgfs are ok)
# * on an enterprise network (idk, cannot repro with samba)
#
# hits the following exceptions in succession:
# * access denied at L601: "path = _getfinalpathname(path)"
# * "cant concat str to bytes" at L621: "return path + tail"
#
return os.path.realpath(rp)
def ls(self, rem, uname, scandir, incl_wo=False, lstat=False):
"""return user-readable [fsdir,real,virt] items at vpath"""
virt_vis = {} # nodes readable by user
abspath = self.canonical(rem)
items = os.listdir(fsenc(abspath))
real = [fsdec(x) for x in items]
real = list(statdir(nuprint, scandir, lstat, abspath))
real.sort()
if not rem:
for name, vn2 in sorted(self.nodes.items()):
if uname in vn2.uread:
ok = uname in vn2.uread or "*" in vn2.uread
if not ok and incl_wo:
ok = uname in vn2.uwrite or "*" in vn2.uwrite
if ok:
virt_vis[name] = vn2
# no vfs nodes in the list of real inodes
real = [x for x in real if x not in self.nodes]
real = [x for x in real if x[0] not in self.nodes]
return [abspath, real, virt_vis]
def user_tree(self, uname, readable=False, writable=False):
ret = []
opt1 = readable and (uname in self.uread or "*" in self.uread)
opt2 = writable and (uname in self.uwrite or "*" in self.uwrite)
if opt1 or opt2:
ret.append(self.vpath)
def walk(self, rel, rem, uname, dots, scandir, lstat=False):
"""
recursively yields from ./rem;
rel is a unix-style user-defined vpath (not vfs-related)
"""
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, False, lstat)
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
rfiles.sort()
rdirs.sort()
yield rel, fsroot, rfiles, rdirs, vfs_virt
for rdir, _ in rdirs:
if not dots and rdir.startswith("."):
continue
wrel = (rel + "/" + rdir).lstrip("/")
wrem = (rem + "/" + rdir).lstrip("/")
for x in self.walk(wrel, wrem, uname, scandir, lstat):
yield x
for n, vfs in sorted(vfs_virt.items()):
if not dots and n.startswith("."):
continue
wrel = (rel + "/" + n).lstrip("/")
for x in vfs.walk(wrel, "", uname, scandir, lstat):
yield x
def zipgen(self, vrem, flt, uname, dots, scandir):
if flt:
flt = {k: True for k in flt}
for vpath, apath, files, rd, vd in self.walk("", vrem, uname, dots, scandir):
if flt:
files = [x for x in files if x[0] in flt]
rm = [x for x in rd if x[0] not in flt]
[rd.remove(x) for x in rm]
rm = [x for x in vd.keys() if x not in flt]
[vd.pop(x) for x in rm]
flt = None
# print(repr([vpath, apath, [x[0] for x in files]]))
fnames = [n[0] for n in files]
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
apaths = [os.path.join(apath, n) for n in fnames]
files = list(zip(vpaths, apaths, files))
if not dots:
# dotfile filtering based on vpath (intended visibility)
files = [x for x in files if "/." not in "/" + x[0]]
rm = [x for x in rd if x[0].startswith(".")]
for x in rm:
rd.remove(x)
rm = [k for k in vd.keys() if k.startswith(".")]
for x in rm:
del vd[x]
# up2k filetring based on actual abspath
files = [x for x in files if "{0}.hist{0}up2k.".format(os.sep) not in x[1]]
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
yield f
def user_tree(self, uname, readable, writable, admin):
is_readable = False
if uname in self.uread or "*" in self.uread:
readable.append(self.vpath)
is_readable = True
if uname in self.uwrite or "*" in self.uwrite:
writable.append(self.vpath)
if is_readable:
admin.append(self.vpath)
for _, vn in sorted(self.nodes.items()):
ret.extend(vn.user_tree(uname, readable, writable))
return ret
vn.user_tree(uname, readable, writable, admin)
class AuthSrv(object):
"""verifies users against given paths"""
def __init__(self, args, log_func):
self.log_func = log_func
def __init__(self, args, log_func, warn_anonwrite=True):
self.args = args
self.warn_anonwrite = True
self.log_func = log_func
self.warn_anonwrite = warn_anonwrite
self.line_ctr = 0
if WINDOWS:
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)")
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
else:
self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)")
self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)$")
self.mutex = threading.Lock()
self.reload()
def log(self, msg):
self.log_func("auth", msg)
def invert(self, orig):
if PY2:
return {v: k for k, v in orig.iteritems()}
else:
return {v: k for k, v in orig.items()}
def log(self, msg, c=0):
self.log_func("auth", msg, c)
def laggy_iter(self, iterable):
"""returns [value,isFinalValue]"""
@@ -161,10 +273,12 @@ class AuthSrv(object):
yield prev, True
def _parse_config_file(self, fd, user, mread, mwrite, mount):
def _parse_config_file(self, fd, user, mread, mwrite, madm, mflags, mount):
vol_src = None
vol_dst = None
self.line_ctr = 0
for ln in [x.decode("utf-8").strip() for x in fd]:
self.line_ctr += 1
if not ln and vol_src is not None:
vol_src = None
vol_dst = None
@@ -191,13 +305,57 @@ class AuthSrv(object):
mount[vol_dst] = vol_src
mread[vol_dst] = []
mwrite[vol_dst] = []
madm[vol_dst] = []
mflags[vol_dst] = {}
continue
lvl, uname = ln.split(" ")
if lvl in "ra":
mread[vol_dst].append(uname)
if lvl in "wa":
mwrite[vol_dst].append(uname)
if len(ln) > 1:
lvl, uname = ln.split(" ")
else:
lvl = ln
uname = "*"
self._read_vol_str(
lvl,
uname,
mread[vol_dst],
mwrite[vol_dst],
madm[vol_dst],
mflags[vol_dst],
)
def _read_vol_str(self, lvl, uname, mr, mw, ma, mf):
if lvl == "c":
cval = True
if "=" in uname:
uname, cval = uname.split("=", 1)
self._read_volflag(mf, uname, cval, False)
return
if uname == "":
uname = "*"
if lvl in "ra":
mr.append(uname)
if lvl in "wa":
mw.append(uname)
if lvl == "a":
ma.append(uname)
def _read_volflag(self, flags, name, value, is_list):
if name not in ["mtp"]:
flags[name] = value
return
if not is_list:
value = [value]
elif not value:
return
flags[name] = flags.get(name, []) + value
def reload(self):
"""
@@ -210,6 +368,8 @@ class AuthSrv(object):
user = {} # username:password
mread = {} # mountpoint:[username]
mwrite = {} # mountpoint:[username]
madm = {} # mountpoint:[username]
mflags = {} # mountpoint:[flag]
mount = {} # dst:src (mountpoint:realpath)
if self.args.a:
@@ -219,39 +379,47 @@ class AuthSrv(object):
if self.args.v:
# list of src:dst:permset:permset:...
# permset is [rwa]username
for vol_match in [self.re_vol.match(x) for x in self.args.v]:
try:
src, dst, perms = vol_match.groups()
except:
raise Exception("invalid -v argument")
# permset is [rwa]username or [c]flag
for v_str in self.args.v:
m = self.re_vol.match(v_str)
if not m:
raise Exception("invalid -v argument: [{}]".format(v_str))
src, dst, perms = m.groups()
# print("\n".join([src, dst, perms]))
src = fsdec(os.path.abspath(fsenc(src)))
dst = dst.strip("/")
mount[dst] = src
mread[dst] = []
mwrite[dst] = []
madm[dst] = []
mflags[dst] = {}
perms = perms.split(":")
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
if uname == "":
uname = "*"
if lvl in "ra":
mread[dst].append(uname)
if lvl in "wa":
mwrite[dst].append(uname)
self._read_vol_str(
lvl, uname, mread[dst], mwrite[dst], madm[dst], mflags[dst]
)
if self.args.c:
for cfg_fn in self.args.c:
with open(cfg_fn, "rb") as f:
self._parse_config_file(f, user, mread, mwrite, mount)
try:
self._parse_config_file(
f, user, mread, mwrite, madm, mflags, mount
)
except:
m = "\n\033[1;31m\nerror in config file {} on line {}:\n\033[0m"
print(m.format(cfg_fn, self.line_ctr))
raise
if not mount:
# -h says our defaults are CWD at root and read/write for everyone
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
elif "" not in mount:
# there's volumes but no root; make root inaccessible
vfs = VFS(os.path.abspath("."), "", [], [])
vfs = VFS(os.path.abspath("."), "")
vfs.flags["d2d"] = True
maxdepth = 0
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
@@ -261,12 +429,16 @@ class AuthSrv(object):
if dst == "":
# rootfs was mapped; fully replaces the default CWD vfs
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst])
vfs = VFS(
mount[dst], dst, mread[dst], mwrite[dst], madm[dst], mflags[dst]
)
continue
v = vfs.add(mount[dst], dst)
v.uread = mread[dst]
v.uwrite = mwrite[dst]
v.uadm = madm[dst]
v.flags = mflags[dst]
missing_users = {}
for d in [mread, mwrite]:
@@ -277,28 +449,198 @@ class AuthSrv(object):
if missing_users:
self.log(
"\033[31myou must -a the following users: "
+ ", ".join(k for k in sorted(missing_users))
+ "\033[0m"
"you must -a the following users: "
+ ", ".join(k for k in sorted(missing_users)),
c=1,
)
raise Exception("invalid config")
all_mte = {}
errors = False
for vol in vfs.all_vols.values():
if (self.args.e2ds and vol.uwrite) or self.args.e2dsa:
vol.flags["e2ds"] = True
if self.args.e2d or "e2ds" in vol.flags:
vol.flags["e2d"] = True
for k in ["e2t", "e2ts", "e2tsr"]:
if getattr(self.args, k):
vol.flags[k] = True
for k1, k2 in IMPLICATIONS:
if k1 in vol.flags:
vol.flags[k2] = True
# default tag-list if unset
if "mte" not in vol.flags:
vol.flags["mte"] = self.args.mte
# append parsers from argv to volume-flags
self._read_volflag(vol.flags, "mtp", self.args.mtp, True)
# d2d drops all database features for a volume
for grp, rm in [["d2d", "e2d"], ["d2t", "e2t"]]:
if not vol.flags.get(grp, False):
continue
vol.flags["d2t"] = True
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
# mt* needs e2t so drop those too
for grp, rm in [["e2t", "mt"]]:
if vol.flags.get(grp, False):
continue
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
# verify tags mentioned by -mt[mp] are used by -mte
local_mtp = {}
local_only_mtp = {}
tags = vol.flags.get("mtp", []) + vol.flags.get("mtm", [])
tags = [x.split("=")[0] for x in tags]
tags = [y for x in tags for y in x.split(",")]
for a in tags:
local_mtp[a] = True
local = True
for b in self.args.mtp or []:
b = b.split("=")[0]
if a == b:
local = False
if local:
local_only_mtp[a] = True
local_mte = {}
for a in vol.flags.get("mte", "").split(","):
local = True
all_mte[a] = True
local_mte[a] = True
for b in self.args.mte.split(","):
if not a or not b:
continue
if a == b:
local = False
for mtp in local_only_mtp.keys():
if mtp not in local_mte:
m = 'volume "/{}" defines metadata tag "{}", but doesnt use it in "-mte" (or with "cmte" in its volume-flags)'
self.log(m.format(vol.vpath, mtp), 1)
errors = True
tags = self.args.mtp or []
tags = [x.split("=")[0] for x in tags]
tags = [y for x in tags for y in x.split(",")]
for mtp in tags:
if mtp not in all_mte:
m = 'metadata tag "{}" is defined by "-mtm" or "-mtp", but is not used by "-mte" (or by any "cmte" volume-flag)'
self.log(m.format(mtp), 1)
errors = True
if errors:
sys.exit(1)
try:
v, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == v.realpath:
self.warn_anonwrite = False
self.log(
"\033[31manyone can read/write the current directory: {}\033[0m".format(
v.realpath
)
)
msg = "anyone can read/write the current directory: {}"
self.log(msg.format(v.realpath), c=1)
except Pebkac:
self.warn_anonwrite = True
with self.mutex:
self.vfs = vfs
self.user = user
self.iuser = self.invert(user)
self.iuser = {v: k for k, v in user.items()}
# import pprint
# pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount})
def dbg_ls(self):
users = self.args.ls
vols = "*"
flags = []
try:
users, vols = users.split(",", 1)
except:
pass
try:
vols, flags = vols.split(",", 1)
flags = flags.split(",")
except:
pass
if users == "**":
users = list(self.user.keys()) + ["*"]
else:
users = [users]
for u in users:
if u not in self.user and u != "*":
raise Exception("user not found: " + u)
if vols == "*":
vols = ["/" + x for x in self.vfs.all_vols.keys()]
else:
vols = [vols]
for v in vols:
if not v.startswith("/"):
raise Exception("volumes must start with /")
if v[1:] not in self.vfs.all_vols:
raise Exception("volume not found: " + v)
self.log({"users": users, "vols": vols, "flags": flags})
for k, v in self.vfs.all_vols.items():
self.log("/{}: read({}) write({})".format(k, v.uread, v.uwrite))
flag_v = "v" in flags
flag_ln = "ln" in flags
flag_p = "p" in flags
flag_r = "r" in flags
n_bads = 0
for v in vols:
v = v[1:]
vtop = "/{}/".format(v) if v else "/"
for u in users:
self.log("checking /{} as {}".format(v, u))
try:
vn, _ = self.vfs.get(v, u, True, False)
except:
continue
atop = vn.realpath
g = vn.walk("", "", u, True, not self.args.no_scandir, lstat=False)
for vpath, apath, files, _, _ in g:
fnames = [n[0] for n in files]
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
vpaths = [vtop + x for x in vpaths]
apaths = [os.path.join(apath, n) for n in fnames]
files = list(zip(vpaths, apaths))
if flag_ln:
files = [x for x in files if not x[1].startswith(atop + os.sep)]
n_bads += len(files)
if flag_v:
msg = [
'# user "{}", vpath "{}"\n{}'.format(u, vp, ap)
for vp, ap in files
]
else:
msg = [x[1] for x in files]
if msg:
nuprint("\n".join(msg))
if n_bads and flag_p:
raise Exception("found symlink leaving volume, and strict is set")
if not flag_r:
sys.exit(0)

View File

@@ -29,7 +29,7 @@ class BrokerMp(object):
self.mutex = threading.Lock()
cores = self.args.j
if cores is None:
if not cores:
cores = mp.cpu_count()
self.log("broker", "booting {} subprocesses".format(cores))
@@ -51,7 +51,7 @@ class BrokerMp(object):
self.procs.append(proc)
proc.start()
if True:
if not self.args.q:
thr = threading.Thread(target=self.debug_load_balancer)
thr.daemon = True
thr.start()

View File

@@ -49,11 +49,11 @@ class MpWorker(object):
# print('k')
pass
def log(self, src, msg):
self.q_yield.put([0, "log", [src, msg]])
def log(self, src, msg, c=0):
self.q_yield.put([0, "log", [src, msg, c]])
def logw(self, msg):
self.log("mp{}".format(self.n), msg)
def logw(self, msg, c=0):
self.log("mp{}".format(self.n), msg, c)
def httpdrop(self, addr):
self.q_yield.put([0, "httpdrop", [addr]])
@@ -73,7 +73,9 @@ class MpWorker(object):
if PY2:
sck = pickle.loads(sck) # nosec
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
self.httpsrv.accept(sck, addr)
with self.mutex:

View File

@@ -28,7 +28,9 @@ class BrokerThr(object):
def put(self, want_retval, dest, *args):
if dest == "httpconn":
sck, addr = args
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
self.httpsrv.accept(sck, addr)
else:

File diff suppressed because it is too large Load Diff

View File

@@ -1,29 +1,25 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import re
import os
import sys
import ssl
import time
import socket
HAVE_SSL = True
try:
import jinja2
except ImportError:
print(
"""\033[1;31m
you do not have jinja2 installed,\033[33m
choose one of these:\033[0m
* apt install python-jinja2
* python3 -m pip install --user jinja2
* (try another python version, if you have one)
* (try copyparty.sfx instead)
"""
)
sys.exit(1)
import ssl
except:
HAVE_SSL = False
from .__init__ import E
from .util import Unrecv
from .httpcli import HttpCli
from .u2idx import U2idx
from .th_cli import ThumbCli
from .th_srv import HAVE_PIL
from .ico import Ico
class HttpConn(object):
@@ -41,27 +37,45 @@ class HttpConn(object):
self.auth = hsrv.auth
self.cert_path = hsrv.cert_path
self.workload = 0
self.log_func = hsrv.log
self.log_src = "{} \033[36m{}".format(addr[0], addr[1]).ljust(26)
enth = HAVE_PIL and not self.args.no_thumb
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
self.ico = Ico(self.args)
env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
self.tpl_mounts = env.get_template("splash.html")
self.tpl_browser = env.get_template("browser.html")
self.tpl_msg = env.get_template("msg.html")
self.tpl_md = env.get_template("md.html")
self.tpl_mde = env.get_template("mde.html")
self.t0 = time.time()
self.nbyte = 0
self.workload = 0
self.u2idx = None
self.log_func = hsrv.log
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None
self.set_rproxy()
def set_rproxy(self, ip=None):
if ip is None:
color = 36
ip = self.addr[0]
self.rproxy = None
else:
color = 34
self.rproxy = ip
self.ip = ip
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
return self.log_src
def respath(self, res_name):
return os.path.join(E.mod, "web", res_name)
def log(self, msg):
self.log_func(self.log_src, msg)
def log(self, msg, c=0):
self.log_func(self.log_src, msg, c)
def run(self):
def get_u2idx(self):
if not self.u2idx:
self.u2idx = U2idx(self.args, self.log_func)
return self.u2idx
def _detect_https(self):
method = None
self.sr = None
if self.cert_path:
try:
method = self.s.recv(4, socket.MSG_PEEK)
@@ -82,20 +96,64 @@ class HttpConn(object):
err = "need at least 4 bytes in the first packet; got {}".format(
len(method)
)
self.log(err)
if method:
self.log(err)
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
return
if method not in [None, b"GET ", b"HEAD", b"POST"]:
return method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]
def run(self):
self.sr = None
if self.args.https_only:
is_https = True
elif self.args.http_only or not HAVE_SSL:
is_https = False
else:
is_https = self._detect_https()
if is_https:
if self.sr:
self.log("\033[1;31mTODO: cannot do https in jython\033[0m")
self.log("TODO: cannot do https in jython", c="1;31")
return
self.log_src = self.log_src.replace("[36m", "[35m")
try:
self.s = ssl.wrap_socket(
self.s, server_side=True, certfile=self.cert_path
)
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
ctx.load_cert_chain(self.cert_path)
if self.args.ssl_ver:
ctx.options &= ~self.args.ssl_flags_en
ctx.options |= self.args.ssl_flags_de
# print(repr(ctx.options))
if self.args.ssl_log:
try:
ctx.keylog_filename = self.args.ssl_log
except:
self.log("keylog failed; openssl or python too old")
if self.args.ciphers:
ctx.set_ciphers(self.args.ciphers)
self.s = ctx.wrap_socket(self.s, server_side=True)
msg = [
"\033[1;3{:d}m{}".format(c, s)
for c, s in zip([0, 5, 0], self.s.cipher())
]
self.log(" ".join(msg) + "\033[0m")
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
overlap = [y[::-1] for y in self.s.shared_ciphers()]
lines = [str(x) for x in (["TLS cipher overlap:"] + overlap)]
self.log("\n".join(lines))
for k, v in [
["compression", self.s.compression()],
["ALPN proto", self.s.selected_alpn_protocol()],
["NPN proto", self.s.selected_npn_protocol()],
]:
self.log("TLS {}: {}".format(k, v or "nah"))
except Exception as ex:
em = str(ex)
@@ -108,7 +166,7 @@ class HttpConn(object):
pass
else:
self.log("\033[35mhandshake\033[0m " + em)
self.log("handshake\033[0m " + em, c=5)
return

View File

@@ -2,10 +2,28 @@
from __future__ import print_function, unicode_literals
import os
import sys
import time
import socket
import threading
try:
import jinja2
except ImportError:
print(
"""\033[1;31m
you do not have jinja2 installed,\033[33m
choose one of these:\033[0m
* apt install python-jinja2
* {} -m pip install --user jinja2
* (try another python version, if you have one)
* (try copyparty.sfx instead)
""".format(
os.path.basename(sys.executable)
)
)
sys.exit(1)
from .__init__ import E, MACOS
from .httpconn import HttpConn
from .authsrv import AuthSrv
@@ -30,6 +48,13 @@ class HttpSrv(object):
self.workload_thr_alive = False
self.auth = AuthSrv(self.args, self.log)
env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
self.j2 = {
x: env.get_template(x + ".html")
for x in ["splash", "browser", "browser2", "msg", "md", "mde"]
}
cert_path = os.path.join(E.cfg, "cert.pem")
if os.path.exists(cert_path):
self.cert_path = cert_path
@@ -38,7 +63,9 @@ class HttpSrv(object):
def accept(self, sck, addr):
"""takes an incoming tcp connection and creates a thread to handle it"""
self.log("%s %s" % addr, "-" * 5 + "C-cthr")
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
thr = threading.Thread(target=self.thr_client, args=(sck, addr))
thr.daemon = True
thr.start()
@@ -66,11 +93,15 @@ class HttpSrv(object):
thr.start()
try:
self.log("%s %s" % addr, "-" * 6 + "C-crun")
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-crun" % ("-" * 6,), c="1;30")
cli.run()
finally:
self.log("%s %s" % addr, "-" * 7 + "C-done")
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")
try:
sck.shutdown(socket.SHUT_RDWR)
sck.close()
@@ -78,7 +109,8 @@ class HttpSrv(object):
if not MACOS:
self.log(
"%s %s" % addr,
"shut_rdwr err:\n {}\n {}".format(repr(sck), ex),
"shut({}): {}".format(sck.fileno(), ex),
c="1;30",
)
if ex.errno not in [10038, 10054, 107, 57, 9]:
# 10038 No longer considered a socket

39
copyparty/ico.py Normal file
View File

@@ -0,0 +1,39 @@
import hashlib
import colorsys
from .__init__ import PY2
class Ico(object):
def __init__(self, args):
self.args = args
def get(self, ext, as_thumb):
"""placeholder to make thumbnails not break"""
h = hashlib.md5(ext.encode("utf-8")).digest()[:2]
if PY2:
h = [ord(x) for x in h]
c1 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 0.3)
c2 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 1)
c = list(c1) + list(c2)
c = [int(x * 255) for x in c]
c = "".join(["{:02x}".format(x) for x in c])
h = 30
if not self.args.th_no_crop and as_thumb:
w, h = self.args.th_size.split("x")
h = int(100 / (float(w) / float(h)))
svg = """\
<?xml version="1.0" encoding="UTF-8"?>
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
<rect width="100%" height="100%" fill="#{}" />
<text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" xml:space="preserve"
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
</g></svg>
"""
svg = svg.format(h, c[:6], c[6:], ext).encode("utf-8")
return ["image/svg+xml", svg]

454
copyparty/mtag.py Normal file
View File

@@ -0,0 +1,454 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import re
import os
import sys
import json
import shutil
import subprocess as sp
from .__init__ import PY2, WINDOWS
from .util import fsenc, fsdec, REKOBO_LKEY
if not PY2:
unicode = str
def have_ff(cmd):
if PY2:
cmd = (cmd + " -version").encode("ascii").split(b" ")
try:
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE).communicate()
return True
except:
return False
else:
return bool(shutil.which(cmd))
HAVE_FFMPEG = have_ff("ffmpeg")
HAVE_FFPROBE = have_ff("ffprobe")
class MParser(object):
def __init__(self, cmdline):
self.tag, args = cmdline.split("=", 1)
self.tags = self.tag.split(",")
self.timeout = 30
self.force = False
self.audio = "y"
self.ext = []
while True:
try:
bp = os.path.expanduser(args)
if os.path.exists(bp):
self.bin = bp
return
except:
pass
arg, args = args.split(",", 1)
arg = arg.lower()
if arg.startswith("a"):
self.audio = arg[1:] # [r]equire [n]ot [d]ontcare
continue
if arg == "f":
self.force = True
continue
if arg.startswith("t"):
self.timeout = int(arg[1:])
continue
if arg.startswith("e"):
self.ext.append(arg[1:])
continue
raise Exception()
def ffprobe(abspath):
cmd = [
b"ffprobe",
b"-hide_banner",
b"-show_streams",
b"-show_format",
b"--",
fsenc(abspath),
]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
r = p.communicate()
txt = r[0].decode("utf-8", "replace")
return parse_ffprobe(txt)
def parse_ffprobe(txt):
"""ffprobe -show_format -show_streams"""
streams = []
fmt = {}
g = None
for ln in [x.rstrip("\r") for x in txt.split("\n")]:
try:
k, v = ln.split("=", 1)
g[k] = v
continue
except:
pass
if ln == "[STREAM]":
g = {}
streams.append(g)
if ln == "[FORMAT]":
g = {"codec_type": "format"} # heh
fmt = g
streams = [fmt] + streams
ret = {} # processed
md = {} # raw tags
have = {}
for strm in streams:
typ = strm.get("codec_type")
if typ in have:
continue
have[typ] = True
kvm = []
if typ == "audio":
kvm = [
["codec_name", "ac"],
["channel_layout", "chs"],
["sample_rate", ".hz"],
["bit_rate", ".aq"],
["duration", ".dur"],
]
if typ == "video":
if strm.get("DISPOSITION:attached_pic") == "1" or fmt.get(
"format_name"
) in ["mp3", "ogg", "flac"]:
continue
kvm = [
["codec_name", "vc"],
["pix_fmt", "pixfmt"],
["r_frame_rate", ".fps"],
["bit_rate", ".vq"],
["width", ".resw"],
["height", ".resh"],
["duration", ".dur"],
]
if typ == "format":
kvm = [["duration", ".dur"], ["bit_rate", ".q"]]
for sk, rk in kvm:
v = strm.get(sk)
if v is None:
continue
if rk.startswith("."):
try:
v = float(v)
v2 = ret.get(rk)
if v2 is None or v > v2:
ret[rk] = v
except:
# sqlite doesnt care but the code below does
if v not in ["N/A"]:
ret[rk] = v
else:
ret[rk] = v
if ret.get("vc") == "ansi": # shellscript
return {}, {}
for strm in streams:
for k, v in strm.items():
if not k.startswith("TAG:"):
continue
k = k[4:].strip()
v = v.strip()
if k and v:
md[k] = [v]
for k in [".q", ".vq", ".aq"]:
if k in ret:
ret[k] /= 1000 # bit_rate=320000
for k in [".q", ".vq", ".aq", ".resw", ".resh"]:
if k in ret:
ret[k] = int(ret[k])
if ".fps" in ret:
fps = ret[".fps"]
if "/" in fps:
fa, fb = fps.split("/")
fps = int(fa) * 1.0 / int(fb)
if fps < 1000 and fmt.get("format_name") not in ["image2", "png_pipe"]:
ret[".fps"] = round(fps, 3)
else:
del ret[".fps"]
if ".dur" in ret:
if ret[".dur"] < 0.1:
del ret[".dur"]
if ".q" in ret:
del ret[".q"]
if ".resw" in ret and ".resh" in ret:
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
ret = {k: [0, v] for k, v in ret.items()}
return ret, md
class MTag(object):
def __init__(self, log_func, args):
self.log_func = log_func
self.usable = True
self.prefer_mt = False
mappings = args.mtm
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
or_ffprobe = " or ffprobe"
if self.backend == "mutagen":
self.get = self.get_mutagen
try:
import mutagen
except:
self.log("could not load mutagen, trying ffprobe instead", c=3)
self.backend = "ffprobe"
if self.backend == "ffprobe":
self.get = self.get_ffprobe
self.prefer_mt = True
# about 20x slower
self.usable = HAVE_FFPROBE
if self.usable and WINDOWS and sys.version_info < (3, 8):
self.usable = False
or_ffprobe = " or python >= 3.8"
msg = "found ffprobe but your python is too old; need 3.8 or newer"
self.log(msg, c=1)
if not self.usable:
msg = "need mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
self.log(
msg.format(or_ffprobe, " " * 37, os.path.basename(sys.executable)), c=1
)
return
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
tagmap = {
"album": ["album", "talb", "\u00a9alb", "original-album", "toal"],
"artist": [
"artist",
"tpe1",
"\u00a9art",
"composer",
"performer",
"arranger",
"\u00a9wrt",
"tcom",
"tpe3",
"original-artist",
"tope",
],
"title": ["title", "tit2", "\u00a9nam"],
"circle": [
"album-artist",
"tpe2",
"aart",
"conductor",
"organization",
"band",
],
".tn": ["tracknumber", "trck", "trkn", "track"],
"genre": ["genre", "tcon", "\u00a9gen"],
"date": [
"original-release-date",
"release-date",
"date",
"tdrc",
"\u00a9day",
"original-date",
"original-year",
"tyer",
"tdor",
"tory",
"year",
"creation-time",
],
".bpm": ["bpm", "tbpm", "tmpo", "tbp"],
"key": ["initial-key", "tkey", "key"],
"comment": ["comment", "comm", "\u00a9cmt", "comments", "description"],
}
if mappings:
for k, v in [x.split("=") for x in mappings]:
tagmap[k] = v.split(",")
self.tagmap = {}
for k, vs in tagmap.items():
vs2 = []
for v in vs:
if "-" not in v:
vs2.append(v)
continue
vs2.append(v.replace("-", " "))
vs2.append(v.replace("-", "_"))
vs2.append(v.replace("-", ""))
self.tagmap[k] = vs2
self.rmap = {
v: [n, k] for k, vs in self.tagmap.items() for n, v in enumerate(vs)
}
# self.get = self.compare
def log(self, msg, c=0):
self.log_func("mtag", msg, c)
def normalize_tags(self, ret, md):
for k, v in dict(md).items():
if not v:
continue
k = k.lower().split("::")[0].strip()
mk = self.rmap.get(k)
if not mk:
continue
pref, mk = mk
if mk not in ret or ret[mk][0] > pref:
ret[mk] = [pref, v[0]]
# take first value
ret = {k: unicode(v[1]).strip() for k, v in ret.items()}
# track 3/7 => track 3
for k, v in ret.items():
if k[0] == ".":
v = v.split("/")[0].strip().lstrip("0")
ret[k] = v or 0
# normalize key notation to rkeobo
okey = ret.get("key")
if okey:
key = okey.replace(" ", "").replace("maj", "").replace("min", "m")
ret["key"] = REKOBO_LKEY.get(key.lower(), okey)
return ret
def compare(self, abspath):
if abspath.endswith(".au"):
return {}
print("\n" + abspath)
r1 = self.get_mutagen(abspath)
r2 = self.get_ffprobe(abspath)
keys = {}
for d in [r1, r2]:
for k in d.keys():
keys[k] = True
diffs = []
l1 = []
l2 = []
for k in sorted(keys.keys()):
if k in [".q", ".dur"]:
continue # lenient
v1 = r1.get(k)
v2 = r2.get(k)
if v1 == v2:
print(" ", k, v1)
elif v1 != "0000": # ffprobe date=0
diffs.append(k)
print(" 1", k, v1)
print(" 2", k, v2)
if v1:
l1.append(k)
if v2:
l2.append(k)
if diffs:
raise Exception()
return r1
def get_mutagen(self, abspath):
import mutagen
try:
md = mutagen.File(fsenc(abspath), easy=True)
x = md.info.length
except Exception as ex:
return {}
ret = {}
try:
dur = int(md.info.length)
try:
q = int(md.info.bitrate / 1024)
except:
q = int((os.path.getsize(fsenc(abspath)) / dur) / 128)
ret[".dur"] = [0, dur]
ret[".q"] = [0, q]
except:
pass
return self.normalize_tags(ret, md)
def get_ffprobe(self, abspath):
ret, md = ffprobe(abspath)
return self.normalize_tags(ret, md)
def get_bin(self, parsers, abspath):
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
pypath = [str(pypath)] + [str(x) for x in sys.path if x]
pypath = str(os.pathsep.join(pypath))
env = os.environ.copy()
env["PYTHONPATH"] = pypath
ret = {}
for tagname, mp in parsers.items():
try:
cmd = [sys.executable, mp.bin, abspath]
args = {"env": env, "timeout": mp.timeout}
if WINDOWS:
args["creationflags"] = 0x4000
else:
cmd = ["nice"] + cmd
cmd = [fsenc(x) for x in cmd]
v = sp.check_output(cmd, **args).strip()
if not v:
continue
if "," not in tagname:
ret[tagname] = v.decode("utf-8")
else:
v = json.loads(v)
for tag in tagname.split(","):
if tag and tag in v:
ret[tag] = v[tag]
except:
pass
return ret

95
copyparty/star.py Normal file
View File

@@ -0,0 +1,95 @@
import os
import tarfile
import threading
from .sutil import errdesc
from .util import Queue, fsenc
class QFile(object):
"""file-like object which buffers writes into a queue"""
def __init__(self):
self.q = Queue(64)
self.bq = []
self.nq = 0
def write(self, buf):
if buf is None or self.nq >= 240 * 1024:
self.q.put(b"".join(self.bq))
self.bq = []
self.nq = 0
if buf is None:
self.q.put(None)
else:
self.bq.append(buf)
self.nq += len(buf)
class StreamTar(object):
"""construct in-memory tar file from the given path"""
def __init__(self, fgen, **kwargs):
self.ci = 0
self.co = 0
self.qfile = QFile()
self.fgen = fgen
self.errf = None
# python 3.8 changed to PAX_FORMAT as default,
# waste of space and don't care about the new features
fmt = tarfile.GNU_FORMAT
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
w = threading.Thread(target=self._gen)
w.daemon = True
w.start()
def gen(self):
while True:
buf = self.qfile.q.get()
if not buf:
break
self.co += len(buf)
yield buf
yield None
if self.errf:
os.unlink(self.errf["ap"])
def ser(self, f):
name = f["vp"]
src = f["ap"]
fsi = f["st"]
inf = tarfile.TarInfo(name=name)
inf.mode = fsi.st_mode
inf.size = fsi.st_size
inf.mtime = fsi.st_mtime
inf.uid = 0
inf.gid = 0
self.ci += inf.size
with open(fsenc(src), "rb", 512 * 1024) as f:
self.tar.addfile(inf, f)
def _gen(self):
errors = []
for f in self.fgen:
if "err" in f:
errors.append([f["vp"], f["err"]])
continue
try:
self.ser(f)
except Exception as ex:
errors.append([f["vp"], repr(ex)])
if errors:
self.errf = errdesc(errors)
self.ser(self.errf)
self.tar.close()
self.qfile.write(None)

25
copyparty/sutil.py Normal file
View File

@@ -0,0 +1,25 @@
import os
import time
import tempfile
from datetime import datetime
def errdesc(errors):
report = ["copyparty failed to add the following files to the archive:", ""]
for fn, err in errors:
report.extend([" file: {}".format(fn), "error: {}".format(err), ""])
with tempfile.NamedTemporaryFile(prefix="copyparty-", delete=False) as tf:
tf_path = tf.name
tf.write("\r\n".join(report).encode("utf-8", "replace"))
dt = datetime.utcfromtimestamp(time.time())
dt = dt.strftime("%Y-%m%d-%H%M%S")
os.chmod(tf_path, 0o444)
return {
"vp": "archive-errors-{}.txt".format(dt),
"ap": tf_path,
"st": os.stat(tf_path),
}

View File

@@ -2,6 +2,7 @@
from __future__ import print_function, unicode_literals
import re
import os
import sys
import time
import threading
@@ -9,9 +10,11 @@ from datetime import datetime, timedelta
import calendar
from .__init__ import PY2, WINDOWS, MACOS, VT100
from .util import mp
from .authsrv import AuthSrv
from .tcpsrv import TcpSrv
from .up2k import Up2k
from .util import mp
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
class SvcHub(object):
@@ -34,9 +37,29 @@ class SvcHub(object):
self.log = self._log_disabled if args.q else self._log_enabled
# jank goes here
auth = AuthSrv(self.args, self.log, False)
if args.ls:
auth.dbg_ls()
# initiate all services to manage
self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self)
self.up2k = Up2k(self, auth.vfs.all_vols)
self.thumbsrv = None
if not args.no_thumb:
if HAVE_PIL:
if not HAVE_WEBP:
args.th_no_webp = True
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
self.log("thumb", msg, c=3)
self.thumbsrv = ThumbSrv(self, auth.vfs.all_vols)
else:
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n"
self.log(
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3
)
# decide which worker impl to use
if self.check_mp_enable():
@@ -63,18 +86,29 @@ class SvcHub(object):
self.tcpsrv.shutdown()
self.broker.shutdown()
if self.thumbsrv:
self.thumbsrv.shutdown()
for n in range(200): # 10s
time.sleep(0.05)
if self.thumbsrv.stopped():
break
if n == 3:
print("waiting for thumbsrv...")
print("nailed it")
def _log_disabled(self, src, msg):
def _log_disabled(self, src, msg, c=0):
pass
def _log_enabled(self, src, msg):
def _log_enabled(self, src, msg, c=0):
"""handles logging from all components"""
with self.log_mutex:
now = time.time()
if now >= self.next_day:
dt = datetime.utcfromtimestamp(now)
print("\033[36m{}\033[0m".format(dt.strftime("%Y-%m-%d")))
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
# unix timestamp of next 00:00:00 (leap-seconds safe)
day_now = dt.day
@@ -84,23 +118,30 @@ class SvcHub(object):
dt = dt.replace(hour=0, minute=0, second=0)
self.next_day = calendar.timegm(dt.utctimetuple())
fmt = "\033[36m{} \033[33m{:21} \033[0m{}"
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
if not VT100:
fmt = "{} {:21} {}"
fmt = "{} {:21} {}\n"
if "\033" in msg:
msg = self.ansi_re.sub("", msg)
if "\033" in src:
src = self.ansi_re.sub("", src)
elif c:
if isinstance(c, int):
msg = "\033[3{}m{}".format(c, msg)
elif "\033" not in c:
msg = "\033[{}m{}\033[0m".format(c, msg)
else:
msg = "{}{}\033[0m".format(c, msg)
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
msg = fmt.format(ts, src, msg)
try:
print(msg)
print(msg, end="")
except UnicodeEncodeError:
try:
print(msg.encode("utf-8", "replace").decode())
print(msg.encode("utf-8", "replace").decode(), end="")
except:
print(msg.encode("ascii", "replace").decode())
print(msg.encode("ascii", "replace").decode(), end="")
def check_mp_support(self):
vmin = sys.version_info[1]
@@ -129,8 +170,8 @@ class SvcHub(object):
return None
def check_mp_enable(self):
if self.args.j == 0:
self.log("root", "multiprocessing disabled by argument -j 0;")
if self.args.j == 1:
self.log("root", "multiprocessing disabled by argument -j 1;")
return False
if mp.cpu_count() <= 1:

271
copyparty/szip.py Normal file
View File

@@ -0,0 +1,271 @@
import os
import time
import zlib
import struct
from datetime import datetime
from .sutil import errdesc
from .util import yieldfile, sanitize_fn
def dostime2unix(buf):
t, d = struct.unpack("<HH", buf)
ts = (t & 0x1F) * 2
tm = (t >> 5) & 0x3F
th = t >> 11
dd = d & 0x1F
dm = (d >> 5) & 0xF
dy = (d >> 9) + 1980
tt = (dy, dm, dd, th, tm, ts)
tf = "{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}"
iso = tf.format(*tt)
dt = datetime.strptime(iso, "%Y-%m-%d %H:%M:%S")
return int(dt.timestamp())
def unixtime2dos(ts):
tt = time.gmtime(ts)
dy, dm, dd, th, tm, ts = list(tt)[:6]
bd = ((dy - 1980) << 9) + (dm << 5) + dd
bt = (th << 11) + (tm << 5) + ts // 2
return struct.pack("<HH", bt, bd)
def gen_fdesc(sz, crc32, z64):
ret = b"\x50\x4b\x07\x08"
fmt = "<LQQ" if z64 else "<LLL"
ret += struct.pack(fmt, crc32, sz, sz)
return ret
def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
"""
does regular file headers
and the central directory meme if h_pos is set
(h_pos = absolute position of the regular header)
"""
# appnote 4.5 / zip 3.0 (2008) / unzip 6.0 (2009) says to add z64
# extinfo for values which exceed H, but that becomes an off-by-one
# (can't tell if it was clamped or exactly maxval), make it obvious
z64 = sz >= 0xFFFFFFFF
z64v = [sz, sz] if z64 else []
if h_pos and h_pos >= 0xFFFFFFFF:
# central, also consider ptr to original header
z64v.append(h_pos)
# confusingly this doesn't bump if h_pos
req_ver = b"\x2d\x00" if z64 else b"\x0a\x00"
if crc32:
crc32 = struct.pack("<L", crc32)
else:
crc32 = b"\x00" * 4
if h_pos is None:
# 4b magic, 2b min-ver
ret = b"\x50\x4b\x03\x04" + req_ver
else:
# 4b magic, 2b spec-ver, 2b min-ver
ret = b"\x50\x4b\x01\x02\x1e\x03" + req_ver
ret += b"\x00" if pre_crc else b"\x08" # streaming
ret += b"\x08" if utf8 else b"\x00" # appnote 6.3.2 (2007)
# 2b compression, 4b time, 4b crc
ret += b"\x00\x00" + unixtime2dos(lastmod) + crc32
# spec says to put zeros when !crc if bit3 (streaming)
# however infozip does actual sz and it even works on winxp
# (same reasning for z64 extradata later)
vsz = 0xFFFFFFFF if z64 else sz
ret += struct.pack("<LL", vsz, vsz)
# windows support (the "?" replace below too)
fn = sanitize_fn(fn, ok="/")
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
z64_len = len(z64v) * 8 + 4 if z64v else 0
ret += struct.pack("<HH", len(bfn), z64_len)
if h_pos is not None:
# 2b comment, 2b diskno
ret += b"\x00" * 4
# 2b internal.attr, 4b external.attr
# infozip-macos: 0100 0000 a481 file:644
# infozip-macos: 0100 0100 0080 file:000
ret += b"\x01\x00\x00\x00\xa4\x81"
# 4b local-header-ofs
ret += struct.pack("<L", min(h_pos, 0xFFFFFFFF))
ret += bfn
if z64v:
ret += struct.pack("<HH" + "Q" * len(z64v), 1, len(z64v) * 8, *z64v)
return ret
def gen_ecdr(items, cdir_pos, cdir_end):
"""
summary of all file headers,
usually the zipfile footer unless something clamps
"""
ret = b"\x50\x4b\x05\x06"
# 2b ndisk, 2b disk0
ret += b"\x00" * 4
cdir_sz = cdir_end - cdir_pos
nitems = min(0xFFFF, len(items))
csz = min(0xFFFFFFFF, cdir_sz)
cpos = min(0xFFFFFFFF, cdir_pos)
need_64 = nitems == 0xFFFF or 0xFFFFFFFF in [csz, cpos]
# 2b tnfiles, 2b dnfiles, 4b dir sz, 4b dir pos
ret += struct.pack("<HHLL", nitems, nitems, csz, cpos)
# 2b comment length
ret += b"\x00\x00"
return [ret, need_64]
def gen_ecdr64(items, cdir_pos, cdir_end):
"""
z64 end of central directory
added when numfiles or a headerptr clamps
"""
ret = b"\x50\x4b\x06\x06"
# 8b own length from hereon
ret += b"\x2c" + b"\x00" * 7
# 2b spec-ver, 2b min-ver
ret += b"\x1e\x03\x2d\x00"
# 4b ndisk, 4b disk0
ret += b"\x00" * 8
# 8b tnfiles, 8b dnfiles, 8b dir sz, 8b dir pos
cdir_sz = cdir_end - cdir_pos
ret += struct.pack("<QQQQ", len(items), len(items), cdir_sz, cdir_pos)
return ret
def gen_ecdr64_loc(ecdr64_pos):
"""
z64 end of central directory locator
points to ecdr64
why
"""
ret = b"\x50\x4b\x06\x07"
# 4b cdisk, 8b start of ecdr64, 4b ndisks
ret += struct.pack("<LQL", 0, ecdr64_pos, 1)
return ret
class StreamZip(object):
def __init__(self, fgen, utf8=False, pre_crc=False):
self.fgen = fgen
self.utf8 = utf8
self.pre_crc = pre_crc
self.pos = 0
self.items = []
def _ct(self, buf):
self.pos += len(buf)
return buf
def ser(self, f):
name = f["vp"]
src = f["ap"]
st = f["st"]
sz = st.st_size
ts = st.st_mtime + 1
crc = None
if self.pre_crc:
crc = 0
for buf in yieldfile(src):
crc = zlib.crc32(buf, crc)
crc &= 0xFFFFFFFF
h_pos = self.pos
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
yield self._ct(buf)
crc = crc or 0
for buf in yieldfile(src):
if not self.pre_crc:
crc = zlib.crc32(buf, crc)
yield self._ct(buf)
crc &= 0xFFFFFFFF
self.items.append([name, sz, ts, crc, h_pos])
z64 = sz >= 4 * 1024 * 1024 * 1024
if z64 or not self.pre_crc:
buf = gen_fdesc(sz, crc, z64)
yield self._ct(buf)
def gen(self):
errors = []
for f in self.fgen:
if "err" in f:
errors.append([f["vp"], f["err"]])
continue
try:
for x in self.ser(f):
yield x
except Exception as ex:
errors.append([f["vp"], repr(ex)])
if errors:
errf = errdesc(errors)
print(repr(errf))
for x in self.ser(errf):
yield x
cdir_pos = self.pos
for name, sz, ts, crc, h_pos in self.items:
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
yield self._ct(buf)
cdir_end = self.pos
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
if need_64:
ecdir64_pos = self.pos
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
yield self._ct(buf)
buf = gen_ecdr64_loc(ecdir64_pos)
yield self._ct(buf)
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
yield self._ct(ecdr)
if errors:
os.unlink(errf["ap"])

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import re
import time
import socket
import select
from .util import chkcmd, Counter
@@ -23,55 +24,80 @@ class TcpSrv(object):
ip = "127.0.0.1"
eps = {ip: "local only"}
if self.args.i != ip:
eps = self.detect_interfaces(self.args.i) or {self.args.i: "external"}
nonlocals = [x for x in self.args.i if x != ip]
if nonlocals:
eps = self.detect_interfaces(self.args.i)
if not eps:
for x in nonlocals:
eps[x] = "external"
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
self.log(
"tcpsrv",
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
ip, self.args.p, desc
),
)
for port in sorted(self.args.p):
self.log(
"tcpsrv",
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
ip, port, desc
),
)
self.srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.srv = []
for ip in self.args.i:
for port in self.args.p:
self.srv.append(self._listen(ip, port))
def _listen(self, ip, port):
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
try:
self.srv.bind((self.args.i, self.args.p))
srv.bind((ip, port))
return srv
except (OSError, socket.error) as ex:
if ex.errno == 98:
raise Exception(
"\033[1;31mport {} is busy on interface {}\033[0m".format(
self.args.p, self.args.i
)
)
if ex.errno == 99:
raise Exception(
"\033[1;31minterface {} does not exist\033[0m".format(self.args.i)
)
if ex.errno in [98, 48]:
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
elif ex.errno in [99, 49]:
e = "\033[1;31minterface {} does not exist\033[0m".format(ip)
else:
raise
raise Exception(e)
def run(self):
self.srv.listen(self.args.nc)
self.log("tcpsrv", "listening @ {0}:{1}".format(self.args.i, self.args.p))
for srv in self.srv:
srv.listen(self.args.nc)
ip, port = srv.getsockname()
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
while True:
self.log("tcpsrv", "-" * 1 + "C-ncli")
if self.args.log_conn:
self.log("tcpsrv", "|%sC-ncli" % ("-" * 1,), c="1;30")
if self.num_clients.v >= self.args.nc:
time.sleep(0.1)
continue
self.log("tcpsrv", "-" * 2 + "C-acc1")
sck, addr = self.srv.accept()
self.log("%s %s" % addr, "-" * 3 + "C-acc2")
self.num_clients.add()
self.hub.broker.put(False, "httpconn", sck, addr)
if self.args.log_conn:
self.log("tcpsrv", "|%sC-acc1" % ("-" * 2,), c="1;30")
ready, _, _ = select.select(self.srv, [], [])
for srv in ready:
sck, addr = srv.accept()
sip, sport = srv.getsockname()
if self.args.log_conn:
self.log(
"%s %s" % addr,
"|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
"-" * 3, sip, sport % 8, sport
),
c="1;30",
)
self.num_clients.add()
self.hub.broker.put(False, "httpconn", sck, addr)
def shutdown(self):
self.log("tcpsrv", "ok bye")
def detect_interfaces(self, listen_ip):
def detect_interfaces(self, listen_ips):
eps = {}
# get all ips and their interfaces
@@ -85,8 +111,9 @@ class TcpSrv(object):
for ln in ip_addr.split("\n"):
try:
ip, dev = r.match(ln.rstrip()).groups()
if listen_ip in ["0.0.0.0", ip]:
eps[ip] = dev
for lip in listen_ips:
if lip in ["0.0.0.0", ip]:
eps[ip] = dev
except:
pass
@@ -113,11 +140,12 @@ class TcpSrv(object):
s.close()
if default_route and listen_ip in ["0.0.0.0", default_route]:
desc = "\033[32mexternal"
try:
eps[default_route] += ", " + desc
except:
eps[default_route] = desc
for lip in listen_ips:
if default_route and lip in ["0.0.0.0", default_route]:
desc = "\033[32mexternal"
try:
eps[default_route] += ", " + desc
except:
eps[default_route] = desc
return eps

49
copyparty/th_cli.py Normal file
View File

@@ -0,0 +1,49 @@
import os
import time
from .util import Cooldown
from .th_srv import thumb_path, THUMBABLE, FMT_FF
class ThumbCli(object):
def __init__(self, broker):
self.broker = broker
self.args = broker.args
# cache on both sides for less broker spam
self.cooldown = Cooldown(self.args.th_poke)
def get(self, ptop, rem, mtime, fmt):
ext = rem.rsplit(".")[-1].lower()
if ext not in THUMBABLE:
return None
if self.args.no_vthumb and ext in FMT_FF:
return None
if fmt == "j" and self.args.th_no_jpg:
fmt = "w"
if fmt == "w" and self.args.th_no_webp:
fmt = "j"
tpath = thumb_path(ptop, rem, mtime, fmt)
ret = None
try:
st = os.stat(tpath)
if st.st_size:
ret = tpath
else:
return None
except:
pass
if ret:
tdir = os.path.dirname(tpath)
if self.cooldown.poke(tdir):
self.broker.put(False, "thumbsrv.poke", tdir)
return ret
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)
return x.get()

375
copyparty/th_srv.py Normal file
View File

@@ -0,0 +1,375 @@
import os
import sys
import time
import shutil
import base64
import hashlib
import threading
import subprocess as sp
from .__init__ import PY2
from .util import fsenc, mchkcmd, Queue, Cooldown, BytesIO
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
if not PY2:
unicode = str
HAVE_PIL = False
HAVE_HEIF = False
HAVE_AVIF = False
HAVE_WEBP = False
try:
from PIL import Image, ImageOps
HAVE_PIL = True
try:
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
HAVE_WEBP = True
except:
pass
try:
from pyheif_pillow_opener import register_heif_opener
register_heif_opener()
HAVE_HEIF = True
except:
pass
try:
import pillow_avif
HAVE_AVIF = True
except:
pass
except:
pass
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# ffmpeg -formats
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
if HAVE_HEIF:
FMT_PIL += " heif heifs heic heics"
if HAVE_AVIF:
FMT_PIL += " avif avifs"
FMT_PIL, FMT_FF = [{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FF]]
THUMBABLE = {}
if HAVE_PIL:
THUMBABLE.update(FMT_PIL)
if HAVE_FFMPEG and HAVE_FFPROBE:
THUMBABLE.update(FMT_FF)
def thumb_path(ptop, rem, mtime, fmt):
# base16 = 16 = 256
# b64-lc = 38 = 1444
# base64 = 64 = 4096
try:
rd, fn = rem.rsplit("/", 1)
except:
rd = ""
fn = rem
if rd:
h = hashlib.sha512(fsenc(rd)).digest()[:24]
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
else:
rd = "top"
# could keep original filenames but this is safer re pathlen
h = hashlib.sha512(fsenc(fn)).digest()[:24]
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
return "{}/.hist/th/{}/{}.{:x}.{}".format(
ptop, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
)
class ThumbSrv(object):
def __init__(self, hub, vols):
self.hub = hub
self.vols = [v.realpath for v in vols.values()]
self.args = hub.args
self.log_func = hub.log
res = hub.args.th_size.split("x")
self.res = tuple([int(x) for x in res])
self.poke_cd = Cooldown(self.args.th_poke)
self.mutex = threading.Lock()
self.busy = {}
self.stopping = False
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
self.q = Queue(self.nthr * 4)
for _ in range(self.nthr):
t = threading.Thread(target=self.worker)
t.daemon = True
t.start()
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE):
missing = []
if not HAVE_FFMPEG:
missing.append("ffmpeg")
if not HAVE_FFPROBE:
missing.append("ffprobe")
msg = "cannot create video thumbnails because some of the required programs are not available: "
msg += ", ".join(missing)
self.log(msg, c=3)
t = threading.Thread(target=self.cleaner)
t.daemon = True
t.start()
def log(self, msg, c=0):
self.log_func("thumb", msg, c)
def shutdown(self):
self.stopping = True
for _ in range(self.nthr):
self.q.put(None)
def stopped(self):
with self.mutex:
return not self.nthr
def get(self, ptop, rem, mtime, fmt):
tpath = thumb_path(ptop, rem, mtime, fmt)
abspath = os.path.join(ptop, rem)
cond = threading.Condition()
with self.mutex:
try:
self.busy[tpath].append(cond)
self.log("wait {}".format(tpath))
except:
thdir = os.path.dirname(tpath)
try:
os.makedirs(thdir)
except:
pass
inf_path = os.path.join(thdir, "dir.txt")
if not os.path.exists(inf_path):
with open(inf_path, "wb") as f:
f.write(fsenc(os.path.dirname(abspath)))
self.busy[tpath] = [cond]
self.q.put([abspath, tpath])
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
while not self.stopping:
with self.mutex:
if tpath not in self.busy:
break
with cond:
cond.wait()
try:
st = os.stat(tpath)
if st.st_size:
return tpath
except:
pass
return None
def worker(self):
while not self.stopping:
task = self.q.get()
if not task:
break
abspath, tpath = task
ext = abspath.split(".")[-1].lower()
fun = None
if not os.path.exists(tpath):
if ext in FMT_PIL:
fun = self.conv_pil
elif ext in FMT_FF:
fun = self.conv_ffmpeg
if fun:
try:
fun(abspath, tpath)
except Exception as ex:
msg = "{} failed on {}\n {!r}"
self.log(msg.format(fun.__name__, abspath, ex), 3)
with open(tpath, "wb") as _:
pass
with self.mutex:
subs = self.busy[tpath]
del self.busy[tpath]
for x in subs:
with x:
x.notify_all()
with self.mutex:
self.nthr -= 1
def conv_pil(self, abspath, tpath):
with Image.open(fsenc(abspath)) as im:
crop = not self.args.th_no_crop
res2 = self.res
if crop:
res2 = (res2[0] * 2, res2[1] * 2)
try:
im.thumbnail(res2, resample=Image.LANCZOS)
if crop:
iw, ih = im.size
dw, dh = self.res
res = (min(iw, dw), min(ih, dh))
im = ImageOps.fit(im, res, method=Image.LANCZOS)
except:
im.thumbnail(self.res)
if im.mode not in ("RGB", "L"):
im = im.convert("RGB")
if tpath.endswith(".webp"):
# quality 80 = pillow-default
# quality 75 = ffmpeg-default
# method 0 = pillow-default, fast
# method 4 = ffmpeg-default
# method 6 = max, slow
im.save(tpath, quality=40, method=6)
else:
im.save(tpath, quality=40) # default=75
def conv_ffmpeg(self, abspath, tpath):
ret, _ = ffprobe(abspath)
dur = ret[".dur"][1] if ".dur" in ret else 4
seek = "{:.0f}".format(dur / 3)
scale = "scale={0}:{1}:force_original_aspect_ratio="
if self.args.th_no_crop:
scale += "decrease,setsar=1:1"
else:
scale += "increase,crop={0}:{1},setsar=1:1"
scale = scale.format(*list(self.res)).encode("utf-8")
cmd = [
b"ffmpeg",
b"-nostdin",
b"-hide_banner",
b"-ss",
seek,
b"-i",
fsenc(abspath),
b"-vf",
scale,
b"-vframes",
b"1",
]
if tpath.endswith(".jpg"):
cmd += [
b"-q:v",
b"6", # default=??
]
else:
cmd += [
b"-q:v",
b"50", # default=75
b"-compression_level:v",
b"6", # default=4, 0=fast, 6=max
]
cmd += [fsenc(tpath)]
mchkcmd(cmd)
def poke(self, tdir):
if not self.poke_cd.poke(tdir):
return
ts = int(time.time())
try:
p1 = os.path.dirname(tdir)
p2 = os.path.dirname(p1)
for dp in [tdir, p1, p2]:
os.utime(fsenc(dp), (ts, ts))
except:
pass
def cleaner(self):
interval = self.args.th_clean
while True:
time.sleep(interval)
for vol in self.vols:
vol += "/.hist/th"
self.log("\033[Jcln {}/\033[A".format(vol))
self.clean(vol)
self.log("\033[Jcln ok")
def clean(self, vol):
# self.log("cln {}".format(vol))
maxage = self.args.th_maxage
now = time.time()
prev_b64 = None
prev_fp = None
try:
ents = os.listdir(vol)
except:
return
for f in sorted(ents):
fp = os.path.join(vol, f)
cmp = fp.lower().replace("\\", "/")
# "top" or b64 prefix/full (a folder)
if len(f) <= 3 or len(f) == 24:
age = now - os.path.getmtime(fp)
if age > maxage:
with self.mutex:
safe = True
for k in self.busy.keys():
if k.lower().replace("\\", "/").startswith(cmp):
safe = False
break
if safe:
self.log("rm -rf [{}]".format(fp))
shutil.rmtree(fp, ignore_errors=True)
else:
self.clean(fp)
continue
# thumb file
try:
b64, ts, ext = f.split(".")
if len(b64) != 24 or len(ts) != 8 or ext not in ["jpg", "webp"]:
raise Exception()
ts = int(ts, 16)
except:
if f != "dir.txt":
self.log("foreign file in thumbs dir: [{}]".format(fp), 1)
continue
if b64 == prev_b64:
self.log("rm replaced [{}]".format(fp))
os.unlink(prev_fp)
prev_b64 = b64
prev_fp = fp

269
copyparty/u2idx.py Normal file
View File

@@ -0,0 +1,269 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import re
import os
import time
import threading
from datetime import datetime
from .util import u8safe, s3dec, html_escape, Pebkac
from .up2k import up2k_wark_from_hashlist
try:
HAVE_SQLITE3 = True
import sqlite3
except:
HAVE_SQLITE3 = False
class U2idx(object):
def __init__(self, args, log_func):
self.args = args
self.log_func = log_func
self.timeout = args.srch_time
if not HAVE_SQLITE3:
self.log("could not load sqlite3; searchign wqill be disabled")
return
self.cur = {}
self.mem_cur = sqlite3.connect(":memory:")
self.mem_cur.execute(r"create table a (b text)")
self.p_end = None
self.p_dur = 0
def log(self, msg, c=0):
self.log_func("u2idx", msg, c)
def fsearch(self, vols, body):
"""search by up2k hashlist"""
if not HAVE_SQLITE3:
return []
fsize = body["size"]
fhash = body["hash"]
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
uq = "where substr(w,1,16) = ? and w = ?"
uv = [wark[:16], wark]
try:
return self.run_query(vols, uq, uv)[0]
except Exception as ex:
raise Pebkac(500, repr(ex))
def get_cur(self, ptop):
cur = self.cur.get(ptop)
if cur:
return cur
cur = _open(ptop)
if not cur:
return None
self.cur[ptop] = cur
return cur
def search(self, vols, uq):
"""search by query params"""
if not HAVE_SQLITE3:
return []
q = ""
va = []
joins = ""
is_key = True
is_size = False
is_date = False
kw_key = ["(", ")", "and ", "or ", "not "]
kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "]
ptn_mt = re.compile(r"^\.?[a-z]+$")
mt_ctr = 0
mt_keycmp = "substr(up.w,1,16)"
mt_keycmp2 = None
while True:
uq = uq.strip()
if not uq:
break
ok = False
for kw in kw_key + kw_val:
if uq.startswith(kw):
is_key = kw in kw_key
uq = uq[len(kw) :]
ok = True
q += kw
break
if ok:
continue
v, uq = (uq + " ").split(" ", 1)
if is_key:
is_key = False
if v == "size":
v = "up.sz"
is_size = True
elif v == "date":
v = "up.mt"
is_date = True
elif v == "path":
v = "up.rd"
elif v == "name":
v = "up.fn"
elif v == "tags" or ptn_mt.match(v):
mt_ctr += 1
mt_keycmp2 = "mt{}.w".format(mt_ctr)
joins += "inner join mt mt{} on {} = {} ".format(
mt_ctr, mt_keycmp, mt_keycmp2
)
mt_keycmp = mt_keycmp2
if v == "tags":
v = "mt{0}.v".format(mt_ctr)
else:
v = "+mt{0}.k = '{1}' and mt{0}.v".format(mt_ctr, v)
else:
raise Pebkac(400, "invalid key [" + v + "]")
q += v + " "
continue
head = ""
tail = ""
if is_date:
is_date = False
v = v.upper().rstrip("Z").replace(",", " ").replace("T", " ")
while " " in v:
v = v.replace(" ", " ")
for fmt in [
"%Y-%m-%d %H:%M:%S",
"%Y-%m-%d %H:%M",
"%Y-%m-%d %H",
"%Y-%m-%d",
]:
try:
v = datetime.strptime(v, fmt).timestamp()
break
except:
pass
elif is_size:
is_size = False
v = int(float(v) * 1024 * 1024)
else:
if v.startswith("*"):
head = "'%'||"
v = v[1:]
if v.endswith("*"):
tail = "||'%'"
v = v[:-1]
q += " {}?{} ".format(head, tail)
va.append(v)
is_key = True
try:
return self.run_query(vols, joins + "where " + q, va)
except Exception as ex:
raise Pebkac(500, repr(ex))
def run_query(self, vols, uq, uv):
done_flag = []
self.active_id = "{:.6f}_{}".format(
time.time(), threading.current_thread().ident
)
thr = threading.Thread(
target=self.terminator,
args=(
self.active_id,
done_flag,
),
)
thr.daemon = True
thr.start()
if not uq or not uv:
q = "select * from up"
v = ()
else:
q = "select up.* from up " + uq
v = tuple(uv)
self.log("qs: {!r} {!r}".format(q, v))
ret = []
lim = 1000
taglist = {}
for (vtop, ptop, flags) in vols:
cur = self.get_cur(ptop)
if not cur:
continue
self.active_cur = cur
sret = []
c = cur.execute(q, v)
for hit in c:
w, ts, sz, rd, fn = hit
lim -= 1
if lim <= 0:
break
if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn)
rp = "/".join([x for x in [vtop, rd, fn] if x])
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
for hit in sret:
w = hit["w"]
del hit["w"]
tags = {}
q2 = "select k, v from mt where w = ? and k != 'x'"
for k, v2 in cur.execute(q2, (w,)):
taglist[k] = True
tags[k] = v2
hit["tags"] = tags
ret.extend(sret)
done_flag.append(True)
self.active_id = None
# undupe hits from multiple metadata keys
if len(ret) > 1:
ret = [ret[0]] + [
y for x, y in zip(ret[:-1], ret[1:]) if x["rp"] != y["rp"]
]
return ret, list(taglist.keys())
def terminator(self, identifier, done_flag):
for _ in range(self.timeout):
time.sleep(1)
if done_flag:
return
if identifier == self.active_id:
self.active_cur.connection.interrupt()
def _open(ptop):
db_path = os.path.join(ptop, ".hist", "up2k.db")
if os.path.exists(db_path):
return sqlite3.connect(db_path).cursor()

File diff suppressed because it is too large Load Diff

View File

@@ -2,16 +2,22 @@
from __future__ import print_function, unicode_literals
import re
import os
import sys
import time
import base64
import select
import struct
import hashlib
import platform
import traceback
import threading
import mimetypes
import contextlib
import subprocess as sp # nosec
from datetime import datetime
from .__init__ import PY2, WINDOWS
from .__init__ import PY2, WINDOWS, ANYWIN
from .stolen import surrogateescape
FAKE_MP = False
@@ -29,10 +35,12 @@ if not PY2:
from urllib.parse import unquote_to_bytes as unquote
from urllib.parse import quote_from_bytes as quote
from queue import Queue
from io import BytesIO
else:
from urllib import unquote # pylint: disable=no-name-in-module
from urllib import quote # pylint: disable=no-name-in-module
from Queue import Queue # pylint: disable=import-error,no-name-in-module
from StringIO import StringIO as BytesIO
surrogateescape.register_surrogateescape()
FS_ENCODING = sys.getfilesystemencoding()
@@ -40,9 +48,14 @@ if WINDOWS and PY2:
FS_ENCODING = "utf-8"
HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT"
HTTPCODE = {
200: "OK",
204: "No Content",
206: "Partial Content",
302: "Found",
304: "Not Modified",
400: "Bad Request",
403: "Forbidden",
@@ -51,11 +64,65 @@ HTTPCODE = {
413: "Payload Too Large",
416: "Requested Range Not Satisfiable",
422: "Unprocessable Entity",
429: "Too Many Requests",
500: "Internal Server Error",
501: "Not Implemented",
}
IMPLICATIONS = [
["e2dsa", "e2ds"],
["e2ds", "e2d"],
["e2tsr", "e2ts"],
["e2ts", "e2t"],
["e2t", "e2d"],
]
MIMES = {
"md": "text/plain; charset=UTF-8",
"opus": "audio/ogg; codecs=opus",
"webp": "image/webp",
}
REKOBO_KEY = {
v: ln.split(" ", 1)[0]
for ln in """
1B 6d B
2B 7d Gb F#
3B 8d Db C#
4B 9d Ab G#
5B 10d Eb D#
6B 11d Bb A#
7B 12d F
8B 1d C
9B 2d G
10B 3d D
11B 4d A
12B 5d E
1A 6m Abm G#m
2A 7m Ebm D#m
3A 8m Bbm A#m
4A 9m Fm
5A 10m Cm
6A 11m Gm
7A 12m Dm
8A 1m Am
9A 2m Em
10A 3m Bm
11A 4m Gbm F#m
12A 5m Dbm C#m
""".strip().split(
"\n"
)
for v in ln.strip().split(" ")[1:]
if v
}
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
class Counter(object):
def __init__(self, v=0):
self.v = v
@@ -70,6 +137,32 @@ class Counter(object):
self.v = absval
class Cooldown(object):
def __init__(self, maxage):
self.maxage = maxage
self.mutex = threading.Lock()
self.hist = {}
self.oldest = 0
def poke(self, key):
with self.mutex:
now = time.time()
ret = False
v = self.hist.get(key, 0)
if now - v > self.maxage:
self.hist[key] = now
ret = True
if self.oldest - now > self.maxage * 2:
self.hist = {
k: v for k, v in self.hist.items() if now - v < self.maxage
}
self.oldest = sorted(self.hist.values())[0]
return ret
class Unrecv(object):
"""
undo any number of socket recv ops
@@ -94,6 +187,150 @@ class Unrecv(object):
self.buf = buf + self.buf
class ProgressPrinter(threading.Thread):
"""
periodically print progress info without linefeeds
"""
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True
self.msg = None
self.end = False
self.start()
def run(self):
msg = None
while not self.end:
time.sleep(0.1)
if msg == self.msg or self.end:
continue
msg = self.msg
uprint(" {}\033[K\r".format(msg))
print("\033[K", end="")
sys.stdout.flush() # necessary on win10 even w/ stderr btw
def uprint(msg):
try:
print(msg, end="")
except UnicodeEncodeError:
try:
print(msg.encode("utf-8", "replace").decode(), end="")
except:
print(msg.encode("ascii", "replace").decode(), end="")
def nuprint(msg):
uprint("{}\n".format(msg))
def rice_tid():
tid = threading.current_thread().ident
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
def trace(*args, **kwargs):
t = time.time()
stack = "".join(
"\033[36m{}\033[33m{}".format(x[0].split(os.sep)[-1][:-3], x[1])
for x in traceback.extract_stack()[3:-1]
)
parts = ["{:.6f}".format(t), rice_tid(), stack]
if args:
parts.append(repr(args))
if kwargs:
parts.append(repr(kwargs))
msg = "\033[0m ".join(parts)
# _tracebuf.append(msg)
nuprint(msg)
@contextlib.contextmanager
def ren_open(fname, *args, **kwargs):
fdir = kwargs.pop("fdir", None)
suffix = kwargs.pop("suffix", None)
if fname == os.devnull:
with open(fname, *args, **kwargs) as f:
yield {"orz": [f, fname]}
return
if suffix:
ext = fname.split(".")[-1]
if len(ext) < 7:
suffix += "." + ext
orig_name = fname
bname = fname
ext = ""
while True:
ofs = bname.rfind(".")
if ofs < 0 or ofs < len(bname) - 7:
# doesn't look like an extension anymore
break
ext = bname[ofs:] + ext
bname = bname[:ofs]
b64 = ""
while True:
try:
if fdir:
fpath = os.path.join(fdir, fname)
else:
fpath = fname
if suffix and os.path.exists(fsenc(fpath)):
fpath += suffix
fname += suffix
ext += suffix
with open(fsenc(fpath), *args, **kwargs) as f:
if b64:
fp2 = "fn-trunc.{}.txt".format(b64)
fp2 = os.path.join(fdir, fp2)
with open(fsenc(fp2), "wb") as f2:
f2.write(orig_name.encode("utf-8"))
yield {"orz": [f, fname]}
return
except OSError as ex_:
ex = ex_
if ex.errno not in [36, 63] and (not WINDOWS or ex.errno != 22):
raise
if not b64:
b64 = (bname + ext).encode("utf-8", "replace")
b64 = hashlib.sha512(b64).digest()[:12]
b64 = base64.urlsafe_b64encode(b64).decode("utf-8").rstrip("=")
badlen = len(fname)
while len(fname) >= badlen:
if len(bname) < 8:
raise ex
if len(bname) > len(ext):
# drop the last letter of the filename
bname = bname[:-1]
else:
try:
# drop the leftmost sub-extension
_, ext = ext.split(".", 1)
except:
# okay do the first letter then
ext = "." + ext[2:]
fname = "{}~{}{}".format(bname, b64, ext)
class MultipartParser(object):
def __init__(self, log_func, sr, http_headers):
self.sr = sr
@@ -333,6 +570,41 @@ def read_header(sr):
return ret[:ofs].decode("utf-8", "surrogateescape").split("\r\n")
def humansize(sz, terse=False):
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
if sz < 1024:
break
sz /= 1024.0
ret = " ".join([str(sz)[:4].rstrip("."), unit])
if not terse:
return ret
return ret.replace("iB", "").replace(" ", "")
def get_spd(nbyte, t0, t=None):
if t is None:
t = time.time()
bps = nbyte / ((t - t0) + 0.001)
s1 = humansize(nbyte).replace(" ", "\033[33m").replace("iB", "")
s2 = humansize(bps).replace(" ", "\033[35m").replace("iB", "")
return "{} \033[0m{}/s\033[0m".format(s1, s2)
def s2hms(s, optional_h=False):
s = int(s)
h, s = divmod(s, 3600)
m, s = divmod(s, 60)
if not h and optional_h:
return "{}:{:02}".format(m, s)
return "{}:{:02}:{:02}".format(h, m, s)
def undot(path):
ret = []
for node in path.split("/"):
@@ -349,11 +621,12 @@ def undot(path):
return "/".join(ret)
def sanitize_fn(fn):
fn = fn.replace("\\", "/").split("/")[-1]
def sanitize_fn(fn, ok="", bad=[]):
if "/" not in ok:
fn = fn.replace("\\", "/").split("/")[-1]
if WINDOWS:
for bad, good in [
if ANYWIN:
remap = [
["<", ""],
[">", ""],
[":", ""],
@@ -363,23 +636,56 @@ def sanitize_fn(fn):
["|", ""],
["?", ""],
["*", ""],
]:
fn = fn.replace(bad, good)
]
for a, b in [x for x in remap if x[0] not in ok]:
fn = fn.replace(a, b)
bad = ["con", "prn", "aux", "nul"]
bad.extend(["con", "prn", "aux", "nul"])
for n in range(1, 10):
bad += "com{0} lpt{0}".format(n).split(" ")
if fn.lower() in bad:
fn = "_" + fn
if fn.lower() in bad:
fn = "_" + fn
return fn.strip()
def u8safe(txt):
try:
return txt.encode("utf-8", "xmlcharrefreplace").decode("utf-8", "replace")
except:
return txt.encode("utf-8", "replace").decode("utf-8", "replace")
def exclude_dotfiles(filepaths):
for fpath in filepaths:
if not fpath.split("/")[-1].startswith("."):
yield fpath
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
def http_ts(ts):
file_dt = datetime.utcfromtimestamp(ts)
return file_dt.strftime(HTTP_TS_FMT)
def html_escape(s, quote=False, crlf=False):
"""html.escape but also newlines"""
s = s.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
if quote:
s = s.replace('"', "&quot;").replace("'", "&#x27;")
if crlf:
s = s.replace("\r", "&#13;").replace("\n", "&#10;")
return s
def html_bescape(s, quote=False, crlf=False):
"""html.escape but bytestrings"""
s = s.replace(b"&", b"&amp;").replace(b"<", b"&lt;").replace(b">", b"&gt;")
if quote:
s = s.replace(b'"', b"&quot;").replace(b"'", b"&#x27;")
if crlf:
s = s.replace(b"\r", b"&#13;").replace(b"\n", b"&#10;")
return s
def quotep(txt):
@@ -396,8 +702,8 @@ def quotep(txt):
def unquotep(txt):
"""url unquoter which deals with bytes correctly"""
btxt = w8enc(txt)
unq1 = btxt.replace(b"+", b" ")
unq2 = unquote(unq1)
# btxt = btxt.replace(b"+", b" ")
unq2 = unquote(btxt)
return w8dec(unq2)
@@ -417,6 +723,16 @@ def w8enc(txt):
return txt.encode(FS_ENCODING, "surrogateescape")
def w8b64dec(txt):
"""decodes base64(filesystem-bytes) to wtf8"""
return w8dec(base64.urlsafe_b64decode(txt.encode("ascii")))
def w8b64enc(txt):
"""encodes wtf8 to base64(filesystem-bytes)"""
return base64.urlsafe_b64encode(w8enc(txt)).decode("ascii")
if PY2 and WINDOWS:
# moonrunes become \x3f with bytestrings,
# losing mojibake support is worth
@@ -430,6 +746,43 @@ else:
fsdec = w8dec
def s3enc(mem_cur, rd, fn):
ret = []
for v in [rd, fn]:
try:
mem_cur.execute("select * from a where b = ?", (v,))
ret.append(v)
except:
ret.append("//" + w8b64enc(v))
# self.log("mojien/{} [{}] {}".format(k, v, ret[-1][2:]))
return tuple(ret)
def s3dec(rd, fn):
ret = []
for k, v in [["d", rd], ["f", fn]]:
if v.startswith("//"):
ret.append(w8b64dec(v[2:]))
# self.log("mojide/{} [{}] {}".format(k, ret[-1], v[2:]))
else:
ret.append(v)
return tuple(ret)
def atomic_move(src, dst):
src = fsenc(src)
dst = fsenc(dst)
if not PY2:
os.replace(src, dst)
else:
if os.path.exists(dst):
os.unlink(dst)
os.rename(src, dst)
def read_socket(sr, total_size):
remains = total_size
while remains > 0:
@@ -445,6 +798,59 @@ def read_socket(sr, total_size):
yield buf
def read_socket_unbounded(sr):
while True:
buf = sr.recv(32 * 1024)
if not buf:
return
yield buf
def read_socket_chunked(sr, log=None):
err = "expected chunk length, got [{}] |{}| instead"
while True:
buf = b""
while b"\r" not in buf:
rbuf = sr.recv(2)
if not rbuf or len(buf) > 16:
err = err.format(buf.decode("utf-8", "replace"), len(buf))
raise Pebkac(400, err)
buf += rbuf
if not buf.endswith(b"\n"):
sr.recv(1)
try:
chunklen = int(buf.rstrip(b"\r\n"), 16)
except:
err = err.format(buf.decode("utf-8", "replace"), len(buf))
raise Pebkac(400, err)
if chunklen == 0:
sr.recv(2) # \r\n after final chunk
return
if log:
log("receiving {} byte chunk".format(chunklen))
for chunk in read_socket(sr, chunklen):
yield chunk
sr.recv(2) # \r\n after each chunk too
def yieldfile(fn):
with open(fsenc(fn), "rb", 512 * 1024) as f:
while True:
buf = f.read(64 * 1024)
if not buf:
break
yield buf
def hashcopy(actor, fin, fout):
u32_lim = int((2 ** 31) * 0.9)
hashobj = hashlib.sha512()
@@ -464,6 +870,73 @@ def hashcopy(actor, fin, fout):
return tlen, hashobj.hexdigest(), digest_b64
def sendfile_py(lower, upper, f, s):
remains = upper - lower
f.seek(lower)
while remains > 0:
# time.sleep(0.01)
buf = f.read(min(4096, remains))
if not buf:
return remains
try:
s.sendall(buf)
remains -= len(buf)
except:
return remains
return 0
def sendfile_kern(lower, upper, f, s):
out_fd = s.fileno()
in_fd = f.fileno()
ofs = lower
while ofs < upper:
try:
req = min(2 ** 30, upper - ofs)
select.select([], [out_fd], [], 10)
n = os.sendfile(out_fd, in_fd, ofs, req)
except Exception as ex:
# print("sendfile: " + repr(ex))
n = 0
if n <= 0:
return upper - ofs
ofs += n
# print("sendfile: ok, sent {} now, {} total, {} remains".format(n, ofs - lower, upper - ofs))
return 0
def statdir(logger, scandir, lstat, top):
try:
btop = fsenc(top)
if scandir and hasattr(os, "scandir"):
src = "scandir"
with os.scandir(btop) as dh:
for fh in dh:
try:
yield [fsdec(fh.name), fh.stat(follow_symlinks=not lstat)]
except Exception as ex:
msg = "scan-stat: \033[36m{} @ {}"
logger(msg.format(repr(ex), fsdec(fh.path)))
else:
src = "listdir"
fun = os.lstat if lstat else os.stat
for name in os.listdir(btop):
abspath = os.path.join(btop, name)
try:
yield [fsdec(name), fun(abspath)]
except Exception as ex:
msg = "list-stat: \033[36m{} @ {}"
logger(msg.format(repr(ex), fsdec(abspath)))
except Exception as ex:
logger("{}: \033[31m{} @ {}".format(src, repr(ex), top))
def unescape_cookie(orig):
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
ret = ""
@@ -492,11 +965,13 @@ def unescape_cookie(orig):
return ret
def guess_mime(url):
if url.endswith(".md"):
return ["text/plain; charset=UTF-8"]
def guess_mime(url, fallback="application/octet-stream"):
try:
_, ext = url.rsplit(".", 1)
except:
return fallback
return mimetypes.guess_type(url)
return MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
def runcmd(*argv):
@@ -515,10 +990,25 @@ def chkcmd(*argv):
return sout, serr
def mchkcmd(argv, timeout=10):
if PY2:
with open(os.devnull, "wb") as f:
rv = sp.call(argv, stdout=f, stderr=f)
else:
rv = sp.call(argv, stdout=sp.DEVNULL, stderr=sp.DEVNULL, timeout=timeout)
if rv:
raise sp.CalledProcessError(rv, (argv[0], b"...", argv[-1]))
def gzip_orig_sz(fn):
with open(fsenc(fn), "rb") as f:
f.seek(-4, 2)
return struct.unpack(b"I", f.read(4))[0]
rv = f.read(4)
try:
return struct.unpack(b"I", rv)[0]
except:
return struct.unpack("I", rv)[0]
def py_desc():
@@ -528,7 +1018,11 @@ def py_desc():
if ofs > 0:
py_ver = py_ver[:ofs]
bitness = struct.calcsize(b"P") * 8
try:
bitness = struct.calcsize(b"P") * 8
except:
bitness = struct.calcsize("P") * 8
host_os = platform.system()
compiler = platform.python_compiler()
@@ -540,7 +1034,26 @@ def py_desc():
)
def align_tab(lines):
rows = []
ncols = 0
for ln in lines:
row = [x for x in ln.split(" ") if x]
ncols = max(ncols, len(row))
rows.append(row)
lens = [0] * ncols
for row in rows:
for n, col in enumerate(row):
lens[n] = max(lens[n], len(col))
return ["".join(x.ljust(y + 2) for x, y in zip(row, lens)) for row in rows]
class Pebkac(Exception):
def __init__(self, code, msg=None):
super(Pebkac, self).__init__(msg or HTTPCODE[code])
self.code = code
def __repr__(self):
return "Pebkac({}, {})".format(self.code, repr(self.args))

12
copyparty/web/Makefile Normal file
View File

@@ -0,0 +1,12 @@
# run me to zopfli all the static files
# which should help on really slow connections
# but then why are you using copyparty in the first place
pk: $(addsuffix .gz, $(wildcard *.js *.css))
un: $(addsuffix .un, $(wildcard *.gz))
%.gz: %
pigz -11 -J 34 -I 5730 $<
%.un: %
pigz -d $<

View File

@@ -1,3 +1,6 @@
:root {
--grid-sz: 10em;
}
* {
line-height: 1.2em;
}
@@ -6,7 +9,7 @@ html,body,tr,th,td,#files,a {
background: none;
font-weight: inherit;
font-size: inherit;
padding: none;
padding: 0;
border: none;
}
html {
@@ -39,15 +42,22 @@ body {
margin: 1.3em 0 0 0;
font-size: 1.4em;
}
#path #entree {
margin-left: -.7em;
}
#files {
border-collapse: collapse;
margin-top: 2em;
border-spacing: 0;
z-index: 1;
position: relative;
}
#files tbody a {
display: block;
padding: .3em 0;
}
a {
#files tbody div a {
color: #f5a;
}
a, #files tbody div a:last-child {
color: #fc5;
padding: .2em;
text-decoration: none;
@@ -55,6 +65,12 @@ a {
#files a:hover {
color: #fff;
background: #161616;
text-decoration: underline;
}
#files thead {
background: #333;
position: sticky;
top: 0;
}
#files thead a {
color: #999;
@@ -65,6 +81,7 @@ a {
}
#files thead th {
padding: .5em 1.3em .3em 1.3em;
cursor: pointer;
}
#files thead th:last-child {
background: #444;
@@ -81,6 +98,14 @@ a {
#files td {
margin: 0;
padding: 0 .5em;
border-bottom: 1px solid #111;
}
#files td+td+td {
max-width: 30em;
overflow: hidden;
}
#files tr+tr td {
border-top: 1px solid #383838;
}
#files tbody td:nth-child(3) {
font-family: monospace;
@@ -100,6 +125,9 @@ a {
padding-bottom: 1.3em;
border-bottom: .5em solid #444;
}
#files tbody tr td:last-child {
white-space: nowrap;
}
#files thead th[style] {
width: auto !important;
}
@@ -131,11 +159,70 @@ a {
.logue {
padding: .2em 1.5em;
}
a.play {
color: #e70;
.logue:empty {
display: none;
}
a.play.act {
color: #af0;
#pro.logue {
margin-bottom: .8em;
}
#epi.logue {
margin: .8em 0;
}
#srv_info {
opacity: .5;
font-size: .8em;
color: #fc5;
position: absolute;
top: .5em;
left: 2em;
}
#srv_info span {
color: #fff;
}
#files tbody a.play {
color: #e70;
padding: .2em;
margin: -.2em;
}
#files tbody a.play.act {
color: #840;
text-shadow: 0 0 .3em #b80;
}
#files tbody tr.sel td,
#ggrid a.sel,
html.light #ggrid a.sel {
color: #fff;
background: #925;
border-color: #c37;
}
#files tbody tr.sel:hover td,
#ggrid a.sel:hover,
html.light #ggrid a.sel:hover {
color: #fff;
background: #d39;
border-color: #d48;
text-shadow: 1px 1px 0 #804;
}
#ggrid a.sel,
html.light #ggrid a.sel {
border-top: 1px solid #d48;
box-shadow: 0 .1em 1.2em #b36;
transition: all 0.2s cubic-bezier(.2, 2.2, .5, 1); /* https://cubic-bezier.com/#.4,2,.7,1 */
}
#ggrid a.sel img {
opacity: .7;
box-shadow: 0 0 1em #b36;
filter: contrast(130%) brightness(107%);
}
#files tr.sel a {
color: #fff;
}
#files tr.sel a.play {
color: #fc5;
}
#files tr.sel a.play.act {
color: #fff;
text-shadow: 0 0 1px #fff;
}
#blocked {
position: fixed;
@@ -145,7 +232,7 @@ a.play.act {
height: 100%;
background: #333;
font-size: 2.5em;
z-index:99;
z-index: 99;
}
#blk_play,
#blk_abrt {
@@ -179,6 +266,7 @@ a.play.act {
bottom: -6em;
height: 6em;
width: 100%;
z-index: 3;
transition: bottom 0.15s;
}
#widget.open {
@@ -192,8 +280,11 @@ a.play.act {
height: 100%;
background: #3c3c3c;
}
#wtoggle {
cursor: url(/.cpr/dd/1.png), pointer;
#wtico {
cursor: url(/.cpr/dd/4.png), pointer;
animation: cursor 500ms;
}
#wtico:hover {
animation: cursor 500ms infinite;
}
@keyframes cursor {
@@ -201,7 +292,10 @@ a.play.act {
30% {cursor: url(/.cpr/dd/3.png), pointer}
50% {cursor: url(/.cpr/dd/4.png), pointer}
75% {cursor: url(/.cpr/dd/5.png), pointer}
85% {cursor: url(/.cpr/dd/1.png), pointer}
85% {cursor: url(/.cpr/dd/4.png), pointer}
}
@keyframes spin {
100% {transform: rotate(360deg)}
}
#wtoggle {
position: absolute;
@@ -219,6 +313,52 @@ a.play.act {
padding: .2em 0 0 .07em;
color: #fff;
}
#wzip, #wnp {
display: none;
margin-right: .3em;
padding-right: .3em;
border-right: .1em solid #555;
}
#wnp a {
position: relative;
font-size: .47em;
margin: 0 .1em;
top: -.4em;
}
#wnp a+a {
margin-left: .33em;
}
#wtoggle,
#wtoggle * {
line-height: 1em;
}
#wtoggle.np {
width: 5.5em;
}
#wtoggle.sel {
width: 6.4em;
}
#wtoggle.sel #wzip,
#wtoggle.np #wnp {
display: inline-block;
}
#wtoggle.sel.np #wnp {
display: none;
}
#wzip a {
font-size: .4em;
padding: 0 .3em;
margin: -.3em .2em;
position: relative;
display: inline-block;
}
#wzip a+a {
margin-left: .8em;
}
#wtoggle.sel #wzip #selzip {
top: -.6em;
padding: .4em .3em;
}
#barpos,
#barbuf {
position: absolute;
@@ -262,3 +402,623 @@ a.play.act {
width: calc(100% - 10.5em);
background: rgba(0,0,0,0.2);
}
@media (min-width: 80em) {
#barpos,
#barbuf {
width: calc(100% - 21em);
left: 9.8em;
top: .7em;
height: 1.6em;
bottom: auto;
}
#widget {
bottom: -3.2em;
height: 3.2em;
}
#pvol {
max-width: 9em;
}
}
.opview {
display: none;
}
.opview.act {
display: block;
}
#ops a {
color: #fc5;
font-size: 1.5em;
padding: .25em .3em;
margin: 0;
outline: none;
}
#ops a.act {
background: #281838;
border-radius: 0 0 .2em .2em;
border-bottom: .3em solid #d90;
box-shadow: 0 -.15em .2em #000 inset;
padding-bottom: .3em;
}
#ops i {
font-size: 1.5em;
}
#ops i:before {
content: 'x';
color: #282828;
text-shadow: 0 0 .08em #01a7e1;
position: relative;
}
#ops i:after {
content: 'x';
color: #282828;
text-shadow: 0 0 .08em #ff3f1a;
margin-left: -.35em;
font-size: 1.05em;
}
#ops,
.opbox {
border: 1px solid #3a3a3a;
box-shadow: 0 0 1em #222 inset;
}
#ops {
background: #333;
margin: 1.7em 1.5em 0 1.5em;
padding: .3em .6em;
border-radius: .3em;
border-width: .15em 0;
white-space: nowrap;
}
.opbox {
background: #2d2d2d;
margin: 1.5em 0 0 0;
padding: .5em;
border-radius: 0 1em 1em 0;
border-width: .15em .3em .3em 0;
max-width: 40em;
}
.opbox input {
margin: .5em;
}
.opview input[type=text] {
color: #fff;
background: #383838;
border: none;
box-shadow: 0 0 .3em #222;
border-bottom: 1px solid #fc5;
border-radius: .2em;
padding: .2em .3em;
}
input[type="checkbox"]+label {
color: #f5a;
}
input[type="checkbox"]:checked+label {
color: #fc5;
}
#srch_form {
border: 1px solid #3a3a3a;
box-shadow: 0 0 1em #222 inset;
background: #2d2d2d;
border-radius: .4em;
margin: 1.4em;
margin-bottom: 0;
padding: 0 .5em .5em 0;
}
#srch_form table {
display: inline-block;
}
#srch_form td {
padding: .6em .6em;
}
#srch_form td:first-child {
width: 3em;
padding-right: .2em;
text-align: right;
}
#op_search input {
margin: 0;
}
#srch_q {
white-space: pre;
color: #f80;
height: 1em;
margin: .2em 0 -1em 1.6em;
}
#tq_raw {
width: calc(100% - 2em);
margin: .3em 0 0 1.4em;
}
#tq_raw td+td {
width: 100%;
}
#op_search #q_raw {
width: 100%;
display: block;
}
#files td div span {
color: #fff;
padding: 0 .4em;
font-weight: bold;
font-style: italic;
}
#files td div a:hover {
background: #444;
color: #fff;
}
#files td div a {
display: inline-block;
white-space: nowrap;
}
#files td div a:last-child {
width: 100%;
}
#files td div {
border-collapse: collapse;
width: 100%;
}
#wrap {
margin-top: 2em;
}
#tree {
display: none;
position: absolute;
left: 0;
bottom: 0;
top: 7em;
overflow-y: auto;
-ms-scroll-chaining: none;
overscroll-behavior-y: none;
scrollbar-color: #eb0 #333;
}
#thx_ff {
padding: 5em 0;
}
#tree::-webkit-scrollbar-track,
#tree::-webkit-scrollbar {
background: #333;
}
#tree::-webkit-scrollbar-thumb {
background: #eb0;
}
#tree:hover {
z-index: 2;
}
#treeul {
position: relative;
left: -1.7em;
width: calc(100% + 1.3em);
}
.btn {
padding: .2em .4em;
font-size: 1.2em;
background: #2a2a2a;
box-shadow: 0 .1em .2em #222 inset;
border-radius: .3em;
margin: .2em;
position: relative;
top: -.2em;
}
.btn:hover {
background: #805;
}
.tgl.btn.on {
background: #fc4;
color: #400;
text-shadow: none;
}
#detree {
padding: .3em .5em;
font-size: 1.5em;
line-height: 1.5em;
}
#tree ul,
#tree li {
padding: 0;
margin: 0;
}
#tree ul {
border-left: .2em solid #555;
}
#tree li {
margin-left: 1em;
list-style: none;
border-top: 1px solid #4c4c4c;
border-bottom: 1px solid #222;
}
#tree li:last-child {
border-bottom: none;
}
#treeul a.hl {
color: #400;
background: #fc4;
border-radius: .3em;
text-shadow: none;
}
#treeul a {
display: inline-block;
}
#treeul a+a {
width: calc(100% - 2em);
background: #333;
line-height: 1em;
}
#treeul a+a:hover {
background: #222;
color: #fff;
}
#treeul a:first-child {
font-family: monospace, monospace;
}
.dumb_loader_thing {
display: inline-block;
margin: 1em .3em 1em 1em;
padding: 0 1.2em 0 0;
font-size: 4em;
animation: spin 1s linear infinite;
position: absolute;
z-index: 9;
}
#files .cfg {
display: none;
font-size: 2em;
white-space: nowrap;
}
#files th:hover .cfg,
#files th.min .cfg {
display: block;
width: 1em;
border-radius: .2em;
margin: -1.3em auto 0 auto;
background: #444;
}
#files th.min .cfg {
margin: -.6em;
}
#files>thead>tr>th.min span {
position: absolute;
transform: rotate(270deg);
background: linear-gradient(90deg, rgba(68,68,68,0), rgba(68,68,68,0.5) 70%, #444);
margin-left: -4.6em;
padding: .4em;
top: 5.4em;
width: 8em;
text-align: right;
letter-spacing: .04em;
}
#files td:nth-child(2n) {
color: #f5a;
}
#files td.min a {
display: none;
}
#files tr.play td,
#files tr.play div a {
background: #fc4;
border-color: transparent;
color: #400;
text-shadow: none;
}
#files tr.play a {
color: inherit;
}
#files tr.play a:hover {
color: #300;
background: #fea;
}
#op_cfg {
max-width: none;
margin-right: 1.5em;
}
#op_cfg>div>a {
line-height: 2em;
}
#op_cfg>div>span {
display: inline-block;
padding: .2em .4em;
}
#op_cfg h3 {
margin: .8em 0 0 .6em;
padding: 0;
border-bottom: 1px solid #555;
}
#opdesc {
display: none;
}
#ops:hover #opdesc {
display: block;
background: linear-gradient(0deg,#555, #4c4c4c 80%, #444);
box-shadow: 0 .3em 1em #222;
padding: 1em;
border-radius: .3em;
position: absolute;
z-index: 3;
top: 6em;
right: 1.5em;
}
#ops:hover #opdesc.off {
display: none;
}
#opdesc code {
background: #3c3c3c;
padding: .2em .3em;
border-top: 1px solid #777;
border-radius: .3em;
font-family: monospace, monospace;
line-height: 2em;
}
#griden.on+#thumbs {
opacity: .3;
}
#ghead {
background: #3c3c3c;
border: 1px solid #444;
border-radius: .3em;
padding: .5em;
margin: 0 1.5em 1em .4em;
position: sticky;
top: -.3em;
}
html.light #ghead {
background: #f7f7f7;
border-color: #ddd;
}
#ghead .btn {
position: relative;
top: 0;
}
#ggrid {
padding-top: .5em;
}
#ggrid a {
display: inline-block;
width: var(--grid-sz);
vertical-align: top;
overflow-wrap: break-word;
background: #383838;
border: 1px solid #444;
border-top: 1px solid #555;
box-shadow: 0 .1em .2em #222;
border-radius: .3em;
padding: .3em;
margin: .5em;
}
#ggrid a img {
border-radius: .2em;
max-width: var(--grid-sz);
max-height: calc(var(--grid-sz)/1.25);
margin: 0 auto;
display: block;
}
#ggrid a span {
padding: .2em .3em;
display: block;
}
#ggrid a:hover {
background: #444;
border-color: #555;
color: #fd9;
}
html.light #ggrid a {
background: #f7f7f7;
border-color: #ddd;
box-shadow: 0 .1em .2em #ddd;
}
html.light #ggrid a:hover {
background: #fff;
border-color: #ccc;
color: #015;
box-shadow: 0 .1em .5em #aaa;
}
#pvol,
#barbuf,
#barpos,
#u2conf label {
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
html.light {
color: #333;
background: #eee;
text-shadow: none;
}
html.light #ops,
html.light .opbox,
html.light #srch_form {
background: #f7f7f7;
box-shadow: 0 0 .3em #ddd;
border-color: #f7f7f7;
}
html.light #ops a.act {
box-shadow: 0 .2em .2em #ccc;
background: #fff;
border-color: #07a;
padding-top: .4em;
}
html.light #op_cfg h3 {
border-color: #ccc;
}
html.light .btn {
color: #666;
background: #ddd;
box-shadow: none;
}
html.light .btn:hover {
background: #caf;
}
html.light .tgl.btn.on {
background: #4a0;
color: #fff;
}
html.light #srv_info {
color: #c83;
text-shadow: 1px 1px 0 #fff;
}
html.light #srv_info span {
color: #000;
}
html.light #treeul a+a {
background: inherit;
color: #06a;
}
html.light #treeul a.hl {
background: #07a;
color: #fff;
}
html.light #tree li {
border-color: #ddd #fff #f7f7f7 #fff;
}
html.light #tree ul {
border-color: #ccc;
}
html.light a,
html.light #ops a,
html.light #files tbody div a:last-child {
color: #06a;
}
html.light #files tbody {
background: #f7f7f7;
}
html.light #files {
box-shadow: 0 0 .3em #ccc;
}
html.light #files thead th {
background: #eee;
}
html.light #files tr td {
border-top: 1px solid #ddd;
}
html.light #files td {
border-bottom: 1px solid #f7f7f7;
}
html.light #files tbody tr:last-child td {
border-bottom: .2em solid #ccc;
}
html.light #files td:nth-child(2n) {
color: #d38;
}
html.light #files tr:hover td {
background: #fff;
}
html.light #files tbody a.play {
color: #c0f;
}
html.light tr.play td {
background: #fc5;
}
html.light tr.play a {
color: #406;
}
html.light #files th:hover .cfg,
html.light #files th.min .cfg {
background: #ccc;
}
html.light #files > thead > tr > th.min span {
background: linear-gradient(90deg, rgba(204,204,204,0), rgba(204,204,204,0.5) 70%, #ccc);
}
html.light #blocked {
background: #eee;
}
html.light #blk_play a,
html.light #blk_abrt a {
background: #fff;
box-shadow: 0 .2em .4em #ddd;
}
html.light #widget a {
color: #fc5;
}
html.light #files tr.sel:hover td {
background: #c37;
}
html.light #files tr.sel td {
color: #fff;
}
html.light #files tr.sel a {
color: #fff;
}
html.light #files tr.sel a.play.act {
color: #fb0;
}
html.light input[type="checkbox"] + label {
color: #333;
}
html.light .opview input[type="text"] {
background: #fff;
color: #333;
box-shadow: 0 0 2px #888;
border-color: #38d;
}
html.light #ops:hover #opdesc {
background: #fff;
box-shadow: 0 .3em 1em #ccc;
}
html.light #opdesc code {
background: #060;
color: #fff;
}
html.light #u2tab a>span,
html.light #files td div span {
color: #000;
}
html.light #path {
background: #f7f7f7;
text-shadow: none;
box-shadow: 0 0 .3em #bbb;
}
html.light #path a {
color: #333;
}
html.light #path a:not(:last-child)::after {
border-color: #ccc;
background: none;
border-width: .1em .1em 0 0;
margin: -.2em .3em -.2em -.3em;
}
html.light #path a:hover {
background: none;
color: #60a;
}
html.light #files tbody div a {
color: #d38;
}
html.light #files a:hover,
html.light #files tr.sel a:hover {
color: #000;
background: #fff;
}
html.light #tree {
scrollbar-color: #a70 #ddd;
}
html.light #tree::-webkit-scrollbar-track,
html.light #tree::-webkit-scrollbar {
background: #ddd;
}
#tree::-webkit-scrollbar-thumb {
background: #da0;
}

View File

@@ -7,69 +7,126 @@
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css{{ ts }}">
{%- if can_upload %}
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css{{ ts }}">
{%- endif %}
</head>
<body>
{%- if can_upload %}
<div id="ops">
<a href="#" data-dest="" data-desc="close submenu">---</a>
{%- if have_up2k_idx %}
<a href="#" data-perm="read" data-dest="search" data-desc="search for files by attributes, path/name, music tags, or any combination of those.&lt;br /&gt;&lt;br /&gt;&lt;code&gt;foo bar&lt;/code&gt; = must contain both foo and bar,&lt;br /&gt;&lt;code&gt;foo -bar&lt;/code&gt; = must contain foo but not bar,&lt;br /&gt;&lt;code&gt;^yana .opus$&lt;/code&gt; = must start with yana and have the opus extension">🔎</a>
<a href="#" data-dest="up2k" data-desc="up2k: upload files (if you have write-access) or toggle into the search-mode and drag files onto the search button to see if they exist somewhere on the server">🚀</a>
{%- else %}
<a href="#" data-perm="write" data-dest="up2k" data-desc="up2k: upload files with resume support (close your browser and drop the same files in later)">🚀</a>
{%- endif %}
<a href="#" data-perm="write" data-dest="bup" data-desc="bup: basic uploader, even supports netscape 4.0">🎈</a>
<a href="#" data-perm="write" data-dest="mkdir" data-desc="mkdir: create a new directory">📂</a>
<a href="#" data-perm="read write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
<a href="#" data-perm="write" data-dest="msg" data-desc="msg: send a message to the server log">📟</a>
<a href="#" data-dest="cfg" data-desc="configuration options">⚙️</a>
<div id="opdesc"></div>
</div>
<div id="op_search" class="opview">
{%- if have_tags_idx %}
<div id="srch_form" class="tags"></div>
{%- else %}
<div id="srch_form"></div>
{%- endif %}
<div id="srch_q"></div>
</div>
{%- include 'upload.html' %}
{%- endif %}
<div id="op_cfg" class="opview opbox">
<h3>switches</h3>
<div>
<a id="tooltips" class="tgl btn" href="#">tooltips</a>
<a id="lightmode" class="tgl btn" href="#">lightmode</a>
<a id="griden" class="tgl btn" href="#">the grid</a>
<a id="thumbs" class="tgl btn" href="#">thumbs</a>
</div>
{%- if have_zip %}
<h3>folder download</h3>
<div id="arc_fmt"></div>
{%- endif %}
<h3>key notation</h3>
<div id="key_notation"></div>
</div>
<h1 id="path">
<a href="#" id="entree">🌲</a>
{%- for n in vpnodes %}
<a href="/{{ n[0] }}">{{ n[1] }}</a>
{%- endfor %}
</h1>
{%- if can_read %}
{%- if prologue %}
<div id="pro" class="logue">{{ prologue }}</div>
{%- endif %}
<div id="tree">
<a href="#" id="detree">🍞...</a>
<a href="#" class="btn" step="2" id="twobytwo">+</a>
<a href="#" class="btn" step="-2" id="twig">&ndash;</a>
<a href="#" class="tgl btn" id="dyntree">a</a>
<ul id="treeul"></ul>
<div id="thx_ff">&nbsp;</div>
</div>
<div id="wrap">
<div id="pro" class="logue">{{ logues[0] }}</div>
<table id="files">
<thead>
<tr>
<th></th>
<th>File Name</th>
<th>File Size</th>
<th>Date</th>
<th name="lead"><span>c</span></th>
<th name="href"><span>File Name</span></th>
<th name="sz" sort="int"><span>Size</span></th>
{%- for k in taglist %}
{%- if k.startswith('.') %}
<th name="tags/{{ k }}" sort="int"><span>{{ k[1:] }}</span></th>
{%- else %}
<th name="tags/{{ k }}"><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
{%- endif %}
{%- endfor %}
<th name="ext"><span>T</span></th>
<th name="ts"><span>Date</span></th>
</tr>
</thead>
<tbody>
{%- for f in files %}
<tr><td>{{ f[0] }}</td><td><a href="{{ f[1] }}">{{ f[2] }}</a></td><td>{{ f[3] }}</td><td>{{ f[4] }}</td></tr>
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
{%- if f.tags is defined %}
{%- for k in taglist %}
<td>{{ f.tags[k] }}</td>
{%- endfor %}
{%- endif %}
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
{%- endfor %}
</tbody>
</table>
{%- if epilogue %}
<div id="epi" class="logue">{{ epilogue }}</div>
{%- endif %}
{%- endif %}
<div id="epi" class="logue">{{ logues[1] }}</div>
<h2><a href="?h">control-panel</a></h2>
<div id="widget">
<div id="wtoggle"></div>
<div id="widgeti">
<div id="pctl"><a href="#" id="bprev"></a><a href="#" id="bplay"></a><a href="#" id="bnext"></a></div>
<canvas id="pvol" width="288" height="38"></canvas>
<canvas id="barpos"></canvas>
<canvas id="barbuf"></canvas>
</div>
</div>
{%- if can_read %}
</div>
{%- if srv_info %}
<div id="srv_info"><span>{{ srv_info }}</span></div>
{%- endif %}
<div id="widget"></div>
<script>
var tag_order_cfg = {{ tag_order }};
</script>
<script src="/.cpr/util.js{{ ts }}"></script>
<script src="/.cpr/browser.js{{ ts }}"></script>
{%- endif %}
{%- if can_upload %}
<script src="/.cpr/up2k.js{{ ts }}"></script>
{%- endif %}
<script>
apply_perms({{ perms }});
</script>
</body>
</html>

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,60 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>{{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<style>
html{font-family:sans-serif}
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
a{display:block}
</style>
</head>
<body>
{%- if srv_info %}
<p><span>{{ srv_info }}</span></p>
{%- endif %}
{%- if have_b_u %}
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="bput" />
<input type="file" name="f" multiple /><br />
<input type="submit" value="start upload" />
</form>
<br />
{%- endif %}
{%- if logues[0] %}
<div>{{ logues[0] }}</div><br />
{%- endif %}
<table id="files">
<thead>
<tr>
<th name="lead"><span>c</span></th>
<th name="href"><span>File Name</span></th>
<th name="sz" sort="int"><span>Size</span></th>
<th name="ts"><span>Date</span></th>
</tr>
</thead>
<tbody>
<tr><td></td><td><a href="../{{ url_suf }}">parent folder</a></td><td>-</td><td>-</td></tr>
{%- for f in files %}
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}{{ url_suf }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.dt }}</td></tr>
{%- endfor %}
</tbody>
</table>
{%- if logues[1] %}
<div>{{ logues[1] }}</div><br />
{%- endif %}
<h2><a href="{{ url_suf }}{{ url_suf and '&amp;' or '?' }}h">control-panel</a></h2>
</body>
</html>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 248 B

View File

@@ -1,3 +1,7 @@
@font-face {
font-family: 'scp';
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
}
html, body {
color: #333;
background: #eee;
@@ -9,6 +13,7 @@ html, body {
}
#mw {
margin: 0 auto;
padding: 0 1.5em;
}
pre, code, a {
color: #480;
@@ -22,7 +27,7 @@ code {
font-size: .96em;
}
pre, code {
font-family: monospace, monospace;
font-family: 'scp', monospace, monospace;
white-space: pre-wrap;
word-break: break-all;
}
@@ -42,9 +47,12 @@ pre code {
pre code:last-child {
border-bottom: none;
}
pre code:before {
pre code::before {
content: counter(precode);
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
display: inline-block;
text-align: right;
font-size: .75em;
@@ -104,8 +112,12 @@ h2 a, h4 a, h6 a {
#mp ol>li {
margin: .7em 0;
}
strong {
color: #000;
}
p>em,
li>em {
li>em,
td>em {
color: #c50;
padding: .1em;
border-bottom: .1em solid #bbb;
@@ -168,14 +180,12 @@ small {
}
table {
border-collapse: collapse;
margin: 1em 0;
}
td {
th, td {
padding: .2em .5em;
border: .12em solid #aaa;
}
th {
border: .12em solid #aaa;
}
blink {
animation: blinker .7s cubic-bezier(.9, 0, .1, 1) infinite;
}
@@ -198,13 +208,15 @@ blink {
height: 100%;
}
#mw {
padding: 0 1em;
margin: 0 auto;
right: 0;
}
#mp {
max-width: 54em;
max-width: 52em;
margin-bottom: 6em;
word-break: break-word;
overflow-wrap: break-word;
word-wrap: break-word; /*ie*/
}
a {
color: #fff;
@@ -238,12 +250,6 @@ blink {
z-index: 10;
width: calc(100% - 1em);
}
#mn.undocked {
position: fixed;
padding: 1.7em 0 1.5em 1em;
box-shadow: 0 0 .5em rgba(0, 0, 0, 0.3);
background: #f7f7f7;
}
#mn a {
color: #444;
background: none;
@@ -261,7 +267,7 @@ blink {
#mn a:last-child {
padding-right: .5em;
}
#mn a:not(:last-child):after {
#mn a:not(:last-child)::after {
content: '';
width: 1.05em;
height: 1.05em;
@@ -290,6 +296,32 @@ blink {
text-decoration: underline;
border: none;
}
#mh a:hover {
color: #000;
background: #ddd;
}
#toolsbox {
overflow: hidden;
display: inline-block;
background: #eee;
height: 1.5em;
padding: 0 .2em;
margin: 0 .2em;
position: absolute;
}
#toolsbox.open {
height: auto;
overflow: visible;
background: #eee;
box-shadow: 0 .2em .2em #ccc;
padding-bottom: .2em;
}
#toolsbox a {
display: block;
}
#toolsbox a+a {
text-decoration: none;
}
@@ -333,8 +365,12 @@ blink {
html.dark #m>ol {
border-color: #555;
}
html.dark strong {
color: #fff;
}
html.dark p>em,
html.dark li>em {
html.dark li>em,
html.dark td>em {
color: #f94;
border-color: #666;
}
@@ -355,7 +391,7 @@ blink {
background: #282828;
border: .07em dashed #444;
}
html.dark #mn a:not(:last-child):after {
html.dark #mn a:not(:last-child)::after {
border-color: rgba(255,255,255,0.3);
}
html.dark #mn a {
@@ -372,21 +408,32 @@ blink {
color: #ccc;
background: none;
}
html.dark #mh a:hover {
background: #333;
color: #fff;
}
html.dark #toolsbox {
background: #222;
}
html.dark #toolsbox.open {
box-shadow: 0 .2em .2em #069;
border-radius: 0 0 .4em .4em;
}
}
@media screen and (min-width: 70em) {
@media screen and (min-width: 66em) {
#mw {
position: fixed;
overflow-y: auto;
left: 14em;
left: calc(100% - 57em);
left: calc(100% - 55em);
max-width: none;
bottom: 0;
scrollbar-color: #eb0 #f7f7f7;
}
#toc {
width: 13em;
width: calc(100% - 57.3em);
width: calc(100% - 55.3em);
max-width: 30em;
background: #eee;
position: fixed;
@@ -433,41 +480,117 @@ blink {
width: .8em;
}
html.dark #toc::-webkit-scrollbar-thumb {
background: #eb0;
}
html.dark #mn.undocked {
box-shadow: 0 0 .5em #555;
border: none;
background: #0a0a0a;
background: #b80;
}
}
@media screen and (min-width: 87.5em) {
@media screen and (min-width: 85.5em) {
#toc { width: 30em }
#mw { left: 30.5em }
}
@media print {
@page {
size: A4;
padding: 0;
margin: .5in .6in;
mso-header-margin: .6in;
mso-footer-margin: .6in;
mso-paper-source: 0;
}
a {
color: #079;
text-decoration: none;
border-bottom: .07em solid #4ac;
padding: 0 .3em;
}
#toc {
margin: 0 !important;
}
#toc>ul {
border-left: .1em solid #84c4dd;
}
#mn, #mh {
display: none;
}
}
html, body, #toc, #mw {
margin: 0 !important;
word-break: break-word;
width: 52em;
}
#toc {
margin-left: 1em !important;
}
#toc a {
color: #000 !important;
}
#toc a::after {
/* hopefully supported by browsers eventually */
content: leader('.') target-counter(attr(href), page);
}
a[ctr]::before {
content: attr(ctr) '. ';
}
h1 {
margin: 2em 0;
}
h2 {
margin: 2em 0 0 0;
}
h1, h2, h3 {
page-break-inside: avoid;
}
h1::after,
h2::after,
h3::after {
content: 'orz';
color: transparent;
display: block;
line-height: 1em;
padding: 4em 0 0 0;
margin: 0 0 -5em 0;
}
p {
page-break-inside: avoid;
}
table {
page-break-inside: auto;
}
tr {
page-break-inside: avoid;
page-break-after: auto;
}
thead {
display: table-header-group;
}
tfoot {
display: table-footer-group;
}
#mp a.vis::after {
content: ' (' attr(href) ')';
border-bottom: 1px solid #bbb;
color: #444;
}
blockquote {
border-color: #555;
}
code {
border-color: #bbb;
}
pre, pre code {
border-color: #999;
}
pre code::before {
color: #058;
}
/*
*[data-ln]:before {
content: attr(data-ln);
font-size: .8em;
margin: 0 .4em;
color: #f0c;
html.dark a {
color: #000;
}
html.dark pre,
html.dark code {
color: #240;
}
html.dark p>em,
html.dark li>em,
html.dark td>em {
color: #940;
}
}
*/

View File

@@ -17,15 +17,23 @@
<a id="save" href="?edit">save</a>
<a id="sbs" href="#">sbs</a>
<a id="nsbs" href="#">editor</a>
<a id="help" href="#">help</a>
<div id="toolsbox">
<a id="tools" href="#">tools</a>
<a id="fmt_table" href="#">prettify table (ctrl-k)</a>
<a id="iter_uni" href="#">non-ascii: iterate (ctrl-u)</a>
<a id="mark_uni" href="#">non-ascii: markup</a>
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
<a id="help" href="#">help</a>
</div>
{%- else %}
<a href="?edit">edit (basic)</a>
<a href="?edit2">edit (fancy)</a>
<a href="?raw">view raw</a>
{%- endif %}
</div>
<div id="toc"></div>
<div id="mtw">
<textarea id="mt">{{ md }}</textarea>
<textarea id="mt" autocomplete="off">{{ md }}</textarea>
</div>
<div id="mw">
<div id="ml">
@@ -39,16 +47,19 @@
{%- if edit %}
<div id="helpbox">
<textarea>
<textarea autocomplete="off">
write markdown (html is permitted)
write markdown (most html is 🙆 too)
### hotkey list
## hotkey list
* `Ctrl-S` to save
* `Ctrl-E` to toggle mode
* `Ctrl-K` to prettyprint a table
* `Ctrl-U` to iterate non-ascii chars
* `Ctrl-H` / `Ctrl-Shift-H` to create a header
* `TAB` / `Shift-TAB` to indent/dedent a selection
### toolbar
## toolbar
1. toggle dark mode
2. show/hide navigation bar
3. save changes on server
@@ -56,16 +67,68 @@ write markdown (html is permitted)
5. toggle editor/preview
6. this thing :^)
.
## markdown
|||
|--|--|
|`**bold**`|**bold**|
|`_italic_`|_italic_|
|`~~strike~~`|~~strike~~|
|`` `code` ``|`code`|
|`[](#hotkey-list)`|[](#hotkey-list)|
|`[](/foo/bar.md#header)`|[](/foo/bar.md#header)|
|`<blink>💯</blink>`|<blink>💯</blink>|
## tables
|left-aligned|centered|right-aligned
| ---------- | :----: | ----------:
|one |two |three
|left-aligned|centered|right-aligned
| ---------- | :----: | ----------:
|one |two |three
## lists
* one
* two
1. one
1. two
* one
* two
1. one
1. two
## headers
# level 1
## level 2
### level 3
## quote
> hello
> hello
## codeblock
four spaces (no tab pls)
## code in lists
* foo
bar
six spaces total
* foo
bar
six spaces total
.
</textarea>
</div>
{%- endif %}
<script>
var link_md_as_html = false; // TODO (does nothing)
var last_modified = {{ lastmod }};
var md_opt = {
link_md_as_html: false,
allow_plugins: {{ md_plug }},
modpoll_freq: {{ md_chk_rate }}
};
(function () {
var btn = document.getElementById("lightswitch");
@@ -75,24 +138,18 @@ var last_modified = {{ lastmod }};
document.documentElement.setAttribute("class", dark ? "dark" : "");
btn.innerHTML = "go " + (dark ? "light" : "dark");
if (window.localStorage)
localStorage.setItem('darkmode', dark ? 1 : 0);
localStorage.setItem('lightmode', dark ? 0 : 1);
};
btn.onclick = toggle;
if (window.localStorage && localStorage.getItem('darkmode') == 1)
if (window.localStorage && localStorage.getItem('lightmode') != 1)
toggle();
})();
if (!String.startsWith) {
String.prototype.startsWith = function(s, i) {
i = i>0 ? i|0 : 0;
return this.substring(i, i + s.length) === s;
};
}
</script>
<script src="/.cpr/deps/marked.full.js"></script>
<script src="/.cpr/util.js"></script>
<script src="/.cpr/deps/marked.js"></script>
<script src="/.cpr/md.js"></script>
{%- if edit %}
<script src="/.cpr/md2.js"></script>
<script src="/.cpr/md2.js"></script>
{%- endif %}
</body></html>

View File

@@ -1,10 +1,12 @@
var dom_toc = document.getElementById('toc');
var dom_wrap = document.getElementById('mw');
var dom_hbar = document.getElementById('mh');
var dom_nav = document.getElementById('mn');
var dom_pre = document.getElementById('mp');
var dom_src = document.getElementById('mt');
var dom_navtgl = document.getElementById('navtoggle');
"use strict";
var dom_toc = ebi('toc');
var dom_wrap = ebi('mw');
var dom_hbar = ebi('mh');
var dom_nav = ebi('mn');
var dom_pre = ebi('mp');
var dom_src = ebi('mt');
var dom_navtgl = ebi('navtoggle');
// chrome 49 needs this
@@ -18,6 +20,10 @@ var dbg = function () { };
// dbg = console.log
// plugins
var md_plug = {};
function hesc(txt) {
return txt.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
}
@@ -30,11 +36,24 @@ function cls(dom, name, add) {
}
function static(obj) {
function statify(obj) {
return JSON.parse(JSON.stringify(obj));
}
// dodge browser issues
(function () {
var ua = navigator.userAgent;
if (ua.indexOf(') Gecko/') !== -1 && /Linux| Mac /.exec(ua)) {
// necessary on ff-68.7 at least
var s = mknod('style');
s.innerHTML = '@page { margin: .5in .6in .8in .6in; }';
console.log(s.innerHTML);
document.head.appendChild(s);
}
})();
// add navbar
(function () {
var n = document.location + '';
@@ -46,7 +65,7 @@ function static(obj) {
if (a > 0)
loc.push(n[a]);
var dec = hesc(decodeURIComponent(n[a]));
var dec = hesc(uricom_dec(n[a])[0]);
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
}
@@ -141,17 +160,125 @@ function copydom(src, dst, lv) {
}
function convert_markdown(md_text) {
marked.setOptions({
function md_plug_err(ex, js) {
var errbox = ebi('md_errbox');
if (errbox)
errbox.parentNode.removeChild(errbox);
if (!ex)
return;
var msg = (ex + '').split('\n')[0];
var ln = ex.lineNumber;
var o = null;
if (ln) {
msg = "Line " + ln + ", " + msg;
var lns = js.split('\n');
if (ln < lns.length) {
o = mknod('span');
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
o.textContent = lns[ln - 1];
}
}
errbox = mknod('div');
errbox.setAttribute('id', 'md_errbox');
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
errbox.textContent = msg;
errbox.onclick = function () {
alert('' + ex.stack);
};
if (o) {
errbox.appendChild(o);
errbox.style.padding = '.25em .5em';
}
dom_nav.appendChild(errbox);
try {
console.trace();
}
catch (ex2) { }
}
function load_plug(md_text, plug_type) {
if (!md_opt.allow_plugins)
return md_text;
var find = '\n```copyparty_' + plug_type + '\n';
var ofs = md_text.indexOf(find);
if (ofs === -1)
return md_text;
var ofs2 = md_text.indexOf('\n```', ofs + 1);
if (ofs2 == -1)
return md_text;
var js = md_text.slice(ofs + find.length, ofs2 + 1);
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
var old_plug = md_plug[plug_type];
if (!old_plug || old_plug[1] != js) {
js = 'const x = { ' + js + ' }; x;';
try {
var x = eval(js);
}
catch (ex) {
md_plug[plug_type] = null;
md_plug_err(ex, js);
return md;
}
if (x['ctor']) {
x['ctor']();
delete x['ctor'];
}
md_plug[plug_type] = [x, js];
}
return md;
}
function convert_markdown(md_text, dest_dom) {
md_text = md_text.replace(/\r/g, '');
md_plug_err(null);
md_text = load_plug(md_text, 'pre');
md_text = load_plug(md_text, 'post');
var marked_opts = {
//headerPrefix: 'h-',
breaks: true,
gfm: true
});
var md_html = marked(md_text);
};
var ext = md_plug['pre'];
if (ext)
Object.assign(marked_opts, ext[0]);
try {
var md_html = marked(md_text, marked_opts);
}
catch (ex) {
if (ext)
md_plug_err(ex, ext[1]);
throw ex;
}
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
var nodes = md_dom.getElementsByTagName('a');
for (var a = nodes.length - 1; a >= 0; a--) {
var href = nodes[a].getAttribute('href');
var txt = nodes[a].textContent;
if (!txt)
nodes[a].textContent = href;
else if (href !== txt)
nodes[a].setAttribute('class', 'vis');
}
// todo-lists (should probably be a marked extension)
var nodes = md_dom.getElementsByTagName('input');
nodes = md_dom.getElementsByTagName('input');
for (var a = nodes.length - 1; a >= 0; a--) {
var dom_box = nodes[a];
if (dom_box.getAttribute('type') !== 'checkbox')
@@ -172,7 +299,7 @@ function convert_markdown(md_text) {
}
// separate <code> for each line in <pre>
var nodes = md_dom.getElementsByTagName('pre');
nodes = md_dom.getElementsByTagName('pre');
for (var a = nodes.length - 1; a >= 0; a--) {
var el = nodes[a];
@@ -185,7 +312,7 @@ function convert_markdown(md_text) {
continue;
var nline = parseInt(el.getAttribute('data-ln')) + 1;
var lines = el.innerHTML.replace(/\r?\n<\/code>$/i, '</code>').split(/\r?\n/g);
var lines = el.innerHTML.replace(/\n<\/code>$/i, '</code>').split(/\n/g);
for (var b = 0; b < lines.length - 1; b++)
lines[b] += '</code>\n<code data-ln="' + (nline + b) + '">';
@@ -218,18 +345,36 @@ function convert_markdown(md_text) {
el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>';
}
copydom(md_dom, dom_pre, 0);
ext = md_plug['post'];
if (ext && ext[0].render)
try {
ext[0].render(md_dom);
}
catch (ex) {
md_plug_err(ex, ext[1]);
}
copydom(md_dom, dest_dom, 0);
if (ext && ext[0].render2)
try {
ext[0].render2(dest_dom);
}
catch (ex) {
md_plug_err(ex, ext[1]);
}
}
function init_toc() {
var loader = document.getElementById('ml');
var loader = ebi('ml');
loader.parentNode.removeChild(loader);
var anchors = []; // list of toc entries, complex objects
var anchor = null; // current toc node
var html = []; // generated toc html
var lv = 0; // current indentation level in the toc html
var ctr = [0, 0, 0, 0, 0, 0];
var manip_nodes_dyn = dom_pre.getElementsByTagName('*');
var manip_nodes = [];
@@ -250,8 +395,18 @@ function init_toc() {
html.push('</ul>');
lv--;
}
ctr[lv - 1]++;
for (var b = lv; b < 6; b++)
ctr[b] = 0;
html.push('<li>' + elm.innerHTML + '</li>');
elm.childNodes[0].setAttribute('ctr', ctr.slice(0, lv).join('.'));
var elm2 = elm.cloneNode(true);
elm2.childNodes[0].textContent = elm.textContent;
while (elm2.childNodes.length > 1)
elm2.removeChild(elm2.childNodes[1]);
html.push('<li>' + elm2.innerHTML + '</li>');
if (anchor != null)
anchors.push(anchor);
@@ -333,7 +488,7 @@ function init_toc() {
// "main" :p
convert_markdown(dom_src.value);
convert_markdown(dom_src.value, dom_pre);
var toc = init_toc();
@@ -365,45 +520,13 @@ var redraw = (function () {
dom_navtgl.onclick = function () {
var timeout = null;
function show_nav(e) {
if (e && e.target == dom_hbar && e.pageX && e.pageX < dom_hbar.offsetWidth / 2)
return;
clearTimeout(timeout);
dom_nav.style.display = 'block';
}
function hide_nav() {
clearTimeout(timeout);
timeout = setTimeout(function () {
dom_nav.style.display = 'none';
}, 30);
}
var hidden = dom_navtgl.innerHTML == 'hide nav';
dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav';
if (hidden) {
dom_nav.setAttribute('class', 'undocked');
dom_nav.style.display = 'none';
dom_nav.style.top = dom_hbar.offsetHeight + 'px';
dom_nav.onmouseenter = show_nav;
dom_nav.onmouseleave = hide_nav;
dom_hbar.onmouseenter = show_nav;
dom_hbar.onmouseleave = hide_nav;
}
else {
dom_nav.setAttribute('class', '');
dom_nav.style.display = 'block';
dom_nav.style.top = '0';
dom_nav.onmouseenter = null;
dom_nav.onmouseleave = null;
dom_hbar.onmouseenter = null;
dom_hbar.onmouseleave = null;
}
if (window.localStorage)
localStorage.setItem('hidenav', hidden ? 1 : 0);
dom_nav.style.display = hidden ? 'none' : 'block';
swrite('hidenav', hidden ? 1 : 0);
redraw();
};
if (window.localStorage && localStorage.getItem('hidenav') == 1)
if (sread('hidenav') == 1)
dom_navtgl.onclick();

View File

@@ -1,108 +1,125 @@
#toc {
display: none;
display: none;
}
#mtw {
display: block;
position: fixed;
left: 0;
bottom: 0;
width: calc(100% - 58em);
display: block;
position: fixed;
left: .5em;
bottom: 0;
width: calc(100% - 56em);
}
#mw {
left: calc(100% - 57em);
overflow-y: auto;
position: fixed;
bottom: 0;
left: calc(100% - 55em);
overflow-y: auto;
position: fixed;
bottom: 0;
}
/* single-screen */
#mtw.preview,
#mw.editor {
opacity: 0;
z-index: 1;
opacity: 0;
z-index: 1;
}
#mw.preview,
#mtw.editor {
z-index: 5;
z-index: 5;
}
#mtw.single,
#mw.single {
margin: 0;
left: 1em;
left: max(1em, calc((100% - 58em) / 2));
margin: 0;
left: 1em;
left: max(1em, calc((100% - 56em) / 2));
}
#mtw.single {
width: 57em;
width: min(57em, calc(100% - 2em));
width: 55em;
width: min(55em, calc(100% - 2em));
}
#mp {
position: relative;
position: relative;
}
#mt, #mtr {
width: 100%;
height: calc(100% - 5px);
color: #444;
background: #f7f7f7;
border: 1px solid #999;
outline: none;
padding: 0;
margin: 0;
font-family: 'consolas', monospace, monospace;
white-space: pre-wrap;
word-break: break-word;
overflow-wrap: break-word;
word-wrap: break-word; /*ie*/
overflow-y: scroll;
line-height: 1.3em;
font-size: .9em;
position: relative;
scrollbar-color: #eb0 #f7f7f7;
width: 100%;
height: calc(100% - 1px);
color: #444;
background: #f7f7f7;
border: 1px solid #999;
outline: none;
padding: 0;
margin: 0;
font-family: 'consolas', monospace, monospace;
white-space: pre-wrap;
word-break: break-word;
overflow-wrap: break-word;
word-wrap: break-word; /*ie*/
overflow-y: scroll;
line-height: 1.3em;
font-size: .9em;
position: relative;
scrollbar-color: #eb0 #f7f7f7;
}
html.dark #mt {
color: #eee;
background: #222;
border: 1px solid #777;
scrollbar-color: #b80 #282828;
color: #eee;
background: #222;
border: 1px solid #777;
scrollbar-color: #b80 #282828;
}
#mtr {
position: absolute;
top: 0;
left: 0;
position: absolute;
top: 0;
left: 0;
}
#save.force-save {
color: #400;
background: #f97;
border-radius: .15em;
color: #400;
background: #f97;
border-radius: .15em;
}
html.dark #save.force-save {
color: #fca;
background: #720;
}
#save.disabled {
opacity: .4;
opacity: .4;
}
#helpbox,
#toast {
background: #f7f7f7;
border-radius: .4em;
z-index: 9001;
}
#helpbox {
display: none;
position: fixed;
background: #f7f7f7;
box-shadow: 0 .5em 2em #777;
border-radius: .4em;
padding: 2em;
top: 4em;
overflow-y: auto;
height: calc(100% - 12em);
left: calc(50% - 15em);
right: 0;
width: 30em;
z-index: 9001;
display: none;
position: fixed;
padding: 2em;
top: 4em;
overflow-y: auto;
box-shadow: 0 .5em 2em #777;
height: calc(100% - 12em);
left: calc(50% - 15em);
right: 0;
width: 30em;
}
#helpclose {
display: block;
display: block;
}
html.dark #helpbox {
background: #222;
box-shadow: 0 .5em 2em #444;
border: 1px solid #079;
border-width: 1px 0;
box-shadow: 0 .5em 2em #444;
}
html.dark #helpbox,
html.dark #toast {
background: #222;
border: 1px solid #079;
border-width: 1px 0;
}
#toast {
font-weight: bold;
text-align: center;
padding: .6em 0;
position: fixed;
top: 30%;
transition: opacity 0.2s ease-in-out;
opacity: 1;
}
# mt {opacity: .5;top:1px}

View File

@@ -1,16 +1,25 @@
"use strict";
// server state
var server_md = dom_src.value;
// the non-ascii whitelist
var esc_uni_whitelist = '\\n\\t\\x20-\\x7eÆØÅæøå';
var js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
// dom nodes
var dom_swrap = document.getElementById('mtw');
var dom_sbs = document.getElementById('sbs');
var dom_nsbs = document.getElementById('nsbs');
var dom_swrap = ebi('mtw');
var dom_sbs = ebi('sbs');
var dom_nsbs = ebi('nsbs');
var dom_tbox = ebi('toolsbox');
var dom_ref = (function () {
var d = document.createElement('div');
var d = mknod('div');
d.setAttribute('id', 'mtr');
dom_swrap.appendChild(d);
d = document.getElementById('mtr');
d = ebi('mtr');
// hide behind the textarea (offsetTop is not computed if display:none)
dom_src.style.zIndex = '4';
d.style.zIndex = '3';
@@ -19,14 +28,12 @@ var dom_ref = (function () {
// line->scrollpos maps
var map_src = [];
var map_pre = [];
function genmap(dom) {
function genmapq(dom, query) {
var ret = [];
var last_y = -1;
var parent_y = 0;
var parent_n = null;
var nodes = dom.querySelectorAll('*[data-ln]');
var nodes = dom.querySelectorAll(query);
for (var a = 0; a < nodes.length; a++) {
var n = nodes[a];
var ln = parseInt(n.getAttribute('data-ln'));
@@ -35,7 +42,7 @@ function genmap(dom) {
var y = 0;
var par = n.offsetParent;
if (par != parent_n) {
if (par && par != parent_n) {
while (par && par != dom) {
y += par.offsetTop;
par = par.offsetParent;
@@ -49,7 +56,7 @@ function genmap(dom) {
while (ln > ret.length)
ret.push(null);
var y = parent_y + n.offsetTop;
y = parent_y + n.offsetTop;
if (y <= last_y)
//console.log('awawa');
continue;
@@ -60,6 +67,25 @@ function genmap(dom) {
}
return ret;
}
var map_src = [];
var map_pre = [];
function genmap(dom, oldmap) {
var find = nlines;
while (oldmap && find-- > 0) {
var tmap = genmapq(dom, '*[data-ln="' + find + '"]');
if (!tmap || !tmap.length)
continue;
var cy = tmap[find];
var oy = parseInt(oldmap[find]);
if (cy + 24 > oy && cy - 24 < oy)
return oldmap;
console.log('map regen', dom.getAttribute('id'), find, oy, cy, oy - cy);
break;
}
return genmapq(dom, '*[data-ln]');
}
// input handler
@@ -68,9 +94,9 @@ var nlines = 0;
var draw_md = (function () {
var delay = 1;
function draw_md() {
var t0 = new Date().getTime();
var t0 = Date.now();
var src = dom_src.value;
convert_markdown(src);
convert_markdown(src, dom_pre);
var lines = hesc(src).replace(/\r/g, "").split('\n');
nlines = lines.length;
@@ -79,13 +105,13 @@ var draw_md = (function () {
html.push('<span data-ln="' + (a + 1) + '">' + lines[a] + "</span>");
dom_ref.innerHTML = html.join('\n');
map_src = genmap(dom_ref);
map_pre = genmap(dom_pre);
map_src = genmap(dom_ref, map_src);
map_pre = genmap(dom_pre, map_pre);
cls(document.getElementById('save'), 'disabled', src == server_md);
cls(ebi('save'), 'disabled', src == server_md);
var t1 = new Date().getTime();
delay = t1 - t0 > 150 ? 25 : 1;
var t1 = Date.now();
delay = t1 - t0 > 100 ? 25 : 1;
}
var timeout = null;
@@ -108,8 +134,8 @@ redraw = (function () {
dom_wrap.style.top = y;
dom_swrap.style.top = y;
dom_ref.style.width = getComputedStyle(dom_src).offsetWidth + 'px';
map_src = genmap(dom_ref);
map_pre = genmap(dom_pre);
map_src = genmap(dom_ref, map_src);
map_pre = genmap(dom_pre, map_pre);
dbg(document.body.clientWidth + 'x' + document.body.clientHeight);
}
function setsbs() {
@@ -118,7 +144,7 @@ redraw = (function () {
onresize();
}
function modetoggle() {
mode = dom_nsbs.innerHTML;
var mode = dom_nsbs.innerHTML;
dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor';
mode += ' single';
dom_wrap.setAttribute('class', mode);
@@ -147,14 +173,14 @@ redraw = (function () {
dst.scrollTop = 0;
return;
}
if (y + 8 + src.clientHeight > src.scrollHeight) {
if (y + 48 + src.clientHeight > src.scrollHeight) {
dst.scrollTop = dst.scrollHeight - dst.clientHeight;
return;
}
y += src.clientHeight / 2;
var sy1 = -1, sy2 = -1, dy1 = -1, dy2 = -1;
for (var a = 1; a < nlines + 1; a++) {
if (srcmap[a] === null || dstmap[a] === null)
if (srcmap[a] == null || dstmap[a] == null)
continue;
if (srcmap[a] > y) {
@@ -197,14 +223,108 @@ redraw = (function () {
})();
// modification checker
function Modpoll() {
this.skip_one = true;
this.disabled = false;
this.periodic = function () {
var that = this;
setTimeout(function () {
that.periodic();
}, 1000 * md_opt.modpoll_freq);
var skip = null;
if (ebi('toast'))
skip = 'toast';
else if (this.skip_one)
skip = 'saved';
else if (this.disabled)
skip = 'disabled';
if (skip) {
console.log('modpoll skip, ' + skip);
this.skip_one = false;
return;
}
console.log('modpoll...');
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
var xhr = new XMLHttpRequest();
xhr.modpoll = this;
xhr.open('GET', url, true);
xhr.responseType = 'text';
xhr.onreadystatechange = this.cb;
xhr.send();
}
this.cb = function () {
if (this.modpoll.disabled || this.modpoll.skip_one) {
console.log('modpoll abort');
return;
}
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) {
console.log('modpoll err ' + this.status + ": " + this.responseText);
return;
}
if (!this.responseText)
return;
var server_ref = server_md.replace(/\r/g, '');
var server_now = this.responseText.replace(/\r/g, '');
if (server_ref != server_now) {
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
this.modpoll.disabled = true;
var msg = [
"The document has changed on the server.<br />" +
"The changes will NOT be loaded into your editor automatically.",
"Press F5 or CTRL-R to refresh the page,<br />" +
"replacing your document with the server copy.",
"You can click this message to ignore and contnue."
];
return toast(false, "box-shadow:0 1em 2em rgba(64,64,64,0.8);font-weight:normal",
36, "<p>" + msg.join('</p>\n<p>') + '</p>');
}
console.log('modpoll eq');
}
if (md_opt.modpoll_freq > 0)
this.periodic();
return this;
}
var modpoll = new Modpoll();
window.onbeforeunload = function (e) {
if ((ebi("save").getAttribute('class') + '').indexOf('disabled') >= 0)
return; //nice (todo)
e.preventDefault(); //ff
e.returnValue = ''; //chrome
};
// save handler
function save(e) {
if (e) e.preventDefault();
var save_btn = document.getElementById("save"),
var save_btn = ebi("save"),
save_cls = save_btn.getAttribute('class') + '';
if (save_cls.indexOf('disabled') >= 0) {
alert('there is nothing to save');
toast(true, ";font-size:2em;color:#c90", 9, "no changes");
return;
}
@@ -221,13 +341,15 @@ function save(e) {
fd.append("lastmod", (force ? -1 : last_modified));
fd.append("body", txt);
var url = (document.location + '').split('?')[0] + '?raw';
var url = (document.location + '').split('?')[0];
var xhr = new XMLHttpRequest();
xhr.open('POST', url, true);
xhr.responseType = 'text';
xhr.onreadystatechange = save_cb;
xhr.btn = save_btn;
xhr.txt = txt;
modpoll.skip_one = true; // skip one iteration while we save
xhr.send(fd);
}
@@ -272,19 +394,24 @@ function save_cb() {
this.btn.classList.remove('force-save');
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
run_savechk(r.lastmod, this.txt, this.btn, 0);
}
function run_savechk(lastmod, txt, btn, ntry) {
// download the saved doc from the server and compare
var url = (document.location + '').split('?')[0] + '?raw';
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.responseType = 'text';
xhr.onreadystatechange = save_chk;
xhr.btn = this.save_btn;
xhr.txt = this.txt;
xhr.lastmod = r.lastmod;
xhr.onreadystatechange = savechk_cb;
xhr.lastmod = lastmod;
xhr.txt = txt;
xhr.btn = btn;
xhr.ntry = ntry;
xhr.send();
}
function save_chk() {
function savechk_cb() {
if (this.readyState != XMLHttpRequest.DONE)
return;
@@ -296,6 +423,14 @@ function save_chk() {
var doc1 = this.txt.replace(/\r\n/g, "\n");
var doc2 = this.responseText.replace(/\r\n/g, "\n");
if (doc1 != doc2) {
var that = this;
if (that.ntry < 10) {
// qnap funny, try a few more times
setTimeout(function () {
run_savechk(that.lastmod, that.txt, that.btn, that.ntry + 1)
}, 100);
return;
}
alert(
'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' +
'Length: yours=' + doc1.length + ', server=' + doc2.length
@@ -308,18 +443,44 @@ function save_chk() {
last_modified = this.lastmod;
server_md = this.txt;
draw_md();
toast(true, ";font-size:6em;font-family:serif;color:#9b4", 4,
'OK✔<span style="font-size:.2em;color:#999;position:absolute">' + this.ntry + '</span>');
var ok = document.createElement('div');
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
ok.innerHTML = 'OK✔';
var parent = document.getElementById('m');
document.documentElement.appendChild(ok);
setTimeout(function () {
ok.style.opacity = 0;
}, 500);
setTimeout(function () {
modpoll.disabled = false;
}
function toast(autoclose, style, width, msg) {
var ok = ebi("toast");
if (ok)
ok.parentNode.removeChild(ok);
}, 750);
style = "width:" + width + "em;left:calc(50% - " + (width / 2) + "em);" + style;
ok = mknod('div');
ok.setAttribute('id', 'toast');
ok.setAttribute('style', style);
ok.innerHTML = msg;
var parent = ebi('m');
document.documentElement.appendChild(ok);
var hide = function (delay) {
delay = delay || 0;
setTimeout(function () {
ok.style.opacity = 0;
}, delay);
setTimeout(function () {
if (ok.parentNode)
ok.parentNode.removeChild(ok);
}, delay + 250);
}
ok.onclick = function () {
hide(0);
};
if (autoclose)
hide(500);
}
@@ -392,6 +553,9 @@ function setsel(s) {
dom_src.value = [s.pre, s.sel, s.post].join('');
dom_src.setSelectionRange(s.car, s.cdr, dom_src.selectionDirection);
dom_src.oninput();
// support chrome:
dom_src.blur();
dom_src.focus();
}
@@ -465,28 +629,44 @@ function md_newline() {
var s = linebounds(true),
ln = s.md.substring(s.n1, s.n2),
m1 = /^( *)([0-9]+)(\. +)/.exec(ln),
m2 = /^[ \t>+-]*(\* )?/.exec(ln);
m2 = /^[ \t>+-]*(\* )?/.exec(ln),
drop = dom_src.selectionEnd - dom_src.selectionStart;
var pre = m2[0];
if (m1 !== null)
pre = m1[1] + (parseInt(m1[2]) + 1) + m1[3];
if (pre.length > s.car - s.n1)
// in gutter, do nothing
return true;
s.pre = s.md.substring(0, s.car) + '\n' + pre;
s.sel = '';
s.post = s.md.substring(s.car);
s.post = s.md.substring(s.car + drop);
s.car = s.cdr = s.pre.length;
setsel(s);
return false;
}
// backspace
function md_backspace() {
var s = linebounds(true),
ln = s.md.substring(s.n1, s.n2),
m = /^[ \t>+-]*(\* )?([0-9]+\. +)?/.exec(ln);
o0 = dom_src.selectionStart,
left = s.md.slice(s.n1, o0),
m = /^[ \t>+-]*(\* )?([0-9]+\. +)?/.exec(left);
// if car is in whitespace area, do nothing
if (/^\s*$/.test(left))
return true;
// same if selection
if (o0 != dom_src.selectionEnd)
return true;
// same if line is all-whitespace or non-markup
var v = m[0].replace(/[^ ]/g, " ");
if (v === m[0] || v.length !== ln.length)
if (v === m[0] || v.length !== left.length)
return true;
s.pre = s.md.substring(0, s.n1) + v;
@@ -498,6 +678,248 @@ function md_backspace() {
}
// paragraph jump
function md_p_jump(down) {
var txt = dom_src.value,
ofs = dom_src.selectionStart;
if (down) {
while (txt[ofs] == '\n' && --ofs > 0);
ofs = txt.indexOf("\n\n", ofs);
if (ofs < 0)
ofs = txt.length - 1;
while (txt[ofs] == '\n' && ++ofs < txt.length - 1);
}
else {
txt += '\n\n';
while (ofs > 1 && txt[ofs - 1] == '\n') ofs--;
ofs = Math.max(0, txt.lastIndexOf("\n\n", ofs - 1));
while (txt[ofs] == '\n' && ++ofs < txt.length - 1);
}
dom_src.setSelectionRange(ofs, ofs, "none");
}
function reLastIndexOf(txt, ptn, end) {
var ofs = (typeof end !== 'undefined') ? end : txt.length;
end = ofs;
while (ofs >= 0) {
var sub = txt.slice(ofs, end);
if (ptn.test(sub))
return ofs;
ofs--;
}
return -1;
}
// table formatter
function fmt_table(e) {
if (e) e.preventDefault();
//dom_tbox.setAttribute('class', '');
var txt = dom_src.value,
ofs = dom_src.selectionStart,
//o0 = txt.lastIndexOf('\n\n', ofs),
//o1 = txt.indexOf('\n\n', ofs);
o0 = reLastIndexOf(txt, /\n\s*\n/m, ofs),
o1 = txt.slice(ofs).search(/\n\s*\n|\n\s*$/m);
// note \s contains \n but its fine
if (o0 < 0)
o0 = 0;
else {
// seek past the hit
var m = /\n\s*\n/m.exec(txt.slice(o0));
o0 += m[0].length;
}
o1 = o1 < 0 ? txt.length : o1 + ofs;
var err = 'cannot format table due to ',
tab = txt.slice(o0, o1).split(/\s*\n/),
re_ind = /^\s*/,
ind = tab[1].match(re_ind)[0],
r0_ind = tab[0].slice(0, ind.length),
lpipe = tab[1].indexOf('|') < tab[1].indexOf('-'),
rpipe = tab[1].lastIndexOf('|') > tab[1].lastIndexOf('-'),
re_lpipe = lpipe ? /^\s*\|\s*/ : /^\s*/,
re_rpipe = rpipe ? /\s*\|\s*$/ : /\s*$/,
ncols;
// the second row defines the table,
// need to process that first
var tmp = tab[0];
tab[0] = tab[1];
tab[1] = tmp;
for (var a = 0; a < tab.length; a++) {
var row_name = (a == 1) ? 'header' : 'row#' + (a + 1);
var ind2 = tab[a].match(re_ind)[0];
if (ind != ind2 && a != 1) // the table can be a list entry or something, ignore [0]
return alert(err + 'indentation mismatch on row#2 and ' + row_name + ',\n' + tab[a]);
var t = tab[a].slice(ind.length);
t = t.replace(re_lpipe, "");
t = t.replace(re_rpipe, "");
tab[a] = t.split(/\s*\|\s*/g);
if (a == 0)
ncols = tab[a].length;
else if (ncols < tab[a].length)
return alert(err + 'num.columns(' + row_name + ') exceeding row#2; ' + ncols + ' < ' + tab[a].length);
// if row has less columns than row2, fill them in
while (tab[a].length < ncols)
tab[a].push('');
}
// aight now swap em back
tmp = tab[0];
tab[0] = tab[1];
tab[1] = tmp;
var re_align = /^ *(:?)-+(:?) *$/;
var align = [];
for (var col = 0; col < tab[1].length; col++) {
var m = tab[1][col].match(re_align);
if (!m)
return alert(err + 'invalid column specification, row#2, col ' + (col + 1) + ', [' + tab[1][col] + ']');
if (m[2]) {
if (m[1])
align.push('c');
else
align.push('r');
}
else
align.push('l');
}
var pad = [];
var tmax = 0;
for (var col = 0; col < ncols; col++) {
var max = 0;
for (var row = 0; row < tab.length; row++)
if (row != 1)
max = Math.max(max, tab[row][col].length);
var s = '';
for (var n = 0; n < max; n++)
s += ' ';
pad.push(s);
tmax = Math.max(max, tmax);
}
var dashes = '';
for (var a = 0; a < tmax; a++)
dashes += '-';
var ret = [];
for (var row = 0; row < tab.length; row++) {
var ln = [];
for (var col = 0; col < tab[row].length; col++) {
var p = pad[col];
var s = tab[row][col];
if (align[col] == 'l') {
s = (s + p).slice(0, p.length);
}
else if (align[col] == 'r') {
s = (p + s).slice(-p.length);
}
else {
var pt = p.length - s.length;
var pl = p.slice(0, Math.floor(pt / 2));
var pr = p.slice(0, pt - pl.length);
s = pl + s + pr;
}
if (row == 1) {
if (align[col] == 'l')
s = dashes.slice(0, p.length);
else if (align[col] == 'r')
s = dashes.slice(0, p.length - 1) + ':';
else
s = ':' + dashes.slice(0, p.length - 2) + ':';
}
ln.push(s);
}
ret.push(ind + '| ' + ln.join(' | ') + ' |');
}
// restore any markup in the row0 gutter
ret[0] = r0_ind + ret[0].slice(ind.length);
ret = {
"pre": txt.slice(0, o0),
"sel": ret.join('\n'),
"post": txt.slice(o1),
"car": o0,
"cdr": o0
};
setsel(ret);
}
// show unicode
function mark_uni(e) {
if (e) e.preventDefault();
dom_tbox.setAttribute('class', '');
var txt = dom_src.value,
ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g'),
mod = txt.replace(/\r/g, "").replace(ptn, "\u2588\u2770$1\u2771");
if (txt == mod) {
alert('no results; no modifications were made');
return;
}
dom_src.value = mod;
}
// iterate unicode
function iter_uni(e) {
if (e) e.preventDefault();
var txt = dom_src.value,
ofs = dom_src.selectionDirection == "forward" ? dom_src.selectionEnd : dom_src.selectionStart,
re = new RegExp('([^' + js_uni_whitelist + ']+)'),
m = re.exec(txt.slice(ofs));
if (!m) {
alert('no more hits from cursor onwards');
return;
}
ofs += m.index;
dom_src.setSelectionRange(ofs, ofs + m[0].length, "forward");
dom_src.oninput();
// support chrome:
dom_src.blur();
dom_src.focus();
}
// configure whitelist
function cfg_uni(e) {
if (e) e.preventDefault();
var reply = prompt("unicode whitelist", esc_uni_whitelist);
if (reply === null)
return;
esc_uni_whitelist = reply;
js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
}
// hotkeys / toolbar
(function () {
function keydown(ev) {
@@ -509,6 +931,11 @@ function md_backspace() {
save();
return false;
}
if (ev.code == "Escape" || kc == 27) {
var d = ebi('helpclose');
if (d)
d.click();
}
if (document.activeElement == dom_src) {
if (ev.code == "Tab" || kc == 9) {
md_indent(ev.shiftKey);
@@ -523,8 +950,7 @@ function md_backspace() {
return false;
}
if (!ctrl && !ev.shiftKey && (ev.code == "Enter" || kc == 13)) {
md_newline();
return false;
return md_newline();
}
if (ctrl && (ev.code == "KeyZ" || kc == 90)) {
if (ev.shiftKey)
@@ -541,27 +967,63 @@ function md_backspace() {
if (!ctrl && !ev.shiftKey && kc == 8) {
return md_backspace();
}
if (ctrl && (ev.code == "KeyK")) {
fmt_table();
return false;
}
if (ctrl && (ev.code == "KeyU")) {
iter_uni();
return false;
}
if (ctrl && (ev.code == "KeyE")) {
dom_nsbs.click();
//fmt_table();
return false;
}
var up = ev.code == "ArrowUp" || kc == 38;
var dn = ev.code == "ArrowDown" || kc == 40;
if (ctrl && (up || dn)) {
md_p_jump(dn);
return false;
}
}
}
document.onkeydown = keydown;
document.getElementById('save').onclick = save;
ebi('save').onclick = save;
})();
document.getElementById('help').onclick = function (e) {
ebi('tools').onclick = function (e) {
if (e) e.preventDefault();
var dom = document.getElementById('helpbox');
var is_open = dom_tbox.getAttribute('class') != 'open';
dom_tbox.setAttribute('class', is_open ? 'open' : '');
};
ebi('help').onclick = function (e) {
if (e) e.preventDefault();
dom_tbox.setAttribute('class', '');
var dom = ebi('helpbox');
var dtxt = dom.getElementsByTagName('textarea');
if (dtxt.length > 0)
dom.innerHTML = '<a href="#" id="helpclose">close</a>' + marked(dtxt[0].value);
if (dtxt.length > 0) {
convert_markdown(dtxt[0].value, dom);
dom.innerHTML = '<a href="#" id="helpclose">close</a>' + dom.innerHTML;
}
dom.style.display = 'block';
document.getElementById('helpclose').onclick = function () {
ebi('helpclose').onclick = function () {
dom.style.display = 'none';
};
};
ebi('fmt_table').onclick = fmt_table;
ebi('mark_uni').onclick = mark_uni;
ebi('iter_uni').onclick = iter_uni;
ebi('cfg_uni').onclick = cfg_uni;
// blame steen
action_stack = (function () {
var hist = {
@@ -587,7 +1049,7 @@ action_stack = (function () {
var p1 = from.length,
p2 = to.length;
while (p1 --> 0 && p2 --> 0)
while (p1-- > 0 && p2-- > 0)
if (from[p1] != to[p2])
break;
@@ -631,7 +1093,7 @@ action_stack = (function () {
dom_src.value = ref;
dom_src.setSelectionRange(cpos, cpos);
ignore = true; // all browsers
draw_md();
dom_src.oninput();
return true;
}
@@ -667,13 +1129,12 @@ action_stack = (function () {
ref = newtxt;
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
if (hist.un.length > 0)
dbg(static(hist.un.slice(-1)[0]));
dbg(statify(hist.un.slice(-1)[0]));
if (hist.re.length > 0)
dbg(static(hist.re.slice(-1)[0]));
dbg(statify(hist.re.slice(-1)[0]));
}
return {
push: push,
undo: undo,
redo: redo,
push: schedule_push,
@@ -681,14 +1142,3 @@ action_stack = (function () {
_ref: ref
}
})();
/*
document.getElementById('help').onclick = function () {
var c1 = getComputedStyle(dom_src).cssText.split(';');
var c2 = getComputedStyle(dom_ref).cssText.split(';');
var max = Math.min(c1.length, c2.length);
for (var a = 0; a < max; a++)
if (c1[a] !== c2[a])
console.log(c1[a] + '\n' + c2[a]);
}
*/

View File

@@ -8,68 +8,58 @@ html .editor-toolbar>i.separator { border-left: 1px solid #ccc; }
html .editor-toolbar.disabled-for-preview>button:not(.no-disable) { opacity: .35 }
html {
line-height: 1.5em;
line-height: 1.5em;
}
html, body {
margin: 0;
padding: 0;
min-height: 100%;
font-family: sans-serif;
background: #f7f7f7;
color: #333;
margin: 0;
padding: 0;
min-height: 100%;
font-family: sans-serif;
background: #f7f7f7;
color: #333;
}
#mn {
font-weight: normal;
margin: 1.3em 0 .7em 1em;
font-weight: normal;
margin: 1.3em 0 .7em 1em;
}
#mn a {
color: #444;
margin: 0 0 0 -.2em;
padding: 0 0 0 .4em;
text-decoration: none;
/* ie: */
border-bottom: .1em solid #777\9;
margin-right: 1em\9;
color: #444;
margin: 0 0 0 -.2em;
padding: 0 0 0 .4em;
text-decoration: none;
/* ie: */
border-bottom: .1em solid #777\9;
margin-right: 1em\9;
}
#mn a:first-child {
padding-left: .5em;
padding-left: .5em;
}
#mn a:last-child {
padding-right: .5em;
padding-right: .5em;
}
#mn a:not(:last-child):after {
content: '';
width: 1.05em;
height: 1.05em;
margin: -.2em .3em -.2em -.4em;
display: inline-block;
border: 1px solid rgba(0,0,0,0.2);
border-width: .2em .2em 0 0;
transform: rotate(45deg);
content: '';
width: 1.05em;
height: 1.05em;
margin: -.2em .3em -.2em -.4em;
display: inline-block;
border: 1px solid rgba(0,0,0,0.2);
border-width: .2em .2em 0 0;
transform: rotate(45deg);
}
#mn a:hover {
color: #000;
text-decoration: underline;
color: #000;
text-decoration: underline;
}
html .editor-toolbar>button.disabled {
opacity: .35;
pointer-events: none;
opacity: .35;
pointer-events: none;
}
html .editor-toolbar>button.save.force-save {
background: #f97;
background: #f97;
}
/*
*[data-ln]:before {
content: attr(data-ln);
font-size: .8em;
margin: 0 .4em;
color: #f0c;
}
.cm-header { font-size: .4em !important }
*/
@@ -101,29 +91,29 @@ html .editor-toolbar>button.save.force-save {
line-height: 1.1em;
}
.mdo a {
color: #fff;
background: #39b;
text-decoration: none;
padding: 0 .3em;
border: none;
border-bottom: .07em solid #079;
color: #fff;
background: #39b;
text-decoration: none;
padding: 0 .3em;
border: none;
border-bottom: .07em solid #079;
}
.mdo h2 {
color: #fff;
background: #555;
margin-top: 2em;
border-bottom: .22em solid #999;
border-top: none;
color: #fff;
background: #555;
margin-top: 2em;
border-bottom: .22em solid #999;
border-top: none;
}
.mdo h1 {
color: #fff;
background: #444;
font-weight: normal;
border-top: .4em solid #fb0;
border-bottom: .4em solid #777;
border-radius: 0 1em 0 1em;
margin: 3em 0 1em 0;
padding: .5em 0;
color: #fff;
background: #444;
font-weight: normal;
border-top: .4em solid #fb0;
border-bottom: .4em solid #777;
border-radius: 0 1em 0 1em;
margin: 3em 0 1em 0;
padding: .5em 0;
}
h1, h2 {
line-height: 1.5em;
@@ -160,8 +150,12 @@ h2 {
.mdo ol>li {
margin: .7em 0;
}
strong {
color: #000;
}
p>em,
li>em {
li>em,
td>em {
color: #c50;
padding: .1em;
border-bottom: .1em solid #bbb;
@@ -193,14 +187,14 @@ th {
/* mde support */
.mdo {
padding: 1em;
background: #f7f7f7;
padding: 1em;
background: #f7f7f7;
}
html.dark .mdo {
background: #1c1c1c;
background: #1c1c1c;
}
.CodeMirror {
background: #f7f7f7;
background: #f7f7f7;
}
@@ -210,104 +204,108 @@ html.dark .mdo {
/* darkmode */
html.dark .mdo,
html.dark .CodeMirror {
border-color: #222;
border-color: #222;
}
html.dark,
html.dark body,
html.dark .CodeMirror {
background: #222;
color: #ccc;
background: #222;
color: #ccc;
}
html.dark .CodeMirror-cursor {
border-color: #fff;
border-color: #fff;
}
html.dark .CodeMirror-selected {
box-shadow: 0 0 1px #0cf inset;
box-shadow: 0 0 1px #0cf inset;
}
html.dark .CodeMirror-selected,
html.dark .CodeMirror-selectedtext {
border-radius: .1em;
background: #246;
color: #fff;
border-radius: .1em;
background: #246;
color: #fff;
}
html.dark .mdo a {
background: #057;
background: #057;
}
html.dark .mdo h1 a, html.dark .mdo h4 a,
html.dark .mdo h2 a, html.dark .mdo h5 a,
html.dark .mdo h3 a, html.dark .mdo h6 a {
color: inherit;
background: none;
color: inherit;
background: none;
}
html.dark pre,
html.dark code {
color: #8c0;
background: #1a1a1a;
border: .07em solid #333;
color: #8c0;
background: #1a1a1a;
border: .07em solid #333;
}
html.dark .mdo ul,
html.dark .mdo ol {
border-color: #444;
border-color: #444;
}
html.dark .mdo>ul,
html.dark .mdo>ol {
border-color: #555;
border-color: #555;
}
html.dark strong {
color: #fff;
}
html.dark p>em,
html.dark li>em {
color: #f94;
border-color: #666;
html.dark li>em,
html.dark td>em {
color: #f94;
border-color: #666;
}
html.dark h1 {
background: #383838;
border-top: .4em solid #b80;
border-bottom: .4em solid #4c4c4c;
background: #383838;
border-top: .4em solid #b80;
border-bottom: .4em solid #4c4c4c;
}
html.dark h2 {
background: #444;
border-bottom: .22em solid #555;
background: #444;
border-bottom: .22em solid #555;
}
html.dark td,
html.dark th {
border-color: #444;
border-color: #444;
}
html.dark blockquote {
background: #282828;
border: .07em dashed #444;
background: #282828;
border: .07em dashed #444;
}
html.dark #mn a {
color: #ccc;
color: #ccc;
}
html.dark #mn a:not(:last-child):after {
border-color: rgba(255,255,255,0.3);
border-color: rgba(255,255,255,0.3);
}
html.dark .editor-toolbar {
border-color: #2c2c2c;
background: #1c1c1c;
border-color: #2c2c2c;
background: #1c1c1c;
}
html.dark .editor-toolbar>i.separator {
border-left: 1px solid #444;
border-right: 1px solid #111;
border-left: 1px solid #444;
border-right: 1px solid #111;
}
html.dark .editor-toolbar>button {
margin-left: -1px; border: 1px solid rgba(255,255,255,0.1);
color: #aaa;
margin-left: -1px; border: 1px solid rgba(255,255,255,0.1);
color: #aaa;
}
html.dark .editor-toolbar>button:hover {
color: #333;
color: #333;
}
html.dark .editor-toolbar>button.active {
color: #333;
border-color: #ec1;
background: #c90;
color: #333;
border-color: #ec1;
background: #c90;
}
html.dark .editor-toolbar::after,
html.dark .editor-toolbar::before {
background: none;
background: none;
}

View File

@@ -17,28 +17,33 @@
</div>
</div>
<div id="m">
<textarea id="mt" style="display:none">{{ md }}</textarea>
<textarea id="mt" style="display:none" autocomplete="off">{{ md }}</textarea>
</div>
</div>
<script>
var link_md_as_html = false; // TODO (does nothing)
var last_modified = {{ lastmod }};
var md_opt = {
link_md_as_html: false,
allow_plugins: {{ md_plug }},
modpoll_freq: {{ md_chk_rate }}
};
var lightswitch = (function () {
var fun = function () {
var dark = !!!document.documentElement.getAttribute("class");
var dark = !document.documentElement.getAttribute("class");
document.documentElement.setAttribute("class", dark ? "dark" : "");
if (window.localStorage)
localStorage.setItem('darkmode', dark ? 1 : 0);
localStorage.setItem('lightmode', dark ? 0 : 1);
};
if (window.localStorage && localStorage.getItem('darkmode') == 1)
if (window.localStorage && localStorage.getItem('lightmode') != 1)
fun();
return fun;
})();
</script>
<script src="/.cpr/deps/easymde.full.js"></script>
<script src="/.cpr/util.js"></script>
<script src="/.cpr/deps/easymde.js"></script>
<script src="/.cpr/mde.js"></script>
</body></html>

View File

@@ -1,7 +1,9 @@
var dom_wrap = document.getElementById('mw');
var dom_nav = document.getElementById('mn');
var dom_doc = document.getElementById('m');
var dom_md = document.getElementById('mt');
"use strict";
var dom_wrap = ebi('mw');
var dom_nav = ebi('mn');
var dom_doc = ebi('m');
var dom_md = ebi('mt');
(function () {
var n = document.location + '';
@@ -13,7 +15,7 @@ var dom_md = document.getElementById('mt');
if (a > 0)
loc.push(n[a]);
var dec = decodeURIComponent(n[a]).replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
var dec = uricom_dec(n[a])[0].replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
}
@@ -63,13 +65,13 @@ var mde = (function () {
mde.codemirror.on("change", function () {
md_changed(mde);
});
var loader = document.getElementById('ml');
var loader = ebi('ml');
loader.parentNode.removeChild(loader);
return mde;
})();
function set_jumpto() {
document.querySelector('.editor-preview-side').onclick = jumpto;
QS('.editor-preview-side').onclick = jumpto;
}
function jumpto(ev) {
@@ -92,7 +94,7 @@ function md_changed(mde, on_srv) {
window.md_saved = mde.value();
var md_now = mde.value();
var save_btn = document.querySelector('.editor-toolbar button.save');
var save_btn = QS('.editor-toolbar button.save');
if (md_now == window.md_saved)
save_btn.classList.add('disabled');
@@ -103,7 +105,7 @@ function md_changed(mde, on_srv) {
}
function save(mde) {
var save_btn = document.querySelector('.editor-toolbar button.save');
var save_btn = QS('.editor-toolbar button.save');
if (save_btn.classList.contains('disabled')) {
alert('there is nothing to save');
return;
@@ -121,7 +123,7 @@ function save(mde) {
fd.append("lastmod", (force ? -1 : last_modified));
fd.append("body", txt);
var url = (document.location + '').split('?')[0] + '?raw';
var url = (document.location + '').split('?')[0];
var xhr = new XMLHttpRequest();
xhr.open('POST', url, true);
xhr.responseType = 'text';
@@ -210,10 +212,10 @@ function save_chk() {
last_modified = this.lastmod;
md_changed(this.mde, true);
var ok = document.createElement('div');
var ok = mknod('div');
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
ok.innerHTML = 'OK✔';
var parent = document.getElementById('m');
var parent = ebi('m');
document.documentElement.appendChild(ok);
setTimeout(function () {
ok.style.opacity = 0;

View File

@@ -3,7 +3,7 @@ html,body,tr,th,td,#files,a {
background: none;
font-weight: inherit;
font-size: inherit;
padding: none;
padding: 0;
border: none;
}
html {
@@ -20,8 +20,8 @@ body {
padding-bottom: 5em;
}
#box {
padding: .5em 1em;
background: #2c2c2c;
padding: .5em 1em;
background: #2c2c2c;
}
pre {
font-family: monospace, monospace;

View File

@@ -26,6 +26,13 @@ a {
border-radius: .2em;
padding: .2em .8em;
}
td, th {
padding: .3em .6em;
text-align: left;
}
.btns {
margin: 1em 0;
}
html.dark,

View File

@@ -13,23 +13,44 @@
<div id="wrap">
<p>hello {{ this.uname }}</p>
{%- if avol %}
<h1>admin panel:</h1>
<table>
<thead><tr><th>vol</th><th>action</th><th>status</th></tr></thead>
<tbody>
{% for mp in avol %}
{%- if mp in vstate and vstate[mp] %}
<tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
{%- endif %}
{% endfor %}
</tbody>
</table>
<div class="btns">
<a href="{{ avol[0] }}?stack">dump stack</a>
</div>
{%- endif %}
{%- if rvol %}
<h1>you can browse these:</h1>
<ul>
{% for mp in rvol %}
<li><a href="/{{ mp }}">/{{ mp }}</a></li>
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
{% endfor %}
</ul>
{%- endif %}
{%- if wvol %}
<h1>you can upload to:</h1>
<ul>
{% for mp in wvol %}
<li><a href="/{{ mp }}">/{{ mp }}</a></li>
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
{% endfor %}
</ul>
{%- endif %}
<h1>login for more:</h1>
<ul>
<form method="post" enctype="multipart/form-data" action="/">
<form method="post" enctype="multipart/form-data" action="/{{ url_suf }}">
<input type="hidden" name="act" value="login" />
<input type="password" name="cppwd" />
<input type="submit" value="Login" />
@@ -38,7 +59,7 @@
</div>
<script>
if (window.localStorage && localStorage.getItem('darkmode') == 1)
if (window.localStorage && localStorage.getItem('lightmode') != 1)
document.documentElement.setAttribute("class", "dark");
</script>

File diff suppressed because it is too large Load Diff

View File

@@ -1,92 +1,4 @@
.opview {
display: none;
}
.opview.act {
display: block;
}
#ops a {
color: #fc5;
font-size: 1.5em;
padding: 0 .3em;
margin: 0;
outline: none;
}
#ops a.act {
text-decoration: underline;
}
/*
#ops a+a:after,
#ops a:first-child:after {
content: 'x';
color: #282828;
text-shadow: 0 0 .08em #01a7e1;
margin-left: .3em;
position: relative;
}
#ops a+a:before {
content: 'x';
color: #282828;
text-shadow: 0 0 .08em #ff3f1a;
margin-right: .3em;
margin-left: -.3em;
}
#ops a:last-child:after {
content: '';
}
#ops a.act:before,
#ops a.act:after {
text-decoration: none !important;
}
*/
#ops i {
font-size: 1.5em;
}
#ops i:before {
content: 'x';
color: #282828;
text-shadow: 0 0 .08em #01a7e1;
position: relative;
}
#ops i:after {
content: 'x';
color: #282828;
text-shadow: 0 0 .08em #ff3f1a;
margin-left: -.35em;
font-size: 1.05em;
}
#ops,
.opbox {
border: 1px solid #3a3a3a;
box-shadow: 0 0 1em #222 inset;
}
#ops {
display: none;
background: #333;
margin: 1.7em 1.5em 0 1.5em;
padding: .3em .6em;
border-radius: .3em;
border-width: .15em 0;
}
.opbox {
background: #2d2d2d;
margin: 1.5em 0 0 0;
padding: .5em;
border-radius: 0 1em 1em 0;
border-width: .15em .3em .3em 0;
max-width: 40em;
}
.opbox input {
margin: .5em;
}
.opbox input[type=text] {
color: #fff;
background: #383838;
border: none;
box-shadow: 0 0 .3em #222;
border-bottom: 1px solid #fc5;
border-radius: .2em;
padding: .2em .3em;
}
#op_up2k {
padding: 0 1em 1em 1em;
}
@@ -94,6 +6,9 @@
position: absolute;
top: 0;
left: 0;
width: 2px;
height: 2px;
overflow: hidden;
}
#u2form input {
background: #444;
@@ -104,10 +19,10 @@
color: #f87;
padding: .5em;
}
#u2form {
width: 2px;
height: 2px;
overflow: hidden;
#u2err.msg {
color: #999;
padding: .5em;
font-size: .9em;
}
#u2btn {
color: #eee;
@@ -117,17 +32,32 @@
background: linear-gradient(to bottom, #367 0%, #489 50%, #38788a 51%, #367 100%);
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#489', endColorstr='#38788a', GradientType=0);
text-decoration: none;
line-height: 1.5em;
line-height: 1.3em;
border: 1px solid #222;
border-radius: .4em;
text-align: center;
font-size: 2em;
margin: 1em auto;
padding: 1em 0;
width: 12em;
font-size: 1.5em;
margin: .5em auto;
padding: .8em 0;
width: 16em;
cursor: pointer;
box-shadow: .4em .4em 0 #111;
}
#op_up2k.srch #u2btn {
background: linear-gradient(to bottom, #ca3 0%, #fd8 50%, #fc6 51%, #b92 100%);
text-shadow: 1px 1px 1px #fc6;
color: #333;
}
#u2conf #u2btn {
margin: -1.5em 0;
padding: .8em 0;
width: 100%;
max-width: 12em;
display: inline-block;
}
#u2conf #u2btn_cw {
text-align: right;
}
#u2notbtn {
display: none;
text-align: center;
@@ -142,6 +72,9 @@
width: calc(100% - 2em);
max-width: 100em;
}
#op_up2k.srch #u2tab {
max-width: none;
}
#u2tab td {
border: 1px solid #ccc;
border-width: 0 0px 1px 0;
@@ -149,16 +82,61 @@
}
#u2tab td:nth-child(2) {
width: 5em;
white-space: nowrap;
}
#u2tab td:nth-child(3) {
width: 40%;
}
#u2tab tr+tr:hover td {
#op_up2k.srch #u2tab td:nth-child(3) {
font-family: sans-serif;
width: auto;
}
#u2tab tbody tr:hover td {
background: #222;
}
#u2cards {
padding: 1em 0 .3em 1em;
margin: 1.5em auto -2.5em auto;
white-space: nowrap;
text-align: center;
overflow: hidden;
}
#u2cards.w {
width: 45em;
text-align: left;
}
#u2cards a {
padding: .2em 1em;
border: 1px solid #777;
border-width: 0 0 1px 0;
background: linear-gradient(to bottom, #333, #222);
}
#u2cards a:first-child {
border-radius: .4em 0 0 0;
}
#u2cards a:last-child {
border-radius: 0 .4em 0 0;
}
#u2cards a.act {
padding-bottom: .5em;
border-width: 1px 1px .1em 1px;
border-radius: .3em .3em 0 0;
margin-left: -1px;
background: linear-gradient(to bottom, #464, #333 80%);
box-shadow: 0 -.17em .67em #280;
border-color: #7c5 #583 #333 #583;
position: relative;
color: #fd7;
}
#u2cards span {
color: #fff;
}
#u2conf {
margin: 1em auto;
width: 26em;
width: 30em;
}
#u2conf.has_btn {
width: 48em;
}
#u2conf * {
text-align: center;
@@ -169,12 +147,16 @@
outline: none;
}
#u2conf .txtbox {
width: 4em;
width: 3em;
color: #fff;
background: #444;
border: 1px solid #777;
font-size: 1.2em;
padding: .15em 0;
height: 1.05em;
}
#u2conf .txtbox.err {
background: #922;
}
#u2conf a {
color: #fff;
@@ -183,39 +165,137 @@
border-radius: .1em;
font-size: 1.5em;
padding: .1em 0;
margin: 0 -.25em;
margin: 0 -1px;
width: 1.5em;
height: 1em;
display: inline-block;
position: relative;
line-height: 1em;
bottom: -.08em;
bottom: -0.08em;
}
#u2conf input+a {
background: #d80;
}
#u2conf label {
font-size: 1.6em;
width: 2em;
height: 1em;
padding: .4em 0;
display: block;
border-radius: .25em;
}
#u2conf input[type="checkbox"] {
position: relative;
opacity: .02;
top: 2em;
}
#u2conf input[type="checkbox"]+label {
position: relative;
background: #603;
border-bottom: .2em solid #a16;
box-shadow: 0 .1em .3em #a00 inset;
}
#u2conf input[type="checkbox"]:checked+label {
background: #6a1;
border-bottom: .2em solid #efa;
box-shadow: 0 .1em .5em #0c0;
}
#u2conf input[type="checkbox"]+label:hover {
box-shadow: 0 .1em .3em #fb0;
border-color: #fb0;
}
#op_up2k.srch #u2conf td:nth-child(1)>*,
#op_up2k.srch #u2conf td:nth-child(2)>*,
#op_up2k.srch #u2conf td:nth-child(3)>* {
background: #777;
border-color: #ccc;
box-shadow: none;
opacity: .2;
}
#u2cdesc {
position: absolute;
width: 34em;
left: calc(50% - 15em);
background: #222;
border: 0 solid #555;
text-align: center;
overflow: hidden;
margin: 0 -2em;
padding: 0 1em;
height: 0;
opacity: .1;
transition: all 0.14s ease-in-out;
box-shadow: 0 .2em .5em #222;
border-radius: .4em;
z-index: 1;
}
#u2cdesc.show {
padding: 1em;
height: auto;
border-width: .2em 0;
opacity: 1;
}
#u2foot {
color: #fff;
font-style: italic;
}
#u2foot span {
color: #999;
font-size: .9em;
}
#u2footfoot {
margin-bottom: -1em;
}
.prog {
font-family: monospace;
}
.prog>div {
display: inline-block;
position: relative;
overflow: hidden;
margin: 0;
padding: 0;
height: 1.1em;
margin-bottom: -.15em;
box-shadow: -1px -1px 0 inset rgba(255,255,255,0.1);
#u2tab a>span {
font-weight: bold;
font-style: italic;
color: #fff;
padding-left: .2em;
}
.prog>div>div {
width: 0%;
position: absolute;
left: 0;
top: 0;
bottom: 0;
background: #0a0;
#u2cleanup {
float: right;
margin-bottom: -.3em;
}
html.light #u2btn {
box-shadow: .4em .4em 0 #ccc;
}
html.light #u2cards span {
color: #000;
}
html.light #u2cards a {
background: linear-gradient(to bottom, #eee, #fff);
}
html.light #u2cards a.act {
color: #037;
background: inherit;
box-shadow: 0 -.17em .67em #0ad;
border-color: #09c #05a #eee #05a;
}
html.light #u2conf .txtbox {
background: #fff;
color: #444;
}
html.light #u2conf .txtbox.err {
background: #f96;
color: #300;
}
html.light #u2cdesc {
background: #fff;
border: none;
}
html.light #op_up2k.srch #u2btn {
border-color: #a80;
}
html.light #u2foot {
color: #000;
}
html.light #u2tab tbody tr:hover td {
background: #fff;
}

View File

@@ -1,13 +1,7 @@
<div id="ops"><a
href="#" data-dest="">---</a><i></i><a
href="#" data-dest="up2k">up2k</a><i></i><a
href="#" data-dest="bup">bup</a><i></i><a
href="#" data-dest="mkdir">mkdir</a><i></i><a
href="#" data-dest="new_md">new.md</a></div>
<div id="op_bup" class="opview opbox act">
<div id="u2err"></div>
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="bput" />
<input type="file" name="f" multiple><br />
<input type="submit" value="start upload">
@@ -15,7 +9,7 @@
</div>
<div id="op_mkdir" class="opview opbox act">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="mkdir" />
<input type="text" name="name" size="30">
<input type="submit" value="mkdir">
@@ -23,48 +17,87 @@
</div>
<div id="op_new_md" class="opview opbox">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="new_md" />
<input type="text" name="name" size="30">
<input type="submit" value="create doc">
</form>
</div>
<div id="op_msg" class="opview opbox act">
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
<input type="text" name="msg" size="30">
<input type="submit" value="send msg">
</form>
</div>
<div id="op_up2k" class="opview">
<form id="u2form" method="post" enctype="multipart/form-data" onsubmit="return false;"></form>
<table id="u2conf">
<tr>
<td>parallel uploads</td>
<td><br />parallel uploads:</td>
<td rowspan="2">
<input type="checkbox" id="multitask" />
<label for="multitask" alt="continue hashing other files while uploading">🏃</label>
</td>
<td rowspan="2">
<input type="checkbox" id="ask_up" />
<label for="ask_up" alt="ask for confirmation befofre upload starts">💭</label>
</td>
<td rowspan="2">
<input type="checkbox" id="flag_en" />
<label for="flag_en" alt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</label>
</td>
{%- if have_up2k_idx %}
<td data-perm="read" rowspan="2">
<input type="checkbox" id="fsearch" />
<label for="fsearch" alt="don't actually upload, instead check if the files already $N exist on the server (will scan all folders you can read)">🔎</label>
</td>
{%- endif %}
<td data-perm="read" rowspan="2" id="u2btn_cw"></td>
</tr>
<tr>
<td>
<a href="#" id="nthread_sub">&ndash;</a>
<input class="txtbox" id="nthread" value="2" />
<a href="#" id="nthread_add">+</a>
</td>
<td rowspan="2">
<input type="checkbox" id="multitask" />
<label for="multitask">hash while<br />uploading</label>
<a href="#" id="nthread_sub">&ndash;</a><input
class="txtbox" id="nthread" value="2"/><a
href="#" id="nthread_add">+</a><br />&nbsp;
</td>
</tr>
</table>
<div id="u2cdesc"></div>
<div id="u2notbtn"></div>
<div id="u2btn">
drop files here<br />
(or click me)
<div id="u2btn_ct">
<div id="u2btn">
<span id="u2bm"></span><br />
drag/drop files<br />
and folders here<br />
(or click me)
</div>
</div>
<div id="u2cards">
<a href="#" act="ok">ok <span>0</span></a><a
href="#" act="ng">ng <span>0</span></a><a
href="#" act="done">done <span>0</span></a><a
href="#" act="bz" class="act">busy <span>0</span></a><a
href="#" act="q">que <span>0</span></a>
</div>
<table id="u2tab">
<tr>
<td>filename</td>
<td>status</td>
<td>progress</td>
</tr>
<thead>
<tr>
<td>filename</td>
<td>status</td>
<td>progress<a href="#" id="u2cleanup">cleanup</a></td>
</tr>
</thead>
<tbody></tbody>
</table>
<p id="u2foot"></p>
<p>( if you don't need lastmod timestamps, resumable uploads or progress bars just use the <a href="#" id="u2nope">basic uploader</a>)</p>
<p id="u2footfoot" data-perm="write">( you can use the <a href="#" id="u2nope">basic uploader</a> if you don't need lastmod timestamps, resumable uploads, or progress bars )</p>
</div>

530
copyparty/web/util.js Normal file
View File

@@ -0,0 +1,530 @@
"use strict";
if (!window['console'])
window['console'] = {
"log": function (msg) { }
};
var clickev = window.Touch ? 'touchstart' : 'click',
ANDROID = /(android)/i.test(navigator.userAgent);
// error handler for mobile devices
function hcroak(msg) {
document.body.innerHTML = msg;
window.onerror = undefined;
throw 'fatal_err';
}
function croak(msg) {
document.body.textContent = msg;
window.onerror = undefined;
throw msg;
}
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
'&': '&amp;',
'"': '&quot;',
'<': '&lt;',
'>': '&gt;'
}[c];
});
}
function vis_exh(msg, url, lineNo, columnNo, error) {
window.onerror = undefined;
window['vis_exh'] = null;
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<h2>' + find[a] + '</h2>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
}
document.body.style.fontSize = '0.8em';
document.body.style.padding = '0 1em 1em 1em';
hcroak(html.join('\n'));
}
var ebi = document.getElementById.bind(document),
QS = document.querySelector.bind(document),
QSA = document.querySelectorAll.bind(document),
mknod = document.createElement.bind(document);
function ev(e) {
e = e || window.event;
if (!e)
return;
if (e.preventDefault)
e.preventDefault()
if (e.stopPropagation)
e.stopPropagation();
e.returnValue = false;
return e;
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
if (!String.prototype.endsWith) {
String.prototype.endsWith = function (search, this_len) {
if (this_len === undefined || this_len > this.length) {
this_len = this.length;
}
return this.substring(this_len - search.length, this_len) === search;
};
}
if (!String.startsWith) {
String.prototype.startsWith = function (s, i) {
i = i > 0 ? i | 0 : 0;
return this.substring(i, i + s.length) === s;
};
}
// https://stackoverflow.com/a/950146
function import_js(url, cb) {
var head = document.head || document.getElementsByTagName('head')[0];
var script = mknod('script');
script.type = 'text/javascript';
script.src = url;
script.onreadystatechange = cb;
script.onload = cb;
head.appendChild(script);
}
var crctab = (function () {
var c, tab = [];
for (var n = 0; n < 256; n++) {
c = n;
for (var k = 0; k < 8; k++) {
c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));
}
tab[n] = c;
}
return tab;
})();
function crc32(str) {
var crc = 0 ^ (-1);
for (var i = 0; i < str.length; i++) {
crc = (crc >>> 8) ^ crctab[(crc ^ str.charCodeAt(i)) & 0xFF];
}
return ((crc ^ (-1)) >>> 0).toString(16);
}
function clmod(obj, cls, add) {
var re = new RegExp('\\s*\\b' + cls + '\\s*\\b', 'g');
if (add == 't')
add = !re.test(obj.className);
obj.className = obj.className.replace(re, ' ') + (add ? ' ' + cls : '');
}
function sortfiles(nodes) {
var sopts = jread('fsort', [["href", 1, ""]]);
try {
var is_srch = false;
if (nodes[0]['rp']) {
is_srch = true;
for (var b = 0, bb = nodes.length; b < bb; b++)
nodes[b].ext = nodes[b].rp.split('.').pop();
for (var b = 0; b < sopts.length; b++)
if (sopts[b][0] == 'href')
sopts[b][0] = 'rp';
}
for (var a = sopts.length - 1; a >= 0; a--) {
var name = sopts[a][0], rev = sopts[a][1], typ = sopts[a][2];
if (!name)
continue;
if (name == 'ts')
typ = 'int';
if (name.indexOf('tags/') === 0) {
name = name.slice(5);
for (var b = 0, bb = nodes.length; b < bb; b++)
nodes[b]._sv = nodes[b].tags[name];
}
else {
for (var b = 0, bb = nodes.length; b < bb; b++) {
var v = nodes[b][name];
if ((v + '').indexOf('<a ') === 0)
v = v.split('>')[1];
else if (name == "href" && v) {
if (v.slice(-1) == '/')
v = '\t' + v;
v = uricom_dec(v)[0]
}
nodes[b]._sv = v;
}
}
var onodes = nodes.map(function (x) { return x; });
nodes.sort(function (n1, n2) {
var v1 = n1._sv,
v2 = n2._sv;
if (v1 === undefined) {
if (v2 === undefined) {
return onodes.indexOf(n1) - onodes.indexOf(n2);
}
return -1 * rev;
}
if (v2 === undefined) return 1 * rev;
var ret = rev * (typ == 'int' ? (v1 - v2) : (v1.localeCompare(v2)));
if (ret === 0)
ret = onodes.indexOf(n1) - onodes.indexOf(n2);
return ret;
});
}
for (var b = 0, bb = nodes.length; b < bb; b++) {
delete nodes[b]._sv;
if (is_srch)
delete nodes[b].ext;
}
}
catch (ex) {
console.log("failed to apply sort config: " + ex);
console.log("resetting fsort " + sread('fsort'))
localStorage.removeItem('fsort');
}
return nodes;
}
function sortTable(table, col, cb) {
var tb = table.tBodies[0],
th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1;
for (var a = 0, thl = th.length; a < thl; a++)
th[a].className = th[a].className.replace(/ *sort-?1 */, " ");
th[col].className += ' sort' + reverse;
var stype = th[col].getAttribute('sort');
try {
var nrules = [], rules = jread("fsort", []);
rules.unshift([th[col].getAttribute('name'), reverse, stype || '']);
for (var a = 0; a < rules.length; a++) {
var add = true;
for (var b = 0; b < a; b++)
if (rules[a][0] == rules[b][0])
add = false;
if (add)
nrules.push(rules[a]);
if (nrules.length >= 10)
break;
}
jwrite("fsort", nrules);
}
catch (ex) {
console.log("failed to persist sort rules, resetting: " + ex);
jwrite("fsort", null);
}
var vl = [];
for (var a = 0; a < tr.length; a++) {
var cell = tr[a].cells[col];
if (!cell) {
vl.push([null, a]);
continue;
}
var v = cell.getAttribute('sortv') || cell.textContent.trim();
if (stype == 'int') {
v = parseInt(v.replace(/[, ]/g, '')) || 0;
}
vl.push([v, a]);
}
vl.sort(function (a, b) {
a = a[0];
b = b[0];
if (a === null)
return -1;
if (b === null)
return 1;
if (stype == 'int') {
return reverse * (a - b);
}
return reverse * (a.localeCompare(b));
});
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[vl[i][1]]);
if (cb) cb();
}
function makeSortable(table, cb) {
var th = table.tHead, i;
th && (th = th.rows[0]) && (th = th.cells);
if (th) i = th.length;
else return; // if no `<thead>` then do nothing
while (--i >= 0) (function (i) {
th[i].onclick = function (e) {
ev(e);
sortTable(table, i, cb);
};
}(i));
}
(function () {
var ops = QSA('#ops>a');
for (var a = 0; a < ops.length; a++) {
ops[a].onclick = opclick;
}
})();
function opclick(e) {
ev(e);
var dest = this.getAttribute('data-dest');
goto(dest);
swrite('opmode', dest || null);
var input = QS('.opview.act input:not([type="hidden"])')
if (input)
input.focus();
}
function goto(dest) {
var obj = QSA('.opview.act');
for (var a = obj.length - 1; a >= 0; a--)
clmod(obj[a], 'act');
obj = QSA('#ops>a');
for (var a = obj.length - 1; a >= 0; a--)
clmod(obj[a], 'act');
if (dest) {
var ui = ebi('op_' + dest);
clmod(ui, 'act', true);
QS('#ops>a[data-dest=' + dest + ']').className += " act";
var fn = window['goto_' + dest];
if (fn)
fn();
}
if (window['treectl'])
treectl.onscroll();
}
(function () {
goto();
var op = sread('opmode');
if (op !== null && op !== '.')
try {
goto(op);
}
catch (ex) { }
})();
function linksplit(rp) {
var ret = [];
var apath = '/';
if (rp && rp.charAt(0) == '/')
rp = rp.slice(1);
while (rp) {
var link = rp;
var ofs = rp.indexOf('/');
if (ofs === -1) {
rp = null;
}
else {
link = rp.slice(0, ofs + 1);
rp = rp.slice(ofs + 1);
}
var vlink = esc(link),
elink = uricom_enc(link);
if (link.indexOf('/') !== -1) {
vlink = vlink.slice(0, -1) + '<span>/</span>';
elink = elink.slice(0, -3) + '/';
}
ret.push('<a href="' + apath + elink + '">' + vlink + '</a>');
apath += elink;
}
return ret;
}
function uricom_enc(txt, do_fb_enc) {
try {
return encodeURIComponent(txt);
}
catch (ex) {
console.log("uce-err [" + txt + "]");
if (do_fb_enc)
return esc(txt);
return txt;
}
}
function uricom_dec(txt) {
try {
return [decodeURIComponent(txt), true];
}
catch (ex) {
console.log("ucd-err [" + txt + "]");
return [txt, false];
}
}
function get_evpath() {
var ret = document.location.pathname;
if (ret.indexOf('/') !== 0)
ret = '/' + ret;
if (ret.lastIndexOf('/') !== ret.length - 1)
ret += '/';
return ret;
}
function get_vpath() {
return uricom_dec(get_evpath())[0];
}
function unix2iso(ts) {
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
}
function s2ms(s) {
s = Math.floor(s);
var m = Math.floor(s / 60);
return m + ":" + ("0" + (s - m * 60)).slice(-2);
}
function has(haystack, needle) {
for (var a = 0; a < haystack.length; a++)
if (haystack[a] == needle)
return true;
return false;
}
function sread(key) {
if (window.localStorage)
return localStorage.getItem(key);
return null;
}
function swrite(key, val) {
if (window.localStorage) {
if (val === undefined || val === null)
localStorage.removeItem(key);
else
localStorage.setItem(key, val);
}
}
function jread(key, fb) {
var str = sread(key);
if (!str)
return fb;
return JSON.parse(str);
}
function jwrite(key, val) {
if (!val)
swrite(key);
else
swrite(key, JSON.stringify(val));
}
function icfg_get(name, defval) {
return parseInt(fcfg_get(name, defval));
}
function fcfg_get(name, defval) {
var o = ebi(name);
var val = parseFloat(sread(name));
if (isNaN(val))
return parseFloat(o ? o.value : defval);
if (o)
o.value = val;
return val;
}
function bcfg_get(name, defval) {
var o = ebi(name);
if (!o)
return defval;
var val = sread(name);
if (val === null)
val = defval;
else
val = (val == '1');
bcfg_upd_ui(name, val);
return val;
}
function bcfg_set(name, val) {
swrite(name, val ? '1' : '0');
bcfg_upd_ui(name, val);
return val;
}
function bcfg_upd_ui(name, val) {
var o = ebi(name);
if (!o)
return;
if (o.getAttribute('type') == 'checkbox')
o.checked = val;
else if (o) {
clmod(o, 'on', val);
}
}
function hist_push(url) {
console.log("h-push " + url);
history.pushState(url, url, url);
}
function hist_replace(url) {
console.log("h-repl " + url);
history.replaceState(url, url, url);
}

View File

@@ -32,9 +32,13 @@ r
# and a folder where anyone can upload
# but nobody can see the contents
# and set the e2d flag to enable the uploads database
# and set the nodupe flag to reject duplicate uploads
/home/ed/inc
/dump
w
c e2d
c nodupe
# this entire config file can be replaced with these arguments:
# -u ed:123 -u k:k -v .::r:aed -v priv:priv:rk:aed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w

32
docs/minimal-up2k.html Normal file
View File

@@ -0,0 +1,32 @@
<!--
save this as .epilogue.html inside a write-only folder to declutter the UI, makes it look like
https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png
-->
<style>
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
#u2cards /* and the upload progress tabs */
{display: none !important} /* do it! */
/* add some margins because now it's weird */
.opview {margin-top: 2.5em}
#op_up2k {margin-top: 3em}
/* and embiggen the upload button */
#u2conf #u2btn, #u2btn {padding:1.5em 0}
/* adjust the button area a bit */
#u2conf.has_btn {width: 35em !important; margin: 5em auto}
</style>
<a href="#" onclick="this.parentNode.innerHTML='';">show advanced options</a>

242
docs/music-analysis.sh Normal file
View File

@@ -0,0 +1,242 @@
#!/bin/bash
echo please dont actually run this as a scriopt
exit 1
# dependency-heavy, not particularly good fit
pacman -S llvm10
python3 -m pip install --user librosa
git clone https://github.com/librosa/librosa.git
# correct bpm for tracks with bad tags
br='
/Trip Trip Trip\(Hardcore Edit\).mp3/ {v=176}
/World!!.BIG_SOS/ {v=175}
/\/08\..*\(BIG_SOS Bootleg\)\.mp3/ {v=175}
/もってけ!セーラ服.Asterisk DnB/ {v=175}
/Rondo\(Asterisk DnB Re.mp3/ {v=175}
/Ray Nautica 175 Edit/ {v=175;x="thunk"}
/TOKIMEKI Language.Jauz/ {v=174}
/YUPPUN Hardcore Remix\).mp3/ {v=174;x="keeps drifting"}
/(èâAâï.î╧ûδ|バーチャリアル.狐耶)J-Core Remix\).mp3/ {v=172;x="hard"}
/lucky train..Freezer/ {v=170}
/Alf zero Bootleg ReMix/ {v=170}
/Prisoner of Love.Kacky/ {v=170}
/火炎 .Qota/ {v=170}
/\(hu-zin Bootleg\)\.mp3/ {v=170}
/15. STRAIGHT BET\(Milynn Bootleg\)\.mp3/ {v=170}
/\/13.*\(Milynn Bootleg\)\.mp3/ {v=167;x="way hard"}
/COLOR PLANET .10SAI . nijikon Remix\)\.mp3/ {v=165}
/11\. (朝はご飯派|Æ⌐é═é▓ö╤öh)\.mp3/ {v=162}
/09\. Where.s the core/ {v=160}
/PLANET\(Koushif Jersey Club Bootleg\)remaster.mp3/ {v=160;x="starts ez turns bs"}
/kened Soul - Madeon x Angel Beats!.mp3/ {v=160}
/Dear Moments\(Mother Harlot Bootleg\)\.mp3/ {v=150}
/POWER.Ringos UKG/ {v=140}
/ブルー・フィールド\(Ringos UKG Remix\).mp3/ {v=135}
/プラチナジェット.Ringo Remix..mp3/ {v=131.2}
/Mirrorball Love \(TKM Bootleg Mix\).mp3/ {v=130}
/Photon Melodies \(TKM Bootleg Mix\).mp3/ {v=128}
/Trap of Love \(TKM Bootleg Mix\).mp3/ {v=128}
/One Step \(TKM Bootleg Mix\)\.mp3/ {v=126}
/04 (トリカムイ岩|âgâèâJâÇâCèΓ).mp3/ {v=125}
/Get your Wish \(NAWN REMIX\)\.mp3/ {v=95}
/Flicker .Nitro Fun/ {v=92}
/\/14\..*suicat Remix/ {v=85.5;x="tricky"}
/Yanagi Nagi - Harumodoki \(EO Remix\)\.mp3/ {v=150}
/Azure - Nicology\.mp3/ {v=128;x="off by 5 how"}
'
# afun host, collects/grades the results
runfun() { cores=8; touch run; rm -f /dev/shm/mres.*; t00=$(date +%s); tbc() { bc | sed -r 's/(\.[0-9]{2}).*/\1/'; }; for ((core=0; core<$cores; core++)); do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db 'select dur.w, dur.v, bpm.v from mt bpm join mt dur on bpm.w = dur.w where bpm.k = ".bpm" and dur.k = ".dur" order by dur.w' | uniq -w16 | while IFS=\| read w dur bpm; do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db "select rd, fn from up where substr(w,1,16) = '$w'" | sed -r "s/^/$bpm /"; done | grep mir/cr | tr \| / | awk '{v=$1;sub(/[^ ]+ /,"")} '"$br"' {printf "%s %s\n",v,$0}' | while read bpm fn; do [ -e run ] || break; n=$((n+1)); ncore=$((n%cores)); [ $ncore -eq $core ] || continue; t0=$(date +%s.%N); (afun || exit 1; t=$(date +%s.%N); td=$(echo "scale=3; $t - $t0" | tbc); bd=$(echo "scale=3; $bpm / $py" | tbc); printf '%4s sec, %4s orig, %6s py, %4s div, %s\n' $td $bpm $py $bd "$fn") | tee -a /dev/shm/mres.$ncore; rv=${PIPESTATUS[0]}; [ $rv -eq 0 ] || { echo "FAULT($rv): $fn"; }; done & done; wait 2>/dev/null; cat /dev/shm/mres.* | awk 'function prt(c) {printf "\033[3%sm%s\033[0m\n",c,$0} $8!="div,"{next} $5!~/^[0-9\.]+/{next} {meta=$3;det=$5;div=meta/det} div<0.7{det/=2} div>1.3{det*=2} {idet=sprintf("%.0f",det)} {idiff=idet-meta} meta>idet{idiff=meta-idet} idiff==0{n0++;prt(6);next} idiff==1{n1++;prt(3);next} idiff>10{nx++;prt(1);next} {n10++;prt(5)} END {printf "ok: %d 1off: %2s (%3s) 10off: %2s (%3s) fail: %2s\n",n0,n1,n0+n1,n10,n0+n1+n10,nx}'; te=$(date +%s); echo $((te-t00)) sec spent; }
# ok: 8 1off: 62 ( 70) 10off: 86 (156) fail: 25 # 105 sec, librosa @ 8c archvm on 3700x w10
# ok: 4 1off: 59 ( 63) 10off: 65 (128) fail: 53 # using original tags (bad)
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -t 60 /dev/shm/$core.wav || return 1; py="$(/home/ed/src/librosa/examples/beat_tracker.py /dev/shm/$core.wav x 2>&1 | awk 'BEGIN {v=1} /^Estimated tempo: /{v=$3} END {print v}')"; } runfun
# ok: 119 1off: 5 (124) 10off: 8 (132) fail: 49 # 51 sec, vamp-example-fixedtempo
# ok: 109 1off: 4 (113) 10off: 9 (122) fail: 59 # bad-tags
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 22050 -f f32le /dev/shm/$core.pcm || return 1; py="$(python3 -c 'import vamp; import numpy as np; f = open("/dev/shm/'$core'.pcm", "rb"); d = np.fromfile(f, dtype=np.float32); c = vamp.collect(d, 22050, "vamp-example-plugins:fixedtempo", parameters={"maxdflen":40}); print(c["list"][0]["label"].split(" ")[0])')"; }; runfun
# ok: 102 1off: 61 (163) 10off: 12 (175) fail: 6 # 61 sec, vamp-qm-tempotracker
# ok: 80 1off: 48 (128) 10off: 11 (139) fail: 42 # bad-tags
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 22050 -f f32le /dev/shm/$core.pcm || return 1; py="$(python3 -c 'import vamp; import numpy as np; f = open("/dev/shm/'$core'.pcm", "rb"); d = np.fromfile(f, dtype=np.float32); c = vamp.collect(d, 22050, "qm-vamp-plugins:qm-tempotracker", parameters={"inputtempo":150}); v = [float(x["label"].split(" ")[0]) for x in c["list"] if x["label"]]; v = list(sorted(v))[len(v)//4:-len(v)//4]; print(round(sum(v) / len(v), 1))')"; }; runfun
# ok: 133 1off: 32 (165) 10off: 12 (177) fail: 3 # 51 sec, vamp-beatroot
# ok: 101 1off: 22 (123) 10off: 16 (139) fail: 39 # bad-tags
# note: some tracks fully fail to analyze (unlike the others which always provide a guess)
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 22050 -f f32le /dev/shm/$core.pcm || return 1; py="$(python3 -c 'import vamp; import numpy as np; f = open("/dev/shm/'$core'.pcm", "rb"); d = np.fromfile(f, dtype=np.float32); c = vamp.collect(d, 22050, "beatroot-vamp:beatroot"); cl=c["list"]; print(round(60*((len(cl)-1)/(float(cl[-1]["timestamp"]-cl[1]["timestamp"]))), 2))')"; }; runfun
# ok: 124 1off: 9 (133) 10off: 40 (173) fail: 8 # 231 sec, essentia/full
# ok: 109 1off: 8 (117) 10off: 22 (139) fail: 42 # bad-tags
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 /dev/shm/$core.wav || return 1; py="$(python3 -c 'import essentia; import essentia.standard as es; fe, fef = es.MusicExtractor(lowlevelStats=["mean", "stdev"], rhythmStats=["mean", "stdev"], tonalStats=["mean", "stdev"])("/dev/shm/'$core'.wav"); print("{:.2f}".format(fe["rhythm.bpm"]))')"; }; runfun
# ok: 113 1off: 18 (131) 10off: 46 (177) fail: 4 # 134 sec, essentia/re2013
# ok: 101 1off: 15 (116) 10off: 26 (142) fail: 39 # bad-tags
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 /dev/shm/$core.wav || return 1; py="$(python3 -c 'from essentia.standard import *; a=MonoLoader(filename="/dev/shm/'$core'.wav")(); bpm,beats,confidence,_,intervals=RhythmExtractor2013(method="multifeature")(a); print("{:.2f}".format(bpm))')"; }; runfun
########################################################################
##
## key detectyion
##
########################################################################
# console scriptlet reusing keytabs from browser.js
var m=''; for (var a=0; a<24; a++) m += 's/\\|(' + maps["traktor_sharps"][a].trim() + "|" + maps["rekobo_classic"][a].trim() + "|" + maps["traktor_musical"][a].trim() + "|" + maps["traktor_open"][a].trim() + ')$/|' + maps["rekobo_alnum"][a].trim() + '/;'; console.log(m);
# translate to camelot
re='s/\|(B|B|B|6d)$/|1B/;s/\|(F#|F#|Gb|7d)$/|2B/;s/\|(C#|Db|Db|8d)$/|3B/;s/\|(G#|Ab|Ab|9d)$/|4B/;s/\|(D#|Eb|Eb|10d)$/|5B/;s/\|(A#|Bb|Bb|11d)$/|6B/;s/\|(F|F|F|12d)$/|7B/;s/\|(C|C|C|1d)$/|8B/;s/\|(G|G|G|2d)$/|9B/;s/\|(D|D|D|3d)$/|10B/;s/\|(A|A|A|4d)$/|11B/;s/\|(E|E|E|5d)$/|12B/;s/\|(G#m|Abm|Abm|6m)$/|1A/;s/\|(D#m|Ebm|Ebm|7m)$/|2A/;s/\|(A#m|Bbm|Bbm|8m)$/|3A/;s/\|(Fm|Fm|Fm|9m)$/|4A/;s/\|(Cm|Cm|Cm|10m)$/|5A/;s/\|(Gm|Gm|Gm|11m)$/|6A/;s/\|(Dm|Dm|Dm|12m)$/|7A/;s/\|(Am|Am|Am|1m)$/|8A/;s/\|(Em|Em|Em|2m)$/|9A/;s/\|(Bm|Bm|Bm|3m)$/|10A/;s/\|(F#m|F#m|Gbm|4m)$/|11A/;s/\|(C#m|Dbm|Dbm|5m)$/|12A/;'
# runner/wrapper
runfun() { cores=8; touch run; tbc() { bc | sed -r 's/(\.[0-9]{2}).*/\1/'; }; for ((core=0; core<$cores; core++)); do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db 'select dur.w, dur.v, key.v from mt key join mt dur on key.w = dur.w where key.k = "key" and dur.k = ".dur" order by dur.w' | uniq -w16 | grep -vE '(Off-Key|None)$' | sed -r "s/ //g;$re" | uniq -w16 | while IFS=\| read w dur bpm; do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db "select rd, fn from up where substr(w,1,16) = '$w'" | sed -r "s/^/$bpm /"; done| grep mir/cr | tr \| / | while read key fn; do [ -e run ] || break; n=$((n+1)); ncore=$((n%cores)); [ $ncore -eq $core ] || continue; t0=$(date +%s.%N); (afun || exit 1; t=$(date +%s.%N); td=$(echo "scale=3; $t - $t0" | tbc); [ "$key" = "$py" ] && c=2 || c=5; printf '%4s sec, %4s orig, \033[3%dm%4s py,\033[0m %s\n' $td "$key" $c "$py" "$fn") || break; done & done; time wait 2>/dev/null; }
# ok: 26 1off: 10 2off: 1 fail: 3 # 15 sec, keyfinder
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 -t 60 /dev/shm/$core.wav || break; py="$(python3 -c 'import sys; import keyfinder; print(keyfinder.key(sys.argv[1]).camelot())' "/dev/shm/$core.wav")"; }; runfun
# https://github.com/MTG/essentia/raw/master/src/examples/tutorial/example_key_by_steps_streaming.py
# https://essentia.upf.edu/reference/std_Key.html # edma edmm braw bgate
sed -ri 's/^(key = Key\().*/\1profileType="bgate")/' example_key_by_steps_streaming.py
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 -t 60 /dev/shm/$core.wav || break; py="$(python3 example_key_by_steps_streaming.py /dev/shm/$core.{wav,yml} 2>/dev/null | sed -r "s/ major//;s/ minor/m/;s/^/|/;$re;s/.//")"; }; runfun
########################################################################
##
## misc
##
########################################################################
python3 -m pip install --user vamp
import librosa
d, r = librosa.load('/dev/shm/0.wav')
d.dtype
# dtype('float32')
d.shape
# (1323000,)
d
# array([-1.9614939e-08, 1.8037968e-08, -1.4106059e-08, ...,
# 1.2024145e-01, 2.7462116e-01, 1.6202132e-01], dtype=float32)
import vamp
c = vamp.collect(d, r, "vamp-example-plugins:fixedtempo")
c
# {'list': [{'timestamp': 0.005804988, 'duration': 9.999092971, 'label': '110.0 bpm', 'values': array([109.98116], dtype=float32)}]}
ffmpeg -ss 48 -i /mnt/Users/ed/Music/mir/cr-a/'I Beg You(ths Bootleg).wav' -ac 1 -ar 22050 -f f32le -t 60 /dev/shm/f32.pcm
import numpy as np
f = open('/dev/shm/f32.pcm', 'rb')
d = np.fromfile(f, dtype=np.float32)
d
array([-0.17803933, -0.27206388, -0.41586545, ..., -0.04940119,
-0.0267825 , -0.03564296], dtype=float32)
d = np.reshape(d, [1, -1])
d
array([[-0.17803933, -0.27206388, -0.41586545, ..., -0.04940119,
-0.0267825 , -0.03564296]], dtype=float32)
import vampyhost
print("\n".join(vampyhost.list_plugins()))
mvamp:marsyas_bextract_centroid
mvamp:marsyas_bextract_lpcc
mvamp:marsyas_bextract_lsp
mvamp:marsyas_bextract_mfcc
mvamp:marsyas_bextract_rolloff
mvamp:marsyas_bextract_scf
mvamp:marsyas_bextract_sfm
mvamp:marsyas_bextract_zero_crossings
mvamp:marsyas_ibt
mvamp:zerocrossing
qm-vamp-plugins:qm-adaptivespectrogram
qm-vamp-plugins:qm-barbeattracker
qm-vamp-plugins:qm-chromagram
qm-vamp-plugins:qm-constantq
qm-vamp-plugins:qm-dwt
qm-vamp-plugins:qm-keydetector
qm-vamp-plugins:qm-mfcc
qm-vamp-plugins:qm-onsetdetector
qm-vamp-plugins:qm-segmenter
qm-vamp-plugins:qm-similarity
qm-vamp-plugins:qm-tempotracker
qm-vamp-plugins:qm-tonalchange
qm-vamp-plugins:qm-transcription
vamp-aubio:aubiomelenergy
vamp-aubio:aubiomfcc
vamp-aubio:aubionotes
vamp-aubio:aubioonset
vamp-aubio:aubiopitch
vamp-aubio:aubiosilence
vamp-aubio:aubiospecdesc
vamp-aubio:aubiotempo
vamp-example-plugins:amplitudefollower
vamp-example-plugins:fixedtempo
vamp-example-plugins:percussiononsets
vamp-example-plugins:powerspectrum
vamp-example-plugins:spectralcentroid
vamp-example-plugins:zerocrossing
vamp-rubberband:rubberband
plug = vampyhost.load_plugin("vamp-example-plugins:fixedtempo", 22050, 0)
plug.info
{'apiVersion': 2, 'pluginVersion': 1, 'identifier': 'fixedtempo', 'name': 'Simple Fixed Tempo Estimator', 'description': 'Study a short section of audio and estimate its tempo, assuming the tempo is constant', 'maker': 'Vamp SDK Example Plugins', 'copyright': 'Code copyright 2008 Queen Mary, University of London. Freely redistributable (BSD license)'}
plug = vampyhost.load_plugin("qm-vamp-plugins:qm-tempotracker", 22050, 0)
from pprint import pprint; pprint(plug.parameters)
for c in plug.parameters: print("{} \033[36m{} [\033[33m{}\033[36m] = {}\033[0m".format(c["identifier"], c["name"], "\033[36m, \033[33m".join(c["valueNames"]), c["valueNames"][int(c["defaultValue"])])) if "valueNames" in c else print("{} \033[36m{} [\033[33m{}..{}\033[36m] = {}\033[0m".format(c["identifier"], c["name"], c["minValue"], c["maxValue"], c["defaultValue"]))
beatroot-vamp:beatroot
cl=c["list"]; 60*((len(cl)-1)/(float(cl[-1]["timestamp"]-cl[1]["timestamp"])))
ffmpeg -ss 48 -i /mnt/Users/ed/Music/mir/cr-a/'I Beg You(ths Bootleg).wav' -ac 1 -ar 22050 -f f32le -t 60 /dev/shm/f32.pcm
# 128 bpm, key 5A Cm
import vamp
import numpy as np
f = open('/dev/shm/f32.pcm', 'rb')
d = np.fromfile(f, dtype=np.float32)
c = vamp.collect(d, 22050, "vamp-example-plugins:fixedtempo", parameters={"maxdflen":40})
c["list"][0]["label"]
# 127.6 bpm
c = vamp.collect(d, 22050, "qm-vamp-plugins:qm-tempotracker", parameters={"inputtempo":150})
print("\n".join([v["label"] for v in c["list"] if v["label"]]))
v = [float(x["label"].split(' ')[0]) for x in c["list"] if x["label"]]
v = list(sorted(v))[len(v)//4:-len(v)//4]
v = sum(v) / len(v)
# 128.1 bpm

View File

@@ -3,6 +3,21 @@ echo not a script
exit 1
##
## delete all partial uploads
## (supports linux/macos, probably windows+msys2)
gzip -d < .hist/up2k.snap | jq -r '.[].tnam' | while IFS= read -r f; do rm -f -- "$f"; done
gzip -d < .hist/up2k.snap | jq -r '.[].name' | while IFS= read -r f; do wc -c -- "$f" | grep -qiE '^[^0-9a-z]*0' && rm -f -- "$f"; done
##
## detect partial uploads based on file contents
## (in case of context loss or old copyparties)
echo; find -type f | while IFS= read -r x; do printf '\033[A\033[36m%s\033[K\033[0m\n' "$x"; tail -c$((1024*1024)) <"$x" | xxd -a | awk 'NR==1&&/^[0: ]+.{16}$/{next} NR==2&&/^\*$/{next} NR==3&&/^[0f]+: [0 ]+65 +.{16}$/{next} {e=1} END {exit e}' || continue; printf '\033[A\033[31msus:\033[33m %s \033[0m\n\n' "$x"; done
##
## create a test payload
@@ -13,7 +28,7 @@ head -c $((2*1024*1024*1024)) /dev/zero | openssl enc -aes-256-ctr -pass pass:hu
## testing multiple parallel uploads
## usage: para | tee log
para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}; do for ((n=0;n<s;n++)); do curl -sF "act=bput" -F "f=@garbage.file" http://127.0.0.1:1234/ 2>&1 & done; wait; echo; done; done; }
para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}; do for ((n=0;n<s;n++)); do curl -sF "act=bput" -F "f=@garbage.file" http://127.0.0.1:3923/ 2>&1 & done; wait; echo; done; done; }
##
@@ -36,13 +51,13 @@ for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qwe,rty;asd
fn=$(printf '\xba\xdc\xab.cab')
echo asdf > "$fn"
curl --cookie cppwd=wark -sF "act=bput" -F "f=@$fn" http://127.0.0.1:1234/moji/%ED%91/
curl --cookie cppwd=wark -sF "act=bput" -F "f=@$fn" http://127.0.0.1:3923/moji/%ED%91/
##
## test compression
wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:1234/.cpr/deps/ogv.js -O- | md5sum; p=~ed/dev/copyparty/copyparty/web/deps/ogv.js.gz; md5sum $p; gzip -d < $p | md5sum
wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:3923/.cpr/deps/ogv.js -O- | md5sum; p=~ed/dev/copyparty/copyparty/web/deps/ogv.js.gz; md5sum $p; gzip -d < $p | md5sum
##
@@ -52,6 +67,50 @@ wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:123
shab64() { sp=$1; f="$2"; v=0; sz=$(stat -c%s "$f"); while true; do w=$((v+sp*1024*1024)); printf $(tail -c +$((v+1)) "$f" | head -c $((w-v)) | sha512sum | cut -c-64 | sed -r 's/ .*//;s/(..)/\\x\1/g') | base64 -w0 | cut -c-43 | tr '+/' '-_'; v=$w; [ $v -lt $sz ] || break; done; }
##
## poll url for performance issues
command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s ",k,p}}') 2>&1 | awk -v ts=$t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s \033[3%dm%s %s\033[0m\n",ts,c,t2,t1}'; sleep 0.1 || break; done
##
## js oneliners
# get all up2k search result URLs
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
##
## bash oneliners
# get the size and video-id of all youtube vids in folder, assuming filename ends with -id.ext, and create a copyparty search query
find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv|mp4|webm)$/{next} {sub(/\.[^\.]+$/,"");n=length($0);v=substr($0,n-10);print $1, v}' | tee /dev/stderr | awk 'BEGIN {p="("} {printf("%s name like *-%s.* ",p,$2);p="or"} END {print ")\n"}' | cat >&2
##
## sqlite3 stuff
# find dupe metadata keys
sqlite3 up2k.db 'select mt1.w, mt1.k, mt1.v, mt2.v from mt mt1 inner join mt mt2 on mt1.w = mt2.w where mt1.k = mt2.k and mt1.rowid != mt2.rowid'
# partial reindex by deleting all tags for a list of files
time sqlite3 up2k.db 'select mt1.w from mt mt1 inner join mt mt2 on mt1.w = mt2.w where mt1.k = +mt2.k and mt1.rowid != mt2.rowid' > warks
cat warks | while IFS= read -r x; do sqlite3 up2k.db "delete from mt where w = '$x'"; done
# dump all dbs
find -iname up2k.db | while IFS= read -r x; do sqlite3 "$x" 'select substr(w,1,12), rd, fn from up' | sed -r 's/\|/ \| /g' | while IFS= read -r y; do printf '%s | %s\n' "$x" "$y"; done; done
##
## media
# split track into test files
e=6; s=10; d=~/dev/copyparty/srv/aus; n=1; p=0; e=$((e*60)); rm -rf $d; mkdir $d; while true; do ffmpeg -hide_banner -ss $p -i 'nervous_testpilot - office.mp3' -c copy -t $s $d/$(printf %04d $n).mp3; n=$((n+1)); p=$((p+s)); [ $p -gt $e ] && break; done
-v srv/aus:aus:r:ce2dsa:ce2ts:cmtp=fgsfds=bin/mtag/sleep.py
sqlite3 .hist/up2k.db 'select * from mt where k="fgsfds" or k="t:mtp"' | tee /dev/stderr | wc -l
##
## vscode
@@ -81,6 +140,19 @@ for d in /usr /var; do find $d -type f -size +30M 2>/dev/null; done | while IFS=
brew install python@2
pip install virtualenv
# readme toc
cat README.md | awk '!/^#/{next} {lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab)} {printf "%" ((lv-1)*4+1) "s [%s](#%s)\n", "*",$0,bab}'
# fix firefox phantom breakpoints,
# suggestions from bugtracker, doesnt work (debugger is not attachable)
devtools settings >> advanced >> enable browser chrome debugging + enable remote debugging
burger > developer >> browser toolbox (ctrl-alt-shift-i)
iframe btn topright >> chrome://devtools/content/debugger/index.html
dbg.asyncStore.pendingBreakpoints = {}
# fix firefox phantom breakpoints
about:config >> devtools.debugger.prefs-schema-version = -1
##
## http 206
@@ -99,3 +171,26 @@ Range: bytes=24- "yz" Content-Range: bytes 24-25/26
Range: bytes=25-29 "z" Content-Range: bytes 25-25/26
Range: bytes=26- Content-Range: bytes */26
HTTP/1.1 416 Requested Range Not Satisfiable
##
## md perf
var tsh = [];
function convert_markdown(md_text, dest_dom) {
tsh.push(Date.now());
while (tsh.length > 10)
tsh.shift();
if (tsh.length > 1) {
var end = tsh.slice(-2);
console.log("render", end.pop() - end.pop(), (tsh[tsh.length - 1] - tsh[0]) / (tsh.length - 1));
}
##
## tmpfiles.d meme
mk() { rm -rf /tmp/foo; sudo -u ed bash -c 'mkdir /tmp/foo; echo hi > /tmp/foo/bar'; }
mk && t0="$(date)" && while true; do date -s "$(date '+ 1 hour')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
mk && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done; echo "$t0"

82
docs/nuitka.txt Normal file
View File

@@ -0,0 +1,82 @@
# recipe for building an exe with nuitka (extreme jank edition)
#
# NOTE: win7 and win10 builds both work on win10 but
# on win7 they immediately c0000005 in kernelbase.dll
#
# first install python-3.6.8-amd64.exe
# [x] add to path
#
# copypaste the rest of this file into cmd
rem from pypi
cd \users\ed\downloads
python -m pip install --user Nuitka-0.6.14.7.tar.gz
rem https://github.com/brechtsanders/winlibs_mingw/releases/download/10.2.0-11.0.0-8.0.0-r5/winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip
mkdir C:\Users\ed\AppData\Local\Nuitka\
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\10.2.0-11.0.0-8.0.0-r5\
copy c:\users\ed\downloads\winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\10.2.0-11.0.0-8.0.0-r5\winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip
rem https://github.com/ccache/ccache/releases/download/v3.7.12/ccache-3.7.12-windows-32.zip
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\v3.7.12\
copy c:\users\ed\downloads\ccache-3.7.12-windows-32.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\v3.7.12\ccache-3.7.12-windows-32.zip
rem https://dependencywalker.com/depends22_x64.zip
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\x86_64\
copy c:\users\ed\downloads\depends22_x64.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\x86_64\depends22_x64.zip
cd \
rd /s /q %appdata%\..\local\temp\pe-copyparty
cd \users\ed\downloads
python copyparty-sfx.py -h
cd %appdata%\..\local\temp\pe-copyparty\copyparty
python
import os, re
os.rename('../dep-j2/jinja2', '../jinja2')
os.rename('../dep-j2/markupsafe', '../markupsafe')
print("# nuitka dies if .__init__.stuff is imported")
with open('__init__.py','r',encoding='utf-8') as f:
t1 = f.read()
with open('util.py','r',encoding='utf-8') as f:
t2 = f.read().split('\n')[3:]
t2 = [x for x in t2 if 'from .__init__' not in x]
t = t1 + '\n'.join(t2)
with open('__init__.py','w',encoding='utf-8') as f:
f.write('\n')
with open('util.py','w',encoding='utf-8') as f:
f.write(t)
print("# local-imports fail, prefix module names")
ptn = re.compile(r'^( *from )(\.[^ ]+ import .*)')
for d, _, fs in os.walk('.'):
for f in fs:
fp = os.path.join(d, f)
if not fp.endswith('.py'):
continue
t = ''
with open(fp,'r',encoding='utf-8') as f:
for ln in [x.rstrip('\r\n') for x in f]:
m = ptn.match(ln)
if not m:
t += ln + '\n'
continue
p1, p2 = m.groups()
t += "{}copyparty{}\n".format(p1, p2).replace("__init__", "util")
with open(fp,'w',encoding='utf-8') as f:
f.write(t)
exit()
cd ..
rd /s /q bout & python -m nuitka --standalone --onefile --windows-onefile-tempdir --python-flag=no_site --assume-yes-for-downloads --include-data-dir=copyparty\web=copyparty\web --include-data-dir=copyparty\res=copyparty\res --run --output-dir=bout --mingw64 --include-package=markupsafe --include-package=jinja2 copyparty

View File

@@ -0,0 +1,35 @@
diff --git a/copyparty/httpcli.py b/copyparty/httpcli.py
index 2d3c1ad..e1e85a0 100644
--- a/copyparty/httpcli.py
+++ b/copyparty/httpcli.py
@@ -864,6 +864,30 @@ class HttpCli(object):
#
# send reply
+ try:
+ fakefn = self.conn.hsrv.fakefn
+ fakectr = self.conn.hsrv.fakectr
+ fakedata = self.conn.hsrv.fakedata
+ except:
+ fakefn = b''
+ fakectr = 0
+ fakedata = b''
+
+ self.log('\n{} {}\n{}'.format(fakefn, fakectr, open_args[0]))
+ if fakefn == open_args[0] and fakectr > 0:
+ self.reply(fakedata, mime=guess_mime(req_path)[0])
+ self.conn.hsrv.fakectr = fakectr - 1
+ else:
+ with open_func(*open_args) as f:
+ fakedata = f.read()
+
+ self.conn.hsrv.fakefn = open_args[0]
+ self.conn.hsrv.fakedata = fakedata
+ self.conn.hsrv.fakectr = 15
+ self.reply(fakedata, mime=guess_mime(req_path)[0])
+
+ return True
+
self.out_headers["Accept-Ranges"] = "bytes"
self.send_headers(
length=upper - lower,

62
docs/rclone.md Normal file
View File

@@ -0,0 +1,62 @@
# using rclone to mount a remote copyparty server as a local filesystem
speed estimates with server and client on the same win10 machine:
* `1070 MiB/s` with rclone as both server and client
* `570 MiB/s` with rclone-client and `copyparty -ed -j16` as server
* `220 MiB/s` with rclone-client and `copyparty -ed` as server
* `100 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
when server is on another machine (1gbit LAN),
* `75 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
* `92 MiB/s` with rclone-client and `copyparty -ed` as server
* `103 MiB/s` (connection max) with `copyparty -ed -j16` and all the others
# creating the config file
if you want to use password auth, add `headers = Cookie,cppwd=fgsfds` below
### on windows clients:
```
(
echo [cpp]
echo type = http
echo url = http://127.0.0.1:3923/
) > %userprofile%\.config\rclone\rclone.conf
```
also install the windows dependencies: [winfsp](https://github.com/billziss-gh/winfsp/releases/latest)
### on unix clients:
```
cat > ~/.config/rclone/rclone.conf <<'EOF'
[cpp]
type = http
url = http://127.0.0.1:3923/
EOF
```
# mounting the copyparty server locally
```
rclone.exe mount --vfs-cache-max-age 5s --attr-timeout 5s --dir-cache-time 5s cpp: Z:
```
# use rclone as server too, replacing copyparty
feels out of place but is too good not to mention
```
rclone.exe serve http --read-only .
```
* `webdav` gives write-access but `http` is twice as fast
* `ftp` is buggy, avoid
# bugs
* rclone-client throws an exception if you try to read an empty file (should return zero bytes)

10
docs/unirange.py Normal file
View File

@@ -0,0 +1,10 @@
v = "U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+2000-206F, U+2074, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD"
for v in v.split(","):
if "+" in v:
v = v.split("+")[1]
if "-" in v:
lo, hi = v.split("-")
else:
lo = hi = v
for v in range(int(lo, 16), int(hi, 16) + 1):
print("{:4x} [{}]".format(v, chr(v)))

130
scripts/copyparty-repack.sh Executable file
View File

@@ -0,0 +1,130 @@
#!/bin/bash
repacker=1
set -e
# -- download latest copyparty (source.tgz and sfx),
# -- build minimal sfx versions,
# -- create a .tar.gz bundle
#
# convenient for deploying updates to inconvenient locations
# (and those are usually linux so bash is good inaff)
# (but that said this even has macos support)
#
# bundle will look like:
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty
# -rw-r--r-- 0 ed ed 491318 Nov 19 00:40 copyparty-extras/copyparty-0.5.4.tar.gz
# -rwxr-xr-x 0 ed ed 30254 Nov 17 23:58 copyparty-extras/copyparty-fuse.py
# -rwxr-xr-x 0 ed ed 481403 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.sh
# -rwxr-xr-x 0 ed ed 506043 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.py
# -rwxr-xr-x 0 ed ed 167699 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.sh
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
command -v gnutar && tar() { gnutar "$@"; }
command -v gtar && tar() { gtar "$@"; }
command -v gsed && sed() { gsed "$@"; }
td="$(mktemp -d)"
od="$(pwd)"
cd "$td"
pwd
dl_text() {
command -v curl >/dev/null && exec curl "$@"
exec wget -O- "$@"
}
dl_files() {
command -v curl >/dev/null && exec curl -L --remote-name-all "$@"
exec wget "$@"
}
export -f dl_files
# if cache exists, use that instead of bothering github
cache="$od/.copyparty-repack.cache"
[ -e "$cache" ] &&
tar -xf "$cache" ||
{
# get download links from github
dl_text https://api.github.com/repos/9001/copyparty/releases/latest |
(
# prefer jq if available
jq -r '.assets[]|select(.name|test("-sfx|tar.gz")).browser_download_url' ||
# fallback to awk (sorry)
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
) |
tee /dev/stderr |
tr -d '\r' | tr '\n' '\0' |
xargs -0 bash -c 'dl_files "$@"' _
tar -czf "$cache" *
}
# move src into copyparty-extras/,
# move sfx into copyparty-extras/sfx-full/
mkdir -p copyparty-extras/sfx-{full,lite}
mv copyparty-sfx.* copyparty-extras/sfx-full/
mv copyparty-*.tar.gz copyparty-extras/
# unpack the source code
( cd copyparty-extras/
tar -xf *.tar.gz
)
# use repacker from release if that is newer
p_other=copyparty-extras/copyparty-*/scripts/copyparty-repack.sh
other=$(awk -F= 'BEGIN{v=-1} NR<10&&/^repacker=/{v=$NF} END{print v}' <$p_other)
[ $repacker -lt $other ] &&
cat $p_other >"$od/$0" && cd "$od" && rm -rf "$td" && exec "$0" "$@"
# now drop the cache
rm -f "$cache"
# fix permissions
chmod 755 \
copyparty-extras/sfx-full/* \
copyparty-extras/copyparty-*/{scripts,bin}/*
# extract and repack the sfx with less features enabled
( cd copyparty-extras/sfx-full/
./copyparty-sfx.py -h
cd ../copyparty-*/
./scripts/make-sfx.sh re no-ogv no-cm
)
# put new sfx into copyparty-extras/sfx-lite/,
# fuse client into copyparty-extras/,
# copy lite-sfx.py to ./copyparty,
# delete extracted source code
( cd copyparty-extras/
mv copyparty-*/dist/* sfx-lite/
mv copyparty-*/bin/copyparty-fuse.py .
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
rm -rf copyparty-{0..9}*.*.*{0..9}
)
# and include the repacker itself too
cp -av "$od/$0" copyparty-extras/ ||
cp -av "$0" copyparty-extras/ ||
true
# create the bundle
fn=copyparty-$(date +%Y-%m%d-%H%M%S).tgz
tar -czvf "$od/$fn" *
cd "$od"
rm -rf "$td"
echo
echo "done, here's your bundle:"
ls -al "$fn"

View File

@@ -1,20 +1,27 @@
FROM alpine:3.11
FROM alpine:3.13
WORKDIR /z
ENV ver_asmcrypto=2821dd1dedd1196c378f5854037dda5c869313f3 \
ver_markdownit=10.0.0 \
ver_showdown=1.9.1 \
ver_marked=1.0.0 \
ver_ogvjs=1.6.1 \
ver_mde=2.10.1 \
ver_codemirror=5.53.2 \
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_marked=1.1.0 \
ver_ogvjs=1.8.0 \
ver_mde=2.14.0 \
ver_codemirror=5.59.3 \
ver_fontawesome=5.13.0 \
ver_zopfli=1.0.3
# download
RUN apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev \
# TODO
# sha512.hw.js https://github.com/Daninet/hash-wasm
# sha512.kc.js https://github.com/chm-diederichs/sha3-wasm
# awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' <sha512.ac.js.orig >sha512.ac.js; for fn in sha512.ac.js.orig sha512.ac.js; do wc -c <$fn; wc -c <$fn.gz ; for n in {1..9}; do printf '%8d %d bz\n' $(bzip2 -c$n <$fn | wc -c) $n; done; done
# download;
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
RUN mkdir -p /z/dist/no-pk \
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \
&& wget https://github.com/asmcrypto/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
&& wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
&& wget https://github.com/codemirror/CodeMirror/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
@@ -36,23 +43,7 @@ RUN apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzi
&& npm install \
&& npm i gulp-cli -g ) \
&& unzip fontawesome.zip \
&& tar -xf zopfli.tgz \
&& mkdir -p /z/dist/no-pk
# uncomment if you wanna test the abandoned markdown converters
#ENV build_abandoned=1
RUN [ $build_abandoned ] || exit 0; \
git clone --depth 1 --branch $ver_showdown https://github.com/showdownjs/showdown/ \
&& wget https://github.com/markdown-it/markdown-it/archive/$ver_markdownit.tar.gz -O markdownit.tgz \
&& (cd showdown \
&& npm install \
&& npm i grunt -g ) \
&& (tar -xf markdownit.tgz \
&& cd markdown-it-$ver_markdownit \
&& npm install )
&& tar -xf zopfli.tgz
# build fonttools (which needs zopfli)
@@ -65,6 +56,7 @@ RUN tar -xf zopfli.tgz \
-S . \
&& make -C build \
&& make -C build install \
&& python3 -m ensurepip \
&& python3 -m pip install fonttools zopfli
@@ -80,31 +72,27 @@ RUN cd ogvjs-$ver_ogvjs \
&& cp -pv \
ogv.js \
ogv-worker-audio.js \
ogv-demuxer-ogg.js \
ogv-demuxer-ogg-wasm.js \
ogv-demuxer-ogg-wasm.wasm \
ogv-demuxer-webm.js \
ogv-demuxer-webm-wasm.js \
ogv-demuxer-webm-wasm.wasm \
ogv-decoder-audio-opus.js \
ogv-decoder-audio-opus-wasm.js \
ogv-decoder-audio-opus-wasm.wasm \
ogv-decoder-audio-vorbis.js \
ogv-decoder-audio-vorbis-wasm.js \
ogv-decoder-audio-vorbis-wasm.wasm \
dynamicaudio.swf \
/z/dist
# ogv-demuxer-ogg.js \
# ogv-demuxer-webm.js \
# ogv-decoder-audio-opus.js \
# ogv-decoder-audio-vorbis.js \
# dynamicaudio.swf \
# build marked
RUN wget https://github.com/markedjs/marked/commit/5c166d4164791f643693478e4ac094d63d6e0c9a.patch -O marked-git-1.patch \
&& wget https://patch-diff.githubusercontent.com/raw/markedjs/marked/pull/1652.patch -O marked-git-2.patch
COPY marked.patch /z/
COPY marked-ln.patch /z/
RUN cd marked-$ver_marked \
&& patch -p1 < /z/marked-git-1.patch \
&& patch -p1 < /z/marked-git-2.patch \
&& patch -p1 < /z/marked-ln.patch \
&& patch -p1 < /z/marked.patch \
&& npm run build \
@@ -138,57 +126,10 @@ RUN cd easy-markdown-editor-$ver_mde \
&& patch -p1 < /z/easymde-ln.patch \
&& gulp \
&& cp -pv dist/easymde.min.css /z/dist/easymde.css \
&& cp -pv dist/easymde.min.js /z/dist/easymde.js \
&& sed -ri '/pipe.terser/d; /cleanCSS/d' gulpfile.js \
&& gulp \
&& cp -pv dist/easymde.min.css /z/dist/easymde.full.css \
&& cp -pv dist/easymde.min.js /z/dist/easymde.full.js
&& cp -pv dist/easymde.min.js /z/dist/easymde.js
# build showdown (abandoned; disabled by default)
COPY showdown.patch /z/
RUN [ $build_abandoned ] || exit 0; \
cd showdown \
&& rm -rf bin dist \
# # remove ellipsis plugin \
&& rm \
src/subParsers/ellipsis.js \
test/cases/ellipsis* \
# # remove html-to-md converter \
&& rm \
test/node/testsuite.makemd.js \
test/node/showdown.Converter.makeMarkdown.js \
# # remove emojis \
&& rm src/subParsers/emoji.js \
&& awk '/^showdown.helper.emojis/ {o=1} !o; /^\}/ {o=0}' \
>f <src/helpers.js \
&& mv f src/helpers.js \
&& rm -rf test/features/emojis \
# # remove ghmentions \
&& rm test/features/ghMentions.* \
# # remove option descriptions \
&& sed -ri '/descri(ption|be): /d' src/options.js \
&& patch -p1 < /z/showdown.patch
RUN [ $build_abandoned ] || exit 0; \
cd showdown \
&& grunt build \
&& sed -ri '/sourceMappingURL=showdown.min.js.map/d' dist/showdown.min.js \
&& mv dist/showdown.min.js /z/dist/showdown.js \
&& ls -al /z/dist/showdown.js
# build markdownit (abandoned; disabled by default)
COPY markdown-it.patch /z/
RUN [ $build_abandoned ] || exit 0; \
cd markdown-it-$ver_markdownit \
&& patch -p1 < /z/markdown-it.patch \
&& make browserify \
&& cp -pv dist/markdown-it.min.js /z/dist/markdown-it.js \
&& cp -pv dist/markdown-it.js /z/dist/markdown-it-full.js
# build fontawesome
# build fontawesome and scp
COPY mini-fa.sh /z
COPY mini-fa.css /z
RUN /bin/ash /z/mini-fa.sh
@@ -203,38 +144,6 @@ RUN cd /z/dist \
&& rmdir no-pk
# showdown: abandoned due to code-blocks in lists failing
# 22770 orig
# 12154 no-emojis
# 12134 no-srcmap
# 11189 no-descriptions
# 11152 no-ellipsis
# 10617 no-this.makeMd
# 9569 no-extensions
# 9537 no-extensions
# 9410 no-mentions
# markdown-it: abandoned because no header anchors (and too big)
# 32322 107754 orig (wowee)
# 19619 21392 71540 less entities
# marked:
# 9253 29773 orig
# 9159 29633 no copyright (reverted)
# 9040 29057 no sanitize
# 8870 28631 no email-mangle
# so really not worth it, just drop the patch when that stops working
# easymde:
# 91836 orig
# 88635 no spellcheck
# 88392 no urlRE
# 85651 less bidi
# 82855 less mode meta
# d=/home/ed/dev/copyparty/scripts/deps-docker/; tar -cf ../x . && ssh root@$bip "cd $d && tar -xv >&2 && make >&2 && tar -cC ../../copyparty/web deps" <../x | (cd ../../copyparty/web/; cat > the.tgz; tar -xvf the.tgz)
# git diff -U2 --no-index marked-1.1.0-orig/ marked-1.1.0-edit/ -U2 | sed -r '/^index /d;s`^(diff --git a/)[^/]+/(.* b/)[^/]+/`\1\2`; s`^(---|\+\+\+) ([ab]/)[^/]+/`\1 \2`' > ../dev/copyparty/scripts/deps-docker/marked-ln.patch
# d=/home/ed/dev/copyparty/scripts/deps-docker/; tar -cf ../x . && ssh root@$bip "cd $d && tar -xv >&2 && make >&2 && tar -cC ../../copyparty/web deps" <../x | (cd ../../copyparty/web/; cat > the.tgz; tar -xvf the.tgz; rm the.tgz)
# gzip -dkf ../dev/copyparty/copyparty/web/deps/deps/marked.full.js.gz && diff -NarU2 ../dev/copyparty/copyparty/web/deps/{,deps/}marked.full.js

View File

@@ -1,6 +1,6 @@
diff -NarU2 CodeMirror-orig/mode/gfm/gfm.js CodeMirror-edit/mode/gfm/gfm.js
--- CodeMirror-orig/mode/gfm/gfm.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/mode/gfm/gfm.js 2020-05-02 02:13:32.142131800 +0200
diff -NarU2 codemirror-5.59.3-orig/mode/gfm/gfm.js codemirror-5.59.3/mode/gfm/gfm.js
--- codemirror-5.59.3-orig/mode/gfm/gfm.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/mode/gfm/gfm.js 2021-02-21 20:42:02.166174775 +0000
@@ -97,5 +97,5 @@
}
}
@@ -15,9 +15,9 @@ diff -NarU2 CodeMirror-orig/mode/gfm/gfm.js CodeMirror-edit/mode/gfm/gfm.js
+ }*/
stream.next();
return null;
diff -NarU2 CodeMirror-orig/mode/meta.js CodeMirror-edit/mode/meta.js
--- CodeMirror-orig/mode/meta.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/mode/meta.js 2020-05-02 03:56:58.852408400 +0200
diff -NarU2 codemirror-5.59.3-orig/mode/meta.js codemirror-5.59.3/mode/meta.js
--- codemirror-5.59.3-orig/mode/meta.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/mode/meta.js 2021-02-21 20:42:54.798742821 +0000
@@ -13,4 +13,5 @@
CodeMirror.modeInfo = [
@@ -28,7 +28,7 @@ diff -NarU2 CodeMirror-orig/mode/meta.js CodeMirror-edit/mode/meta.js
{name: "Gas", mime: "text/x-gas", mode: "gas", ext: ["s"]},
{name: "Gherkin", mime: "text/x-feature", mode: "gherkin", ext: ["feature"]},
+ */
{name: "GitHub Flavored Markdown", mime: "text/x-gfm", mode: "gfm", file: /^(readme|contributing|history).md$/i},
{name: "GitHub Flavored Markdown", mime: "text/x-gfm", mode: "gfm", file: /^(readme|contributing|history)\.md$/i},
+ /*
{name: "Go", mime: "text/x-go", mode: "go", ext: ["go"]},
{name: "Groovy", mime: "text/x-groovy", mode: "groovy", ext: ["groovy", "gradle"], file: /^Jenkinsfile$/},
@@ -56,16 +56,16 @@ diff -NarU2 CodeMirror-orig/mode/meta.js CodeMirror-edit/mode/meta.js
+ /*
{name: "XQuery", mime: "application/xquery", mode: "xquery", ext: ["xy", "xquery"]},
{name: "Yacas", mime: "text/x-yacas", mode: "yacas", ext: ["ys"]},
@@ -171,4 +180,5 @@
{name: "xu", mime: "text/x-xu", mode: "mscgen", ext: ["xu"]},
{name: "msgenny", mime: "text/x-msgenny", mode: "mscgen", ext: ["msgenny"]}
@@ -172,4 +181,5 @@
{name: "msgenny", mime: "text/x-msgenny", mode: "mscgen", ext: ["msgenny"]},
{name: "WebAssembly", mime: "text/webassembly", mode: "wast", ext: ["wat", "wast"]},
+ */
];
// Ensure all modes have a mime property for backwards compatibility
diff -NarU2 CodeMirror-orig/src/display/selection.js CodeMirror-edit/src/display/selection.js
--- CodeMirror-orig/src/display/selection.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/display/selection.js 2020-05-02 03:27:30.144662800 +0200
@@ -83,29 +83,21 @@
diff -NarU2 codemirror-5.59.3-orig/src/display/selection.js codemirror-5.59.3/src/display/selection.js
--- codemirror-5.59.3-orig/src/display/selection.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/display/selection.js 2021-02-21 20:44:14.860894328 +0000
@@ -84,29 +84,21 @@
let order = getOrder(lineObj, doc.direction)
iterateBidiSections(order, fromArg || 0, toArg == null ? lineLen : toArg, (from, to, dir, i) => {
- let ltr = dir == "ltr"
@@ -105,24 +105,24 @@ diff -NarU2 CodeMirror-orig/src/display/selection.js CodeMirror-edit/src/display
+ botRight = openEnd && last ? rightSide : toPos.right
add(topLeft, fromPos.top, topRight - topLeft, fromPos.bottom)
if (fromPos.bottom < toPos.top) add(leftSide, fromPos.bottom, null, toPos.top)
diff -NarU2 CodeMirror-orig/src/input/ContentEditableInput.js CodeMirror-edit/src/input/ContentEditableInput.js
--- CodeMirror-orig/src/input/ContentEditableInput.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/input/ContentEditableInput.js 2020-05-02 03:33:05.707995500 +0200
@@ -391,4 +391,5 @@
diff -NarU2 codemirror-5.59.3-orig/src/input/ContentEditableInput.js codemirror-5.59.3/src/input/ContentEditableInput.js
--- codemirror-5.59.3-orig/src/input/ContentEditableInput.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/input/ContentEditableInput.js 2021-02-21 20:44:33.273953867 +0000
@@ -399,4 +399,5 @@
let info = mapFromLineView(view, line, pos.line)
+ /*
let order = getOrder(line, cm.doc.direction), side = "left"
if (order) {
@@ -396,4 +397,5 @@
@@ -404,4 +405,5 @@
side = partPos % 2 ? "right" : "left"
}
+ */
let result = nodeAndOffsetInLineMap(info.map, pos.ch, side)
result.offset = result.collapse == "right" ? result.end : result.start
diff -NarU2 CodeMirror-orig/src/input/movement.js CodeMirror-edit/src/input/movement.js
--- CodeMirror-orig/src/input/movement.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/input/movement.js 2020-05-02 03:31:19.710773500 +0200
diff -NarU2 codemirror-5.59.3-orig/src/input/movement.js codemirror-5.59.3/src/input/movement.js
--- codemirror-5.59.3-orig/src/input/movement.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/input/movement.js 2021-02-21 20:45:12.763093671 +0000
@@ -15,4 +15,5 @@
export function endOfLine(visually, cm, lineObj, lineNo, dir) {
@@ -146,9 +146,9 @@ diff -NarU2 CodeMirror-orig/src/input/movement.js CodeMirror-edit/src/input/move
return null
+ */
}
diff -NarU2 CodeMirror-orig/src/line/line_data.js CodeMirror-edit/src/line/line_data.js
--- CodeMirror-orig/src/line/line_data.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/line/line_data.js 2020-05-02 03:17:02.785065000 +0200
diff -NarU2 codemirror-5.59.3-orig/src/line/line_data.js codemirror-5.59.3/src/line/line_data.js
--- codemirror-5.59.3-orig/src/line/line_data.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/line/line_data.js 2021-02-21 20:45:36.472549599 +0000
@@ -79,6 +79,6 @@
// Optionally wire in some hacks into the token-rendering
// algorithm, to deal with browser quirks.
@@ -158,9 +158,9 @@ diff -NarU2 CodeMirror-orig/src/line/line_data.js CodeMirror-edit/src/line/line_
+ // builder.addToken = buildTokenBadBidi(builder.addToken, order)
builder.map = []
let allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line)
diff -NarU2 CodeMirror-orig/src/measurement/position_measurement.js CodeMirror-edit/src/measurement/position_measurement.js
--- CodeMirror-orig/src/measurement/position_measurement.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/measurement/position_measurement.js 2020-05-02 03:35:20.674159600 +0200
diff -NarU2 codemirror-5.59.3-orig/src/measurement/position_measurement.js codemirror-5.59.3/src/measurement/position_measurement.js
--- codemirror-5.59.3-orig/src/measurement/position_measurement.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/measurement/position_measurement.js 2021-02-21 20:50:52.372945293 +0000
@@ -380,5 +380,6 @@
sticky = "after"
}
@@ -199,9 +199,9 @@ diff -NarU2 CodeMirror-orig/src/measurement/position_measurement.js CodeMirror-e
+*/
let measureText
diff -NarU2 CodeMirror-orig/src/util/bidi.js CodeMirror-edit/src/util/bidi.js
--- CodeMirror-orig/src/util/bidi.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/util/bidi.js 2020-05-02 03:12:44.418649800 +0200
diff -NarU2 codemirror-5.59.3-orig/src/util/bidi.js codemirror-5.59.3/src/util/bidi.js
--- codemirror-5.59.3-orig/src/util/bidi.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/util/bidi.js 2021-02-21 20:52:18.168092225 +0000
@@ -4,5 +4,5 @@
export function iterateBidiSections(order, from, to, f) {
@@ -239,20 +239,19 @@ diff -NarU2 CodeMirror-orig/src/util/bidi.js CodeMirror-edit/src/util/bidi.js
+ var fun = function(str, direction) {
let outerType = direction == "ltr" ? "L" : "R"
@@ -204,12 +210,16 @@
@@ -204,5 +210,11 @@
return direction == "rtl" ? order.reverse() : order
}
-})()
+ return function(str, direction) {
+ var ret = fun(str, direction);
+ console.log("bidiOrdering inner ([%s], %s) => [%s]", str, direction, ret);
+ return ret;
+ }
+})()
})()
+*/
// Get the bidi ordering for the given line (and cache it). Returns
// false for lines that are fully left-to-right, and an array of
@@ -210,6 +222,4 @@
// BidiSpan objects otherwise.
export function getOrder(line, direction) {
- let order = line.order
@@ -260,9 +259,9 @@ diff -NarU2 CodeMirror-orig/src/util/bidi.js CodeMirror-edit/src/util/bidi.js
- return order
+ return false;
}
diff -NarU2 CodeMirror-orig/src/util/feature_detection.js CodeMirror-edit/src/util/feature_detection.js
--- CodeMirror-orig/src/util/feature_detection.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/util/feature_detection.js 2020-05-02 03:16:21.085621400 +0200
diff -NarU2 codemirror-5.59.3-orig/src/util/feature_detection.js codemirror-5.59.3/src/util/feature_detection.js
--- codemirror-5.59.3-orig/src/util/feature_detection.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/util/feature_detection.js 2021-02-21 20:49:22.191269270 +0000
@@ -25,4 +25,5 @@
}

View File

@@ -1,33 +1,57 @@
diff -NarU2 easymde-orig/gulpfile.js easymde-mod1/gulpfile.js
--- easymde-orig/gulpfile.js 2020-04-06 14:09:36.000000000 +0200
+++ easymde-mod1/gulpfile.js 2020-05-01 14:33:52.260175200 +0200
diff -NarU2 easy-markdown-editor-2.14.0-orig/gulpfile.js easy-markdown-editor-2.14.0/gulpfile.js
--- easy-markdown-editor-2.14.0-orig/gulpfile.js 2021-02-14 12:11:48.000000000 +0000
+++ easy-markdown-editor-2.14.0/gulpfile.js 2021-02-21 20:55:37.134701007 +0000
@@ -25,5 +25,4 @@
'./node_modules/codemirror/lib/codemirror.css',
'./src/css/*.css',
- './node_modules/codemirror-spell-checker/src/css/spell-checker.css',
];
diff -NarU2 easymde-orig/package.json easymde-mod1/package.json
--- easymde-orig/package.json 2020-04-06 14:09:36.000000000 +0200
+++ easymde-mod1/package.json 2020-05-01 14:33:57.189975800 +0200
diff -NarU2 easy-markdown-editor-2.14.0-orig/package.json easy-markdown-editor-2.14.0/package.json
--- easy-markdown-editor-2.14.0-orig/package.json 2021-02-14 12:11:48.000000000 +0000
+++ easy-markdown-editor-2.14.0/package.json 2021-02-21 20:55:47.761190082 +0000
@@ -21,5 +21,4 @@
"dependencies": {
"codemirror": "^5.52.2",
"codemirror": "^5.59.2",
- "codemirror-spell-checker": "1.1.2",
"marked": "^0.8.2"
"marked": "^2.0.0"
},
diff -NarU2 easymde-orig/src/js/easymde.js easymde-mod1/src/js/easymde.js
--- easymde-orig/src/js/easymde.js 2020-04-06 14:09:36.000000000 +0200
+++ easymde-mod1/src/js/easymde.js 2020-05-01 14:34:19.878774400 +0200
@@ -11,5 +11,4 @@
diff -NarU2 easy-markdown-editor-2.14.0-orig/src/js/easymde.js easy-markdown-editor-2.14.0/src/js/easymde.js
--- easy-markdown-editor-2.14.0-orig/src/js/easymde.js 2021-02-14 12:11:48.000000000 +0000
+++ easy-markdown-editor-2.14.0/src/js/easymde.js 2021-02-21 20:57:09.143171536 +0000
@@ -12,5 +12,4 @@
require('codemirror/mode/gfm/gfm.js');
require('codemirror/mode/xml/xml.js');
-var CodeMirrorSpellChecker = require('codemirror-spell-checker');
var marked = require('marked/lib/marked');
@@ -1889,18 +1888,7 @@
@@ -1762,9 +1761,4 @@
options.autosave.uniqueId = options.autosave.unique_id;
- // If overlay mode is specified and combine is not provided, default it to true
- if (options.overlayMode && options.overlayMode.combine === undefined) {
- options.overlayMode.combine = true;
- }
-
// Update this options
this.options = options;
@@ -2003,28 +1997,7 @@
var mode, backdrop;
- // CodeMirror overlay mode
- if (options.overlayMode) {
- CodeMirror.defineMode('overlay-mode', function(config) {
- return CodeMirror.overlayMode(CodeMirror.getMode(config, options.spellChecker !== false ? 'spell-checker' : 'gfm'), options.overlayMode.mode, options.overlayMode.combine);
- });
-
- mode = 'overlay-mode';
- backdrop = options.parsingConfig;
- backdrop.gitHubSpice = false;
- } else {
mode = options.parsingConfig;
mode.name = 'gfm';
mode.gitHubSpice = false;
- }
- if (options.spellChecker !== false) {
- mode = 'spell-checker';
- backdrop = options.parsingConfig;
@@ -37,16 +61,28 @@ diff -NarU2 easymde-orig/src/js/easymde.js easymde-mod1/src/js/easymde.js
- CodeMirrorSpellChecker({
- codeMirrorInstance: CodeMirror,
- });
- } else {
mode = options.parsingConfig;
mode.name = 'gfm';
mode.gitHubSpice = false;
- }
// eslint-disable-next-line no-unused-vars
@@ -1927,5 +1915,4 @@
configureMouse: configureMouse,
inputStyle: (options.inputStyle != undefined) ? options.inputStyle : isMobile() ? 'contenteditable' : 'textarea',
- spellcheck: (options.nativeSpellcheck != undefined) ? options.nativeSpellcheck : true,
});
diff -NarU2 easy-markdown-editor-2.14.0-orig/types/easymde.d.ts easy-markdown-editor-2.14.0/types/easymde.d.ts
--- easy-markdown-editor-2.14.0-orig/types/easymde.d.ts 2021-02-14 12:11:48.000000000 +0000
+++ easy-markdown-editor-2.14.0/types/easymde.d.ts 2021-02-21 20:57:42.492620979 +0000
@@ -160,9 +160,4 @@
}
- interface OverlayModeOptions {
- mode: CodeMirror.Mode<any>
- combine?: boolean
- }
-
interface Options {
autoDownloadFontAwesome?: boolean;
@@ -214,7 +209,5 @@
promptTexts?: PromptTexts;
- syncSideBySidePreviewScroll?: boolean;
-
- overlayMode?: OverlayModeOptions
+ syncSideBySidePreviewScroll?: boolean
}
}

View File

@@ -35,7 +35,7 @@ add data-ln="%d" to most tags, %d is the source markdown line
+ // this.ln will be bumped by recursive calls into this func;
+ // reset the count and rely on the outermost token's raw only
+ ln = this.ln;
+
+
// newline
if (token = this.tokenizer.space(src)) {
src = src.substring(token.raw.length);
@@ -234,7 +234,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
- return '<pre><code>'
+ return '<pre' + this.ln + '><code>'
+ (escaped ? code : escape(code, true))
+ '</code></pre>';
+ '</code></pre>\n';
}
- return '<pre><code class="'

View File

@@ -1,7 +1,141 @@
diff -NarU1 marked-1.0.0-orig/src/defaults.js marked-1.0.0-edit/src/defaults.js
--- marked-1.0.0-orig/src/defaults.js 2020-04-21 01:03:48.000000000 +0000
+++ marked-1.0.0-edit/src/defaults.js 2020-04-25 19:16:56.124621393 +0000
@@ -9,10 +9,6 @@
diff --git a/src/Lexer.js b/src/Lexer.js
--- a/src/Lexer.js
+++ b/src/Lexer.js
@@ -5,5 +5,5 @@ const { block, inline } = require('./rules.js');
/**
* smartypants text replacement
- */
+ *
function smartypants(text) {
return text
@@ -26,5 +26,5 @@ function smartypants(text) {
/**
* mangle email addresses
- */
+ *
function mangle(text) {
let out = '',
@@ -439,5 +439,5 @@ module.exports = class Lexer {
// autolink
- if (token = this.tokenizer.autolink(src, mangle)) {
+ if (token = this.tokenizer.autolink(src)) {
src = src.substring(token.raw.length);
tokens.push(token);
@@ -446,5 +446,5 @@ module.exports = class Lexer {
// url (gfm)
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
+ if (!inLink && (token = this.tokenizer.url(src))) {
src = src.substring(token.raw.length);
tokens.push(token);
@@ -453,5 +453,5 @@ module.exports = class Lexer {
// text
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
src = src.substring(token.raw.length);
tokens.push(token);
diff --git a/src/Renderer.js b/src/Renderer.js
--- a/src/Renderer.js
+++ b/src/Renderer.js
@@ -140,5 +140,5 @@ module.exports = class Renderer {
link(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
+ href = cleanUrl(this.options.baseUrl, href);
if (href === null) {
return text;
@@ -153,5 +153,5 @@ module.exports = class Renderer {
image(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
+ href = cleanUrl(this.options.baseUrl, href);
if (href === null) {
return text;
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
--- a/src/Tokenizer.js
+++ b/src/Tokenizer.js
@@ -287,11 +287,8 @@ module.exports = class Tokenizer {
if (cap) {
return {
- type: this.options.sanitize
- ? 'paragraph'
- : 'html',
+ type: 'html',
raw: cap[0],
- pre: !this.options.sanitizer
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
+ text: cap[0]
};
}
@@ -421,15 +418,9 @@ module.exports = class Tokenizer {
return {
- type: this.options.sanitize
- ? 'text'
- : 'html',
+ type: 'html',
raw: cap[0],
inLink,
inRawBlock,
- text: this.options.sanitize
- ? (this.options.sanitizer
- ? this.options.sanitizer(cap[0])
- : escape(cap[0]))
- : cap[0]
+ text: cap[0]
};
}
@@ -550,10 +541,10 @@ module.exports = class Tokenizer {
}
- autolink(src, mangle) {
+ autolink(src) {
const cap = this.rules.inline.autolink.exec(src);
if (cap) {
let text, href;
if (cap[2] === '@') {
- text = escape(this.options.mangle ? mangle(cap[1]) : cap[1]);
+ text = escape(cap[1]);
href = 'mailto:' + text;
} else {
@@ -578,10 +569,10 @@ module.exports = class Tokenizer {
}
- url(src, mangle) {
+ url(src) {
let cap;
if (cap = this.rules.inline.url.exec(src)) {
let text, href;
if (cap[2] === '@') {
- text = escape(this.options.mangle ? mangle(cap[0]) : cap[0]);
+ text = escape(cap[0]);
href = 'mailto:' + text;
} else {
@@ -615,12 +606,12 @@ module.exports = class Tokenizer {
}
- inlineText(src, inRawBlock, smartypants) {
+ inlineText(src, inRawBlock) {
const cap = this.rules.inline.text.exec(src);
if (cap) {
let text;
if (inRawBlock) {
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
+ text = cap[0];
} else {
- text = escape(this.options.smartypants ? smartypants(cap[0]) : cap[0]);
+ text = escape(cap[0]);
}
return {
diff --git a/src/defaults.js b/src/defaults.js
--- a/src/defaults.js
+++ b/src/defaults.js
@@ -8,12 +8,8 @@ function getDefaults() {
highlight: null,
langPrefix: 'language-',
- mangle: true,
pedantic: false,
@@ -12,10 +146,12 @@ diff -NarU1 marked-1.0.0-orig/src/defaults.js marked-1.0.0-edit/src/defaults.js
smartLists: false,
- smartypants: false,
tokenizer: null,
diff -NarU1 marked-1.0.0-orig/src/helpers.js marked-1.0.0-edit/src/helpers.js
--- marked-1.0.0-orig/src/helpers.js 2020-04-21 01:03:48.000000000 +0000
+++ marked-1.0.0-edit/src/helpers.js 2020-04-25 18:58:43.001320210 +0000
@@ -65,16 +65,3 @@
walkTokens: null,
diff --git a/src/helpers.js b/src/helpers.js
--- a/src/helpers.js
+++ b/src/helpers.js
@@ -64,18 +64,5 @@ function edit(regex, opt) {
const nonWordAndColonTest = /[^\w:]/g;
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
-function cleanUrl(sanitize, base, href) {
- if (sanitize) {
@@ -33,7 +169,9 @@ diff -NarU1 marked-1.0.0-orig/src/helpers.js marked-1.0.0-edit/src/helpers.js
- }
+function cleanUrl(base, href) {
if (base && !originIndependentUrl.test(href)) {
@@ -224,8 +211,2 @@
href = resolveUrl(base, href);
@@ -223,10 +210,4 @@ function findClosingBracket(str, b) {
}
-function checkSanitizeDeprecation(opt) {
- if (opt && opt.sanitize && !opt.silent) {
@@ -42,228 +180,161 @@ diff -NarU1 marked-1.0.0-orig/src/helpers.js marked-1.0.0-edit/src/helpers.js
-}
-
module.exports = {
@@ -240,4 +221,3 @@
escape,
@@ -239,5 +220,4 @@ module.exports = {
splitCells,
rtrim,
- findClosingBracket,
- checkSanitizeDeprecation
+ findClosingBracket
};
diff -NarU1 marked-1.0.0-orig/src/Lexer.js marked-1.0.0-edit/src/Lexer.js
--- marked-1.0.0-orig/src/Lexer.js 2020-04-21 01:03:48.000000000 +0000
+++ marked-1.0.0-edit/src/Lexer.js 2020-04-25 22:46:54.107584066 +0000
@@ -6,3 +6,3 @@
* smartypants text replacement
- */
+ *
function smartypants(text) {
@@ -27,3 +27,3 @@
* mangle email addresses
- */
+ *
function mangle(text) {
@@ -388,3 +388,3 @@
// autolink
- if (token = this.tokenizer.autolink(src, mangle)) {
+ if (token = this.tokenizer.autolink(src)) {
src = src.substring(token.raw.length);
@@ -395,3 +395,3 @@
// url (gfm)
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
+ if (!inLink && (token = this.tokenizer.url(src))) {
src = src.substring(token.raw.length);
@@ -402,3 +402,3 @@
// text
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
src = src.substring(token.raw.length);
diff -NarU1 marked-1.0.0-orig/src/marked.js marked-1.0.0-edit/src/marked.js
--- marked-1.0.0-orig/src/marked.js 2020-04-21 01:03:48.000000000 +0000
+++ marked-1.0.0-edit/src/marked.js 2020-04-25 22:42:55.140924439 +0000
@@ -8,3 +8,2 @@
diff --git a/src/marked.js b/src/marked.js
--- a/src/marked.js
+++ b/src/marked.js
@@ -7,5 +7,4 @@ const Slugger = require('./Slugger.js');
const {
merge,
- checkSanitizeDeprecation,
escape
@@ -37,3 +36,2 @@
opt = merge({}, marked.defaults, opt || {});
- checkSanitizeDeprecation(opt);
const highlight = opt.highlight;
@@ -101,6 +99,5 @@
opt = merge({}, marked.defaults, opt || {});
- checkSanitizeDeprecation(opt);
return Parser.parse(Lexer.lex(src, opt), opt);
} = require('./helpers.js');
@@ -35,5 +34,4 @@ function marked(src, opt, callback) {
opt = merge({}, marked.defaults, opt || {});
- checkSanitizeDeprecation(opt);
if (callback) {
@@ -108,5 +106,5 @@ function marked(src, opt, callback) {
return Parser.parse(tokens, opt);
} catch (e) {
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
if ((opt || marked.defaults).silent) {
diff -NarU1 marked-1.0.0-orig/src/Renderer.js marked-1.0.0-edit/src/Renderer.js
--- marked-1.0.0-orig/src/Renderer.js 2020-04-21 01:03:48.000000000 +0000
+++ marked-1.0.0-edit/src/Renderer.js 2020-04-25 18:59:15.091319265 +0000
@@ -134,3 +134,3 @@
link(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
+ href = cleanUrl(this.options.baseUrl, href);
if (href === null) {
@@ -147,3 +147,3 @@
image(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
+ href = cleanUrl(this.options.baseUrl, href);
if (href === null) {
diff -NarU1 marked-1.0.0-orig/src/Tokenizer.js marked-1.0.0-edit/src/Tokenizer.js
--- marked-1.0.0-orig/src/Tokenizer.js 2020-04-21 01:03:48.000000000 +0000
+++ marked-1.0.0-edit/src/Tokenizer.js 2020-04-25 22:47:07.610917004 +0000
@@ -256,9 +256,6 @@
return {
- type: this.options.sanitize
- ? 'paragraph'
- : 'html',
- raw: cap[0],
- pre: !this.options.sanitizer
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
+ type: 'html',
+ raw: cap[0],
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
+ text: cap[0]
};
@@ -382,5 +379,3 @@
return {
- type: this.options.sanitize
- ? 'text'
- : 'html',
+ type: 'html',
raw: cap[0],
@@ -388,7 +383,3 @@
inRawBlock,
- text: this.options.sanitize
- ? (this.options.sanitizer
- ? this.options.sanitizer(cap[0])
- : escape(cap[0]))
- : cap[0]
+ text: cap[0]
};
@@ -504,3 +495,3 @@
- autolink(src, mangle) {
+ autolink(src) {
const cap = this.rules.inline.autolink.exec(src);
@@ -509,3 +500,3 @@
if (cap[2] === '@') {
- text = escape(this.options.mangle ? mangle(cap[1]) : cap[1]);
+ text = escape(cap[1]);
href = 'mailto:' + text;
@@ -532,3 +523,3 @@
- url(src, mangle) {
+ url(src) {
let cap;
@@ -537,3 +528,3 @@
if (cap[2] === '@') {
- text = escape(this.options.mangle ? mangle(cap[0]) : cap[0]);
+ text = escape(cap[0]);
href = 'mailto:' + text;
@@ -569,3 +560,3 @@
- inlineText(src, inRawBlock, smartypants) {
+ inlineText(src, inRawBlock) {
const cap = this.rules.inline.text.exec(src);
@@ -574,5 +565,5 @@
if (inRawBlock) {
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
+ text = cap[0];
} else {
- text = escape(this.options.smartypants ? smartypants(cap[0]) : cap[0]);
+ text = escape(cap[0]);
}
diff -NarU1 marked-1.0.0-orig/test/bench.js marked-1.0.0-edit/test/bench.js
--- marked-1.0.0-orig/test/bench.js 2020-04-21 01:03:48.000000000 +0000
+++ marked-1.0.0-edit/test/bench.js 2020-04-25 19:02:27.227980287 +0000
@@ -34,3 +34,2 @@
if (opt.silent) {
return '<p>An error occurred:</p><pre>'
diff --git a/test/bench.js b/test/bench.js
--- a/test/bench.js
+++ b/test/bench.js
@@ -33,5 +33,4 @@ async function runBench(options) {
breaks: false,
pedantic: false,
- sanitize: false,
smartLists: false
@@ -46,3 +45,2 @@
});
@@ -45,5 +44,4 @@ async function runBench(options) {
breaks: false,
pedantic: false,
- sanitize: false,
smartLists: false
@@ -59,3 +57,2 @@
});
@@ -58,5 +56,4 @@ async function runBench(options) {
breaks: false,
pedantic: false,
- sanitize: false,
smartLists: false
@@ -71,3 +68,2 @@
});
@@ -70,5 +67,4 @@ async function runBench(options) {
breaks: false,
pedantic: false,
- sanitize: false,
smartLists: false
@@ -84,3 +80,2 @@
});
@@ -83,5 +79,4 @@ async function runBench(options) {
breaks: false,
pedantic: true,
- sanitize: false,
smartLists: false
@@ -96,3 +91,2 @@
});
@@ -95,5 +90,4 @@ async function runBench(options) {
breaks: false,
pedantic: true,
- sanitize: false,
smartLists: false
diff -NarU1 marked-1.0.0-orig/test/specs/run-spec.js marked-1.0.0-edit/test/specs/run-spec.js
--- marked-1.0.0-orig/test/specs/run-spec.js 2020-04-21 01:03:48.000000000 +0000
+++ marked-1.0.0-edit/test/specs/run-spec.js 2020-04-25 19:05:24.321308408 +0000
@@ -21,6 +21,2 @@
});
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
--- a/test/specs/run-spec.js
+++ b/test/specs/run-spec.js
@@ -22,8 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
}
- if (spec.options.sanitizer) {
- // eslint-disable-next-line no-eval
- spec.options.sanitizer = eval(spec.options.sanitizer);
- }
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
@@ -49,2 +45 @@
@@ -53,3 +49,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
runSpecs('New', './new');
runSpecs('ReDOS', './redos');
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
diff -NarU1 marked-1.0.0-orig/test/unit/Lexer-spec.js marked-1.0.0-edit/test/unit/Lexer-spec.js
--- marked-1.0.0-orig/test/unit/Lexer-spec.js 2020-04-21 01:03:48.000000000 +0000
+++ marked-1.0.0-edit/test/unit/Lexer-spec.js 2020-04-25 22:47:27.170916427 +0000
@@ -464,3 +464,3 @@
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
--- a/test/unit/Lexer-spec.js
+++ b/test/unit/Lexer-spec.js
@@ -465,5 +465,5 @@ a | b
});
- it('sanitize', () => {
+ /*it('sanitize', () => {
expectTokens({
@@ -482,3 +482,3 @@
md: '<div>html</div>',
@@ -483,5 +483,5 @@ a | b
]
});
- });
+ });*/
});
@@ -586,3 +586,3 @@
@@ -587,5 +587,5 @@ a | b
});
- it('html sanitize', () => {
+ /*it('html sanitize', () => {
expectInlineTokens({
@@ -596,3 +596,3 @@
md: '<div>html</div>',
@@ -597,5 +597,5 @@ a | b
]
});
- });
+ });*/
@@ -825,3 +825,3 @@
it('link', () => {
@@ -909,5 +909,5 @@ a | b
});
- it('autolink mangle email', () => {
+ /*it('autolink mangle email', () => {
expectInlineTokens({
@@ -845,3 +845,3 @@
md: '<test@example.com>',
@@ -929,5 +929,5 @@ a | b
]
});
- });
+ });*/
@@ -882,3 +882,3 @@
it('url', () => {
@@ -966,5 +966,5 @@ a | b
});
- it('url mangle email', () => {
+ /*it('url mangle email', () => {
expectInlineTokens({
@@ -902,3 +902,3 @@
md: 'test@example.com',
@@ -986,5 +986,5 @@ a | b
]
});
- });
+ });*/
});
@@ -918,3 +918,3 @@
@@ -1002,5 +1002,5 @@ a | b
});
- describe('smartypants', () => {
+ /*describe('smartypants', () => {
it('single quotes', () => {
@@ -988,3 +988,3 @@
expectInlineTokens({
@@ -1072,5 +1072,5 @@ a | b
});
});
- });
+ });*/
});
});

View File

@@ -26,3 +26,6 @@ awk '/:before .content:"\\/ {sub(/[^"]+"./,""); sub(/".*/,""); print}' </z/dist/
# and finally create a woff with just our icons
pyftsubset "$orig_woff" --unicodes-file=/z/icon.list --no-ignore-missing-unicodes --flavor=woff --with-zopfli --output-file=/z/dist/no-pk/mini-fa.woff --verbose
# scp is easier, just want basic latin
pyftsubset /z/scp.woff2 --unicodes="20-7e,ab,b7,bb,2022" --no-ignore-missing-unicodes --flavor=woff2 --output-file=/z/dist/no-pk/scp.woff2 --verbose

100
scripts/fusefuzz.py Executable file
View File

@@ -0,0 +1,100 @@
#!/usr/bin/env python3
import os
import time
"""
td=/dev/shm/; [ -e $td ] || td=$HOME; mkdir -p $td/fusefuzz/{r,v}
PYTHONPATH=.. python3 -m copyparty -v $td/fusefuzz/r::r -i 127.0.0.1
../bin/copyparty-fuse.py http://127.0.0.1:3923/ $td/fusefuzz/v -cf 2 -cd 0.5
(d="$PWD"; cd $td/fusefuzz && "$d"/fusefuzz.py)
"""
def chk(fsz, rsz, ofs0, shift, ofs, rf, vf):
if ofs != rf.tell():
rf.seek(ofs)
vf.seek(ofs)
rb = rf.read(rsz)
vb = vf.read(rsz)
print(f"fsz {fsz} rsz {rsz} ofs {ofs0} shift {shift} ofs {ofs} = {len(rb)}")
if rb != vb:
for n, buf in enumerate([rb, vb]):
with open("buf." + str(n), "wb") as f:
f.write(buf)
raise Exception(f"{len(rb)} != {len(vb)}")
return rb, vb
def main():
v = "v"
for n in range(5):
with open(f"r/{n}", "wb") as f:
f.write(b"h" * n)
rand = os.urandom(7919) # prime
for fsz in range(1024 * 1024 * 2 - 3, 1024 * 1024 * 2 + 3):
with open("r/f", "wb", fsz) as f:
f.write((rand * int(fsz / len(rand) + 1))[:fsz])
for rsz in range(64 * 1024 - 2, 64 * 1024 + 2):
ofslist = [0, 1, 2]
for n in range(3):
ofslist.append(fsz - n)
ofslist.append(fsz - (rsz * 1 + n))
ofslist.append(fsz - (rsz * 2 + n))
for ofs0 in ofslist:
for shift in range(-3, 3):
print(f"fsz {fsz} rsz {rsz} ofs {ofs0} shift {shift}")
ofs = ofs0
if ofs < 0 or ofs >= fsz:
continue
for n in range(1, 3):
with open(f"{v}/{n}", "rb") as f:
f.read()
prev_ofs = -99
with open("r/f", "rb", rsz) as rf:
with open(f"{v}/f", "rb", rsz) as vf:
while True:
ofs += shift
if ofs < 0 or ofs > fsz or ofs == prev_ofs:
break
prev_ofs = ofs
rb, vb = chk(fsz, rsz, ofs0, shift, ofs, rf, vf)
if not rb:
break
ofs += len(rb)
for n in range(1, 3):
with open(f"{v}/{n}", "rb") as f:
f.read()
with open("r/f", "rb", rsz) as rf:
with open(f"{v}/f", "rb", rsz) as vf:
for n in range(2):
ofs += shift
if ofs < 0 or ofs > fsz:
break
rb, vb = chk(fsz, rsz, ofs0, shift, ofs, rf, vf)
ofs -= rsz
# bumping fsz, sleep away the dentry cache in cppf
time.sleep(1)
if __name__ == "__main__":
main()

12
scripts/install-githooks.sh Executable file
View File

@@ -0,0 +1,12 @@
#!/bin/bash
set -ex
[ -e setup.py ] || ..
[ -e setup.py ] || {
echo u wot
exit 1
}
cd .git/hooks
rm -f pre-commit
ln -s ../../scripts/run-tests.sh pre-commit

View File

@@ -3,12 +3,15 @@ set -e
echo
# osx support
command -v gtar >/dev/null &&
command -v gfind >/dev/null && {
tar() { gtar "$@"; }
# port install gnutar findutils gsed coreutils
gtar=$(command -v gtar || command -v gnutar) || true
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
tar() { $gtar "$@"; }
sed() { gsed "$@"; }
find() { gfind "$@"; }
sort() { gsort "$@"; }
command -v grealpath >/dev/null &&
realpath() { grealpath "$@"; }
}
which md5sum 2>/dev/null >/dev/null &&
@@ -83,6 +86,8 @@ function have() {
python -c "import $1; $1; $1.__version__"
}
mv copyparty/web/deps/marked.full.js.gz srv/ || true
. buildenv/bin/activate
have setuptools
have wheel

View File

@@ -18,13 +18,23 @@ echo
# (the fancy markdown editor)
command -v gtar >/dev/null &&
command -v gfind >/dev/null && {
tar() { gtar "$@"; }
# port install gnutar findutils gsed coreutils
gtar=$(command -v gtar || command -v gnutar) || true
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
tar() { $gtar "$@"; }
sed() { gsed "$@"; }
find() { gfind "$@"; }
sort() { gsort "$@"; }
unexpand() { gunexpand "$@"; }
command -v grealpath >/dev/null &&
realpath() { grealpath "$@"; }
[ -e /opt/local/bin/bzip2 ] &&
bzip2() { /opt/local/bin/bzip2 "$@"; }
}
pybin=$(command -v python3 || command -v python) || {
echo need python
exit 1
}
[ -e copyparty/__main__.py ] || cd ..
@@ -35,11 +45,17 @@ command -v gfind >/dev/null && {
exit 1
}
use_gz=
do_sh=1
do_py=1
while [ ! -z "$1" ]; do
[ "$1" = clean ] && clean=1 && shift && continue
[ "$1" = re ] && repack=1 && shift && continue
[ "$1" = gz ] && use_gz=1 && shift && continue
[ "$1" = no-ogv ] && no_ogv=1 && shift && continue
[ "$1" = no-cm ] && no_cm=1 && shift && continue
[ "$1" = no-sh ] && do_sh= && shift && continue
[ "$1" = no-py ] && do_py= && shift && continue
break
done
@@ -59,28 +75,32 @@ cd sfx
)/pe-copyparty"
echo "repack of files in $old"
cp -pR "$old/"*{jinja2,copyparty} .
mv {x.,}jinja2 2>/dev/null || true
cp -pR "$old/"*{dep-j2,copyparty} .
}
[ $repack ] || {
echo collecting jinja2
f="../build/Jinja2-2.6.tar.gz"
f="../build/Jinja2-2.11.3.tar.gz"
[ -e "$f" ] ||
(url=https://files.pythonhosted.org/packages/25/c8/212b1c2fd6df9eaf536384b6c6619c4e70a3afd2dffdd00e5296ffbae940/Jinja2-2.6.tar.gz;
(url=https://files.pythonhosted.org/packages/4f/e7/65300e6b32e69768ded990494809106f87da1d436418d5f1367ed3966fd7/Jinja2-2.11.3.tar.gz;
wget -O$f "$url" || curl -L "$url" >$f)
tar -zxf $f
mv Jinja2-*/jinja2 .
rm -rf Jinja2-* jinja2/testsuite jinja2/_markupsafe/tests.py jinja2/_stringdefs.py
mv Jinja2-*/src/jinja2 .
rm -rf Jinja2-*
f=jinja2/lexer.py
sed -r '/.*föö.*/ raise SyntaxError/' <$f >t
tmv $f
f=jinja2/_markupsafe/_constants.py
awk '!/: [0-9]+,?$/ || /(amp|gt|lt|quot|apos|nbsp).:/' <$f >t
tmv $f
echo collecting markupsafe
f="../build/MarkupSafe-1.1.1.tar.gz"
[ -e "$f" ] ||
(url=https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz;
wget -O$f "$url" || curl -L "$url" >$f)
tar -zxf $f
mv MarkupSafe-*/src/markupsafe .
rm -rf MarkupSafe-* markupsafe/_speedups.c
mkdir dep-j2/
mv {markupsafe,jinja2} dep-j2/
# msys2 tar is bad, make the best of it
echo collecting source
@@ -94,8 +114,39 @@ cd sfx
rm -f ../tar
}
ver="$(awk '/^VERSION *= \(/ {
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < ../copyparty/__version__.py)"
ver=
git describe --tags >/dev/null 2>/dev/null && {
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//')";
t_ver=
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+$' && {
# short format (exact version number)
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g')";
}
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+-[0-9]+-g[0-9a-f]+$' && {
# long format (unreleased commit)
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g; s/(.*) (.*)/\1 "\2"/')"
}
[ -z "$t_ver" ] && {
printf 'unexpected git version format: [%s]\n' "$git_ver"
exit 1
}
dt="$(git log -1 --format=%cd --date=short | sed -E 's/-0?/, /g')"
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
sed -ri '
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;
s/^(S_VERSION =)(.*)/#\1\2\n\1 "'"$ver"'"/;
s/^(BUILD_DT =)(.*)/#\1\2\n\1 ('"$dt"')/;
' copyparty/__version__.py
}
[ -z "$ver" ] &&
ver="$(awk '/^VERSION *= \(/ {
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
ts=$(date -u +%s)
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
@@ -112,7 +163,7 @@ find .. -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
echo use smol web deps
rm -f copyparty/web/deps/*.full.*
rm -f copyparty/web/deps/*.full.* copyparty/web/Makefile
# it's fine dw
grep -lE '\.full\.(js|css)' copyparty/web/* |
@@ -131,41 +182,86 @@ done
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
}
[ $repack ] ||
find | grep -E '\.py$' |
grep -vE '__version__' |
tr '\n' '\0' |
xargs -0 $pybin ../scripts/uncomment.py
f=dep-j2/jinja2/constants.py
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
tmv "$f"
# up2k goes from 28k to 22k laff
echo entabbening
find | grep -E '\.(js|css|html|py)$' | while IFS= read -r f; do
find | grep -E '\.(js|css|html)$' | while IFS= read -r f; do
unexpand -t 4 --first-only <"$f" >t
tmv "$f"
done
echo gen tarlist
for d in copyparty dep-j2; do find $d -type f; done |
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1) >list || true
echo creating tar
args=(--owner=1000 --group=1000)
[ "$OSTYPE" = msys ] &&
args=()
tar -cf tar "${args[@]}" --numeric-owner copyparty jinja2
tar -cf tar "${args[@]}" --numeric-owner -T list
pc=bzip2
pe=bz2
[ $use_gz ] && pc=gzip && pe=gz
echo compressing tar
# detect best level; bzip2 -7 is usually better than -9
for n in {2..9}; do cp tar t.$n; bzip2 -$n t.$n & done; wait; mv -v $(ls -1S t.*.bz2 | tail -n 1) tar.bz2
for n in {2..9}; do cp tar t.$n; xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz | tail -n 1) tar.xz
rm t.*
[ $do_py ] && { for n in {2..9}; do cp tar t.$n; $pc -$n t.$n & done; wait; mv -v $(ls -1S t.*.$pe | tail -n 1) tar.bz2; }
[ $do_sh ] && { for n in {2..9}; do cp tar t.$n; xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz | tail -n 1) tar.xz; }
rm t.* || true
exts=()
[ $do_sh ] && {
exts+=(.sh)
echo creating unix sfx
(
sed "s/PACK_TS/$ts/; s/PACK_HTS/$hts/; s/CPP_VER/$ver/" <../scripts/sfx.sh |
grep -E '^sfx_eof$' -B 9001;
cat tar.xz
) >$sfx_out.sh
}
echo creating generic sfx
python ../scripts/sfx.py --sfx-make tar.bz2 $ver $ts
mv sfx.out $sfx_out.py
chmod 755 $sfx_out.*
[ $do_py ] && {
echo creating generic sfx
py=../scripts/sfx.py
suf=
[ $use_gz ] && {
sed -r 's/"r:bz2"/"r:gz"/' <$py >$py.t
py=$py.t
suf=-gz
}
$pybin $py --sfx-make tar.bz2 $ver $ts
mv sfx.out $sfx_out$suf.py
exts+=($suf.py)
[ $use_gz ] &&
rm $py
}
chmod 755 $sfx_out*
printf "done:\n"
printf " %s\n" "$(realpath $sfx_out)."{sh,py}
# rm -rf *
for ext in ${exts[@]}; do
printf " %s\n" "$(realpath $sfx_out)"$ext
done
# tar -tvf ../sfx/tar | sed -r 's/(.* ....-..-.. ..:.. )(.*)/\2 `` \1/' | sort | sed -r 's/(.*) `` (.*)/\2 \1/'| less
# for n in {1..9}; do tar -tf tar | grep -vE '/$' | sed -r 's/(.*)\.(.*)/\2.\1/' | sort | sed -r 's/([^\.]+)\.(.*)/\2.\1/' | tar -cT- | bzip2 -c$n | wc -c; done
# apk add bash python3 tar xz bzip2
# while true; do ./make-sfx.sh; for f in ..//dist/copyparty-sfx.{sh,py}; do mv $f $f.$(wc -c <$f | awk '{print$1}'); done; done

View File

@@ -2,12 +2,16 @@
set -e
echo
command -v gtar >/dev/null &&
command -v gfind >/dev/null && {
tar() { gtar "$@"; }
# osx support
# port install gnutar findutils gsed coreutils
gtar=$(command -v gtar || command -v gnutar) || true
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
tar() { $gtar "$@"; }
sed() { gsed "$@"; }
find() { gfind "$@"; }
sort() { gsort "$@"; }
command -v grealpath >/dev/null &&
realpath() { grealpath "$@"; }
}
which md5sum 2>/dev/null >/dev/null &&
@@ -16,27 +20,29 @@ which md5sum 2>/dev/null >/dev/null &&
ver="$1"
[[ "x$ver" == x ]] &&
[ "x$ver" = x ] &&
{
echo "need argument 1: version"
echo
exit 1
}
[[ -e copyparty/__main__.py ]] || cd ..
[[ -e copyparty/__main__.py ]] ||
[ -e copyparty/__main__.py ] || cd ..
[ -e copyparty/__main__.py ] ||
{
echo "run me from within the project root folder"
echo
exit 1
}
mv copyparty/web/deps/marked.full.js.gz srv/ || true
mkdir -p dist
zip_path="$(pwd)/dist/copyparty-$ver.zip"
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"
[[ -e "$zip_path" ]] ||
[[ -e "$tgz_path" ]] &&
[ -e "$zip_path" ] ||
[ -e "$tgz_path" ] &&
{
echo "found existing archives for this version"
echo " $zip_path"

34
scripts/profile.py Normal file
View File

@@ -0,0 +1,34 @@
#!/usr/bin/env python3
import sys
sys.path.insert(0, ".")
cmd = sys.argv[1]
if cmd == "cpp":
from copyparty.__main__ import main
argv = ["__main__", "-v", "srv::r", "-v", "../../yt:yt:r"]
main(argv=argv)
elif cmd == "test":
from unittest import main
argv = ["__main__", "discover", "-s", "tests"]
main(module=None, argv=argv)
else:
raise Exception()
# import dis; print(dis.dis(main))
# macos:
# option1) python3.9 -m pip install --user -U vmprof==0.4.9
# option2) python3.9 -m pip install --user -U https://github.com/vmprof/vmprof-python/archive/refs/heads/master.zip
#
# python -m vmprof -o prof --lines ./scripts/profile.py test
# linux: ~/.local/bin/vmprofshow prof tree | grep -vF '[1m 0.'
# macos: ~/Library/Python/3.9/bin/vmprofshow prof tree | grep -vF '[1m 0.'
# win: %appdata%\..\Roaming\Python\Python39\Scripts\vmprofshow.exe prof tree

12
scripts/run-tests.sh Executable file
View File

@@ -0,0 +1,12 @@
#!/bin/bash
set -ex
pids=()
for py in python{2,3}; do
$py -m unittest discover -s tests >/dev/null &
pids+=($!)
done
for pid in ${pids[@]}; do
wait $pid
done

View File

@@ -1,8 +1,8 @@
#!/usr/bin/env python
# coding: utf-8
# coding: latin-1
from __future__ import print_function, unicode_literals
import re, os, sys, stat, time, shutil, tarfile, hashlib, platform, tempfile
import re, os, sys, time, shutil, signal, threading, tarfile, hashlib, platform, tempfile, traceback
import subprocess as sp
"""
@@ -27,20 +27,21 @@ CKSUM = None
STAMP = None
PY2 = sys.version_info[0] == 2
WINDOWS = sys.platform in ["win32", "msys"]
sys.dont_write_bytecode = True
me = os.path.abspath(os.path.realpath(__file__))
def eprint(*args, **kwargs):
kwargs["file"] = sys.stderr
print(*args, **kwargs)
def eprint(*a, **ka):
ka["file"] = sys.stderr
print(*a, **ka)
def msg(*args, **kwargs):
if args:
args = ["[SFX]", args[0]] + list(args[1:])
def msg(*a, **ka):
if a:
a = ["[SFX]", a[0]] + list(a[1:])
eprint(*args, **kwargs)
eprint(*a, **ka)
# skip 1
@@ -155,6 +156,9 @@ def encode(data, size, cksum, ver, ts):
skip = True
continue
if ln.strip().startswith("# fmt: "):
continue
unpk += ln + "\n"
for k, v in [
@@ -191,91 +195,14 @@ def makesfx(tar_src, ver, ts):
# skip 0
def get_py_win(ret):
tops = []
p = str(os.getenv("LocalAppdata"))
if p:
tops.append(os.path.join(p, "Programs", "Python"))
progfiles = {}
for p in ["ProgramFiles", "ProgramFiles(x86)"]:
p = str(os.getenv(p))
if p:
progfiles[p] = 1
# 32bit apps get x86 for both
if p.endswith(" (x86)"):
progfiles[p[:-6]] = 1
tops += list(progfiles.keys())
for sysroot in [me, sys.executable]:
sysroot = sysroot[:3].upper()
if sysroot[1] == ":" and sysroot not in tops:
tops.append(sysroot)
# $WIRESHARK_SLOGAN
for top in tops:
try:
for name1 in sorted(os.listdir(top), reverse=True):
if name1.lower().startswith("python"):
path1 = os.path.join(top, name1)
try:
for name2 in os.listdir(path1):
if name2.lower() == "python.exe":
path2 = os.path.join(path1, name2)
ret[path2.lower()] = path2
except:
pass
except:
pass
def get_py_nix(ret):
ptn = re.compile(r"^(python|pypy)[0-9\.-]*$")
for bindir in os.getenv("PATH").split(":"):
if not bindir:
next
try:
for fn in os.listdir(bindir):
if ptn.match(fn):
fn = os.path.join(bindir, fn)
ret[fn.lower()] = fn
except:
pass
def read_py(binp):
cmd = [
binp,
"-c",
"import sys; sys.stdout.write(' '.join(str(x) for x in sys.version_info)); import jinja2",
]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
ver, _ = p.communicate()
ver = ver.decode("utf-8").split(" ")[:3]
ver = [int(x) if x.isdigit() else 0 for x in ver]
return ver, p.returncode == 0
def get_pys():
ver, chk = read_py(sys.executable)
if chk or PY2:
return [[chk, ver, sys.executable]]
hits = {sys.executable.lower(): sys.executable}
if platform.system() == "Windows":
get_py_win(hits)
else:
get_py_nix(hits)
ret = []
for binp in hits.values():
ver, chk = read_py(binp)
ret.append([chk, ver, binp])
msg("\t".join(str(x) for x in ret[-1]))
return ret
def u8(gen):
try:
for s in gen:
yield s.decode("utf-8", "ignore")
except:
yield s
for s in gen:
yield s
def yieldfile(fn):
@@ -285,55 +212,57 @@ def yieldfile(fn):
def hashfile(fn):
hasher = hashlib.md5()
h = hashlib.md5()
for block in yieldfile(fn):
hasher.update(block)
h.update(block)
return hasher.hexdigest()
return h.hexdigest()
def unpack():
"""unpacks the tar yielded by `data`"""
name = "pe-copyparty"
tag = "v" + str(STAMP)
withpid = "{}.{}".format(name, os.getpid())
top = tempfile.gettempdir()
final = os.path.join(top, name)
mine = os.path.join(top, withpid)
tar = os.path.join(mine, "tar")
tag_mine = os.path.join(mine, "v" + str(STAMP))
tag_final = os.path.join(final, "v" + str(STAMP))
opj = os.path.join
final = opj(top, name)
mine = opj(top, withpid)
tar = opj(mine, "tar")
if os.path.exists(tag_final):
msg("found early")
return final
try:
if tag in os.listdir(final):
msg("found early")
return final
except:
pass
nwrite = 0
sz = 0
os.mkdir(mine)
with open(tar, "wb") as f:
for buf in get_payload():
nwrite += len(buf)
sz += len(buf)
f.write(buf)
if nwrite != SIZE:
t = "\n\n bad file:\n expected {} bytes, got {}\n".format(SIZE, nwrite)
raise Exception(t)
cksum = hashfile(tar)
if cksum != CKSUM:
t = "\n\n bad file:\n {} expected,\n {} obtained\n".format(CKSUM, cksum)
raise Exception(t)
ck = hashfile(tar)
if ck != CKSUM:
t = "\n\nexpected {} ({} byte)\nobtained {} ({} byte)\nsfx corrupt"
raise Exception(t.format(CKSUM, SIZE, ck, sz))
with tarfile.open(tar, "r:bz2") as tf:
tf.extractall(mine)
os.remove(tar)
with open(tag_mine, "wb") as f:
with open(opj(mine, tag), "wb") as f:
f.write(b"h\n")
if os.path.exists(tag_final):
msg("found late")
return final
try:
if tag in os.listdir(final):
msg("found late")
return final
except:
pass
try:
if os.path.islink(final):
@@ -343,25 +272,25 @@ def unpack():
except:
pass
for fn in u8(os.listdir(top)):
if fn.startswith(name) and fn != withpid:
try:
old = opj(top, fn)
if time.time() - os.path.getmtime(old) > 86400:
shutil.rmtree(old)
except:
pass
try:
os.symlink(mine, final)
except:
try:
os.rename(mine, final)
return final
except:
msg("reloc fail,", mine)
return mine
for fn in os.listdir(top):
if fn.startswith(name) and fn not in [name, withpid]:
try:
old = os.path.join(top, fn)
if time.time() - os.path.getmtime(old) > 10:
shutil.rmtree(old)
except:
pass
return final
return mine
def get_payload():
@@ -378,84 +307,124 @@ def get_payload():
if ofs < 0:
raise Exception("could not find archive marker")
# start reading from the final b"\n"
# start at final b"\n"
fpos = ofs + len(ptn) - 3
# msg("tar found at", fpos)
f.seek(fpos)
dpos = 0
leftovers = b""
rem = b""
while True:
rbuf = f.read(1024 * 32)
if rbuf:
buf = leftovers + rbuf
buf = rem + rbuf
ofs = buf.rfind(b"\n")
if len(buf) <= 4:
leftovers = buf
rem = buf
continue
if ofs >= len(buf) - 4:
leftovers = buf[ofs:]
rem = buf[ofs:]
buf = buf[:ofs]
else:
leftovers = b"\n# "
rem = b"\n# "
else:
buf = leftovers
buf = rem
fpos += len(buf) + 1
buf = (
buf.replace(b"\n# ", b"")
.replace(b"\n#r", b"\r")
.replace(b"\n#n", b"\n")
)
dpos += len(buf) - 1
for a, b in [[b"\n# ", b""], [b"\n#r", b"\r"], [b"\n#n", b"\n"]]:
buf = buf.replace(a, b)
dpos += len(buf) - 1
yield buf
if not rbuf:
break
def confirm():
def utime(top):
i = 0
files = [os.path.join(dp, p) for dp, dd, df in os.walk(top) for p in dd + df]
while WINDOWS:
t = int(time.time())
if i:
msg("utime {}, {}".format(i, t))
for f in files:
os.utime(f, (t, t))
i += 1
time.sleep(78123)
def confirm(rv):
msg()
msg("retcode", rv if rv else traceback.format_exc())
msg("*** hit enter to exit ***")
raw_input() if PY2 else input()
def run(tmp, py):
msg("OK")
msg("will use:", py)
msg("bound to:", tmp)
fp_py = os.path.join(tmp, "py")
with open(fp_py, "wb") as f:
f.write(py.encode("utf-8") + b"\n")
# avoid loading ./copyparty.py
cmd = [
py,
"-c",
'import sys, runpy; sys.path.insert(0, r"'
+ tmp
+ '"); runpy.run_module("copyparty", run_name="__main__")',
] + list(sys.argv[1:])
msg("\n", cmd, "\n")
p = sp.Popen(str(x) for x in cmd)
try:
p.wait()
raw_input() if PY2 else input()
except:
p.wait()
pass
if p.returncode != 0:
confirm()
sys.exit(rv)
sys.exit(p.returncode)
def run(tmp, j2):
msg("jinja2:", j2 or "bundled")
msg("sfxdir:", tmp)
msg()
# block systemd-tmpfiles-clean.timer
try:
import fcntl
fd = os.open(tmp, os.O_RDONLY)
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except Exception as ex:
if not WINDOWS:
msg("\033[31mflock:", repr(ex))
t = threading.Thread(target=utime, args=(tmp,))
t.daemon = True
t.start()
ld = [tmp, os.path.join(tmp, "dep-j2")]
if j2:
del ld[-1]
if any([re.match(r"^-.*j[0-9]", x) for x in sys.argv]):
run_s(ld)
else:
run_i(ld)
def run_i(ld):
for x in ld:
sys.path.insert(0, x)
from copyparty.__main__ import main as p
p()
def run_s(ld):
# fmt: off
c = "import sys,runpy;" + "".join(['sys.path.insert(0,r"' + x + '");' for x in ld]) + 'runpy.run_module("copyparty",run_name="__main__")'
c = [str(x) for x in [sys.executable, "-c", c] + list(sys.argv[1:])]
# fmt: on
msg("\n", c, "\n")
p = sp.Popen(c)
def bye(*a):
p.send_signal(signal.SIGINT)
signal.signal(signal.SIGTERM, bye)
p.wait()
raise SystemExit(p.returncode)
def main():
sysver = str(sys.version).replace("\n", "\n" + " " * 18)
pktime = time.strftime("%Y-%m-%d, %H:%M:%S", time.gmtime(STAMP))
os.system("")
msg()
msg(" this is: copyparty", VER)
msg(" packed at:", pktime, "UTC,", STAMP)
@@ -484,34 +453,23 @@ def main():
# skip 0
tmp = unpack()
fp_py = os.path.join(tmp, "py")
if os.path.exists(fp_py):
with open(fp_py, "rb") as f:
py = f.read().decode("utf-8").rstrip()
tmp = os.path.realpath(unpack())
return run(tmp, py)
try:
from jinja2 import __version__ as j2
except:
j2 = None
pys = get_pys()
pys.sort(reverse=True)
j2, ver, py = pys[0]
if j2:
try:
os.rename(os.path.join(tmp, "jinja2"), os.path.join(tmp, "x.jinja2"))
except:
pass
return run(tmp, py)
msg("\n could not find jinja2; will use py2 + the bundled version\n")
for _, ver, py in pys:
if ver > [2, 7] and ver < [3, 0]:
return run(tmp, py)
m = "\033[1;31m\n\n\ncould not find a python with jinja2 installed; please do one of these:\n\n pip install --user jinja2\n\n install python2\n\n\033[0m"
msg(m)
confirm()
sys.exit(1)
try:
run(tmp, j2)
except SystemExit as ex:
c = ex.code
if c not in [0, -15]:
confirm(ex.code)
except KeyboardInterrupt:
pass
except:
confirm(0)
if __name__ == "__main__":

View File

@@ -32,8 +32,12 @@ dir="$(
# detect available pythons
(IFS=:; for d in $PATH; do
printf '%s\n' "$d"/python* "$d"/pypy* | tac;
done) | grep -E '(python|pypy)[0-9\.-]*$' > $dir/pys || true
printf '%s\n' "$d"/python* "$d"/pypy*;
done) |
(sed -E 's/(.*\/[^/0-9]+)([0-9]?[^/]*)$/\2 \1/' || cat) |
(sort -nr || cat) |
(sed -E 's/([^ ]*) (.*)/\2\1/' || cat) |
grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
# see if we made a choice before
[ -z "$pybin" ] && pybin="$(cat $dir/py 2>/dev/null || true)"

168
scripts/speedtest-fs.py Normal file
View File

@@ -0,0 +1,168 @@
#!/usr/bin/env python
import os
import sys
import stat
import time
import signal
import traceback
import threading
from queue import Queue
"""speedtest-fs: filesystem performance estimate"""
__author__ = "ed <copyparty@ocv.me>"
__copyright__ = 2020
__license__ = "MIT"
__url__ = "https://github.com/9001/copyparty/"
def get_spd(nbyte, nfiles, nsec):
if not nsec:
return "0.000 MB 0 files 0.000 sec 0.000 MB/s 0.000 f/s"
mb = nbyte / (1024 * 1024.0)
spd = mb / nsec
nspd = nfiles / nsec
return f"{mb:.3f} MB {nfiles} files {nsec:.3f} sec {spd:.3f} MB/s {nspd:.3f} f/s"
class Inf(object):
def __init__(self, t0):
self.msgs = []
self.errors = []
self.reports = []
self.mtx_msgs = threading.Lock()
self.mtx_reports = threading.Lock()
self.n_byte = 0
self.n_file = 0
self.n_sec = 0
self.n_done = 0
self.t0 = t0
thr = threading.Thread(target=self.print_msgs)
thr.daemon = True
thr.start()
def msg(self, fn, n_read):
with self.mtx_msgs:
self.msgs.append(f"{fn} {n_read}")
def err(self, fn):
with self.mtx_reports:
self.errors.append(f"{fn}\n{traceback.format_exc()}")
def print_msgs(self):
while True:
time.sleep(0.02)
with self.mtx_msgs:
msgs = self.msgs
self.msgs = []
if not msgs:
continue
msgs = msgs[-64:]
spd = get_spd(self.n_byte, len(self.reports), self.n_sec)
msgs = [f"{spd} {x}" for x in msgs]
print("\n".join(msgs))
def report(self, fn, n_byte, n_sec):
with self.mtx_reports:
self.reports.append([n_byte, n_sec, fn])
self.n_byte += n_byte
self.n_sec += n_sec
def done(self):
with self.mtx_reports:
self.n_done += 1
def get_files(dir_path):
for fn in os.listdir(dir_path):
fn = os.path.join(dir_path, fn)
st = os.stat(fn).st_mode
if stat.S_ISDIR(st):
yield from get_files(fn)
if stat.S_ISREG(st):
yield fn
def worker(q, inf, read_sz):
while True:
fn = q.get()
if not fn:
break
n_read = 0
try:
t0 = time.time()
with open(fn, "rb") as f:
while True:
buf = f.read(read_sz)
if not buf:
break
n_read += len(buf)
inf.msg(fn, n_read)
inf.report(fn, n_read, time.time() - t0)
except:
inf.err(fn)
inf.done()
def sighandler(signo, frame):
os._exit(0)
def main():
signal.signal(signal.SIGINT, sighandler)
root = "."
if len(sys.argv) > 1:
root = sys.argv[1]
t0 = time.time()
q = Queue(256)
inf = Inf(t0)
num_threads = 8
read_sz = 32 * 1024
targs = (q, inf, read_sz)
for _ in range(num_threads):
thr = threading.Thread(target=worker, args=targs)
thr.daemon = True
thr.start()
for fn in get_files(root):
q.put(fn)
for _ in range(num_threads):
q.put(None)
while inf.n_done < num_threads:
time.sleep(0.1)
t2 = time.time()
print("\n")
log = inf.reports
log.sort()
for nbyte, nsec, fn in log[-64:]:
spd = get_spd(nbyte, len(log), nsec)
print(f"{spd} {fn}")
print()
print("\n".join(inf.errors))
print(get_spd(inf.n_byte, len(log), t2 - t0))
if __name__ == "__main__":
main()

77
scripts/uncomment.py Normal file
View File

@@ -0,0 +1,77 @@
#!/usr/bin/env python
# coding: utf-8
from __future__ import print_function, unicode_literals
import io
import sys
import tokenize
def uncomment(fpath):
""" modified https://stackoverflow.com/a/62074206 """
with open(fpath, "rb") as f:
orig = f.read().decode("utf-8")
out = ""
for ln in orig.split("\n"):
if not ln.startswith("#"):
break
out += ln + "\n"
io_obj = io.StringIO(orig)
prev_toktype = tokenize.INDENT
last_lineno = -1
last_col = 0
for tok in tokenize.generate_tokens(io_obj.readline):
# print(repr(tok))
token_type = tok[0]
token_string = tok[1]
start_line, start_col = tok[2]
end_line, end_col = tok[3]
if start_line > last_lineno:
last_col = 0
if start_col > last_col:
out += " " * (start_col - last_col)
is_legalese = (
"copyright" in token_string.lower() or "license" in token_string.lower()
)
if token_type == tokenize.STRING:
if (
prev_toktype != tokenize.INDENT
and prev_toktype != tokenize.NEWLINE
and start_col > 0
or is_legalese
):
out += token_string
else:
out += '"a"'
elif token_type != tokenize.COMMENT or is_legalese:
out += token_string
prev_toktype = token_type
last_lineno = end_line
last_col = end_col
# out = "\n".join(x for x in out.splitlines() if x.strip())
with open(fpath, "wb") as f:
f.write(out.encode("utf-8"))
def main():
print("uncommenting", end="")
for f in sys.argv[1:]:
print(".", end="")
uncomment(f)
print("k")
if __name__ == "__main__":
main()

View File

@@ -2,27 +2,10 @@
# coding: utf-8
from __future__ import print_function
import io
import os
import sys
from glob import glob
from shutil import rmtree
setuptools_available = True
try:
# need setuptools to build wheel
from setuptools import setup, Command, find_packages
except ImportError:
# works in a pinch
setuptools_available = False
from distutils.core import setup, Command
from distutils.spawn import spawn
if "bdist_wheel" in sys.argv and not setuptools_available:
print("cannot build wheel without setuptools")
sys.exit(1)
from setuptools import setup, Command, find_packages
NAME = "copyparty"
@@ -49,7 +32,7 @@ with open(here + "/README.md", "rb") as f:
about = {}
if not VERSION:
with open(os.path.join(here, NAME, "__version__.py"), "rb") as f:
exec(f.read().decode("utf-8").split("\n\n", 1)[1], about)
exec (f.read().decode("utf-8").split("\n\n", 1)[1], about)
else:
about["__version__"] = VERSION
@@ -102,55 +85,35 @@ args = {
"author_email": "copyparty@ocv.me",
"url": "https://github.com/9001/copyparty",
"license": "MIT",
"data_files": data_files,
"classifiers": [
"Development Status :: 3 - Alpha",
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Environment :: Console",
"Environment :: No Input/Output (Daemon)",
"Topic :: Communications :: File Sharing",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
],
"include_package_data": True,
"data_files": data_files,
"packages": find_packages(),
"install_requires": ["jinja2"],
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
"scripts": ["bin/copyparty-fuse.py"],
"cmdclass": {"clean2": clean2},
}
if setuptools_available:
args.update(
{
"packages": find_packages(),
"install_requires": ["jinja2"],
"extras_require": {"thumbnails": ["Pillow"]},
"include_package_data": True,
"entry_points": {
"console_scripts": ["copyparty = copyparty.__main__:main"]
},
"scripts": ["bin/copyparty-fuse.py"],
}
)
else:
args.update(
{
"packages": ["copyparty", "copyparty.stolen"],
"scripts": ["bin/copyparty-fuse.py"],
}
)
# import pprint
# pprint.PrettyPrinter().pprint(args)
# sys.exit(0)
setup(**args)

Some files were not shown because too many files have changed in this diff Show More