Compare commits

...

264 Commits

Author SHA1 Message Date
ed
f9be4c62b1 v0.11.44 2021-07-20 01:03:08 +02:00
ed
027e8c18f1 sfx: option to remove mouse cursor 2021-07-20 01:00:28 +02:00
ed
4a3bb35a95 sfx: option to remove scp.woff2 2021-07-20 00:45:54 +02:00
ed
4bfb0d4494 notes 2021-07-19 23:46:44 +02:00
ed
7e0ef03a1e fix audio player edgecase (continue into next folder with sidebar closed) 2021-07-19 23:10:48 +02:00
ed
f7dbd95a54 v0.11.43 2021-07-19 01:56:19 +02:00
ed
515ee2290b v0.11.42 2021-07-18 23:22:09 +02:00
ed
b0c78910bb fix tabchange triggering tooltips 2021-07-18 23:21:36 +02:00
ed
f4ca62b664 reattach tooltips on column show/hide 2021-07-18 23:14:57 +02:00
ed
8eb8043a3d fix 3rdparty namecase 2021-07-18 22:50:29 +02:00
ed
3e8541362a keep active dir scrolled into view on keybd nav 2021-07-18 22:32:34 +02:00
ed
789724e348 use preferred key notation in search results 2021-07-18 21:50:57 +02:00
ed
5125b9532f fix multiple whitespace in query translator 2021-07-18 21:39:28 +02:00
ed
ebc9de02b0 case-insensitive tag search 2021-07-18 21:34:36 +02:00
ed
ec788fa491 mutagen fixes:
* extract codec and format info
* add FFprobe as fallback when mutagen fails
* add option to blacklist FFprobe for tags
2021-07-18 19:57:31 +02:00
ed
9b5e264574 systemd: fix name in journalctl 2021-07-17 19:14:15 +02:00
ed
57c297274b v0.11.41 2021-07-17 17:53:34 +02:00
ed
e9bf092317 tweak audio drawer tab 2021-07-17 17:24:48 +02:00
ed
d173887324 explain confusing behavior in journalctl 2021-07-17 16:45:49 +02:00
ed
99820d854c oh that wasnt enough ok then 2021-07-17 16:45:25 +02:00
ed
62df0a0eb2 thx osx 2021-07-17 16:43:22 +02:00
ed
600e9ac947 try to workaround iphones not hiding tooltips 2021-07-17 16:03:21 +02:00
ed
3ca41be2b4 do up2k snapshot on shutdown 2021-07-17 14:48:35 +02:00
ed
5c7debd900 improve signal handling + emit sd-notify on start 2021-07-17 04:15:07 +02:00
ed
7fa5b23ce3 sfx: fix color bleed on flock errors 2021-07-17 04:12:14 +02:00
ed
ff82738aaf vscode: support whitespace in python binary path 2021-07-17 04:11:14 +02:00
ed
bf5ee9d643 colum header tooltips 2021-07-17 02:52:55 +02:00
ed
72a8593ecd gridmode shortcut in the audio drawer 2021-07-17 01:45:05 +02:00
ed
bc3bbe07d4 combine tabs on narrow screens 2021-07-17 01:21:49 +02:00
ed
c7cb64bfef gallery: add hotkey list button 2021-07-17 01:14:14 +02:00
ed
629f537d06 add more hotkey tooltips 2021-07-17 01:05:26 +02:00
ed
9e988041b8 cosmetics 2021-07-16 02:56:21 +02:00
ed
f9a8b5c9d7 update readme 2021-07-16 02:44:06 +02:00
ed
b9c3538253 nope, not doing this 2021-07-15 23:49:30 +02:00
ed
2bc0cdf017 fix md-editor hotkeys on dvorak 2021-07-15 23:24:10 +02:00
ed
02a91f60d4 playing some golf 2021-07-15 23:19:37 +02:00
ed
fae83da197 v0.11.40 2021-07-15 01:13:15 +02:00
ed
0fe4aa6418 ux tweaks 2021-07-15 01:04:38 +02:00
ed
21a51bf0dc make it feel like home 2021-07-15 00:50:43 +02:00
ed
bcb353cc30 allow ctrl-clicking primary tabs 2021-07-15 00:37:14 +02:00
ed
6af4508518 adjust the sfx edit warning 2021-07-15 00:26:33 +02:00
ed
6a559bc28a gallery: dispose videos to stop buffering 2021-07-15 00:22:26 +02:00
ed
0f5026cd20 gallery: option to autoplay next video on end 2021-07-15 00:04:33 +02:00
ed
a91b80a311 gallery: add video loop hotkey R 2021-07-14 09:42:38 +02:00
ed
ec534701c8 gallery: pause/resume audio player on video 2021-07-14 09:40:12 +02:00
ed
af5169f67f gallery: fix hotkeys + focus 2021-07-14 09:35:50 +02:00
ed
18676c5e65 better crash page 2021-07-14 09:34:42 +02:00
ed
e2df6fda7b update hotkeys 2021-07-13 02:20:52 +02:00
ed
e9ae9782fe v0.11.39 2021-07-13 00:54:23 +02:00
ed
016dba4ca9 v0.11.38 2021-07-13 00:35:34 +02:00
ed
39c7ef305f add a link to clear settings on the js crash page 2021-07-13 00:33:46 +02:00
ed
849c1dc848 video-player: add hotkeys m=mute, f=fullscreen 2021-07-13 00:23:48 +02:00
ed
61414014fe gallery: fix link overlapping image 2021-07-13 00:14:06 +02:00
ed
578a915884 stack/thread monitors in mpw + better thread names 2021-07-12 23:03:52 +02:00
ed
eacafb8a63 add option to log summary of running threads 2021-07-12 22:57:37 +02:00
ed
4446760f74 fix link to ?stack on rootless configs 2021-07-12 22:55:38 +02:00
ed
6da2a083f9 v0.11.37 2021-07-12 00:51:59 +02:00
ed
8837c8f822 print zip/tar errors to log 2021-07-12 00:47:22 +02:00
ed
bac301ed66 get rid of iffy default-args 2021-07-12 00:15:13 +02:00
ed
061db3906d v0.11.36 2021-07-11 06:39:58 +02:00
ed
fd7df5c952 v0.11.35 2021-07-11 06:22:56 +02:00
ed
a270019147 easier to tell youre trying to watch a video that firefox cant deal with 2021-07-11 06:21:25 +02:00
ed
55e0209901 add video-player keybinds 2021-07-11 06:12:24 +02:00
ed
2b255fbbed add in-gallery video playback 2021-07-11 03:25:46 +02:00
ed
8a2345a0fb top of the sandwich fell off 2021-07-11 02:06:18 +02:00
ed
bfa9f535aa more context in exceptions 2021-07-11 01:59:07 +02:00
ed
f757623ad8 make bdmv thumbnails 2021-07-09 20:09:32 +02:00
ed
3c7465e268 option to disable thumbcache eviction 2021-07-09 19:55:17 +02:00
ed
108665fc4f v0.11.34 2021-07-09 17:12:21 +02:00
ed
ed519c9138 add performance notes 2021-07-09 17:10:37 +02:00
ed
2dd2e2c57e discard logs in mpw 2021-07-09 17:01:11 +02:00
ed
6c3a976222 scale max-clients to mp-workers 2021-07-09 16:48:02 +02:00
ed
80cc26bd95 fix max-client limit 2021-07-09 16:33:11 +02:00
ed
970fb84fd8 hex looks better 2021-07-09 16:11:33 +02:00
ed
20cbcf6931 logging + shutdown cleanup 2021-07-09 16:07:16 +02:00
ed
8fcde2a579 move tcp accept into mp-worker 2021-07-09 15:49:36 +02:00
ed
b32d1f8ad3 make ?stack work anywhere 2021-07-09 13:46:42 +02:00
ed
03513e0cb1 effectively pointless but cool 2021-07-09 03:41:44 +02:00
ed
e041a2b197 fix centos7 support 2021-07-08 23:35:28 +02:00
ed
d7d625be2a v0.11.33 2021-07-07 10:45:47 +02:00
ed
4121266678 v0.11.32 2021-07-06 21:58:03 +02:00
ed
22971a6be4 up2k-cli: add turbo button 2021-07-06 21:43:07 +02:00
ed
efbf8d7e0d better handling of invalid requests 2021-07-06 01:03:09 +02:00
ed
397396ea4a apply -nw to PUT uploads too 2021-07-06 00:49:39 +02:00
ed
e59b077c21 announce the rotates 2021-07-06 00:43:37 +02:00
ed
4bc39f3084 add logrotate 2021-07-06 00:23:51 +02:00
ed
21c3570786 detect more recursive symlinks 2021-07-05 23:50:03 +02:00
ed
2f85c1fb18 add logging to file 2021-07-05 23:30:33 +02:00
ed
1e27a4c2df make thumb-dir.txt unretrievable 2021-07-05 00:21:33 +02:00
ed
456f575637 v0.11.31 2021-07-04 16:44:29 +02:00
ed
51546c9e64 add missing -nw check 2021-07-04 16:10:20 +02:00
ed
83b4b70ef4 add keepalive handshakes 2021-07-04 16:04:26 +02:00
ed
a5120d4f6f parallelize handshakes 2021-07-04 01:48:01 +02:00
ed
c95941e14f add testimonials, drop bad idea 2021-07-04 00:32:29 +02:00
ed
0dd531149d good 2021-07-03 18:11:52 +02:00
ed
67da1b5219 add ideas 2021-07-03 17:29:49 +02:00
ed
919bd16437 add hls notes 2021-07-03 01:32:36 +02:00
ed
ecead109ab v0.11.30 2021-07-01 22:27:19 +02:00
ed
765294c263 ignore dupe-chunk warnings; handshake takes care of it 2021-07-01 20:22:12 +02:00
ed
d6b5351207 add cachebuster because chrome ignores no-cache 2021-07-01 20:10:02 +02:00
ed
a2009bcc6b up2k-cli: recover from tcp/dns issues on upload 2021-07-01 00:52:09 +02:00
ed
12709a8a0a up2k-cli: recover from antivirus yanking files mid-read 2021-07-01 00:11:40 +02:00
ed
c055baefd2 up2k-client: maybe fix busy-tab (assumed linear progress) 2021-06-30 23:17:07 +02:00
ed
56522599b5 up2k-client: way faster init on large filedrops 2021-06-30 21:26:13 +02:00
ed
664f53b75d chrome gets stuck iterating over aux.h on win10 2021-06-30 19:26:06 +02:00
ed
87200d9f10 make -nw apply to more stuff 2021-06-30 19:23:45 +02:00
ed
5c3d0b6520 catch errors in onloads 2021-06-30 17:09:37 +02:00
ed
bd49979f4a v0.11.29 2021-06-30 01:51:57 +02:00
ed
7e606cdd9f make search rate-control less visually confusing 2021-06-30 01:44:25 +02:00
ed
8b4b7fa794 allow opening tree nodes in a new tab 2021-06-30 01:08:20 +02:00
ed
05345ddf8b add per-connection request counting 2021-06-30 01:00:00 +02:00
ed
66adb470ad optional progressbar tint 2021-06-30 00:55:57 +02:00
ed
e15c8fd146 add upload pause 2021-06-30 00:34:33 +02:00
ed
0f09b98a39 scan for additional folder thumbnails 2021-06-30 00:19:39 +02:00
ed
b4d6f4e24d american-friendly upload limits (allow additional bypass using manual text entry) 2021-06-30 00:11:23 +02:00
ed
3217fa625b more todo 2021-06-29 23:59:15 +02:00
ed
e719ff8a47 make sfx kipu-proof 2021-06-29 23:53:57 +02:00
ed
9fcf528d45 update readme 2021-06-29 23:32:21 +02:00
ed
1ddbf5a158 update todo 2021-06-29 23:00:28 +02:00
ed
64bf4574b0 add todo maybe 2021-06-28 20:38:59 +02:00
ed
5649d26077 v0.11.28 2021-06-28 15:36:13 +02:00
ed
92f923effe hotkey for adjusting tree width 2021-06-28 15:34:10 +02:00
ed
0d46d548b9 fix panic when zero accounts 2021-06-28 15:20:40 +02:00
ed
062df3f0c3 point control-panel link to / 2021-06-27 00:52:15 +02:00
ed
789fb53b8e tweaks 2021-06-27 00:49:28 +02:00
ed
351db5a18f ah yes trailing whitespace as markup my good old friend we meet again 2021-06-27 00:20:42 +02:00
ed
aabbd271c8 add debian howto 2021-06-27 00:19:37 +02:00
ed
aae8e0171e v0.11.27 2021-06-25 22:23:21 +02:00
ed
45827a2458 fix exit-search button in gridview 2021-06-25 22:18:16 +02:00
ed
726030296f apparently the html dom-property is not normalized 2021-06-25 22:07:37 +02:00
ed
6659ab3881 ajax subfolders from gridview 2021-06-25 21:49:09 +02:00
ed
c6a103609e fix gridview selection/baguettebox order 2021-06-25 21:35:45 +02:00
ed
c6b3f035e5 gridview audio playback in search results too 2021-06-25 21:12:49 +02:00
ed
2b0a7e378e persist url-password as cookie 2021-06-25 20:39:55 +02:00
ed
b75ce909c8 audio seek with scrollbar on progressbar 2021-06-25 20:24:30 +02:00
ed
229c3f5dab play audio from grid when widget open 2021-06-25 20:04:19 +02:00
ed
ec73094506 v0.11.26 2021-06-25 03:10:43 +02:00
ed
c7650c9326 v0.11.25 2021-06-25 03:06:15 +02:00
ed
d94c6d4e72 more rice 2021-06-25 03:02:04 +02:00
ed
3cc8760733 clear seekbar when switching folders 2021-06-25 02:56:21 +02:00
ed
a2f6973495 heh 2021-06-25 02:43:47 +02:00
ed
f8648fa651 always set mediasession play/pause state 2021-06-25 02:39:39 +02:00
ed
177aa038df send charset=utf8 for css, js files 2021-06-25 02:10:42 +02:00
ed
e0a14ec881 event hints for ogvjs playback 2021-06-25 02:03:18 +02:00
ed
9366512f2f audio player: add pause-fade + track-restart +
fix ogvjs paused-seek
2021-06-25 01:46:30 +02:00
ed
ea38b8041a actually fix autoplay on some chromes 2021-06-25 00:43:58 +02:00
ed
f1870daf0d retry filesearch when rate-limited 2021-06-23 22:01:06 +02:00
ed
9722441aad maybe fix autoplay on some chromes 2021-06-23 20:35:05 +02:00
ed
9d014087f4 censor passwords in logs 2021-06-23 00:04:11 +02:00
ed
83b4038b85 ok they actually served a purpose 2021-06-22 21:33:11 +00:00
ed
1e0a448feb audio-key: truncate at 5min + mojibake support 2021-06-22 22:21:39 +02:00
ed
fb81de3b36 v0.11.24 2021-06-22 17:28:09 +02:00
ed
aa4f352301 prefer audio tags in audio files 2021-06-22 17:21:24 +02:00
ed
f1a1c2ea45 recover from opening a corrupt database 2021-06-22 17:19:56 +02:00
ed
6249bd4163 add pebkac hints 2021-06-22 17:18:34 +02:00
ed
2579dc64ce update notes 2021-06-21 22:49:28 +00:00
ed
356512270a file extensions dont contain whitespace 2021-06-21 23:50:35 +02:00
ed
bed27f2b43 mention fix for the OSD popup on windows 2021-06-21 23:43:07 +02:00
ed
54013d861b v0.11.23 2021-06-21 21:15:56 +02:00
ed
ec100210dc support showing album-cover on windows lockscreen 2021-06-21 19:15:22 +00:00
ed
3ab1acf32c v0.11.22 2021-06-21 20:30:29 +02:00
ed
8c28266418 subscribe to media-keys globally as a media player 2021-06-21 20:26:11 +02:00
ed
7f8b8dcb92 scandir is not withable before py3.6 2021-06-21 20:23:35 +02:00
ed
6dd39811d4 disable u2idx if sqlite3 is unavailable 2021-06-21 20:22:54 +02:00
ed
35e2138e3e doc: macos support 2021-06-21 18:42:15 +02:00
ed
239b4e9fe6 v0.11.21 2021-06-20 21:25:18 +02:00
ed
2fcd0e7e72 abandon listing tags in browser when db busy 2021-06-20 21:19:47 +02:00
ed
357347ce3a lower timeout on db reads 2021-06-20 21:03:35 +02:00
ed
36dc1107fb update dbtool desc 2021-06-20 20:05:43 +02:00
ed
0a3bbc4b4a v0.11.20 for real 2021-06-20 19:32:17 +02:00
ed
855b93dcf6 v0.11.20 2021-06-20 18:53:58 +02:00
ed
89b79ba267 fix histpath getting indexed on windows 2021-06-20 17:59:27 +02:00
ed
f5651b7d94 dont include hidden colums in /np clips 2021-06-20 17:45:59 +02:00
ed
1881019ede support cygpaths for mtag binaries 2021-06-20 17:45:23 +02:00
ed
caba4e974c upgrade dbtool for v4 2021-06-20 17:44:24 +02:00
ed
bc3c9613bc cosmetic macos fix on shutdown 2021-06-20 15:50:37 +02:00
ed
15a3ee252e support backslash in filenames 2021-06-20 15:50:06 +02:00
ed
be055961ae adjust up2k hashlen to match base64 window 2021-06-20 15:32:36 +02:00
ed
e3031bdeec fix up2k folder-upload 2021-06-20 00:00:50 +00:00
ed
75917b9f7c better fallback 2021-06-19 16:21:39 +02:00
ed
910732e02c update build notes 2021-06-19 16:20:35 +02:00
ed
264b497681 v0.11.19 2021-06-19 01:32:17 +02:00
ed
372b949622 fix tooltip indicator 2021-06-19 01:25:07 +02:00
ed
789a602914 save some more bytes on the wire 2021-06-19 01:18:48 +02:00
ed
093e955100 move stuff that needs javascript out of the html 2021-06-19 01:10:40 +02:00
ed
c32a89bebf minor lightmode tweaks 2021-06-19 00:17:39 +02:00
ed
c0bebe9f9f eq-param error-hilight in lightmode 2021-06-18 23:51:26 +02:00
ed
57579b2fe5 fix android-chrome layout glitch in up2k 2021-06-18 23:38:43 +02:00
ed
51d14a6b4d fix toolbar tooltips on android 2021-06-18 22:11:01 +02:00
ed
c50f1b64e5 dodge android-chrome bug: canvas aspect ratio 2021-06-18 21:46:15 +02:00
ed
98aaab02c5 block scroll events, hilight selected radios 2021-06-18 20:49:38 +02:00
ed
0fc7973d8b add shadow to playback times 2021-06-18 20:24:36 +02:00
ed
10362aa02e v0.11.18 2021-06-18 00:30:37 +02:00
ed
0a8e759fe6 v0.11.17 2021-06-17 00:31:38 +02:00
ed
d70981cdd1 fix eq param input 2021-06-17 00:29:14 +02:00
ed
e08c03b886 audio-filters: expose gain control 2021-06-16 22:25:29 +02:00
ed
56086e8984 ux: contrast tweaks + fix anchor-scroll 2021-06-16 21:38:30 +02:00
ed
1aa9033022 add play/pause hotkey 2021-06-16 19:19:29 +02:00
ed
076e103d53 ux: responsive settings layout 2021-06-16 19:10:32 +02:00
ed
38c00ea8fc print thumbnail cleanup summary 2021-06-16 18:57:10 +02:00
ed
415757af43 mention the symlink-scanner too 2021-06-16 18:37:23 +02:00
ed
e72ed8c0ed mention some essentials 2021-06-16 18:29:29 +02:00
ed
32f9c6b5bb v0.11.16 2021-06-16 01:51:18 +02:00
ed
6251584ef6 fix .13dB clipping with all-zero eq 2021-06-15 23:37:44 +00:00
ed
f3e413bc28 icons 2021-06-16 00:01:07 +02:00
ed
6f6cc8f3f8 move eq to the player settings tab 2021-06-15 22:26:39 +02:00
ed
8b081e9e69 media player: continue to next folder 2021-06-15 22:19:53 +02:00
ed
c8a510d10e fully hide columns when minimized 2021-06-15 21:43:37 +02:00
ed
6f834f6679 sticky tree header 2021-06-15 21:07:27 +02:00
ed
cf2d6650ac audio-eq: flatten frequency response 2021-06-15 21:06:00 +02:00
ed
cd52dea488 v0.11.15 2021-06-15 00:01:11 +02:00
ed
6ea75df05d add audio equalizer 2021-06-14 23:58:56 +02:00
ed
4846e1e8d6 mention num.clients for rproxy 2021-06-14 19:27:34 +02:00
ed
fc024f789d v0.11.14 2021-06-14 03:05:50 +02:00
ed
473e773aea fix deadlock 2021-06-14 00:55:11 +00:00
ed
48a2e1a353 add threadwatcher 2021-06-14 01:57:18 +02:00
ed
6da63fbd79 up2k-cli: recover from lost handshakes 2021-06-14 01:01:06 +02:00
ed
5bec37fcee fix cosmetic login glitch 2021-06-14 00:28:08 +02:00
ed
3fd0ba0a31 oh right its the other way around 2021-06-13 22:49:55 +02:00
ed
241a143366 add --rproxy for explicit proxy level 2021-06-13 22:22:31 +02:00
ed
a537064da7 custom-css example to add filetype icons 2021-06-13 00:49:28 +02:00
ed
f3dfd24c92 v0.11.13 2021-06-12 20:37:05 +02:00
ed
fa0a7f50bb add image gallery 2021-06-12 20:25:08 +02:00
ed
44a78a7e21 v0.11.12 2021-06-12 04:28:21 +02:00
ed
6b75cbf747 add readme 2021-06-12 04:26:53 +02:00
ed
e7b18ab9fe custom css 2021-06-12 04:22:07 +02:00
ed
aa12830015 keep transparency in thumbnails 2021-06-12 03:32:06 +02:00
ed
f156e00064 s/cover/folder/g 2021-06-12 03:06:56 +02:00
ed
d53c212516 add mtp queue to status page 2021-06-12 02:23:48 +02:00
ed
ca27f8587c add cygpath support for volume src too 2021-06-12 01:55:45 +02:00
ed
88ce008e16 more status on admin panel 2021-06-12 01:39:14 +02:00
ed
081d2cc5d7 add folder thumbnails (cover.jpg or png) 2021-06-11 23:54:54 +02:00
ed
60ac68d000 single authsrv instance per process 2021-06-11 23:01:13 +02:00
ed
fbe656957d fix race 2021-06-11 18:12:06 +02:00
ed
5534c78c17 tests pass 2021-06-11 03:10:33 +02:00
ed
a45a53fdce support macos ffmpeg 2021-06-11 03:05:42 +02:00
ed
972a56e738 fix stuff 2021-06-11 01:45:28 +02:00
ed
5e03b3ca38 use parent db/thumbs in jump-volumes 2021-06-10 20:43:19 +02:00
ed
1078d933b4 adding --no-hash 2021-06-10 18:08:30 +02:00
ed
d6bf300d80 option to store state out-of-volume (mostly untested) 2021-06-10 01:27:04 +02:00
ed
a359d64d44 v0.11.11 2021-06-08 23:43:00 +02:00
ed
22396e8c33 zopfli js/css 2021-06-08 23:19:35 +02:00
ed
5ded5a4516 alphabetical up2k indexing 2021-06-08 21:42:08 +02:00
ed
79c7639aaf haha memes 2021-06-08 21:10:25 +02:00
ed
5bbf875385 fuse-client: print python version 2021-06-08 20:19:51 +02:00
ed
5e159432af vscode: support running with -jN 2021-06-08 20:18:24 +02:00
ed
1d6ae409f6 count expenses when sending files 2021-06-08 20:17:53 +02:00
ed
9d729d3d1a add thread names 2021-06-08 20:14:23 +02:00
ed
4dd5d4e1b7 when rootless, blank instead of block rootdir 2021-06-08 18:35:55 +02:00
ed
acd8149479 dont track workloads unless multiprocessing 2021-06-08 18:01:59 +02:00
ed
b97a1088fa v0.11.10 2021-06-08 09:41:31 +02:00
ed
b77bed3324 fix terminating tls connections wow 2021-06-08 09:40:49 +02:00
ed
a2b7c85a1f forgot what version was running on a box 2021-06-08 00:01:08 +02:00
ed
b28533f850 v0.11.9 2021-06-07 20:22:10 +02:00
ed
bd8c7e538a sfx.sh: use system jinja2 when available 2021-06-07 20:09:45 +02:00
ed
89e48cff24 detect recursive symlinks 2021-06-07 20:09:18 +02:00
ed
ae90a7b7b6 mention firefox funny 2021-06-07 02:10:54 +02:00
ed
6fc1be04da support windows-py3.5 2021-06-06 21:10:53 +02:00
ed
0061d29534 v0.11.8 2021-06-06 19:09:55 +02:00
ed
a891f34a93 update sharex example 2021-06-06 19:06:33 +02:00
ed
d6a1e62a95 append file-ext when avoiding name collisions 2021-06-06 18:53:32 +02:00
ed
cda36ea8b4 support json replies from bput 2021-06-06 18:47:21 +02:00
ed
909a76434a a 2021-06-06 03:07:11 +02:00
ed
39348ef659 add sharex example 2021-06-06 02:53:01 +02:00
74 changed files with 6420 additions and 1941 deletions

17
.vscode/launch.json vendored
View File

@@ -16,12 +16,9 @@
"-e2ts", "-e2ts",
"-mtp", "-mtp",
".bpm=f,bin/mtag/audio-bpm.py", ".bpm=f,bin/mtag/audio-bpm.py",
"-a", "-aed:wark",
"ed:wark", "-vsrv::r:aed:cnodupe",
"-v", "-vdist:dist:r"
"srv::r:aed:cnodupe",
"-v",
"dist:dist:r"
] ]
}, },
{ {
@@ -43,5 +40,13 @@
"${file}" "${file}"
] ]
}, },
{
"name": "Python: Current File",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"justMyCode": false
},
] ]
} }

18
.vscode/launch.py vendored
View File

@@ -3,14 +3,16 @@
# launches 10x faster than mspython debugpy # launches 10x faster than mspython debugpy
# and is stoppable with ^C # and is stoppable with ^C
import re
import os import os
import sys import sys
print(sys.executable)
import shlex import shlex
sys.path.insert(0, os.getcwd())
import jstyleson import jstyleson
from copyparty.__main__ import main as copyparty import subprocess as sp
with open(".vscode/launch.json", "r", encoding="utf-8") as f: with open(".vscode/launch.json", "r", encoding="utf-8") as f:
tj = f.read() tj = f.read()
@@ -25,6 +27,14 @@ except:
pass pass
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv] argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
if re.search(" -j ?[0-9]", " ".join(argv)):
argv = [sys.executable, "-m", "copyparty"] + argv
sp.check_call(argv)
else:
sys.path.insert(0, os.getcwd())
from copyparty.__main__ import main as copyparty
try: try:
copyparty(["a"] + argv) copyparty(["a"] + argv)
except SystemExit as ex: except SystemExit as ex:

5
.vscode/tasks.json vendored
View File

@@ -9,7 +9,10 @@
{ {
"label": "no_dbg", "label": "no_dbg",
"type": "shell", "type": "shell",
"command": "${config:python.pythonPath} .vscode/launch.py" "command": "${config:python.pythonPath}",
"args": [
".vscode/launch.py"
]
} }
] ]
} }

266
README.md
View File

@@ -20,8 +20,10 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* top * top
* [quickstart](#quickstart) * [quickstart](#quickstart)
* [on debian](#on-debian)
* [notes](#notes) * [notes](#notes)
* [status](#status) * [status](#status)
* [testimonials](#testimonials)
* [bugs](#bugs) * [bugs](#bugs)
* [general bugs](#general-bugs) * [general bugs](#general-bugs)
* [not my bugs](#not-my-bugs) * [not my bugs](#not-my-bugs)
@@ -37,12 +39,14 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [other tricks](#other-tricks) * [other tricks](#other-tricks)
* [searching](#searching) * [searching](#searching)
* [search configuration](#search-configuration) * [search configuration](#search-configuration)
* [database location](#database-location)
* [metadata from audio files](#metadata-from-audio-files) * [metadata from audio files](#metadata-from-audio-files)
* [file parser plugins](#file-parser-plugins) * [file parser plugins](#file-parser-plugins)
* [complete examples](#complete-examples) * [complete examples](#complete-examples)
* [browser support](#browser-support) * [browser support](#browser-support)
* [client examples](#client-examples) * [client examples](#client-examples)
* [up2k](#up2k) * [up2k](#up2k)
* [performance](#performance)
* [dependencies](#dependencies) * [dependencies](#dependencies)
* [optional dependencies](#optional-dependencies) * [optional dependencies](#optional-dependencies)
* [install recommended deps](#install-recommended-deps) * [install recommended deps](#install-recommended-deps)
@@ -50,9 +54,12 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [sfx](#sfx) * [sfx](#sfx)
* [sfx repack](#sfx-repack) * [sfx repack](#sfx-repack)
* [install on android](#install-on-android) * [install on android](#install-on-android)
* [building](#building)
* [dev env setup](#dev-env-setup) * [dev env setup](#dev-env-setup)
* [how to release](#how-to-release) * [just the sfx](#just-the-sfx)
* [complete release](#complete-release)
* [todo](#todo) * [todo](#todo)
* [discarded ideas](#discarded-ideas)
## quickstart ## quickstart
@@ -61,19 +68,45 @@ download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/do
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone full access to the current folder; see `-h` for help if you want accounts and volumes etc running the sfx without arguments (for example doubleclicking it on Windows) will give everyone full access to the current folder; see `-h` for help if you want accounts and volumes etc
some recommended options:
* `-e2dsa` enables general file indexing, see [search configuration](#search-configuration)
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies)
* `-v /mnt/music:/music:r:afoo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, with user `foo` as `a`dmin (read/write), password `bar`
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
* replace `:r:afoo` with `:rfoo` to only make the folder readable by `foo` and nobody else
* in addition to `r`ead and `a`dmin, `w`rite makes a folder write-only, so cannot list/access files in it
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
you may also want these, especially on servers: you may also want these, especially on servers:
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service * [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https) * [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
### on debian
recommended steps to enable audio metadata and thumbnails (from images and videos):
* as root, run the following:
`apt install python3 python3-pip python3-dev ffmpeg`
* then, as the user which will be running copyparty (so hopefully not root), run this:
`python3 -m pip install --user -U Pillow pillow-avif-plugin`
(skipped `pyheif-pillow-opener` because apparently debian is too old to build it)
## notes ## notes
general:
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
* because no browsers currently implement the media-query to do this properly orz
browser-specific:
* iPhone/iPad: use Firefox to download files * iPhone/iPad: use Firefox to download files
* Android-Chrome: increase "parallel uploads" for higher speed (android bug) * Android-Chrome: increase "parallel uploads" for higher speed (android bug)
* Android-Firefox: takes a while to select files (their fix for ☝️) * Android-Firefox: takes a while to select files (their fix for ☝️)
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now* * Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale * Desktop-Firefox: may stop you from deleting folders you've uploaded until you visit `about:memory` and click `Minimize memory usage`
* because no browsers currently implement the media-query to do this properly orz
## status ## status
@@ -82,7 +115,7 @@ summary: all planned features work! now please enjoy the bloatening
* backend stuff * backend stuff
* ☑ sanic multipart parser * ☑ sanic multipart parser
*load balancer (multiprocessing) *multiprocessing (actual multithreading)
* ☑ volumes (mountpoints) * ☑ volumes (mountpoints)
* ☑ accounts * ☑ accounts
* upload * upload
@@ -96,11 +129,12 @@ summary: all planned features work! now please enjoy the bloatening
* ☑ FUSE client (read-only) * ☑ FUSE client (read-only)
* browser * browser
* ☑ tree-view * ☑ tree-view
*media player *audio player (with OS media controls)
* ☑ thumbnails * ☑ thumbnails
* ☑ images using Pillow * ...of images using Pillow
* ☑ videos using FFmpeg * ...of videos using FFmpeg
* ☑ cache eviction (max-age; maybe max-size eventually) * ☑ cache eviction (max-age; maybe max-size eventually)
* ☑ image gallery with webm player
* ☑ SPA (browse while uploading) * ☑ SPA (browse while uploading)
* if you use the file-tree on the left only, not folders in the file list * if you use the file-tree on the left only, not folders in the file list
* server indexing * server indexing
@@ -112,24 +146,39 @@ summary: all planned features work! now please enjoy the bloatening
* ☑ editor (sure why not) * ☑ editor (sure why not)
## testimonials
small collection of user feedback
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`
# bugs # bugs
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade * Windows: python 3.7 and older cannot read tags with FFprobe, so use Mutagen or upgrade
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d` * Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* Windows: python 2.7 cannot handle filenames with mojibake * Windows: python 2.7 cannot handle filenames with mojibake
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions
## general bugs ## general bugs
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise * all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
* cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1` * cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1`
* hiding the contents at url `/d1/d2/d3` using `-v :d1/d2/d3:cd2d` has the side-effect of creating databases (for files/tags) inside folders d1 and d2, and those databases take precedence over the main db at the top of the vfs - this means all files in d2 and below will be reindexed unless you already had a vfs entry at or below d2 * dupe files will not have metadata (audio tags etc) displayed in the file listing
* because they don't get `up` entries in the db (probably best fix) and `tx_browser` does not `lstat`
* probably more, pls let me know * probably more, pls let me know
## not my bugs ## not my bugs
* Windows: msys2-python 3.8.6 occasionally throws "RuntimeError: release unlocked lock" when leaving a scoped mutex in up2k * Windows: folders cannot be accessed if the name ends with `.`
* python or windows bug
* Windows: msys2-python 3.8.6 occasionally throws `RuntimeError: release unlocked lock` when leaving a scoped mutex in up2k
* this is an msys2 bug, the regular windows edition of python is fine * this is an msys2 bug, the regular windows edition of python is fine
* VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf
* use `--hist` or the `hist` volflag (`-v [...]:chist=/tmp/foo`) to place the db inside the vm instead
# the browser # the browser
@@ -143,38 +192,63 @@ summary: all planned features work! now please enjoy the bloatening
* `[📂]` mkdir, create directories * `[📂]` mkdir, create directories
* `[📝]` new-md, create a new markdown document * `[📝]` new-md, create a new markdown document
* `[📟]` send-msg, either to server-log or into textfiles if `--urlform save` * `[📟]` send-msg, either to server-log or into textfiles if `--urlform save`
* `[⚙️]` client configuration options * `[🎺]` audio-player config options
* `[⚙️]` general client config options
## hotkeys ## hotkeys
the browser has the following hotkeys the browser has the following hotkeys (assumes qwerty, ignores actual layout)
* `B` toggle breadcrumbs / directory tree
* `I/K` prev/next folder * `I/K` prev/next folder
* `P` parent folder * `M` parent folder (or unexpand current)
* `G` toggle list / grid view * `G` toggle list / grid view
* `T` toggle thumbnails / icons * `T` toggle thumbnails / icons
* when playing audio: * when playing audio:
* `0..9` jump to 10%..90%
* `U/O` skip 10sec back/forward
* `J/L` prev/next song * `J/L` prev/next song
* `J` also starts playing the folder * `U/O` skip 10sec back/forward
* `0..9` jump to 0%..90%
* `P` play/pause (also starts playing the folder)
* when viewing images / playing videos:
* `J/L, Left/Right` prev/next file
* `Home/End` first/last file
* `Esc` close viewer
* videos:
* `U/O` skip 10sec back/forward
* `P/K/Space` play/pause
* `F` fullscreen
* `C` continue playing next video
* `R` loop
* `M` mute
* when tree-sidebar is open:
* `A/D` adjust tree width
* in the grid view: * in the grid view:
* `S` toggle multiselect * `S` toggle multiselect
* `A/D` zoom * shift+`A/D` zoom
* in the markdown editor:
* `^s` save
* `^h` header
* `^k` autoformat table
* `^u` jump to next unicode character
* `^e` toggle editor / preview
* `^up, ^down` jump paragraphs
## tree-mode ## tree-mode
by default there's a breadcrumbs path; you can replace this with a tree-browser sidebar thing by clicking the 🌲 by default there's a breadcrumbs path; you can replace this with a tree-browser sidebar thing by clicking the `🌲` or pressing the `B` hotkey
click `[-]` and `[+]` to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size click `[-]` and `[+]` (or hotkeys `A`/`D`) to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
## thumbnails ## thumbnails
![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/120070302-10836b00-c08a-11eb-8eb4-82004a34c342.png) ![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/120070302-10836b00-c08a-11eb-8eb4-82004a34c342.png)
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how destructive your users are it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
## zip downloads ## zip downloads
@@ -189,9 +263,10 @@ the `zip` link next to folders can produce various types of zip/tar files using
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software | | `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
* hidden files (dotfiles) are excluded unless `-ed` * hidden files (dotfiles) are excluded unless `-ed`
* the up2k.db is always excluded * `up2k.db` and `dir.txt` is always excluded
* `zip_crc` will take longer to download since the server has to read each file twice * `zip_crc` will take longer to download since the server has to read each file twice
* please let me know if you find a program old enough to actually need this * this is only to support MS-DOS PKZIP v2.04g (october 1993) and older
* how are you accessing copyparty actually
you can also zip a selection of files or folders by clicking them in the browser, that brings up a selection editor and zip button in the bottom right you can also zip a selection of files or folders by clicking them in the browser, that brings up a selection editor and zip button in the bottom right
@@ -206,9 +281,11 @@ two upload methods are available in the html client:
up2k has several advantages: up2k has several advantages:
* you can drop folders into the browser (files are added recursively) * you can drop folders into the browser (files are added recursively)
* files are processed in chunks, and each chunk is checksummed * files are processed in chunks, and each chunk is checksummed
* uploads resume if they are interrupted (for example by a reboot) * uploads autoresume if they are interrupted by network issues
* uploads resume if you reboot your browser or pc, just upload the same files again
* server detects any corruption; the client reuploads affected chunks * server detects any corruption; the client reuploads affected chunks
* the client doesn't upload anything that already exists on the server * the client doesn't upload anything that already exists on the server
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
* the last-modified timestamp of the file is preserved * the last-modified timestamp of the file is preserved
see [up2k](#up2k) for details on how it works see [up2k](#up2k) for details on how it works
@@ -241,11 +318,11 @@ in the `[🚀 up2k]` tab, after toggling the `[🔎]` switch green, any files/fo
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]` files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much * the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files (or just refresh the page)
note that since up2k has to read the file twice, `[🎈 bup]` can be up to 2x faster in extreme cases (if your internet connection is faster than the read-speed of your HDD) note that since up2k has to read the file twice, `[🎈 bup]` can be up to 2x faster in extreme cases (if your internet connection is faster than the read-speed of your HDD)
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well thanks to tls also functioning as an integrity check up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well (thanks to tls also functioning as an integrity check)
## markdown viewer ## markdown viewer
@@ -259,6 +336,8 @@ up2k has saved a few uploads from becoming corrupted in-transfer already; caught
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab` * you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
# searching # searching
@@ -281,20 +360,40 @@ searching relies on two databases, the up2k filetree (`-e2d`) and the metadata t
through arguments: through arguments:
* `-e2d` enables file indexing on upload * `-e2d` enables file indexing on upload
* `-e2ds` scans writable folders on startup * `-e2ds` scans writable folders for new files on startup
* `-e2dsa` scans all mounted volumes (including readonly ones) * `-e2dsa` scans all mounted volumes (including readonly ones)
* `-e2t` enables metadata indexing on upload * `-e2t` enables metadata indexing on upload
* `-e2ts` scans for tags in all files that don't have tags yet * `-e2ts` scans for tags in all files that don't have tags yet
* `-e2tsr` deletes all existing tags, so a full reindex * `-e2tsr` deletes all existing tags, does a full reindex
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling: the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
* `-v ~/music::r:ce2dsa:ce2tsr` does a full reindex of everything on startup * `-v ~/music::r:ce2dsa:ce2tsr` does a full reindex of everything on startup
* `-v ~/music::r:cd2d` disables **all** indexing, even if any `-e2*` are on * `-v ~/music::r:cd2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*` * `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those note:
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `cdhash`, this has the following consequences:
* initial indexing is way faster, especially when the volume is on a network disk
* makes it impossible to [file-search](#file-search)
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
if you set `--no-hash`, you can enable hashing for specific volumes using flag `cehash`
## database location
copyparty creates a subfolder named `.hist` inside each volume where it stores the database, thumbnails, and some other stuff
this can instead be kept in a single place using the `--hist` argument, or the `hist=` volume flag, or a mix of both:
* `--hist ~/.cache/copyparty -v ~/music::r:chist=-` sets `~/.cache/copyparty` as the default place to put volume info, but `~/music` gets the regular `.hist` subfolder (`-` restores default behavior)
note:
* markdown edits are always stored in a local `.hist` subdirectory
* on windows the volflag path is cyglike, so `/c/temp` means `C:\temp` but use regular paths for `--hist`
* you can use cygpaths for volumes too, `-v C:\Users::r` and `-v /c/users::r` both work
## metadata from audio files ## metadata from audio files
@@ -310,17 +409,17 @@ tags that start with a `.` such as `.bpm` and `.dur`(ation) indicate numeric val
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,) see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
`--no-mutagen` disables mutagen and uses ffprobe instead, which... `--no-mutagen` disables Mutagen and uses FFprobe instead, which...
* is about 20x slower than mutagen * is about 20x slower than Mutagen
* catches a few tags that mutagen doesn't * catches a few tags that Mutagen doesn't
* melodic key, video resolution, framerate, pixfmt * melodic key, video resolution, framerate, pixfmt
* avoids pulling any GPL code into copyparty * avoids pulling any GPL code into copyparty
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve * more importantly runs FFprobe on incoming files which is bad if your FFmpeg has a cve
## file parser plugins ## file parser plugins
copyparty can invoke external programs to collect additional metadata for files using `mtp` (as argument or volume flag), there is a default timeout of 30sec copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec
* `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata * `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`) * `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
@@ -354,11 +453,13 @@ copyparty can invoke external programs to collect additional metadata for files
| zip selection | - | yep | yep | yep | yep | yep | yep | yep | | zip selection | - | yep | yep | yep | yep | yep | yep | yep |
| directory tree | - | - | `*1` | yep | yep | yep | yep | yep | | directory tree | - | - | `*1` | yep | yep | yep | yep | yep |
| up2k | - | - | yep | yep | yep | yep | yep | yep | | up2k | - | - | yep | yep | yep | yep | yep | yep |
| icons work | - | - | yep | yep | yep | yep | yep | yep |
| markdown editor | - | - | yep | yep | yep | yep | yep | yep | | markdown editor | - | - | yep | yep | yep | yep | yep | yep |
| markdown viewer | - | - | yep | yep | yep | yep | yep | yep | | markdown viewer | - | - | yep | yep | yep | yep | yep | yep |
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep | | play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
| play ogg/opus | - | - | - | - | yep | yep | `*2` | yep | | play ogg/opus | - | - | - | - | yep | yep | `*2` | yep |
| thumbnail view | - | - | - | - | yep | yep | yep | yep |
| image viewer | - | - | - | - | yep | yep | yep | yep |
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
* internet explorer 6 to 8 behave the same * internet explorer 6 to 8 behave the same
* firefox 52 and chrome 49 are the last winxp versions * firefox 52 and chrome 49 are the last winxp versions
@@ -376,7 +477,7 @@ quick summary of more eccentric web-browsers trying to view a directory index:
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg | | **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) | | **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
| **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` | | **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` |
| **SerenityOS** (22d13d8) | hits a page fault, works with `?b=u`, file input not-impl, url params are multiplying | | **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
# client examples # client examples
@@ -397,9 +498,11 @@ quick summary of more eccentric web-browsers trying to view a directory index:
* cross-platform python client available in [./bin/](bin/) * cross-platform python client available in [./bin/](bin/)
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md) * [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
* sharex (screenshot utility): see [./contrib/sharex.sxcu](contrib/#sharexsxcu)
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods: copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;} b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
b512 <movie.mkv b512 <movie.mkv
@@ -419,6 +522,23 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload * client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
# performance
defaults are good for most cases, don't mind the `cannot efficiently use multiple CPU cores` message, it's very unlikely to be a problem
below are some tweaks roughly ordered by usefulness:
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
* `--no-hash` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
* huge amount of short-lived connections
* really heavy traffic (downloads/uploads)
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
# dependencies # dependencies
* `jinja2` (is built into the SFX) * `jinja2` (is built into the SFX)
@@ -428,18 +548,18 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
enable music tags: enable music tags:
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk) * either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users) * or `ffprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
enable image thumbnails: enable thumbnails of images:
* `Pillow` (requires py2.7 or py3.5+) * `Pillow` (requires py2.7 or py3.5+)
enable video thumbnails: enable thumbnails of videos:
* `ffmpeg` and `ffprobe` somewhere in `$PATH` * `ffmpeg` and `ffprobe` somewhere in `$PATH`
enable reading HEIF pictures: enable thumbnails of HEIF pictures:
* `pyheif-pillow-opener` (requires Linux or a C compiler) * `pyheif-pillow-opener` (requires Linux or a C compiler)
enable reading AVIF pictures: enable thumbnails of AVIF pictures:
* `pillow-avif-plugin` * `pillow-avif-plugin`
@@ -453,7 +573,7 @@ python -m pip install --user -U jinja2 mutagen Pillow
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag) some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
these are standalone programs and will never be imported / evaluated by copyparty these are standalone programs and will never be imported / evaluated by copyparty, and must be enabled through `-mtp` configs
# sfx # sfx
@@ -469,10 +589,10 @@ pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `
## sfx repack ## sfx repack
if you don't need all the features you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except for either msys2 or WSL if you're on windows) if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL)
* `724K` original size as of v0.4.0 * `525k` size of original sfx.py as of v0.11.30
* `256K` after `./scripts/make-sfx.sh re no-ogv` * `315k` after `./scripts/make-sfx.sh re no-ogv`
* `164K` after `./scripts/make-sfx.sh re no-ogv no-cm` * `223k` after `./scripts/make-sfx.sh re no-ogv no-cm`
the features you can opt to drop are the features you can opt to drop are
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files * `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files
@@ -494,18 +614,45 @@ echo $?
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
# dev env setup # building
## dev env setup
mostly optional; if you need a working env for vscode or similar
```sh ```sh
python3 -m venv .venv python3 -m venv .venv
. .venv/bin/activate . .venv/bin/activate
pip install jinja2 # mandatory deps pip install jinja2 # mandatory
pip install Pillow # thumbnail deps pip install mutagen # audio metadata
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
pip install black bandit pylint flake8 # vscode tooling pip install black bandit pylint flake8 # vscode tooling
``` ```
# how to release ## just the sfx
unless you need to modify something in the web-dependencies, it's faster to grab those from a previous release:
```sh
rm -rf copyparty/web/deps
curl -L https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py >x.py
python3 x.py -h
rm x.py
mv /tmp/pe-copyparty/copyparty/web/deps/ copyparty/web/deps/
```
then build the sfx using any of the following examples:
```sh
./scripts/make-sfx.sh # both python and sh editions
./scripts/make-sfx.sh no-sh gz # just python with gzip
```
## complete release
also builds the sfx so disregard the sfx section above
in the `scripts` folder: in the `scripts` folder:
@@ -520,14 +667,18 @@ in the `scripts` folder:
roughly sorted by priority roughly sorted by priority
* hls framework for Someone Else to drop code into :^)
* readme.md as epilogue * readme.md as epilogue
* single sha512 across all up2k chunks? maybe
## discarded ideas
* reduce up2k roundtrips * reduce up2k roundtrips
* start from a chunk index and just go * start from a chunk index and just go
* terminate client on bad data * terminate client on bad data
* not worth the effort, just throw enough conncetions at it
discarded ideas * single sha512 across all up2k chunks?
* crypto.subtle cannot into streaming, would have to use hashwasm, expensive
* separate sqlite table per tag * separate sqlite table per tag
* performance fixed by skipping some indexes (`+mt.k`) * performance fixed by skipping some indexes (`+mt.k`)
* audio fingerprinting * audio fingerprinting
@@ -542,3 +693,6 @@ discarded ideas
* nah * nah
* look into android thumbnail cache file format * look into android thumbnail cache file format
* absolutely not * absolutely not
* indexedDB for hashes, cfg enable/clear/sz, 2gb avail, ~9k for 1g, ~4k for 100m, 500k items before autoeviction
* blank hashlist when up-ok to skip handshake
* too many confusing side-effects

View File

@@ -48,15 +48,16 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
# [`dbtool.py`](dbtool.py) # [`dbtool.py`](dbtool.py)
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty recommends to wipe the DB and reindex because it now collects additional metadata during analysis, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
for that example (upgrading to v0.11.0), first move the old db aside, launch copyparty, let it rebuild the db until the point where it starts running mtp (colored messages as it adds the mtp tags), then CTRL-C and patch in the old mtp tags from the old db instead for that example (upgrading to v0.11.20), first launch the new version of copyparty like usual, let it make a backup of the old db and rebuild the new db until the point where it starts running mtp (colored messages as it adds the mtp tags), that's when you hit CTRL-C and patch in the old mtp tags from the old db instead
so assuming you have `-mtp` parsers to provide the tags `key` and `.bpm`: so assuming you have `-mtp` parsers to provide the tags `key` and `.bpm`:
``` ```
~/bin/dbtool.py -ls up2k.db cd /mnt/nas/music/.hist
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -cmp ~/src/copyparty/bin/dbtool.py -ls up2k.db
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy key ~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -cmp
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy .bpm -vac ~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy key
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
``` ```

View File

@@ -345,7 +345,7 @@ class Gateway(object):
except: except:
pass pass
def sendreq(self, *args, headers={}, **kwargs): def sendreq(self, meth, path, headers, **kwargs):
if self.password: if self.password:
headers["Cookie"] = "=".join(["cppwd", self.password]) headers["Cookie"] = "=".join(["cppwd", self.password])
@@ -354,21 +354,21 @@ class Gateway(object):
if c.rx_path: if c.rx_path:
raise Exception() raise Exception()
c.request(*list(args), headers=headers, **kwargs) c.request(meth, path, headers=headers, **kwargs)
c.rx = c.getresponse() c.rx = c.getresponse()
return c return c
except: except:
tid = threading.current_thread().ident tid = threading.current_thread().ident
dbg( dbg(
"\033[1;37;44mbad conn {:x}\n {}\n {}\033[0m".format( "\033[1;37;44mbad conn {:x}\n {} {}\n {}\033[0m".format(
tid, " ".join(str(x) for x in args), c.rx_path if c else "(null)" tid, meth, path, c.rx_path if c else "(null)"
) )
) )
self.closeconn(c) self.closeconn(c)
c = self.getconn() c = self.getconn()
try: try:
c.request(*list(args), headers=headers, **kwargs) c.request(meth, path, headers=headers, **kwargs)
c.rx = c.getresponse() c.rx = c.getresponse()
return c return c
except: except:
@@ -386,7 +386,7 @@ class Gateway(object):
path = dewin(path) path = dewin(path)
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots" web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
c = self.sendreq("GET", web_path) c = self.sendreq("GET", web_path, {})
if c.rx.status != 200: if c.rx.status != 200:
self.closeconn(c) self.closeconn(c)
log( log(
@@ -440,7 +440,7 @@ class Gateway(object):
) )
) )
c = self.sendreq("GET", web_path, headers={"Range": hdr_range}) c = self.sendreq("GET", web_path, {"Range": hdr_range})
if c.rx.status != http.client.PARTIAL_CONTENT: if c.rx.status != http.client.PARTIAL_CONTENT:
self.closeconn(c) self.closeconn(c)
raise Exception( raise Exception(

View File

@@ -54,6 +54,15 @@ MACOS = platform.system() == "Darwin"
info = log = dbg = None info = log = dbg = None
print(
"{} v{} @ {}".format(
platform.python_implementation(),
".".join([str(x) for x in sys.version_info]),
sys.executable,
)
)
try: try:
from fuse import FUSE, FuseOSError, Operations from fuse import FUSE, FuseOSError, Operations
except: except:
@@ -293,14 +302,14 @@ class Gateway(object):
except: except:
pass pass
def sendreq(self, *args, headers={}, **kwargs): def sendreq(self, meth, path, headers, **kwargs):
tid = get_tid() tid = get_tid()
if self.password: if self.password:
headers["Cookie"] = "=".join(["cppwd", self.password]) headers["Cookie"] = "=".join(["cppwd", self.password])
try: try:
c = self.getconn(tid) c = self.getconn(tid)
c.request(*list(args), headers=headers, **kwargs) c.request(meth, path, headers=headers, **kwargs)
return c.getresponse() return c.getresponse()
except: except:
dbg("bad conn") dbg("bad conn")
@@ -308,7 +317,7 @@ class Gateway(object):
self.closeconn(tid) self.closeconn(tid)
try: try:
c = self.getconn(tid) c = self.getconn(tid)
c.request(*list(args), headers=headers, **kwargs) c.request(meth, path, headers=headers, **kwargs)
return c.getresponse() return c.getresponse()
except: except:
info("http connection failed:\n" + traceback.format_exc()) info("http connection failed:\n" + traceback.format_exc())
@@ -325,7 +334,7 @@ class Gateway(object):
path = dewin(path) path = dewin(path)
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls" web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls"
r = self.sendreq("GET", web_path) r = self.sendreq("GET", web_path, {})
if r.status != 200: if r.status != 200:
self.closeconn() self.closeconn()
log( log(
@@ -362,7 +371,7 @@ class Gateway(object):
) )
) )
r = self.sendreq("GET", web_path, headers={"Range": hdr_range}) r = self.sendreq("GET", web_path, {"Range": hdr_range})
if r.status != http.client.PARTIAL_CONTENT: if r.status != http.client.PARTIAL_CONTENT:
self.closeconn() self.closeconn()
raise Exception( raise Exception(

View File

@@ -2,10 +2,13 @@
import os import os
import sys import sys
import time
import shutil
import sqlite3 import sqlite3
import argparse import argparse
DB_VER = 3 DB_VER1 = 3
DB_VER2 = 4
def die(msg): def die(msg):
@@ -45,18 +48,21 @@ def compare(n1, d1, n2, d2, verbose):
nt = next(d1.execute("select count(w) from up"))[0] nt = next(d1.execute("select count(w) from up"))[0]
n = 0 n = 0
miss = 0 miss = 0
for w, rd, fn in d1.execute("select w, rd, fn from up"): for w1, rd, fn in d1.execute("select w, rd, fn from up"):
n += 1 n += 1
if n % 25_000 == 0: if n % 25_000 == 0:
m = f"\033[36mchecked {n:,} of {nt:,} files in {n1} against {n2}\033[0m" m = f"\033[36mchecked {n:,} of {nt:,} files in {n1} against {n2}\033[0m"
print(m) print(m)
q = "select w from up where substr(w,1,16) = ?" if rd.split("/", 1)[0] == ".hist":
hit = d2.execute(q, (w[:16],)).fetchone() continue
q = "select w from up where rd = ? and fn = ?"
hit = d2.execute(q, (rd, fn)).fetchone()
if not hit: if not hit:
miss += 1 miss += 1
if verbose: if verbose:
print(f"file in {n1} missing in {n2}: [{w}] {rd}/{fn}") print(f"file in {n1} missing in {n2}: [{w1}] {rd}/{fn}")
print(f" {miss} files in {n1} missing in {n2}\n") print(f" {miss} files in {n1} missing in {n2}\n")
@@ -64,13 +70,28 @@ def compare(n1, d1, n2, d2, verbose):
n = 0 n = 0
miss = {} miss = {}
nmiss = 0 nmiss = 0
for w, k, v in d1.execute("select * from mt"): for w1, k, v in d1.execute("select * from mt"):
n += 1 n += 1
if n % 100_000 == 0: if n % 100_000 == 0:
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m" m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
print(m) print(m)
v2 = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone() q = "select rd, fn from up where substr(w,1,16) = ?"
rd, fn = d1.execute(q, (w1,)).fetchone()
if rd.split("/", 1)[0] == ".hist":
continue
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
w2 = d2.execute(q, (rd, fn)).fetchone()
if w2:
w2 = w2[0]
v2 = None
if w2:
v2 = d2.execute(
"select v from mt where w = ? and +k = ?", (w2, k)
).fetchone()
if v2: if v2:
v2 = v2[0] v2 = v2[0]
@@ -99,9 +120,7 @@ def compare(n1, d1, n2, d2, verbose):
miss[k] = 1 miss[k] = 1
if verbose: if verbose:
q = "select rd, fn from up where substr(w,1,16) = ?" print(f"missing in {n2}: [{w1}] [{rd}/{fn}] {k} = {v}")
rd, fn = d1.execute(q, (w,)).fetchone()
print(f"missing in {n2}: [{w}] [{rd}/{fn}] {k} = {v}")
for k, v in sorted(miss.items()): for k, v in sorted(miss.items()):
if v: if v:
@@ -114,24 +133,35 @@ def copy_mtp(d1, d2, tag, rm):
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0] nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
n = 0 n = 0
ndone = 0 ndone = 0
for w, k, v in d1.execute("select * from mt where k = ?", (tag,)): for w1, k, v in d1.execute("select * from mt where k = ?", (tag,)):
n += 1 n += 1
if n % 25_000 == 0: if n % 25_000 == 0:
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m" m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
print(m) print(m)
hit = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone() q = "select rd, fn from up where substr(w,1,16) = ?"
rd, fn = d1.execute(q, (w1,)).fetchone()
if rd.split("/", 1)[0] == ".hist":
continue
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
w2 = d2.execute(q, (rd, fn)).fetchone()
if not w2:
continue
w2 = w2[0]
hit = d2.execute("select v from mt where w = ? and +k = ?", (w2, k)).fetchone()
if hit: if hit:
hit = hit[0] hit = hit[0]
if hit != v: if hit != v:
ndone += 1 ndone += 1
if hit is not None: if hit is not None:
d2.execute("delete from mt where w = ? and +k = ?", (w, k)) d2.execute("delete from mt where w = ? and +k = ?", (w2, k))
d2.execute("insert into mt values (?,?,?)", (w, k, v)) d2.execute("insert into mt values (?,?,?)", (w2, k, v))
if rm: if rm:
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w,)) d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2,))
d2.commit() d2.commit()
print(f"copied {ndone} {tag} tags over") print(f"copied {ndone} {tag} tags over")
@@ -168,6 +198,23 @@ def main():
db = sqlite3.connect(ar.db) db = sqlite3.connect(ar.db)
ds = sqlite3.connect(ar.src) if ar.src else None ds = sqlite3.connect(ar.src) if ar.src else None
# revert journals
for d, p in [[db, ar.db], [ds, ar.src]]:
if not d:
continue
pj = "{}-journal".format(p)
if not os.path.exists(pj):
continue
d.execute("create table foo (bar int)")
d.execute("drop table foo")
if ar.copy:
db.close()
shutil.copy2(ar.db, "{}.bak.dbtool.{:x}".format(ar.db, int(time.time())))
db = sqlite3.connect(ar.db)
for d, n in [[ds, "src"], [db, "dst"]]: for d, n in [[ds, "src"], [db, "dst"]]:
if not d: if not d:
continue continue
@@ -176,8 +223,8 @@ def main():
if ver == "corrupt": if ver == "corrupt":
die("{} database appears to be corrupt, sorry") die("{} database appears to be corrupt, sorry")
if ver != DB_VER: if ver < DB_VER1 or ver > DB_VER2:
m = f"{n} db is version {ver}, this tool only supports version {DB_VER}, please upgrade it with copyparty first" m = f"{n} db is version {ver}, this tool only supports versions between {DB_VER1} and {DB_VER2}, please upgrade it with copyparty first"
die(m) die(m)
if ar.ls: if ar.ls:

View File

@@ -60,7 +60,7 @@ def main():
try: try:
det(tf) det(tf)
except: except:
pass pass # mute
finally: finally:
os.unlink(tf) os.unlink(tf)

123
bin/mtag/audio-key-slicing.py Executable file
View File

@@ -0,0 +1,123 @@
#!/usr/bin/env python
import re
import os
import sys
import tempfile
import subprocess as sp
import keyfinder
from copyparty.util import fsenc
"""
dep: github/mixxxdj/libkeyfinder
dep: pypi/keyfinder
dep: ffmpeg
note: this is a janky edition of the regular audio-key.py,
slicing the files at 20sec intervals and keeping 5sec from each,
surprisingly accurate but still garbage (446 ok, 69 bad, 13% miss)
it is fast tho
"""
def get_duration():
# TODO provide ffprobe tags to mtp as json
# fmt: off
dur = sp.check_output([
"ffprobe",
"-hide_banner",
"-v", "fatal",
"-show_streams",
"-show_format",
fsenc(sys.argv[1])
])
# fmt: on
dur = dur.decode("ascii", "replace").split("\n")
dur = [x.split("=")[1] for x in dur if x.startswith("duration=")]
dur = [float(x) for x in dur if re.match(r"^[0-9\.,]+$", x)]
return list(sorted(dur))[-1] if dur else None
def get_segs(dur):
# keep first 5s of each 20s,
# keep entire last segment
ofs = 0
segs = []
while True:
seg = [ofs, 5]
segs.append(seg)
if dur - ofs < 20:
seg[-1] = int(dur - seg[0])
break
ofs += 20
return segs
def slice(tf):
dur = get_duration()
dur = min(dur, 600) # max 10min
segs = get_segs(dur)
# fmt: off
cmd = [
"ffmpeg",
"-nostdin",
"-hide_banner",
"-v", "fatal",
"-y"
]
for seg in segs:
cmd.extend([
"-ss", str(seg[0]),
"-i", fsenc(sys.argv[1])
])
filt = ""
for n, seg in enumerate(segs):
filt += "[{}:a:0]atrim=duration={}[a{}]; ".format(n, seg[1], n)
prev = "a0"
for n in range(1, len(segs)):
nxt = "b{}".format(n)
filt += "[{}][a{}]acrossfade=d=0.5[{}]; ".format(prev, n, nxt)
prev = nxt
cmd.extend([
"-filter_complex", filt[:-2],
"-map", "[{}]".format(nxt),
"-sample_fmt", "s16",
tf
])
# fmt: on
# print(cmd)
sp.check_call(cmd)
def det(tf):
slice(tf)
print(keyfinder.key(tf).camelot())
def main():
with tempfile.NamedTemporaryFile(suffix=".flac", delete=False) as f:
f.write(b"h")
tf = f.name
try:
det(tf)
finally:
os.unlink(tf)
pass
if __name__ == "__main__":
main()

View File

@@ -1,18 +1,54 @@
#!/usr/bin/env python #!/usr/bin/env python
import os
import sys import sys
import tempfile
import subprocess as sp
import keyfinder import keyfinder
from copyparty.util import fsenc
""" """
dep: github/mixxxdj/libkeyfinder dep: github/mixxxdj/libkeyfinder
dep: pypi/keyfinder dep: pypi/keyfinder
dep: ffmpeg dep: ffmpeg
note: cannot fsenc
""" """
# tried trimming the first/last 5th, bad idea,
# misdetects 9a law field (Sphere Caliber) as 10b,
# obvious when mixing 9a ghostly parapara ship
def det(tf):
# fmt: off
sp.check_call([
"ffmpeg",
"-nostdin",
"-hide_banner",
"-v", "fatal",
"-y", "-i", fsenc(sys.argv[1]),
"-t", "300",
"-sample_fmt", "s16",
tf
])
# fmt: on
print(keyfinder.key(tf).camelot())
def main():
with tempfile.NamedTemporaryFile(suffix=".flac", delete=False) as f:
f.write(b"h")
tf = f.name
try: try:
print(keyfinder.key(sys.argv[1]).camelot()) det(tf)
except: except:
pass pass # mute
finally:
os.unlink(tf)
if __name__ == "__main__":
main()

View File

@@ -9,6 +9,16 @@
* assumes the webserver and copyparty is running on the same server/IP * assumes the webserver and copyparty is running on the same server/IP
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript * modify `10.13.1.1` as necessary if you wish to support browsers without javascript
### [`sharex.sxcu`](sharex.sxcu)
* sharex config file to upload screenshots and grab the URL
* `RequestURL`: full URL to the target folder
* `pw`: password (remove the `pw` line if anon-write)
however if your copyparty is behind a reverse-proxy, you may want to use [`sharex-html.sxcu`](sharex-html.sxcu) instead:
* `RequestURL`: full URL to the target folder
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
* `pw`: password (remove `Parameters` if anon-write)
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg) ### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
* disables thumbnails and folder-type detection in windows explorer * disables thumbnails and folder-type detection in windows explorer
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse)) * makes it way faster (especially for slow/networked locations (such as copyparty-fuse))

View File

@@ -1,3 +1,16 @@
# when running copyparty behind a reverse proxy,
# the following arguments are recommended:
#
# -nc 512 important, see next paragraph
# --http-only lower latency on initial connection
# -i 127.0.0.1 only accept connections from nginx
#
# -nc must match or exceed the webserver's max number of concurrent clients;
# nginx default is 512 (worker_processes 1, worker_connections 512)
#
# you may also consider adding -j0 for CPU-intensive configurations
# (not that i can really think of any good examples)
upstream cpp { upstream cpp {
server 127.0.0.1:3923; server 127.0.0.1:3923;
keepalive 120; keepalive 120;

19
contrib/sharex-html.sxcu Normal file
View File

@@ -0,0 +1,19 @@
{
"Version": "13.5.0",
"Name": "copyparty-html",
"DestinationType": "ImageUploader",
"RequestMethod": "POST",
"RequestURL": "http://127.0.0.1:3923/sharex",
"Parameters": {
"pw": "wark"
},
"Body": "MultipartFormData",
"Arguments": {
"act": "bput"
},
"FileFormName": "f",
"RegexList": [
"bytes // <a href=\"/([^\"]+)\""
],
"URL": "http://127.0.0.1:3923/$regex:1|1$"
}

17
contrib/sharex.sxcu Normal file
View File

@@ -0,0 +1,17 @@
{
"Version": "13.5.0",
"Name": "copyparty",
"DestinationType": "ImageUploader",
"RequestMethod": "POST",
"RequestURL": "http://127.0.0.1:3923/sharex",
"Parameters": {
"pw": "wark",
"j": null
},
"Body": "MultipartFormData",
"Arguments": {
"act": "bput"
},
"FileFormName": "f",
"URL": "$json:files[0].url$"
}

View File

@@ -7,11 +7,19 @@
# you may want to: # you may want to:
# change '/usr/bin/python' to another interpreter # change '/usr/bin/python' to another interpreter
# change '/mnt::a' to another location or permission-set # change '/mnt::a' to another location or permission-set
#
# with `Type=notify`, copyparty will signal systemd when it is ready to
# accept connections; correctly delaying units depending on copyparty.
# But note that journalctl will get the timestamps wrong due to
# python disabling line-buffering, so messages are out-of-order:
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
[Unit] [Unit]
Description=copyparty file server Description=copyparty file server
[Service] [Service]
Type=notify
SyslogIdentifier=copyparty
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'

View File

@@ -9,6 +9,9 @@ import os
PY2 = sys.version_info[0] == 2 PY2 = sys.version_info[0] == 2
if PY2: if PY2:
sys.dont_write_bytecode = True sys.dont_write_bytecode = True
unicode = unicode
else:
unicode = str
WINDOWS = False WINDOWS = False
if platform.system() == "Windows": if platform.system() == "Windows":

View File

@@ -20,7 +20,7 @@ import threading
import traceback import traceback
from textwrap import dedent from textwrap import dedent
from .__init__ import E, WINDOWS, VT100, PY2 from .__init__ import E, WINDOWS, VT100, PY2, unicode
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub from .svchub import SvcHub
from .util import py_desc, align_tab, IMPLICATIONS from .util import py_desc, align_tab, IMPLICATIONS
@@ -31,6 +31,8 @@ try:
except: except:
HAVE_SSL = False HAVE_SSL = False
printed = ""
class RiceFormatter(argparse.HelpFormatter): class RiceFormatter(argparse.HelpFormatter):
def _get_help_string(self, action): def _get_help_string(self, action):
@@ -61,8 +63,15 @@ class Dodge11874(RiceFormatter):
super(Dodge11874, self).__init__(*args, **kwargs) super(Dodge11874, self).__init__(*args, **kwargs)
def lprint(*a, **ka):
global printed
printed += " ".join(unicode(x) for x in a) + ka.get("end", "\n")
print(*a, **ka)
def warn(msg): def warn(msg):
print("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg)) lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
def ensure_locale(): def ensure_locale():
@@ -73,7 +82,7 @@ def ensure_locale():
]: ]:
try: try:
locale.setlocale(locale.LC_ALL, x) locale.setlocale(locale.LC_ALL, x)
print("Locale:", x) lprint("Locale:", x)
break break
except: except:
continue continue
@@ -94,7 +103,7 @@ def ensure_cert():
try: try:
if filecmp.cmp(cert_cfg, cert_insec): if filecmp.cmp(cert_cfg, cert_insec):
print( lprint(
"\033[33m using default TLS certificate; https will be insecure." "\033[33m using default TLS certificate; https will be insecure."
+ "\033[36m\n certificate location: {}\033[0m\n".format(cert_cfg) + "\033[36m\n certificate location: {}\033[0m\n".format(cert_cfg)
) )
@@ -123,7 +132,7 @@ def configure_ssl_ver(al):
if "help" in sslver: if "help" in sslver:
avail = [terse_sslver(x[6:]) for x in flags] avail = [terse_sslver(x[6:]) for x in flags]
avail = " ".join(sorted(avail) + ["all"]) avail = " ".join(sorted(avail) + ["all"])
print("\navailable ssl/tls versions:\n " + avail) lprint("\navailable ssl/tls versions:\n " + avail)
sys.exit(0) sys.exit(0)
al.ssl_flags_en = 0 al.ssl_flags_en = 0
@@ -143,7 +152,7 @@ def configure_ssl_ver(al):
for k in ["ssl_flags_en", "ssl_flags_de"]: for k in ["ssl_flags_en", "ssl_flags_de"]:
num = getattr(al, k) num = getattr(al, k)
print("{}: {:8x} ({})".format(k, num, num)) lprint("{}: {:8x} ({})".format(k, num, num))
# think i need that beer now # think i need that beer now
@@ -160,13 +169,13 @@ def configure_ssl_ciphers(al):
try: try:
ctx.set_ciphers(al.ciphers) ctx.set_ciphers(al.ciphers)
except: except:
print("\n\033[1;31mfailed to set ciphers\033[0m\n") lprint("\n\033[1;31mfailed to set ciphers\033[0m\n")
if not hasattr(ctx, "get_ciphers"): if not hasattr(ctx, "get_ciphers"):
print("cannot read cipher list: openssl or python too old") lprint("cannot read cipher list: openssl or python too old")
else: else:
ciphers = [x["description"] for x in ctx.get_ciphers()] ciphers = [x["description"] for x in ctx.get_ciphers()]
print("\n ".join(["\nenabled ciphers:"] + align_tab(ciphers) + [""])) lprint("\n ".join(["\nenabled ciphers:"] + align_tab(ciphers) + [""]))
if is_help: if is_help:
sys.exit(0) sys.exit(0)
@@ -222,10 +231,6 @@ def run_argparse(argv, formatter):
"print,get" prints the data in the log and returns GET "print,get" prints the data in the log and returns GET
(leave out the ",get" to return an error instead) (leave out the ",get" to return an error instead)
--ciphers help = available ssl/tls ciphers,
--ssl-ver help = available ssl/tls versions,
default is what python considers safe, usually >= TLS1
values for --ls: values for --ls:
"USR" is a user to browse as; * is anonymous, ** is all users "USR" is a user to browse as; * is anonymous, ** is all users
"VOL" is a single volume to scan, default is * (all vols) "VOL" is a single volume to scan, default is * (all vols)
@@ -238,29 +243,55 @@ def run_argparse(argv, formatter):
--ls '**' # list all files which are possible to read --ls '**' # list all files which are possible to read
--ls '**,*,ln' # check for dangerous symlinks --ls '**,*,ln' # check for dangerous symlinks
--ls '**,*,ln,p,r' # check, then start normally if safe --ls '**,*,ln,p,r' # check, then start normally if safe
\033[0m
""" """
), ),
) )
# fmt: off # fmt: off
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file") u = unicode
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)") ap2 = ap.add_argument_group('general options')
ap.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)") ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients") ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores") ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account") ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark")
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume") ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
ap.add_argument("-q", action="store_true", help="quiet") ap2.add_argument("-ed", action="store_true", help="enable ?dots")
ap.add_argument("-ed", action="store_true", help="enable ?dots") ap2.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap.add_argument("-emp", action="store_true", help="enable markdown plugins") ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate") ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)") ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
ap.add_argument("-nih", action="store_true", help="no info hostname") ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
ap.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads") ap2 = ap.add_argument_group('network options')
ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar") ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)") ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms") ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [help] shows available versions; default is what your python version considers safe")
ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [help] shows available ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets")
ap2 = ap.add_argument_group('opt-outs')
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap2.add_argument("-nih", action="store_true", help="no info hostname")
ap2.add_argument("-nid", action="store_true", help="no info disk-usage")
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap2 = ap.add_argument_group('safety options')
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
ap2 = ap.add_argument_group('logging options')
ap2.add_argument("-q", action="store_true", help="quiet")
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
ap2.add_argument("--log-htp", action="store_true", help="print http-server threadpool scaling")
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
ap2 = ap.add_argument_group('admin panel options') ap2 = ap.add_argument_group('admin panel options')
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)") ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
@@ -273,9 +304,11 @@ def run_argparse(argv, formatter):
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image") ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output") ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output") ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown") ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval") ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age") ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for")
ap2 = ap.add_argument_group('database options') ap2 = ap.add_argument_group('database options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database") ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
@@ -284,30 +317,27 @@ def run_argparse(argv, formatter):
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing") ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t") ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts") ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead") ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume state")
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead")
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism") ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping") ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader")
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)", ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps") default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps")
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin") ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline") ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
ap2 = ap.add_argument_group('SSL/TLS options') ap2 = ap.add_argument_group('appearance options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls") ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
ap2.add_argument("--ssl-ver", metavar="LIST", type=str, help="ssl/tls versions to allow")
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
ap2 = ap.add_argument_group('debug options') ap2 = ap.add_argument_group('debug options')
ap2.add_argument("--ls", metavar="U[,V[,F]]", help="scan all volumes")
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile") ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir") ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing") ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing")
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header") ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching") ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second")
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
return ap.parse_args(args=argv[1:]) return ap.parse_args(args=argv[1:])
# fmt: on # fmt: on
@@ -324,7 +354,7 @@ def main(argv=None):
desc = py_desc().replace("[", "\033[1;30m[") desc = py_desc().replace("[", "\033[1;30m[")
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0m\n' f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0m\n'
print(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc)) lprint(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
ensure_locale() ensure_locale()
if HAVE_SSL: if HAVE_SSL:
@@ -338,7 +368,7 @@ def main(argv=None):
continue continue
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m" msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
print(msg.format(dk, nk)) lprint(msg.format(dk, nk))
argv[idx] = nk argv[idx] = nk
time.sleep(2) time.sleep(2)
@@ -377,9 +407,12 @@ def main(argv=None):
+ " (if you crash with codec errors then that is why)" + " (if you crash with codec errors then that is why)"
) )
if sys.version_info < (3, 6):
al.no_scandir = True
# signal.signal(signal.SIGINT, sighandler) # signal.signal(signal.SIGINT, sighandler)
SvcHub(al).run() SvcHub(al, argv, printed).run()
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (0, 11, 7) VERSION = (0, 11, 44)
CODENAME = "the grid" CODENAME = "the grid"
BUILD_DT = (2021, 6, 5) BUILD_DT = (2021, 7, 20)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -5,16 +5,19 @@ import re
import os import os
import sys import sys
import stat import stat
import base64
import hashlib
import threading import threading
from .__init__ import PY2, WINDOWS from .__init__ import WINDOWS
from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint from .util import IMPLICATIONS, uncyg, undot, Pebkac, fsdec, fsenc, statdir
class VFS(object): class VFS(object):
"""single level in the virtual fs""" """single level in the virtual fs"""
def __init__(self, realpath, vpath, uread=[], uwrite=[], uadm=[], flags={}): def __init__(self, log, realpath, vpath, uread, uwrite, uadm, flags):
self.log = log
self.realpath = realpath # absolute path on host filesystem self.realpath = realpath # absolute path on host filesystem
self.vpath = vpath # absolute path in the virtual filesystem self.vpath = vpath # absolute path in the virtual filesystem
self.uread = uread # users who can read this self.uread = uread # users who can read this
@@ -22,7 +25,15 @@ class VFS(object):
self.uadm = uadm # users who are regular admins self.uadm = uadm # users who are regular admins
self.flags = flags # config switches self.flags = flags # config switches
self.nodes = {} # child nodes self.nodes = {} # child nodes
self.histtab = None # all realpath->histpath
self.dbv = None # closest full/non-jump parent
if realpath:
self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
self.all_vols = {vpath: self} # flattened recursive self.all_vols = {vpath: self} # flattened recursive
else:
self.histpath = None
self.all_vols = None
def __repr__(self): def __repr__(self):
return "VFS({})".format( return "VFS({})".format(
@@ -32,9 +43,12 @@ class VFS(object):
) )
) )
def _trk(self, vol): def get_all_vols(self, outdict):
self.all_vols[vol.vpath] = vol if self.realpath:
return vol outdict[self.vpath] = self
for v in self.nodes.values():
v.get_all_vols(outdict)
def add(self, src, dst): def add(self, src, dst):
"""get existing, or add new path to the vfs""" """get existing, or add new path to the vfs"""
@@ -46,19 +60,20 @@ class VFS(object):
name, dst = dst.split("/", 1) name, dst = dst.split("/", 1)
if name in self.nodes: if name in self.nodes:
# exists; do not manipulate permissions # exists; do not manipulate permissions
return self._trk(self.nodes[name].add(src, dst)) return self.nodes[name].add(src, dst)
vn = VFS( vn = VFS(
"{}/{}".format(self.realpath, name), self.log,
os.path.join(self.realpath, name) if self.realpath else None,
"{}/{}".format(self.vpath, name).lstrip("/"), "{}/{}".format(self.vpath, name).lstrip("/"),
self.uread, self.uread,
self.uwrite, self.uwrite,
self.uadm, self.uadm,
self.flags, self._copy_flags(name),
) )
self._trk(vn) vn.dbv = self.dbv or self
self.nodes[name] = vn self.nodes[name] = vn
return self._trk(vn.add(src, dst)) return vn.add(src, dst)
if dst in self.nodes: if dst in self.nodes:
# leaf exists; return as-is # leaf exists; return as-is
@@ -66,9 +81,27 @@ class VFS(object):
# leaf does not exist; create and keep permissions blank # leaf does not exist; create and keep permissions blank
vp = "{}/{}".format(self.vpath, dst).lstrip("/") vp = "{}/{}".format(self.vpath, dst).lstrip("/")
vn = VFS(src, vp) vn = VFS(self.log, src, vp, [], [], [], {})
vn.dbv = self.dbv or self
self.nodes[dst] = vn self.nodes[dst] = vn
return self._trk(vn) return vn
def _copy_flags(self, name):
flags = {k: v for k, v in self.flags.items()}
hist = flags.get("hist")
if hist and hist != "-":
flags["hist"] = "{}/{}".format(hist.rstrip("/"), name)
return flags
def bubble_flags(self):
if self.dbv:
for k, v in self.dbv.flags.items():
if k not in ["hist"]:
self.flags[k] = v
for v in self.nodes.values():
v.bubble_flags()
def _find(self, vpath): def _find(self, vpath):
"""return [vfs,remainder]""" """return [vfs,remainder]"""
@@ -96,6 +129,7 @@ class VFS(object):
] ]
def get(self, vpath, uname, will_read, will_write): def get(self, vpath, uname, will_read, will_write):
# type: (str, str, bool, bool) -> tuple[VFS, str]
"""returns [vfsnode,fs_remainder] if user has the requested permissions""" """returns [vfsnode,fs_remainder] if user has the requested permissions"""
vn, rem = self._find(vpath) vn, rem = self._find(vpath)
@@ -107,6 +141,15 @@ class VFS(object):
return vn, rem return vn, rem
def get_dbv(self, vrem):
dbv = self.dbv
if not dbv:
return self, vrem
vrem = [self.vpath[len(dbv.vpath) + 1 :], vrem]
vrem = "/".join([x for x in vrem if x])
return dbv, vrem
def canonical(self, rem): def canonical(self, rem):
"""returns the canonical path (fully-resolved absolute fs path)""" """returns the canonical path (fully-resolved absolute fs path)"""
rp = self.realpath rp = self.realpath
@@ -136,10 +179,11 @@ class VFS(object):
return os.path.realpath(rp) return os.path.realpath(rp)
def ls(self, rem, uname, scandir, incl_wo=False, lstat=False): def ls(self, rem, uname, scandir, incl_wo=False, lstat=False):
# type: (str, str, bool, bool, bool) -> tuple[str, str, dict[str, VFS]]
"""return user-readable [fsdir,real,virt] items at vpath""" """return user-readable [fsdir,real,virt] items at vpath"""
virt_vis = {} # nodes readable by user virt_vis = {} # nodes readable by user
abspath = self.canonical(rem) abspath = self.canonical(rem)
real = list(statdir(nuprint, scandir, lstat, abspath)) real = list(statdir(self.log, scandir, lstat, abspath))
real.sort() real.sort()
if not rem: if not rem:
for name, vn2 in sorted(self.nodes.items()): for name, vn2 in sorted(self.nodes.items()):
@@ -156,13 +200,26 @@ class VFS(object):
return [abspath, real, virt_vis] return [abspath, real, virt_vis]
def walk(self, rel, rem, uname, dots, scandir, lstat=False): def walk(self, rel, rem, seen, uname, dots, scandir, lstat):
""" """
recursively yields from ./rem; recursively yields from ./rem;
rel is a unix-style user-defined vpath (not vfs-related) rel is a unix-style user-defined vpath (not vfs-related)
""" """
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, False, lstat) fsroot, vfs_ls, vfs_virt = self.ls(
rem, uname, scandir, incl_wo=False, lstat=lstat
)
if (
seen
and (not fsroot.startswith(seen[-1]) or fsroot == seen[-1])
and fsroot in seen
):
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}/{}"
self.log("vfs.walk", m.format(seen[-1], fsroot, self.vpath, rem), 3)
return
seen = seen[:] + [fsroot]
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)] rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)] rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
@@ -177,7 +234,7 @@ class VFS(object):
wrel = (rel + "/" + rdir).lstrip("/") wrel = (rel + "/" + rdir).lstrip("/")
wrem = (rem + "/" + rdir).lstrip("/") wrem = (rem + "/" + rdir).lstrip("/")
for x in self.walk(wrel, wrem, uname, scandir, lstat): for x in self.walk(wrel, wrem, seen, uname, dots, scandir, lstat):
yield x yield x
for n, vfs in sorted(vfs_virt.items()): for n, vfs in sorted(vfs_virt.items()):
@@ -185,14 +242,20 @@ class VFS(object):
continue continue
wrel = (rel + "/" + n).lstrip("/") wrel = (rel + "/" + n).lstrip("/")
for x in vfs.walk(wrel, "", uname, scandir, lstat): for x in vfs.walk(wrel, "", seen, uname, dots, scandir, lstat):
yield x yield x
def zipgen(self, vrem, flt, uname, dots, scandir): def zipgen(self, vrem, flt, uname, dots, scandir):
if flt: if flt:
flt = {k: True for k in flt} flt = {k: True for k in flt}
for vpath, apath, files, rd, vd in self.walk("", vrem, uname, dots, scandir): f1 = "{0}.hist{0}up2k.".format(os.sep)
f2a = os.sep + "dir.txt"
f2b = "{0}.hist{0}".format(os.sep)
for vpath, apath, files, rd, vd in self.walk(
"", vrem, [], uname, dots, scandir, False
):
if flt: if flt:
files = [x for x in files if x[0] in flt] files = [x for x in files if x[0] in flt]
@@ -223,7 +286,11 @@ class VFS(object):
del vd[x] del vd[x]
# up2k filetring based on actual abspath # up2k filetring based on actual abspath
files = [x for x in files if "{0}.hist{0}up2k.".format(os.sep) not in x[1]] files = [
x
for x in files
if f1 not in x[1] and (not x[1].endswith(f2a) or f2b not in x[1])
]
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]: for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
yield f yield f
@@ -261,6 +328,7 @@ class AuthSrv(object):
self.reload() self.reload()
def log(self, msg, c=0): def log(self, msg, c=0):
if self.log_func:
self.log_func("auth", msg, c) self.log_func("auth", msg, c)
def laggy_iter(self, iterable): def laggy_iter(self, iterable):
@@ -386,6 +454,9 @@ class AuthSrv(object):
raise Exception("invalid -v argument: [{}]".format(v_str)) raise Exception("invalid -v argument: [{}]".format(v_str))
src, dst, perms = m.groups() src, dst, perms = m.groups()
if WINDOWS:
src = uncyg(src)
# print("\n".join([src, dst, perms])) # print("\n".join([src, dst, perms]))
src = fsdec(os.path.abspath(fsenc(src))) src = fsdec(os.path.abspath(fsenc(src)))
dst = dst.strip("/") dst = dst.strip("/")
@@ -410,15 +481,26 @@ class AuthSrv(object):
) )
except: except:
m = "\n\033[1;31m\nerror in config file {} on line {}:\n\033[0m" m = "\n\033[1;31m\nerror in config file {} on line {}:\n\033[0m"
print(m.format(cfg_fn, self.line_ctr)) self.log(m.format(cfg_fn, self.line_ctr), 1)
raise raise
# case-insensitive; normalize
if WINDOWS:
cased = {}
for k, v in mount.items():
try:
cased[k] = fsdec(os.path.realpath(fsenc(v)))
except:
cased[k] = v
mount = cased
if not mount: if not mount:
# -h says our defaults are CWD at root and read/write for everyone # -h says our defaults are CWD at root and read/write for everyone
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"]) vfs = VFS(self.log_func, os.path.abspath("."), "", ["*"], ["*"], ["*"], {})
elif "" not in mount: elif "" not in mount:
# there's volumes but no root; make root inaccessible # there's volumes but no root; make root inaccessible
vfs = VFS(os.path.abspath("."), "") vfs = VFS(self.log_func, None, "", [], [], [], {})
vfs.flags["d2d"] = True vfs.flags["d2d"] = True
maxdepth = 0 maxdepth = 0
@@ -430,7 +512,13 @@ class AuthSrv(object):
if dst == "": if dst == "":
# rootfs was mapped; fully replaces the default CWD vfs # rootfs was mapped; fully replaces the default CWD vfs
vfs = VFS( vfs = VFS(
mount[dst], dst, mread[dst], mwrite[dst], madm[dst], mflags[dst] self.log_func,
mount[dst],
dst,
mread[dst],
mwrite[dst],
madm[dst],
mflags[dst],
) )
continue continue
@@ -439,6 +527,10 @@ class AuthSrv(object):
v.uwrite = mwrite[dst] v.uwrite = mwrite[dst]
v.uadm = madm[dst] v.uadm = madm[dst]
v.flags = mflags[dst] v.flags = mflags[dst]
v.dbv = None
vfs.all_vols = {}
vfs.get_all_vols(vfs.all_vols)
missing_users = {} missing_users = {}
for d in [mread, mwrite]: for d in [mread, mwrite]:
@@ -455,6 +547,67 @@ class AuthSrv(object):
) )
raise Exception("invalid config") raise Exception("invalid config")
promote = []
demote = []
for vol in vfs.all_vols.values():
hid = hashlib.sha512(fsenc(vol.realpath)).digest()
hid = base64.b32encode(hid).decode("ascii").lower()
vflag = vol.flags.get("hist")
if vflag == "-":
pass
elif vflag:
vol.histpath = uncyg(vflag) if WINDOWS else vflag
elif self.args.hist:
for nch in range(len(hid)):
hpath = os.path.join(self.args.hist, hid[: nch + 1])
try:
os.makedirs(hpath)
except:
pass
powner = os.path.join(hpath, "owner.txt")
try:
with open(powner, "rb") as f:
owner = f.read().rstrip()
except:
owner = None
me = fsenc(vol.realpath).rstrip()
if owner not in [None, me]:
continue
if owner is None:
with open(powner, "wb") as f:
f.write(me)
vol.histpath = hpath
break
vol.histpath = os.path.realpath(vol.histpath)
if vol.dbv:
if os.path.exists(os.path.join(vol.histpath, "up2k.db")):
promote.append(vol)
vol.dbv = None
else:
demote.append(vol)
# discard jump-vols
for v in demote:
vfs.all_vols.pop(v.vpath)
if promote:
msg = [
"\n the following jump-volumes were generated to assist the vfs.\n As they contain a database (probably from v0.11.11 or older),\n they are promoted to full volumes:"
]
for vol in promote:
msg.append(
" /{} ({}) ({})".format(vol.vpath, vol.realpath, vol.histpath)
)
self.log("\n\n".join(msg) + "\n", c=3)
vfs.histtab = {v.realpath: v.histpath for v in vfs.all_vols.values()}
all_mte = {} all_mte = {}
errors = False errors = False
for vol in vfs.all_vols.values(): for vol in vfs.all_vols.values():
@@ -464,6 +617,10 @@ class AuthSrv(object):
if self.args.e2d or "e2ds" in vol.flags: if self.args.e2d or "e2ds" in vol.flags:
vol.flags["e2d"] = True vol.flags["e2d"] = True
if self.args.no_hash:
if "ehash" not in vol.flags:
vol.flags["dhash"] = True
for k in ["e2t", "e2ts", "e2tsr"]: for k in ["e2t", "e2ts", "e2tsr"]:
if getattr(self.args, k): if getattr(self.args, k):
vol.flags[k] = True vol.flags[k] = True
@@ -541,6 +698,8 @@ class AuthSrv(object):
if errors: if errors:
sys.exit(1) sys.exit(1)
vfs.bubble_flags()
try: try:
v, _ = vfs.get("/", "*", False, True) v, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == v.realpath: if self.warn_anonwrite and os.getcwd() == v.realpath:
@@ -555,6 +714,11 @@ class AuthSrv(object):
self.user = user self.user = user
self.iuser = {v: k for k, v in user.items()} self.iuser = {v: k for k, v in user.items()}
self.re_pwd = None
pwds = [re.escape(x) for x in self.iuser.keys()]
if pwds:
self.re_pwd = re.compile("=(" + "|".join(pwds) + ")([]&; ]|$)")
# import pprint # import pprint
# pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount}) # pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount})
@@ -616,13 +780,13 @@ class AuthSrv(object):
continue continue
atop = vn.realpath atop = vn.realpath
g = vn.walk("", "", u, True, not self.args.no_scandir, lstat=False) g = vn.walk("", "", [], u, True, not self.args.no_scandir, False)
for vpath, apath, files, _, _ in g: for vpath, apath, files, _, _ in g:
fnames = [n[0] for n in files] fnames = [n[0] for n in files]
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
vpaths = [vtop + x for x in vpaths] vpaths = [vtop + x for x in vpaths]
apaths = [os.path.join(apath, n) for n in fnames] apaths = [os.path.join(apath, n) for n in fnames]
files = list(zip(vpaths, apaths)) files = [[vpath + "/", apath + os.sep]] + list(zip(vpaths, apaths))
if flag_ln: if flag_ln:
files = [x for x in files if not x[1].startswith(atop + os.sep)] files = [x for x in files if not x[1].startswith(atop + os.sep)]
@@ -637,7 +801,7 @@ class AuthSrv(object):
msg = [x[1] for x in files] msg = [x[1] for x in files]
if msg: if msg:
nuprint("\n".join(msg)) self.log("\n" + "\n".join(msg))
if n_bads and flag_p: if n_bads and flag_p:
raise Exception("found symlink leaving volume, and strict is set") raise Exception("found symlink leaving volume, and strict is set")

View File

@@ -4,17 +4,11 @@ from __future__ import print_function, unicode_literals
import time import time
import threading import threading
from .__init__ import PY2, WINDOWS, VT100
from .broker_util import try_exec from .broker_util import try_exec
from .broker_mpw import MpWorker from .broker_mpw import MpWorker
from .util import mp from .util import mp
if PY2 and not WINDOWS:
from multiprocessing.reduction import ForkingPickler
from StringIO import StringIO as MemesIO # pylint: disable=import-error
class BrokerMp(object): class BrokerMp(object):
"""external api; manages MpWorkers""" """external api; manages MpWorkers"""
@@ -33,33 +27,31 @@ class BrokerMp(object):
cores = mp.cpu_count() cores = mp.cpu_count()
self.log("broker", "booting {} subprocesses".format(cores)) self.log("broker", "booting {} subprocesses".format(cores))
for n in range(cores): for n in range(1, cores + 1):
q_pend = mp.Queue(1) q_pend = mp.Queue(1)
q_yield = mp.Queue(64) q_yield = mp.Queue(64)
proc = mp.Process(target=MpWorker, args=(q_pend, q_yield, self.args, n)) proc = mp.Process(target=MpWorker, args=(q_pend, q_yield, self.args, n))
proc.q_pend = q_pend proc.q_pend = q_pend
proc.q_yield = q_yield proc.q_yield = q_yield
proc.nid = n
proc.clients = {} proc.clients = {}
proc.workload = 0
thr = threading.Thread(target=self.collector, args=(proc,)) thr = threading.Thread(
target=self.collector, args=(proc,), name="mp-sink-{}".format(n)
)
thr.daemon = True thr.daemon = True
thr.start() thr.start()
self.procs.append(proc) self.procs.append(proc)
proc.start() proc.start()
if not self.args.q:
thr = threading.Thread(target=self.debug_load_balancer)
thr.daemon = True
thr.start()
def shutdown(self): def shutdown(self):
self.log("broker", "shutting down") self.log("broker", "shutting down")
for proc in self.procs: for n, proc in enumerate(self.procs):
thr = threading.Thread(target=proc.q_pend.put([0, "shutdown", []])) thr = threading.Thread(
target=proc.q_pend.put([0, "shutdown", []]),
name="mp-shutdown-{}-{}".format(n, len(self.procs)),
)
thr.start() thr.start()
with self.mutex: with self.mutex:
@@ -82,20 +74,6 @@ class BrokerMp(object):
if dest == "log": if dest == "log":
self.log(*args) self.log(*args)
elif dest == "workload":
with self.mutex:
proc.workload = args[0]
elif dest == "httpdrop":
addr = args[0]
with self.mutex:
del proc.clients[addr]
if not proc.clients:
proc.workload = 0
self.hub.tcpsrv.num_clients.add(-1)
elif dest == "retq": elif dest == "retq":
# response from previous ipc call # response from previous ipc call
with self.retpend_mutex: with self.retpend_mutex:
@@ -121,38 +99,9 @@ class BrokerMp(object):
returns a Queue object which eventually contains the response if want_retval returns a Queue object which eventually contains the response if want_retval
(not-impl here since nothing uses it yet) (not-impl here since nothing uses it yet)
""" """
if dest == "httpconn": if dest == "listen":
sck, addr = args for p in self.procs:
sck2 = sck p.q_pend.put([0, dest, [args[0], len(self.procs)]])
if PY2:
buf = MemesIO()
ForkingPickler(buf).dump(sck)
sck2 = buf.getvalue()
proc = sorted(self.procs, key=lambda x: x.workload)[0]
proc.q_pend.put([0, dest, [sck2, addr]])
with self.mutex:
proc.clients[addr] = 50
proc.workload += 50
else: else:
raise Exception("what is " + str(dest)) raise Exception("what is " + str(dest))
def debug_load_balancer(self):
fmt = "\033[1m{}\033[0;36m{:4}\033[0m "
if not VT100:
fmt = "({}{:4})"
last = ""
while self.procs:
msg = ""
for proc in self.procs:
msg += fmt.format(len(proc.clients), proc.workload)
if msg != last:
last = msg
with self.hub.log_mutex:
print(msg)
time.sleep(0.1)

View File

@@ -1,19 +1,15 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
from copyparty.authsrv import AuthSrv
import sys import sys
import time
import signal import signal
import threading import threading
from .__init__ import PY2, WINDOWS
from .broker_util import ExceptionalQueue from .broker_util import ExceptionalQueue
from .httpsrv import HttpSrv from .httpsrv import HttpSrv
from .util import FAKE_MP from .util import FAKE_MP
if PY2 and not WINDOWS:
import pickle # nosec
class MpWorker(object): class MpWorker(object):
"""one single mp instance""" """one single mp instance"""
@@ -24,66 +20,57 @@ class MpWorker(object):
self.args = args self.args = args
self.n = n self.n = n
self.log = self._log_disabled if args.q and not args.lo else self._log_enabled
self.retpend = {} self.retpend = {}
self.retpend_mutex = threading.Lock() self.retpend_mutex = threading.Lock()
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.workload_thr_active = False
# we inherited signal_handler from parent, # we inherited signal_handler from parent,
# replace it with something harmless # replace it with something harmless
if not FAKE_MP: if not FAKE_MP:
signal.signal(signal.SIGINT, self.signal_handler) for sig in [signal.SIGINT, signal.SIGTERM]:
signal.signal(sig, self.signal_handler)
# starting to look like a good idea
self.asrv = AuthSrv(args, None, False)
# instantiate all services here (TODO: inheritance?) # instantiate all services here (TODO: inheritance?)
self.httpsrv = HttpSrv(self) self.httpsrv = HttpSrv(self, n)
self.httpsrv.disconnect_func = self.httpdrop
# on winxp and some other platforms, # on winxp and some other platforms,
# use thr.join() to block all signals # use thr.join() to block all signals
thr = threading.Thread(target=self.main) thr = threading.Thread(target=self.main, name="mpw-main")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
thr.join() thr.join()
def signal_handler(self, signal, frame): def signal_handler(self, sig, frame):
# print('k') # print('k')
pass pass
def log(self, src, msg, c=0): def _log_enabled(self, src, msg, c=0):
self.q_yield.put([0, "log", [src, msg, c]]) self.q_yield.put([0, "log", [src, msg, c]])
def _log_disabled(self, src, msg, c=0):
pass
def logw(self, msg, c=0): def logw(self, msg, c=0):
self.log("mp{}".format(self.n), msg, c) self.log("mp{}".format(self.n), msg, c)
def httpdrop(self, addr):
self.q_yield.put([0, "httpdrop", [addr]])
def main(self): def main(self):
while True: while True:
retq_id, dest, args = self.q_pend.get() retq_id, dest, args = self.q_pend.get()
# self.logw("work: [{}]".format(d[0])) # self.logw("work: [{}]".format(d[0]))
if dest == "shutdown": if dest == "shutdown":
self.httpsrv.shutdown()
self.logw("ok bye") self.logw("ok bye")
sys.exit(0) sys.exit(0)
return return
elif dest == "httpconn": elif dest == "listen":
sck, addr = args self.httpsrv.listen(args[0], args[1])
if PY2:
sck = pickle.loads(sck) # nosec
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
self.httpsrv.accept(sck, addr)
with self.mutex:
if not self.workload_thr_active:
self.workload_thr_alive = True
thr = threading.Thread(target=self.thr_workload)
thr.daemon = True
thr.start()
elif dest == "retq": elif dest == "retq":
# response from previous ipc call # response from previous ipc call
@@ -107,16 +94,3 @@ class MpWorker(object):
self.q_yield.put([retq_id, dest, args]) self.q_yield.put([retq_id, dest, args])
return retq return retq
def thr_workload(self):
"""announce workloads to MpSrv (the mp controller / loadbalancer)"""
# avoid locking in extract_filedata by tracking difference here
while True:
time.sleep(0.2)
with self.mutex:
if self.httpsrv.num_clients() == 0:
# no clients rn, termiante thread
self.workload_thr_alive = False
return
self.q_yield.put([0, "workload", [self.httpsrv.workload]])

View File

@@ -14,24 +14,21 @@ class BrokerThr(object):
self.hub = hub self.hub = hub
self.log = hub.log self.log = hub.log
self.args = hub.args self.args = hub.args
self.asrv = hub.asrv
self.mutex = threading.Lock() self.mutex = threading.Lock()
# instantiate all services here (TODO: inheritance?) # instantiate all services here (TODO: inheritance?)
self.httpsrv = HttpSrv(self) self.httpsrv = HttpSrv(self, None)
self.httpsrv.disconnect_func = self.httpdrop
def shutdown(self): def shutdown(self):
# self.log("broker", "shutting down") # self.log("broker", "shutting down")
self.httpsrv.shutdown()
pass pass
def put(self, want_retval, dest, *args): def put(self, want_retval, dest, *args):
if dest == "httpconn": if dest == "listen":
sck, addr = args self.httpsrv.listen(args[0], 1)
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
self.httpsrv.accept(sck, addr)
else: else:
# new ipc invoking managed service in hub # new ipc invoking managed service in hub
@@ -48,6 +45,3 @@ class BrokerThr(object):
retq = ExceptionalQueue(1) retq = ExceptionalQueue(1)
retq.put(rv) retq.put(rv)
return retq return retq
def httpdrop(self, addr):
self.hub.tcpsrv.num_clients.add(-1)

View File

@@ -10,18 +10,15 @@ import json
import string import string
import socket import socket
import ctypes import ctypes
import traceback
from datetime import datetime from datetime import datetime
import calendar import calendar
from .__init__ import E, PY2, WINDOWS, ANYWIN from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
from .util import * # noqa # pylint: disable=unused-wildcard-import from .util import * # noqa # pylint: disable=unused-wildcard-import
from .authsrv import AuthSrv
from .szip import StreamZip from .szip import StreamZip
from .star import StreamTar from .star import StreamTar
if not PY2:
unicode = str
NO_CACHE = {"Cache-Control": "no-cache"} NO_CACHE = {"Cache-Control": "no-cache"}
NO_STORE = {"Cache-Control": "no-store; max-age=0"} NO_STORE = {"Cache-Control": "no-store; max-age=0"}
@@ -35,12 +32,12 @@ class HttpCli(object):
def __init__(self, conn): def __init__(self, conn):
self.t0 = time.time() self.t0 = time.time()
self.conn = conn self.conn = conn
self.s = conn.s self.s = conn.s # type: socket
self.sr = conn.sr self.sr = conn.sr # type: Unrecv
self.ip = conn.addr[0] self.ip = conn.addr[0]
self.addr = conn.addr self.addr = conn.addr # type: tuple[str, int]
self.args = conn.args self.args = conn.args
self.auth = conn.auth self.asrv = conn.asrv # type: AuthSrv
self.ico = conn.ico self.ico = conn.ico
self.thumbcli = conn.thumbcli self.thumbcli = conn.thumbcli
self.log_func = conn.log_func self.log_func = conn.log_func
@@ -48,12 +45,21 @@ class HttpCli(object):
self.tls = hasattr(self.s, "cipher") self.tls = hasattr(self.s, "cipher")
self.bufsz = 1024 * 32 self.bufsz = 1024 * 32
self.hint = None
self.absolute_urls = False self.absolute_urls = False
self.out_headers = {"Access-Control-Allow-Origin": "*"} self.out_headers = {"Access-Control-Allow-Origin": "*"}
def log(self, msg, c=0): def log(self, msg, c=0):
ptn = self.asrv.re_pwd
if ptn and ptn.search(msg):
msg = ptn.sub(self.unpwd, msg)
self.log_func(self.log_src, msg, c) self.log_func(self.log_src, msg, c)
def unpwd(self, m):
a, b = m.groups()
return "=\033[7m {} \033[27m{}".format(self.asrv.iuser[a], b)
def _check_nonfatal(self, ex): def _check_nonfatal(self, ex):
return ex.code < 400 or ex.code in [404, 429] return ex.code < 400 or ex.code in [404, 429]
@@ -62,14 +68,19 @@ class HttpCli(object):
if rem.startswith("/") or rem.startswith("../") or "/../" in rem: if rem.startswith("/") or rem.startswith("../") or "/../" in rem:
raise Exception("that was close") raise Exception("that was close")
def j2(self, name, **kwargs): def j2(self, name, **ka):
tpl = self.conn.hsrv.j2[name] tpl = self.conn.hsrv.j2[name]
return tpl.render(**kwargs) if kwargs else tpl if ka:
ka["ts"] = self.conn.hsrv.cachebuster()
return tpl.render(**ka)
return tpl
def run(self): def run(self):
"""returns true if connection can be reused""" """returns true if connection can be reused"""
self.keepalive = False self.keepalive = False
self.headers = {} self.headers = {}
self.hint = None
try: try:
headerlines = read_header(self.sr) headerlines = read_header(self.sr)
if not headerlines: if not headerlines:
@@ -83,9 +94,13 @@ class HttpCli(object):
try: try:
self.mode, self.req, self.http_ver = headerlines[0].split(" ") self.mode, self.req, self.http_ver = headerlines[0].split(" ")
except: except:
raise Pebkac(400, "bad headers:\n" + "\n".join(headerlines)) msg = " ]\n#[ ".join(headerlines)
raise Pebkac(400, "bad headers:\n#[ " + msg + " ]")
except Pebkac as ex: except Pebkac as ex:
self.mode = "GET"
self.req = "[junk]"
self.http_ver = "HTTP/1.1"
# self.log("pebkac at httpcli.run #1: " + repr(ex)) # self.log("pebkac at httpcli.run #1: " + repr(ex))
self.keepalive = self._check_nonfatal(ex) self.keepalive = self._check_nonfatal(ex)
self.loud_reply(unicode(ex), status=ex.code) self.loud_reply(unicode(ex), status=ex.code)
@@ -102,9 +117,20 @@ class HttpCli(object):
v = self.headers.get("connection", "").lower() v = self.headers.get("connection", "").lower()
self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0" self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0"
v = self.headers.get("x-forwarded-for", None) n = self.args.rproxy
if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]: if n:
self.ip = v.split(",")[0] v = self.headers.get("x-forwarded-for")
if v and self.conn.addr[0] in ["127.0.0.1", "::1"]:
if n > 0:
n -= 1
vs = v.split(",")
try:
self.ip = vs[n].strip()
except:
self.ip = vs[0].strip()
self.log("rproxy={} oob x-fwd {}".format(self.args.rproxy, v), c=3)
self.log_src = self.conn.set_rproxy(self.ip) self.log_src = self.conn.set_rproxy(self.ip)
if self.args.ihead: if self.args.ihead:
@@ -117,6 +143,9 @@ class HttpCli(object):
if v is not None: if v is not None:
self.log("[H] {}: \033[33m[{}]".format(k, v), 6) self.log("[H] {}: \033[33m[{}]".format(k, v), 6)
if "&" in self.req and "?" not in self.req:
self.hint = "did you mean '?' instead of '&'"
# split req into vpath + uparam # split req into vpath + uparam
uparam = {} uparam = {}
if "?" not in self.req: if "?" not in self.req:
@@ -152,9 +181,12 @@ class HttpCli(object):
self.vpath = unquotep(vpath) self.vpath = unquotep(vpath)
pwd = uparam.get("pw") pwd = uparam.get("pw")
self.uname = self.auth.iuser.get(pwd, "*") self.uname = self.asrv.iuser.get(pwd, "*")
self.rvol, self.wvol, self.avol = [[], [], []] self.rvol, self.wvol, self.avol = [[], [], []]
self.auth.vfs.user_tree(self.uname, self.rvol, self.wvol, self.avol) self.asrv.vfs.user_tree(self.uname, self.rvol, self.wvol, self.avol)
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0]
ua = self.headers.get("user-agent", "") ua = self.headers.get("user-agent", "")
self.is_rclone = ua.startswith("rclone/") self.is_rclone = ua.startswith("rclone/")
@@ -186,12 +218,15 @@ class HttpCli(object):
self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3) self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3)
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath) msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
if self.hint:
msg += "hint: {}\r\n".format(self.hint)
self.reply(msg.encode("utf-8", "replace"), status=ex.code) self.reply(msg.encode("utf-8", "replace"), status=ex.code)
return self.keepalive return self.keepalive
except Pebkac: except Pebkac:
return False return False
def send_headers(self, length, status=200, mime=None, headers={}): def send_headers(self, length, status=200, mime=None, headers=None):
response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])] response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])]
if length is not None: if length is not None:
@@ -201,6 +236,7 @@ class HttpCli(object):
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close")) response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
# headers{} overrides anything set previously # headers{} overrides anything set previously
if headers:
self.out_headers.update(headers) self.out_headers.update(headers)
# default to utf8 html if no content-type is set # default to utf8 html if no content-type is set
@@ -218,7 +254,7 @@ class HttpCli(object):
except: except:
raise Pebkac(400, "client d/c while replying headers") raise Pebkac(400, "client d/c while replying headers")
def reply(self, body, status=200, mime=None, headers={}): def reply(self, body, status=200, mime=None, headers=None):
# TODO something to reply with user-supplied values safely # TODO something to reply with user-supplied values safely
self.send_headers(len(body), status, mime, headers) self.send_headers(len(body), status, mime, headers)
@@ -234,7 +270,7 @@ class HttpCli(object):
self.log(body.rstrip()) self.log(body.rstrip())
self.reply(b"<pre>" + body.encode("utf-8") + b"\r\n", *list(args), **kwargs) self.reply(b"<pre>" + body.encode("utf-8") + b"\r\n", *list(args), **kwargs)
def urlq(self, add={}, rm=[]): def urlq(self, add, rm):
""" """
generates url query based on uparam (b, pw, all others) generates url query based on uparam (b, pw, all others)
removing anything in rm, adding pairs in add removing anything in rm, adding pairs in add
@@ -243,10 +279,11 @@ class HttpCli(object):
if self.is_rclone: if self.is_rclone:
return "" return ""
cmap = {"pw": "cppwd"}
kv = { kv = {
k: v k: v
for k, v in self.uparam.items() for k, v in self.uparam.items()
if k not in rm and self.cookies.get(k) != v if k not in rm and self.cookies.get(cmap.get(k, k)) != v
} }
kv.update(add) kv.update(add)
if not kv: if not kv:
@@ -256,7 +293,14 @@ class HttpCli(object):
return "?" + "&amp;".join(r) return "?" + "&amp;".join(r)
def redirect( def redirect(
self, vpath, suf="", msg="aight", flavor="go to", click=True, use302=False self,
vpath,
suf="",
msg="aight",
flavor="go to",
click=True,
status=200,
use302=False,
): ):
html = self.j2( html = self.j2(
"msg", "msg",
@@ -271,7 +315,7 @@ class HttpCli(object):
h = {"Location": "/" + vpath, "Cache-Control": "no-cache"} h = {"Location": "/" + vpath, "Cache-Control": "no-cache"}
self.reply(html, status=302, headers=h) self.reply(html, status=302, headers=h)
else: else:
self.reply(html) self.reply(html, status=status)
def handle_get(self): def handle_get(self):
if self.do_log: if self.do_log:
@@ -298,6 +342,9 @@ class HttpCli(object):
if "tree" in self.uparam: if "tree" in self.uparam:
return self.tx_tree() return self.tx_tree()
if "stack" in self.uparam:
return self.tx_stack()
# conditional redirect to single volumes # conditional redirect to single volumes
if self.vpath == "" and not self.ouparam: if self.vpath == "" and not self.ouparam:
nread = len(self.rvol) nread = len(self.rvol)
@@ -312,9 +359,7 @@ class HttpCli(object):
self.redirect(vpath, flavor="redirecting to", use302=True) self.redirect(vpath, flavor="redirecting to", use302=True)
return True return True
self.readable, self.writable = self.conn.auth.vfs.can_access( self.readable, self.writable = self.asrv.vfs.can_access(self.vpath, self.uname)
self.vpath, self.uname
)
if not self.readable and not self.writable: if not self.readable and not self.writable:
if self.vpath: if self.vpath:
self.log("inaccessible: [{}]".format(self.vpath)) self.log("inaccessible: [{}]".format(self.vpath))
@@ -329,9 +374,6 @@ class HttpCli(object):
if "scan" in self.uparam: if "scan" in self.uparam:
return self.scanvol() return self.scanvol()
if "stack" in self.uparam:
return self.tx_stack()
return self.tx_browser() return self.tx_browser()
def handle_options(self): def handle_options(self):
@@ -431,18 +473,22 @@ class HttpCli(object):
def dump_to_file(self): def dump_to_file(self):
reader, remains = self.get_body_reader() reader, remains = self.get_body_reader()
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
fdir = os.path.join(vfs.realpath, rem) fdir = os.path.join(vfs.realpath, rem)
addr = self.ip.replace(":", ".") addr = self.ip.replace(":", ".")
fn = "put-{:.6f}-{}.bin".format(time.time(), addr) fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
path = os.path.join(fdir, fn) path = os.path.join(fdir, fn)
if self.args.nw:
path = os.devnull
with open(fsenc(path), "wb", 512 * 1024) as f: with open(fsenc(path), "wb", 512 * 1024) as f:
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f) post_sz, _, sha_b64 = hashcopy(reader, f)
if not self.args.nw:
vfs, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put( self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fn False, "up2k.hash_file", vfs.realpath, vfs.flags, vrem, fn
) )
return post_sz, sha_b64, remains, path return post_sz, sha_b64, remains, path
@@ -460,7 +506,7 @@ class HttpCli(object):
spd1 = get_spd(nbytes, self.t0) spd1 = get_spd(nbytes, self.t0)
spd2 = get_spd(self.conn.nbyte, self.conn.t0) spd2 = get_spd(self.conn.nbyte, self.conn.t0)
return spd1 + " " + spd2 return "{} {} n{}".format(spd1, spd2, self.conn.nreq)
def handle_post_multipart(self): def handle_post_multipart(self):
self.parser = MultipartParser(self.log, self.sr, self.headers) self.parser = MultipartParser(self.log, self.sr, self.headers)
@@ -498,7 +544,7 @@ class HttpCli(object):
if v is None: if v is None:
raise Pebkac(422, "need zip or tar keyword") raise Pebkac(422, "need zip or tar keyword")
vn, rem = self.auth.vfs.get(self.vpath, self.uname, True, False) vn, rem = self.asrv.vfs.get(self.vpath, self.uname, True, False)
items = self.parser.require("files", 1024 * 1024) items = self.parser.require("files", 1024 * 1024)
if not items: if not items:
raise Pebkac(422, "need files list") raise Pebkac(422, "need files list")
@@ -506,6 +552,7 @@ class HttpCli(object):
items = items.replace("\r", "").split("\n") items = items.replace("\r", "").split("\n")
items = [unquotep(x) for x in items if items] items = [unquotep(x) for x in items if items]
self.parser.drop()
return self.tx_zip(k, v, vn, rem, items, self.args.ed) return self.tx_zip(k, v, vn, rem, items, self.args.ed)
def handle_post_json(self): def handle_post_json(self):
@@ -547,22 +594,33 @@ class HttpCli(object):
self.vpath = "/".join([self.vpath, sub]).strip("/") self.vpath = "/".join([self.vpath, sub]).strip("/")
body["name"] = name body["name"] = name
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
dbv, vrem = vfs.get_dbv(rem)
body["vtop"] = vfs.vpath body["vtop"] = dbv.vpath
body["ptop"] = vfs.realpath body["ptop"] = dbv.realpath
body["prel"] = rem body["prel"] = vrem
body["addr"] = self.ip body["addr"] = self.ip
body["vcfg"] = vfs.flags body["vcfg"] = dbv.flags
if sub: if sub:
try: try:
dst = os.path.join(vfs.realpath, rem) dst = os.path.join(vfs.realpath, rem)
os.makedirs(fsenc(dst))
except:
if not os.path.isdir(fsenc(dst)): if not os.path.isdir(fsenc(dst)):
os.makedirs(fsenc(dst))
except OSError as ex:
self.log("makedirs failed [{}]".format(dst))
if not os.path.isdir(fsenc(dst)):
if ex.errno == 13:
raise Pebkac(500, "the server OS denied write-access")
if ex.errno == 17:
raise Pebkac(400, "some file got your folder name") raise Pebkac(400, "some file got your folder name")
raise Pebkac(500, min_ex())
except:
raise Pebkac(500, min_ex())
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body) x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
ret = x.get() ret = x.get()
if sub: if sub:
@@ -575,8 +633,14 @@ class HttpCli(object):
def handle_search(self, body): def handle_search(self, body):
vols = [] vols = []
seen = {}
for vtop in self.rvol: for vtop in self.rvol:
vfs, _ = self.conn.auth.vfs.get(vtop, self.uname, True, False) vfs, _ = self.asrv.vfs.get(vtop, self.uname, True, False)
vfs = vfs.dbv or vfs
if vfs in seen:
continue
seen[vfs] = True
vols.append([vfs.vpath, vfs.realpath, vfs.flags]) vols.append([vfs.vpath, vfs.realpath, vfs.flags])
idx = self.conn.get_u2idx() idx = self.conn.get_u2idx()
@@ -585,7 +649,7 @@ class HttpCli(object):
penalty = 0.7 penalty = 0.7
t_idle = t0 - idx.p_end t_idle = t0 - idx.p_end
if idx.p_dur > 0.7 and t_idle < penalty: if idx.p_dur > 0.7 and t_idle < penalty:
m = "rate-limit ({:.1f} sec), cost {:.2f}, idle {:.2f}" m = "rate-limit {:.1f} sec, cost {:.2f}, idle {:.2f}"
raise Pebkac(429, m.format(penalty, idx.p_dur, t_idle)) raise Pebkac(429, m.format(penalty, idx.p_dur, t_idle))
if "srch" in body: if "srch" in body:
@@ -632,8 +696,8 @@ class HttpCli(object):
except KeyError: except KeyError:
raise Pebkac(400, "need hash and wark headers for binary POST") raise Pebkac(400, "need hash and wark headers for binary POST")
vfs, _ = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, _ = self.asrv.vfs.get(self.vpath, self.uname, False, True)
ptop = vfs.realpath ptop = (vfs.dbv or vfs).realpath
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash) x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash)
response = x.get() response = x.get()
@@ -651,7 +715,7 @@ class HttpCli(object):
with open(fsenc(path), "rb+", 512 * 1024) as f: with open(fsenc(path), "rb+", 512 * 1024) as f:
f.seek(cstart[0]) f.seek(cstart[0])
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f) post_sz, _, sha_b64 = hashcopy(reader, f)
if sha_b64 != chash: if sha_b64 != chash:
raise Pebkac( raise Pebkac(
@@ -705,7 +769,13 @@ class HttpCli(object):
pwd = self.parser.require("cppwd", 64) pwd = self.parser.require("cppwd", 64)
self.parser.drop() self.parser.drop()
if pwd in self.auth.iuser: ck, msg = self.get_pwd_cookie(pwd)
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
return True
def get_pwd_cookie(self, pwd):
if pwd in self.asrv.iuser:
msg = "login ok" msg = "login ok"
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365) dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT") exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
@@ -715,19 +785,17 @@ class HttpCli(object):
exp = "Fri, 15 Aug 1997 01:00:00 GMT" exp = "Fri, 15 Aug 1997 01:00:00 GMT"
ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp) ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp)
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/") return [ck, msg]
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
return True
def handle_mkdir(self): def handle_mkdir(self):
new_dir = self.parser.require("name", 512) new_dir = self.parser.require("name", 512)
self.parser.drop() self.parser.drop()
nullwrite = self.args.nw nullwrite = self.args.nw
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem) self._assert_safe_rem(rem)
sanitized = sanitize_fn(new_dir) sanitized = sanitize_fn(new_dir, "", [])
if not nullwrite: if not nullwrite:
fdir = os.path.join(vfs.realpath, rem) fdir = os.path.join(vfs.realpath, rem)
@@ -741,8 +809,13 @@ class HttpCli(object):
try: try:
os.mkdir(fsenc(fn)) os.mkdir(fsenc(fn))
except OSError as ex:
if ex.errno == 13:
raise Pebkac(500, "the server OS denied write-access")
raise Pebkac(500, "mkdir failed:\n" + min_ex())
except: except:
raise Pebkac(500, "mkdir failed, check the logs") raise Pebkac(500, min_ex())
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/") vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
self.redirect(vpath) self.redirect(vpath)
@@ -753,13 +826,13 @@ class HttpCli(object):
self.parser.drop() self.parser.drop()
nullwrite = self.args.nw nullwrite = self.args.nw
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem) self._assert_safe_rem(rem)
if not new_file.endswith(".md"): if not new_file.endswith(".md"):
new_file += ".md" new_file += ".md"
sanitized = sanitize_fn(new_file) sanitized = sanitize_fn(new_file, "", [])
if not nullwrite: if not nullwrite:
fdir = os.path.join(vfs.realpath, rem) fdir = os.path.join(vfs.realpath, rem)
@@ -777,7 +850,7 @@ class HttpCli(object):
def handle_plain_upload(self): def handle_plain_upload(self):
nullwrite = self.args.nw nullwrite = self.args.nw
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem) self._assert_safe_rem(rem)
files = [] files = []
@@ -792,7 +865,7 @@ class HttpCli(object):
if p_file and not nullwrite: if p_file and not nullwrite:
fdir = os.path.join(vfs.realpath, rem) fdir = os.path.join(vfs.realpath, rem)
fname = sanitize_fn( fname = sanitize_fn(
p_file, bad=[".prologue.html", ".epilogue.html"] p_file, "", [".prologue.html", ".epilogue.html"]
) )
if not os.path.isdir(fsenc(fdir)): if not os.path.isdir(fsenc(fdir)):
@@ -809,13 +882,19 @@ class HttpCli(object):
with ren_open(fname, "wb", 512 * 1024, **open_args) as f: with ren_open(fname, "wb", 512 * 1024, **open_args) as f:
f, fname = f["orz"] f, fname = f["orz"]
self.log("writing to {}/{}".format(fdir, fname)) self.log("writing to {}/{}".format(fdir, fname))
sz, sha512_hex, _ = hashcopy(self.conn, p_data, f) sz, sha512_hex, _ = hashcopy(p_data, f)
if sz == 0: if sz == 0:
raise Pebkac(400, "empty files in post") raise Pebkac(400, "empty files in post")
files.append([sz, sha512_hex, p_file, fname]) files.append([sz, sha512_hex, p_file, fname])
dbv, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put( self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname False,
"up2k.hash_file",
dbv.realpath,
dbv.flags,
vrem,
fname,
) )
self.conn.nbyte += sz self.conn.nbyte += sz
@@ -845,18 +924,36 @@ class HttpCli(object):
status = "OK" status = "OK"
if errmsg: if errmsg:
self.log(errmsg) self.log(errmsg)
errmsg = "ERROR: " + errmsg
status = "ERROR" status = "ERROR"
msg = "{} // {} bytes // {:.3f} MiB/s\n".format(status, sz_total, spd) msg = "{} // {} bytes // {:.3f} MiB/s\n".format(status, sz_total, spd)
jmsg = {"status": status, "sz": sz_total, "mbps": round(spd, 3), "files": []}
if errmsg:
msg += errmsg + "\n"
jmsg["error"] = errmsg
errmsg = "ERROR: " + errmsg
for sz, sha512, ofn, lfn in files: for sz, sha512, ofn, lfn in files:
vpath = self.vpath + "/" + lfn vpath = (self.vpath + "/" if self.vpath else "") + lfn
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format( msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True) sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True)
) )
# truncated SHA-512 prevents length extension attacks; # truncated SHA-512 prevents length extension attacks;
# using SHA-512/224, optionally SHA-512/256 = :64 # using SHA-512/224, optionally SHA-512/256 = :64
jpart = {
"url": "{}://{}/{}".format(
"https" if self.tls else "http",
self.headers.get("host", "copyparty"),
vpath,
),
"sha512": sha512[:56],
"sz": sz,
"fn": lfn,
"fn_orig": ofn,
"path": vpath,
}
jmsg["files"].append(jpart)
vspd = self._spd(sz_total, False) vspd = self._spd(sz_total, False)
self.log("{} {}".format(vspd, msg)) self.log("{} {}".format(vspd, msg))
@@ -868,7 +965,22 @@ class HttpCli(object):
ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg) ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg)
f.write(ft.encode("utf-8")) f.write(ft.encode("utf-8"))
self.redirect(self.vpath, msg=msg, flavor="return to", click=False) status = 400 if errmsg else 200
if "j" in self.uparam:
jtxt = json.dumps(jmsg, indent=2, sort_keys=True).encode("utf-8", "replace")
self.reply(jtxt, mime="application/json", status=status)
else:
self.redirect(
self.vpath,
msg=msg,
flavor="return to",
click=False,
status=status,
)
if errmsg:
return False
self.parser.drop() self.parser.drop()
return True return True
@@ -879,7 +991,7 @@ class HttpCli(object):
raise Pebkac(400, "could not read lastmod from request") raise Pebkac(400, "could not read lastmod from request")
nullwrite = self.args.nw nullwrite = self.args.nw
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem) self._assert_safe_rem(rem)
# TODO: # TODO:
@@ -953,7 +1065,7 @@ class HttpCli(object):
raise Pebkac(400, "expected body, got {}".format(p_field)) raise Pebkac(400, "expected body, got {}".format(p_field))
with open(fsenc(fp), "wb", 512 * 1024) as f: with open(fsenc(fp), "wb", 512 * 1024) as f:
sz, sha512, _ = hashcopy(self.conn, p_data, f) sz, sha512, _ = hashcopy(p_data, f)
new_lastmod = os.stat(fsenc(fp)).st_mtime new_lastmod = os.stat(fsenc(fp)).st_mtime
new_lastmod3 = int(new_lastmod * 1000) new_lastmod3 = int(new_lastmod * 1000)
@@ -972,6 +1084,8 @@ class HttpCli(object):
cli_lastmod = self.headers.get("if-modified-since") cli_lastmod = self.headers.get("if-modified-since")
if cli_lastmod: if cli_lastmod:
try: try:
# some browser append "; length=573"
cli_lastmod = cli_lastmod.split(";")[0].strip()
cli_dt = time.strptime(cli_lastmod, HTTP_TS_FMT) cli_dt = time.strptime(cli_lastmod, HTTP_TS_FMT)
cli_ts = calendar.timegm(cli_dt) cli_ts = calendar.timegm(cli_dt)
return file_lastmod, int(file_ts) > int(cli_ts) return file_lastmod, int(file_ts) > int(cli_ts)
@@ -1118,7 +1232,7 @@ class HttpCli(object):
# #
# send reply # send reply
if not is_compressed: if not is_compressed and "cache" not in self.uparam:
self.out_headers.update(NO_CACHE) self.out_headers.update(NO_CACHE)
self.out_headers["Accept-Ranges"] = "bytes" self.out_headers["Accept-Ranges"] = "bytes"
@@ -1198,7 +1312,7 @@ class HttpCli(object):
fgen = vn.zipgen(rem, items, self.uname, dots, not self.args.no_scandir) fgen = vn.zipgen(rem, items, self.uname, dots, not self.args.no_scandir)
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]})) # for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
bgen = packer(fgen, utf8="utf" in uarg, pre_crc="crc" in uarg) bgen = packer(self.log, fgen, utf8="utf" in uarg, pre_crc="crc" in uarg)
bsent = 0 bsent = 0
for buf in bgen.gen(): for buf in bgen.gen():
if not buf: if not buf:
@@ -1262,7 +1376,7 @@ class HttpCli(object):
for c, v in [[b"&", 4], [b"<", 3], [b">", 3]]: for c, v in [[b"&", 4], [b"<", 3], [b">", 3]]:
sz_md += (len(buf) - len(buf.replace(c, b""))) * v sz_md += (len(buf) - len(buf.replace(c, b""))) * v
file_ts = max(ts_md, ts_html) file_ts = max(ts_md, ts_html, E.t0)
file_lastmod, do_send = self._chk_lastmod(file_ts) file_lastmod, do_send = self._chk_lastmod(file_ts)
self.out_headers["Last-Modified"] = file_lastmod self.out_headers["Last-Modified"] = file_lastmod
self.out_headers.update(NO_CACHE) self.out_headers.update(NO_CACHE)
@@ -1276,6 +1390,7 @@ class HttpCli(object):
"md_plug": "true" if self.args.emp else "false", "md_plug": "true" if self.args.emp else "false",
"md_chk_rate": self.args.mcr, "md_chk_rate": self.args.mcr,
"md": boundary, "md": boundary,
"ts": self.conn.hsrv.cachebuster(),
} }
html = template.render(**targs).encode("utf-8", "replace") html = template.render(**targs).encode("utf-8", "replace")
html = html.split(boundary.encode("utf-8")) html = html.split(boundary.encode("utf-8"))
@@ -1308,17 +1423,19 @@ class HttpCli(object):
return True return True
def tx_mounts(self): def tx_mounts(self):
suf = self.urlq(rm=["h"]) suf = self.urlq({}, ["h"])
rvol, wvol, avol = [ rvol, wvol, avol = [
[("/" + x).rstrip("/") + "/" for x in y] [("/" + x).rstrip("/") + "/" for x in y]
for y in [self.rvol, self.wvol, self.avol] for y in [self.rvol, self.wvol, self.avol]
] ]
vstate = {}
if self.avol and not self.args.no_rescan: if self.avol and not self.args.no_rescan:
x = self.conn.hsrv.broker.put(True, "up2k.get_volstate") x = self.conn.hsrv.broker.put(True, "up2k.get_state")
vstate = json.loads(x.get()) vs = json.loads(x.get())
vstate = {("/" + k).rstrip("/") + "/": v for k, v in vstate.items()} vstate = {("/" + k).rstrip("/") + "/": v for k, v in vs["volstate"].items()}
else:
vstate = {}
vs = {"scanning": None, "hashq": None, "tagq": None, "mtpq": None}
html = self.j2( html = self.j2(
"splash", "splash",
@@ -1327,6 +1444,10 @@ class HttpCli(object):
wvol=wvol, wvol=wvol,
avol=avol, avol=avol,
vstate=vstate, vstate=vstate,
scanning=vs["scanning"],
hashq=vs["hashq"],
tagq=vs["tagq"],
mtpq=vs["mtpq"],
url_suf=suf, url_suf=suf,
) )
self.reply(html.encode("utf-8"), headers=NO_STORE) self.reply(html.encode("utf-8"), headers=NO_STORE)
@@ -1339,9 +1460,10 @@ class HttpCli(object):
if self.args.no_rescan: if self.args.no_rescan:
raise Pebkac(403, "disabled by argv") raise Pebkac(403, "disabled by argv")
vn, _ = self.auth.vfs.get(self.vpath, self.uname, True, True) vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True)
args = [self.asrv.vfs.all_vols, [vn.vpath]]
args = [self.auth.vfs.all_vols, [vn.vpath]]
x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args) x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args)
x = x.get() x = x.get()
if not x: if not x:
@@ -1351,23 +1473,14 @@ class HttpCli(object):
raise Pebkac(500, x) raise Pebkac(500, x)
def tx_stack(self): def tx_stack(self):
if not self.readable or not self.writable: if not self.avol:
raise Pebkac(403, "not admin") raise Pebkac(403, "not admin")
if self.args.no_stack: if self.args.no_stack:
raise Pebkac(403, "disabled by argv") raise Pebkac(403, "disabled by argv")
ret = [] ret = "<pre>{}\n{}".format(time.time(), alltrace())
names = dict([(t.ident, t.name) for t in threading.enumerate()]) self.reply(ret.encode("utf-8"))
for tid, stack in sys._current_frames().items():
ret.append("\n\n# {} ({:x})".format(names.get(tid), tid))
for fn, lno, name, line in traceback.extract_stack(stack):
ret.append('File: "{}", line {}, in {}'.format(fn, lno, name))
if line:
ret.append(" " + str(line.strip()))
ret = ("<pre>" + "\n".join(ret)).encode("utf-8")
self.reply(ret)
def tx_tree(self): def tx_tree(self):
top = self.uparam["tree"] or "" top = self.uparam["tree"] or ""
@@ -1397,9 +1510,9 @@ class HttpCli(object):
ret["k" + quotep(excl)] = sub ret["k" + quotep(excl)] = sub
try: try:
vn, rem = self.auth.vfs.get(top, self.uname, True, False) vn, rem = self.asrv.vfs.get(top, self.uname, True, False)
fsroot, vfs_ls, vfs_virt = vn.ls( fsroot, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, True rem, self.uname, not self.args.no_scandir, incl_wo=True
) )
except: except:
vfs_ls = [] vfs_ls = []
@@ -1438,31 +1551,49 @@ class HttpCli(object):
vpnodes.append([quotep(vpath) + "/", html_escape(node, crlf=True)]) vpnodes.append([quotep(vpath) + "/", html_escape(node, crlf=True)])
vn, rem = self.auth.vfs.get( vn, rem = self.asrv.vfs.get(
self.vpath, self.uname, self.readable, self.writable self.vpath, self.uname, self.readable, self.writable
) )
abspath = vn.canonical(rem) abspath = vn.canonical(rem)
dbv, vrem = vn.get_dbv(rem)
try: try:
st = os.stat(fsenc(abspath)) st = os.stat(fsenc(abspath))
except: except:
raise Pebkac(404) raise Pebkac(404)
if self.readable and not stat.S_ISDIR(st.st_mode): if self.readable:
if rem.startswith(".hist/up2k."): if rem.startswith(".hist/up2k.") or (
rem.endswith("/dir.txt") and rem.startswith(".hist/th/")
):
raise Pebkac(403) raise Pebkac(403)
is_dir = stat.S_ISDIR(st.st_mode)
th_fmt = self.uparam.get("th") th_fmt = self.uparam.get("th")
if th_fmt is not None: if th_fmt is not None:
if is_dir:
for fn in self.args.th_covers.split(","):
fp = os.path.join(abspath, fn)
if os.path.exists(fp):
vrem = "{}/{}".format(vrem.rstrip("/"), fn)
is_dir = False
break
if is_dir:
return self.tx_ico("a.folder")
thp = None thp = None
if self.thumbcli: if self.thumbcli:
thp = self.thumbcli.get(vn.realpath, rem, int(st.st_mtime), th_fmt) thp = self.thumbcli.get(
dbv.realpath, vrem, int(st.st_mtime), th_fmt
)
if thp: if thp:
return self.tx_file(thp) return self.tx_file(thp)
return self.tx_ico(rem) return self.tx_ico(rem)
if not is_dir:
if abspath.endswith(".md") and "raw" not in self.uparam: if abspath.endswith(".md") and "raw" not in self.uparam:
return self.tx_md(abspath) return self.tx_md(abspath)
@@ -1503,9 +1634,8 @@ class HttpCli(object):
if self.writable: if self.writable:
perms.append("write") perms.append("write")
url_suf = self.urlq() url_suf = self.urlq({}, [])
is_ls = "ls" in self.uparam is_ls = "ls" in self.uparam
ts = "" # "?{}".format(time.time())
tpl = "browser" tpl = "browser"
if "b" in self.uparam: if "b" in self.uparam:
@@ -1530,7 +1660,6 @@ class HttpCli(object):
"vdir": quotep(self.vpath), "vdir": quotep(self.vpath),
"vpnodes": vpnodes, "vpnodes": vpnodes,
"files": [], "files": [],
"ts": ts,
"perms": json.dumps(perms), "perms": json.dumps(perms),
"taglist": [], "taglist": [],
"tag_order": [], "tag_order": [],
@@ -1566,7 +1695,7 @@ class HttpCli(object):
return self.tx_zip(k, v, vn, rem, [], self.args.ed) return self.tx_zip(k, v, vn, rem, [], self.args.ed)
fsroot, vfs_ls, vfs_virt = vn.ls( fsroot, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, True rem, self.uname, not self.args.no_scandir, incl_wo=True
) )
stats = {k: v for k, v in vfs_ls} stats = {k: v for k, v in vfs_ls}
vfs_ls = [x[0] for x in vfs_ls] vfs_ls = [x[0] for x in vfs_ls]
@@ -1599,7 +1728,7 @@ class HttpCli(object):
icur = None icur = None
if "e2t" in vn.flags: if "e2t" in vn.flags:
idx = self.conn.get_u2idx() idx = self.conn.get_u2idx()
icur = idx.get_cur(vn.realpath) icur = idx.get_cur(dbv.realpath)
dirs = [] dirs = []
files = [] files = []
@@ -1666,13 +1795,27 @@ class HttpCli(object):
fn = f["name"] fn = f["name"]
rd = f["rd"] rd = f["rd"]
del f["rd"] del f["rd"]
if icur: if not icur:
break
if vn != dbv:
_, rd = vn.get_dbv(rd)
q = "select w from up where rd = ? and fn = ?" q = "select w from up where rd = ? and fn = ?"
r = None
try: try:
r = icur.execute(q, (rd, fn)).fetchone() r = icur.execute(q, (rd, fn)).fetchone()
except: except Exception as ex:
if "database is locked" in str(ex):
break
try:
args = s3enc(idx.mem_cur, rd, fn) args = s3enc(idx.mem_cur, rd, fn)
r = icur.execute(q, args).fetchone() r = icur.execute(q, args).fetchone()
except:
m = "tag list error, {}/{}\n{}"
self.log(m.format(rd, fn, min_ex()))
break
tags = {} tags = {}
f["tags"] = tags f["tags"] = tags
@@ -1682,9 +1825,14 @@ class HttpCli(object):
w = r[0][:16] w = r[0][:16]
q = "select k, v from mt where w = ? and k != 'x'" q = "select k, v from mt where w = ? and k != 'x'"
try:
for k, v in icur.execute(q, (w,)): for k, v in icur.execute(q, (w,)):
taglist[k] = True taglist[k] = True
tags[k] = v tags[k] = v
except:
m = "tag read error, {}/{} [{}]:\n{}"
self.log(m.format(rd, fn, w, min_ex()))
break
if icur: if icur:
taglist = [k for k in vn.flags.get("mte", "").split(",") if k in taglist] taglist = [k for k in vn.flags.get("mte", "").split(",") if k in taglist]
@@ -1707,9 +1855,13 @@ class HttpCli(object):
j2a["files"] = dirs + files j2a["files"] = dirs + files
j2a["logues"] = logues j2a["logues"] = logues
j2a["taglist"] = taglist j2a["taglist"] = taglist
if "mte" in vn.flags: if "mte" in vn.flags:
j2a["tag_order"] = json.dumps(vn.flags["mte"].split(",")) j2a["tag_order"] = json.dumps(vn.flags["mte"].split(","))
if self.args.css_browser:
j2a["css"] = self.args.css_browser
html = self.j2(tpl, **j2a) html = self.j2(tpl, **j2a)
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE) self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
return True return True

View File

@@ -3,7 +3,6 @@ from __future__ import print_function, unicode_literals
import re import re
import os import os
import sys
import time import time
import socket import socket
@@ -34,7 +33,7 @@ class HttpConn(object):
self.hsrv = hsrv self.hsrv = hsrv
self.args = hsrv.args self.args = hsrv.args
self.auth = hsrv.auth self.asrv = hsrv.asrv
self.cert_path = hsrv.cert_path self.cert_path = hsrv.cert_path
enth = HAVE_PIL and not self.args.no_thumb enth = HAVE_PIL and not self.args.no_thumb
@@ -42,13 +41,22 @@ class HttpConn(object):
self.ico = Ico(self.args) self.ico = Ico(self.args)
self.t0 = time.time() self.t0 = time.time()
self.stopping = False
self.nreq = 0
self.nbyte = 0 self.nbyte = 0
self.workload = 0
self.u2idx = None self.u2idx = None
self.log_func = hsrv.log self.log_func = hsrv.log
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None
self.set_rproxy() self.set_rproxy()
def shutdown(self):
self.stopping = True
try:
self.s.shutdown(socket.SHUT_RDWR)
self.s.close()
except:
pass
def set_rproxy(self, ip=None): def set_rproxy(self, ip=None):
if ip is None: if ip is None:
color = 36 color = 36
@@ -70,7 +78,7 @@ class HttpConn(object):
def get_u2idx(self): def get_u2idx(self):
if not self.u2idx: if not self.u2idx:
self.u2idx = U2idx(self.args, self.log_func) self.u2idx = U2idx(self)
return self.u2idx return self.u2idx
@@ -162,7 +170,7 @@ class HttpConn(object):
self.log("client rejected our certificate (nice)") self.log("client rejected our certificate (nice)")
elif "ALERT_CERTIFICATE_UNKNOWN" in em: elif "ALERT_CERTIFICATE_UNKNOWN" in em:
# chrome-android keeps doing this # android-chrome keeps doing this
pass pass
else: else:
@@ -173,7 +181,8 @@ class HttpConn(object):
if not self.sr: if not self.sr:
self.sr = Unrecv(self.s) self.sr = Unrecv(self.s)
while True: while not self.stopping:
self.nreq += 1
cli = HttpCli(self) cli = HttpCli(self)
if not cli.run(): if not cli.run():
return return

View File

@@ -4,6 +4,8 @@ from __future__ import print_function, unicode_literals
import os import os
import sys import sys
import time import time
import math
import base64
import socket import socket
import threading import threading
@@ -24,9 +26,14 @@ except ImportError:
) )
sys.exit(1) sys.exit(1)
from .__init__ import E, MACOS from .__init__ import E, PY2, MACOS
from .util import spack, min_ex, start_stackmon, start_log_thrs
from .httpconn import HttpConn from .httpconn import HttpConn
from .authsrv import AuthSrv
if PY2:
import Queue as queue
else:
import queue
class HttpSrv(object): class HttpSrv(object):
@@ -35,18 +42,28 @@ class HttpSrv(object):
relying on MpSrv for performance (HttpSrv is just plain threads) relying on MpSrv for performance (HttpSrv is just plain threads)
""" """
def __init__(self, broker): def __init__(self, broker, nid):
self.broker = broker self.broker = broker
self.nid = nid
self.args = broker.args self.args = broker.args
self.log = broker.log self.log = broker.log
self.asrv = broker.asrv
self.disconnect_func = None self.name = "httpsrv" + ("-n{}-i{:x}".format(nid, os.getpid()) if nid else "")
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.stopping = False
self.clients = {} self.tp_nthr = 0 # actual
self.workload = 0 self.tp_ncli = 0 # fading
self.workload_thr_alive = False self.tp_time = None # latest worker collect
self.auth = AuthSrv(self.args, self.log) self.tp_q = None if self.args.no_htp else queue.LifoQueue()
self.srvs = []
self.ncli = 0 # exact
self.clients = {} # laggy
self.nclimax = 0
self.cb_ts = 0
self.cb_v = 0
env = jinja2.Environment() env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web")) env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
@@ -61,21 +78,170 @@ class HttpSrv(object):
else: else:
self.cert_path = None self.cert_path = None
def accept(self, sck, addr): if self.tp_q:
"""takes an incoming tcp connection and creates a thread to handle it""" self.start_threads(4)
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
thr = threading.Thread(target=self.thr_client, args=(sck, addr)) name = "httpsrv-scaler" + ("-{}".format(nid) if nid else "")
t = threading.Thread(target=self.thr_scaler, name=name)
t.daemon = True
t.start()
if nid:
if self.args.stackmon:
start_stackmon(self.args.stackmon, nid)
if self.args.log_thrs:
start_log_thrs(self.log, self.args.log_thrs, nid)
def start_threads(self, n):
self.tp_nthr += n
if self.args.log_htp:
self.log(self.name, "workers += {} = {}".format(n, self.tp_nthr), 6)
for _ in range(n):
thr = threading.Thread(
target=self.thr_poolw,
name=self.name + "-poolw",
)
thr.daemon = True thr.daemon = True
thr.start() thr.start()
def num_clients(self): def stop_threads(self, n):
self.tp_nthr -= n
if self.args.log_htp:
self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6)
for _ in range(n):
self.tp_q.put(None)
def thr_scaler(self):
while True:
time.sleep(2 if self.tp_ncli else 30)
with self.mutex: with self.mutex:
return len(self.clients) self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
if self.tp_nthr > self.tp_ncli + 8:
self.stop_threads(4)
def listen(self, sck, nlisteners):
ip, port = sck.getsockname()
self.srvs.append(sck)
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
t = threading.Thread(
target=self.thr_listen,
args=(sck,),
name="httpsrv-n{}-listen-{}-{}".format(self.nid or "0", ip, port),
)
t.daemon = True
t.start()
def thr_listen(self, srv_sck):
"""listens on a shared tcp server"""
ip, port = srv_sck.getsockname()
fno = srv_sck.fileno()
msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
self.log(self.name, msg)
while not self.stopping:
if self.args.log_conn:
self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30")
if self.ncli >= self.nclimax:
self.log(self.name, "at connection limit; waiting", 3)
while self.ncli >= self.nclimax:
time.sleep(0.1)
if self.args.log_conn:
self.log(self.name, "|%sC-acc1" % ("-" * 2,), c="1;30")
try:
sck, addr = srv_sck.accept()
except (OSError, socket.error) as ex:
self.log(self.name, "accept({}): {}".format(fno, ex), c=6)
time.sleep(0.02)
continue
if self.args.log_conn:
m = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
"-" * 3, ip, port % 8, port
)
self.log("%s %s" % addr, m, c="1;30")
self.accept(sck, addr)
def accept(self, sck, addr):
"""takes an incoming tcp connection and creates a thread to handle it"""
now = time.time()
if now - (self.tp_time or now) > 300:
self.tp_q = None
if self.tp_q:
self.tp_q.put((sck, addr))
with self.mutex:
self.ncli += 1
self.tp_time = self.tp_time or now
self.tp_ncli = max(self.tp_ncli, self.ncli + 1)
if self.tp_nthr < self.ncli + 4:
self.start_threads(8)
return
if not self.args.no_htp:
m = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n"
self.log(self.name, m, 1)
with self.mutex:
self.ncli += 1
thr = threading.Thread(
target=self.thr_client,
args=(sck, addr),
name="httpconn-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]),
)
thr.daemon = True
thr.start()
def thr_poolw(self):
while True:
task = self.tp_q.get()
if not task:
break
with self.mutex:
self.tp_time = None
try:
sck, addr = task
me = threading.current_thread()
me.name = "httpconn-{}-{}".format(
addr[0].split(".", 2)[-1][-6:], addr[1]
)
self.thr_client(sck, addr)
me.name = self.name + "-poolw"
except:
self.log(self.name, "thr_client: " + min_ex(), 3)
def shutdown(self): def shutdown(self):
self.log("ok bye") self.stopping = True
for srv in self.srvs:
try:
srv.close()
except:
pass
clients = list(self.clients.keys())
for cli in clients:
try:
cli.shutdown()
except:
pass
if self.tp_q:
self.stop_threads(self.tp_nthr)
for _ in range(10):
time.sleep(0.05)
if self.tp_q.empty():
break
self.log(self.name, "ok bye")
def thr_client(self, sck, addr): def thr_client(self, sck, addr):
"""thread managing one tcp client""" """thread managing one tcp client"""
@@ -84,70 +250,69 @@ class HttpSrv(object):
cli = HttpConn(sck, addr, self) cli = HttpConn(sck, addr, self)
with self.mutex: with self.mutex:
self.clients[cli] = 0 self.clients[cli] = 0
self.workload += 50
if not self.workload_thr_alive:
self.workload_thr_alive = True
thr = threading.Thread(target=self.thr_workload)
thr.daemon = True
thr.start()
fno = sck.fileno()
try: try:
if self.args.log_conn: if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-crun" % ("-" * 6,), c="1;30") self.log("%s %s" % addr, "|%sC-crun" % ("-" * 4,), c="1;30")
cli.run() cli.run()
except (OSError, socket.error) as ex:
if ex.errno not in [10038, 10054, 107, 57, 49, 9]:
self.log(
"%s %s" % addr,
"run({}): {}".format(fno, ex),
c=6,
)
finally: finally:
sck = cli.s
if self.args.log_conn: if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30") self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 5,), c="1;30")
try: try:
fno = sck.fileno()
sck.shutdown(socket.SHUT_RDWR) sck.shutdown(socket.SHUT_RDWR)
sck.close() sck.close()
except (OSError, socket.error) as ex: except (OSError, socket.error) as ex:
if not MACOS: if not MACOS:
self.log( self.log(
"%s %s" % addr, "%s %s" % addr,
"shut({}): {}".format(sck.fileno(), ex), "shut({}): {}".format(fno, ex),
c="1;30", c="1;30",
) )
if ex.errno not in [10038, 10054, 107, 57, 9]: if ex.errno not in [10038, 10054, 107, 57, 49, 9]:
# 10038 No longer considered a socket # 10038 No longer considered a socket
# 10054 Foribly closed by remote # 10054 Foribly closed by remote
# 107 Transport endpoint not connected # 107 Transport endpoint not connected
# 57 Socket is not connected # 57 Socket is not connected
# 49 Can't assign requested address (wifi down)
# 9 Bad file descriptor # 9 Bad file descriptor
raise raise
finally: finally:
with self.mutex: with self.mutex:
del self.clients[cli] del self.clients[cli]
self.ncli -= 1
if self.disconnect_func: def cachebuster(self):
self.disconnect_func(addr) # pylint: disable=not-callable if time.time() - self.cb_ts < 1:
return self.cb_v
def thr_workload(self):
"""indicates the python interpreter workload caused by this HttpSrv"""
# avoid locking in extract_filedata by tracking difference here
while True:
time.sleep(0.2)
with self.mutex: with self.mutex:
if not self.clients: if time.time() - self.cb_ts < 1:
# no clients rn, termiante thread return self.cb_v
self.workload_thr_alive = False
self.workload = 0
return
total = 0 v = E.t0
with self.mutex: try:
for cli in self.clients.keys(): with os.scandir(os.path.join(E.mod, "web")) as dh:
now = cli.workload for fh in dh:
delta = now - self.clients[cli] inf = fh.stat(follow_symlinks=False)
if delta < 0: v = max(v, inf.st_mtime)
# was reset in HttpCli to prevent overflow except:
delta = now pass
total += delta v = base64.urlsafe_b64encode(spack(b">xxL", int(v)))
self.clients[cli] = now self.cb_v = v.decode("ascii")[-4:]
self.cb_ts = time.time()
self.workload = total return self.cb_v

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import hashlib import hashlib
import colorsys import colorsys

View File

@@ -1,22 +1,19 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import re
import os import os
import sys import sys
import json import json
import shutil import shutil
import subprocess as sp import subprocess as sp
from .__init__ import PY2, WINDOWS from .__init__ import PY2, WINDOWS, unicode
from .util import fsenc, fsdec, REKOBO_LKEY from .util import fsenc, fsdec, uncyg, REKOBO_LKEY
if not PY2:
unicode = str
def have_ff(cmd): def have_ff(cmd):
if PY2: if PY2:
print("# checking {}".format(cmd))
cmd = (cmd + " -version").encode("ascii").split(b" ") cmd = (cmd + " -version").encode("ascii").split(b" ")
try: try:
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE).communicate() sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE).communicate()
@@ -44,6 +41,9 @@ class MParser(object):
while True: while True:
try: try:
bp = os.path.expanduser(args) bp = os.path.expanduser(args)
if WINDOWS:
bp = uncyg(bp)
if os.path.exists(bp): if os.path.exists(bp):
self.bin = bp self.bin = bp
return return
@@ -112,6 +112,19 @@ def parse_ffprobe(txt):
ret = {} # processed ret = {} # processed
md = {} # raw tags md = {} # raw tags
is_audio = fmt.get("format_name") in ["mp3", "ogg", "flac", "wav"]
if fmt.get("filename", "").split(".")[-1].lower() in ["m4a", "aac"]:
is_audio = True
# if audio file, ensure audio stream appears first
if (
is_audio
and len(streams) > 2
and streams[1].get("codec_type") != "audio"
and streams[2].get("codec_type") == "audio"
):
streams = [fmt, streams[2], streams[1]] + streams[3:]
have = {} have = {}
for strm in streams: for strm in streams:
typ = strm.get("codec_type") typ = strm.get("codec_type")
@@ -131,9 +144,7 @@ def parse_ffprobe(txt):
] ]
if typ == "video": if typ == "video":
if strm.get("DISPOSITION:attached_pic") == "1" or fmt.get( if strm.get("DISPOSITION:attached_pic") == "1" or is_audio:
"format_name"
) in ["mp3", "ogg", "flac"]:
continue continue
kvm = [ kvm = [
@@ -177,7 +188,7 @@ def parse_ffprobe(txt):
k = k[4:].strip() k = k[4:].strip()
v = v.strip() v = v.strip()
if k and v: if k and v and k not in md:
md[k] = [v] md[k] = [v]
for k in [".q", ".vq", ".aq"]: for k in [".q", ".vq", ".aq"]:
@@ -216,37 +227,47 @@ def parse_ffprobe(txt):
class MTag(object): class MTag(object):
def __init__(self, log_func, args): def __init__(self, log_func, args):
self.log_func = log_func self.log_func = log_func
self.args = args
self.usable = True self.usable = True
self.prefer_mt = False self.prefer_mt = not args.no_mtag_ff
mappings = args.mtm
self.backend = "ffprobe" if args.no_mutagen else "mutagen" self.backend = "ffprobe" if args.no_mutagen else "mutagen"
or_ffprobe = " or ffprobe" self.can_ffprobe = (
HAVE_FFPROBE
and not args.no_mtag_ff
and (not WINDOWS or sys.version_info >= (3, 8))
)
mappings = args.mtm
or_ffprobe = " or FFprobe"
if self.backend == "mutagen": if self.backend == "mutagen":
self.get = self.get_mutagen self.get = self.get_mutagen
try: try:
import mutagen import mutagen
except: except:
self.log("could not load mutagen, trying ffprobe instead", c=3) self.log("could not load Mutagen, trying FFprobe instead", c=3)
self.backend = "ffprobe" self.backend = "ffprobe"
if self.backend == "ffprobe": if self.backend == "ffprobe":
self.usable = self.can_ffprobe
self.get = self.get_ffprobe self.get = self.get_ffprobe
self.prefer_mt = True self.prefer_mt = True
# about 20x slower
self.usable = HAVE_FFPROBE
if self.usable and WINDOWS and sys.version_info < (3, 8): if not HAVE_FFPROBE:
self.usable = False pass
elif args.no_mtag_ff:
msg = "found FFprobe but it was disabled by --no-mtag-ff"
self.log(msg, c=3)
elif WINDOWS and sys.version_info < (3, 8):
or_ffprobe = " or python >= 3.8" or_ffprobe = " or python >= 3.8"
msg = "found ffprobe but your python is too old; need 3.8 or newer" msg = "found FFprobe but your python is too old; need 3.8 or newer"
self.log(msg, c=1) self.log(msg, c=1)
if not self.usable: if not self.usable:
msg = "need mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n" msg = "need Mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
self.log( pybin = os.path.basename(sys.executable)
msg.format(or_ffprobe, " " * 37, os.path.basename(sys.executable)), c=1 self.log(msg.format(or_ffprobe, " " * 37, pybin), c=1)
)
return return
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html # https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
@@ -376,7 +397,7 @@ class MTag(object):
v2 = r2.get(k) v2 = r2.get(k)
if v1 == v2: if v1 == v2:
print(" ", k, v1) print(" ", k, v1)
elif v1 != "0000": # ffprobe date=0 elif v1 != "0000": # FFprobe date=0
diffs.append(k) diffs.append(k)
print(" 1", k, v1) print(" 1", k, v1)
print(" 2", k, v2) print(" 2", k, v2)
@@ -397,20 +418,33 @@ class MTag(object):
md = mutagen.File(fsenc(abspath), easy=True) md = mutagen.File(fsenc(abspath), easy=True)
x = md.info.length x = md.info.length
except Exception as ex: except Exception as ex:
return {} return self.get_ffprobe(abspath) if self.can_ffprobe else {}
ret = {} sz = os.path.getsize(fsenc(abspath))
try: ret = {".q": [0, int((sz / md.info.length) / 128)]}
dur = int(md.info.length)
try:
q = int(md.info.bitrate / 1024)
except:
q = int((os.path.getsize(fsenc(abspath)) / dur) / 128)
ret[".dur"] = [0, dur] for attr, k, norm in [
ret[".q"] = [0, q] ["codec", "ac", unicode],
["channels", "chs", int],
["sample_rate", ".hz", int],
["bitrate", ".aq", int],
["length", ".dur", int],
]:
try:
v = getattr(md.info, attr)
except: except:
pass continue
if not v:
continue
if k == ".aq":
v /= 1000
if k == "ac" and v.startswith("mp4a.40."):
v = "aac"
ret[k] = [0, norm(v)]
return self.normalize_tags(ret, md) return self.normalize_tags(ret, md)

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os import os
import tarfile import tarfile
import threading import threading
@@ -30,10 +33,11 @@ class QFile(object):
class StreamTar(object): class StreamTar(object):
"""construct in-memory tar file from the given path""" """construct in-memory tar file from the given path"""
def __init__(self, fgen, **kwargs): def __init__(self, log, fgen, **kwargs):
self.ci = 0 self.ci = 0
self.co = 0 self.co = 0
self.qfile = QFile() self.qfile = QFile()
self.log = log
self.fgen = fgen self.fgen = fgen
self.errf = None self.errf = None
@@ -42,7 +46,7 @@ class StreamTar(object):
fmt = tarfile.GNU_FORMAT fmt = tarfile.GNU_FORMAT
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
w = threading.Thread(target=self._gen) w = threading.Thread(target=self._gen, name="star-gen")
w.daemon = True w.daemon = True
w.start() w.start()
@@ -88,7 +92,8 @@ class StreamTar(object):
errors.append([f["vp"], repr(ex)]) errors.append([f["vp"], repr(ex)])
if errors: if errors:
self.errf = errdesc(errors) self.errf, txt = errdesc(errors)
self.log("\n".join(([repr(self.errf)] + txt[1:])))
self.ser(self.errf) self.ser(self.errf)
self.tar.close() self.tar.close()

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os import os
import time import time
import tempfile import tempfile
@@ -22,4 +25,4 @@ def errdesc(errors):
"vp": "archive-errors-{}.txt".format(dt), "vp": "archive-errors-{}.txt".format(dt),
"ap": tf_path, "ap": tf_path,
"st": os.stat(tf_path), "st": os.stat(tf_path),
} }, report

View File

@@ -5,12 +5,16 @@ import re
import os import os
import sys import sys
import time import time
import shlex
import string
import signal
import socket
import threading import threading
from datetime import datetime, timedelta from datetime import datetime, timedelta
import calendar import calendar
from .__init__ import PY2, WINDOWS, MACOS, VT100 from .__init__ import E, PY2, WINDOWS, MACOS, VT100, unicode
from .util import mp from .util import mp, start_log_thrs, start_stackmon, min_ex
from .authsrv import AuthSrv from .authsrv import AuthSrv
from .tcpsrv import TcpSrv from .tcpsrv import TcpSrv
from .up2k import Up2k from .up2k import Up2k
@@ -28,23 +32,35 @@ class SvcHub(object):
put() can return a queue (if want_reply=True) which has a blocking get() with the response. put() can return a queue (if want_reply=True) which has a blocking get() with the response.
""" """
def __init__(self, args): def __init__(self, args, argv, printed):
self.args = args self.args = args
self.argv = argv
self.logf = None
self.stop_req = False
self.stopping = False
self.stop_cond = threading.Condition()
self.ansi_re = re.compile("\033\\[[^m]*m") self.ansi_re = re.compile("\033\\[[^m]*m")
self.log_mutex = threading.Lock() self.log_mutex = threading.Lock()
self.next_day = 0 self.next_day = 0
self.log = self._log_disabled if args.q else self._log_enabled self.log = self._log_disabled if args.q else self._log_enabled
if args.lo:
self._setup_logfile(printed)
# jank goes here if args.stackmon:
auth = AuthSrv(self.args, self.log, False) start_stackmon(args.stackmon, 0)
if args.ls:
auth.dbg_ls() if args.log_thrs:
start_log_thrs(self.log, args.log_thrs, 0)
# initiate all services to manage # initiate all services to manage
self.asrv = AuthSrv(self.args, self.log, False)
if args.ls:
self.asrv.dbg_ls()
self.tcpsrv = TcpSrv(self) self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self, auth.vfs.all_vols) self.up2k = Up2k(self)
self.thumbsrv = None self.thumbsrv = None
if not args.no_thumb: if not args.no_thumb:
@@ -54,7 +70,7 @@ class SvcHub(object):
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old" msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
self.log("thumb", msg, c=3) self.log("thumb", msg, c=3)
self.thumbsrv = ThumbSrv(self, auth.vfs.all_vols) self.thumbsrv = ThumbSrv(self)
else: else:
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n" msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n"
self.log( self.log(
@@ -70,22 +86,102 @@ class SvcHub(object):
self.broker = Broker(self) self.broker = Broker(self)
def _logname(self):
dt = datetime.utcfromtimestamp(time.time())
fn = self.args.lo
for fs in "YmdHMS":
fs = "%" + fs
if fs in fn:
fn = fn.replace(fs, dt.strftime(fs))
return fn
def _setup_logfile(self, printed):
base_fn = fn = sel_fn = self._logname()
if fn != self.args.lo:
ctr = 0
# yup this is a race; if started sufficiently concurrently, two
# copyparties can grab the same logfile (considered and ignored)
while os.path.exists(sel_fn):
ctr += 1
sel_fn = "{}.{}".format(fn, ctr)
fn = sel_fn
try:
import lzma
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
except:
import codecs
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
lh.base_fn = base_fn
argv = [sys.executable] + self.argv
if hasattr(shlex, "quote"):
argv = [shlex.quote(x) for x in argv]
else:
argv = ['"{}"'.format(x) for x in argv]
msg = "[+] opened logfile [{}]\n".format(fn)
printed += msg
lh.write("t0: {:.3f}\nargv: {}\n\n{}".format(E.t0, " ".join(argv), printed))
self.logf = lh
print(msg, end="")
def run(self): def run(self):
thr = threading.Thread(target=self.tcpsrv.run) self.tcpsrv.run()
thr = threading.Thread(target=self.sd_notify, name="sd-notify")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
# winxp/py2.7 support: thr.join() kills signals thr = threading.Thread(target=self.stop_thr, name="svchub-sig")
try: thr.daemon = True
while True: thr.start()
time.sleep(9001)
except KeyboardInterrupt: for sig in [signal.SIGINT, signal.SIGTERM]:
signal.signal(sig, self.signal_handler)
try:
while not self.stop_req:
time.sleep(9001)
except:
pass
self.shutdown()
def stop_thr(self):
while not self.stop_req:
with self.stop_cond:
self.stop_cond.wait(9001)
self.shutdown()
def signal_handler(self):
if self.stopping:
return
self.stop_req = True
with self.stop_cond:
self.stop_cond.notify_all()
def shutdown(self):
if self.stopping:
return
self.stopping = True
self.stop_req = True
try:
with self.log_mutex: with self.log_mutex:
print("OPYTHAT") print("OPYTHAT")
self.tcpsrv.shutdown() self.tcpsrv.shutdown()
self.broker.shutdown() self.broker.shutdown()
self.up2k.shutdown()
if self.thumbsrv: if self.thumbsrv:
self.thumbsrv.shutdown() self.thumbsrv.shutdown()
@@ -95,20 +191,33 @@ class SvcHub(object):
break break
if n == 3: if n == 3:
print("waiting for thumbsrv...") print("waiting for thumbsrv (10sec)...")
print("nailed it") print("nailed it", end="")
finally:
print("\033[0m")
if self.logf:
self.logf.close()
def _log_disabled(self, src, msg, c=0): def _log_disabled(self, src, msg, c=0):
pass if not self.logf:
return
def _log_enabled(self, src, msg, c=0):
"""handles logging from all components"""
with self.log_mutex: with self.log_mutex:
ts = datetime.utcfromtimestamp(time.time())
ts = ts.strftime("%Y-%m%d-%H%M%S.%f")[:-3]
self.logf.write("@{} [{}] {}\n".format(ts, src, msg))
now = time.time() now = time.time()
if now >= self.next_day: if now >= self.next_day:
dt = datetime.utcfromtimestamp(now) self._set_next_day()
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
def _set_next_day(self):
if self.next_day and self.logf and self.logf.base_fn != self._logname():
self.logf.close()
self._setup_logfile("")
dt = datetime.utcfromtimestamp(time.time())
# unix timestamp of next 00:00:00 (leap-seconds safe) # unix timestamp of next 00:00:00 (leap-seconds safe)
day_now = dt.day day_now = dt.day
@@ -118,6 +227,15 @@ class SvcHub(object):
dt = dt.replace(hour=0, minute=0, second=0) dt = dt.replace(hour=0, minute=0, second=0)
self.next_day = calendar.timegm(dt.utctimetuple()) self.next_day = calendar.timegm(dt.utctimetuple())
def _log_enabled(self, src, msg, c=0):
"""handles logging from all components"""
with self.log_mutex:
now = time.time()
if now >= self.next_day:
dt = datetime.utcfromtimestamp(now)
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
self._set_next_day()
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n" fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
if not VT100: if not VT100:
fmt = "{} {:21} {}\n" fmt = "{} {:21} {}\n"
@@ -143,20 +261,20 @@ class SvcHub(object):
except: except:
print(msg.encode("ascii", "replace").decode(), end="") print(msg.encode("ascii", "replace").decode(), end="")
if self.logf:
self.logf.write(msg)
def check_mp_support(self): def check_mp_support(self):
vmin = sys.version_info[1] vmin = sys.version_info[1]
if WINDOWS: if WINDOWS:
msg = "need python 3.3 or newer for multiprocessing;" msg = "need python 3.3 or newer for multiprocessing;"
if PY2: if PY2 or vmin < 3:
# py2 pickler doesn't support winsock
return msg
elif vmin < 3:
return msg return msg
elif MACOS: elif MACOS:
return "multiprocessing is wonky on mac osx;" return "multiprocessing is wonky on mac osx;"
else: else:
msg = "need python 2.7 or 3.3+ for multiprocessing;" msg = "need python 3.3+ for multiprocessing;"
if not PY2 and vmin < 3: if PY2 or vmin < 3:
return msg return msg
try: try:
@@ -188,5 +306,24 @@ class SvcHub(object):
if not err: if not err:
return True return True
else: else:
self.log("root", err) self.log("svchub", err)
return False return False
def sd_notify(self):
try:
addr = os.getenv("NOTIFY_SOCKET")
if not addr:
return
addr = unicode(addr)
if addr.startswith("@"):
addr = "\0" + addr[1:]
m = "".join(x for x in addr if x in string.printable)
self.log("sd_notify", m)
sck = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
sck.connect(addr)
sck.sendall(b"READY=1")
except:
self.log("sd_notify", min_ex())

View File

@@ -1,15 +1,17 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os import os
import time import time
import zlib import zlib
import struct
from datetime import datetime from datetime import datetime
from .sutil import errdesc from .sutil import errdesc
from .util import yieldfile, sanitize_fn from .util import yieldfile, sanitize_fn, spack, sunpack
def dostime2unix(buf): def dostime2unix(buf):
t, d = struct.unpack("<HH", buf) t, d = sunpack(b"<HH", buf)
ts = (t & 0x1F) * 2 ts = (t & 0x1F) * 2
tm = (t >> 5) & 0x3F tm = (t >> 5) & 0x3F
@@ -33,13 +35,13 @@ def unixtime2dos(ts):
bd = ((dy - 1980) << 9) + (dm << 5) + dd bd = ((dy - 1980) << 9) + (dm << 5) + dd
bt = (th << 11) + (tm << 5) + ts // 2 bt = (th << 11) + (tm << 5) + ts // 2
return struct.pack("<HH", bt, bd) return spack(b"<HH", bt, bd)
def gen_fdesc(sz, crc32, z64): def gen_fdesc(sz, crc32, z64):
ret = b"\x50\x4b\x07\x08" ret = b"\x50\x4b\x07\x08"
fmt = "<LQQ" if z64 else "<LLL" fmt = b"<LQQ" if z64 else b"<LLL"
ret += struct.pack(fmt, crc32, sz, sz) ret += spack(fmt, crc32, sz, sz)
return ret return ret
@@ -63,7 +65,7 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
req_ver = b"\x2d\x00" if z64 else b"\x0a\x00" req_ver = b"\x2d\x00" if z64 else b"\x0a\x00"
if crc32: if crc32:
crc32 = struct.pack("<L", crc32) crc32 = spack(b"<L", crc32)
else: else:
crc32 = b"\x00" * 4 crc32 = b"\x00" * 4
@@ -84,14 +86,14 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
# however infozip does actual sz and it even works on winxp # however infozip does actual sz and it even works on winxp
# (same reasning for z64 extradata later) # (same reasning for z64 extradata later)
vsz = 0xFFFFFFFF if z64 else sz vsz = 0xFFFFFFFF if z64 else sz
ret += struct.pack("<LL", vsz, vsz) ret += spack(b"<LL", vsz, vsz)
# windows support (the "?" replace below too) # windows support (the "?" replace below too)
fn = sanitize_fn(fn, ok="/") fn = sanitize_fn(fn, "/", [])
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_") bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
z64_len = len(z64v) * 8 + 4 if z64v else 0 z64_len = len(z64v) * 8 + 4 if z64v else 0
ret += struct.pack("<HH", len(bfn), z64_len) ret += spack(b"<HH", len(bfn), z64_len)
if h_pos is not None: if h_pos is not None:
# 2b comment, 2b diskno # 2b comment, 2b diskno
@@ -103,12 +105,12 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
ret += b"\x01\x00\x00\x00\xa4\x81" ret += b"\x01\x00\x00\x00\xa4\x81"
# 4b local-header-ofs # 4b local-header-ofs
ret += struct.pack("<L", min(h_pos, 0xFFFFFFFF)) ret += spack(b"<L", min(h_pos, 0xFFFFFFFF))
ret += bfn ret += bfn
if z64v: if z64v:
ret += struct.pack("<HH" + "Q" * len(z64v), 1, len(z64v) * 8, *z64v) ret += spack(b"<HH" + b"Q" * len(z64v), 1, len(z64v) * 8, *z64v)
return ret return ret
@@ -133,7 +135,7 @@ def gen_ecdr(items, cdir_pos, cdir_end):
need_64 = nitems == 0xFFFF or 0xFFFFFFFF in [csz, cpos] need_64 = nitems == 0xFFFF or 0xFFFFFFFF in [csz, cpos]
# 2b tnfiles, 2b dnfiles, 4b dir sz, 4b dir pos # 2b tnfiles, 2b dnfiles, 4b dir sz, 4b dir pos
ret += struct.pack("<HHLL", nitems, nitems, csz, cpos) ret += spack(b"<HHLL", nitems, nitems, csz, cpos)
# 2b comment length # 2b comment length
ret += b"\x00\x00" ret += b"\x00\x00"
@@ -160,7 +162,7 @@ def gen_ecdr64(items, cdir_pos, cdir_end):
# 8b tnfiles, 8b dnfiles, 8b dir sz, 8b dir pos # 8b tnfiles, 8b dnfiles, 8b dir sz, 8b dir pos
cdir_sz = cdir_end - cdir_pos cdir_sz = cdir_end - cdir_pos
ret += struct.pack("<QQQQ", len(items), len(items), cdir_sz, cdir_pos) ret += spack(b"<QQQQ", len(items), len(items), cdir_sz, cdir_pos)
return ret return ret
@@ -175,13 +177,14 @@ def gen_ecdr64_loc(ecdr64_pos):
ret = b"\x50\x4b\x06\x07" ret = b"\x50\x4b\x06\x07"
# 4b cdisk, 8b start of ecdr64, 4b ndisks # 4b cdisk, 8b start of ecdr64, 4b ndisks
ret += struct.pack("<LQL", 0, ecdr64_pos, 1) ret += spack(b"<LQL", 0, ecdr64_pos, 1)
return ret return ret
class StreamZip(object): class StreamZip(object):
def __init__(self, fgen, utf8=False, pre_crc=False): def __init__(self, log, fgen, utf8=False, pre_crc=False):
self.log = log
self.fgen = fgen self.fgen = fgen
self.utf8 = utf8 self.utf8 = utf8
self.pre_crc = pre_crc self.pre_crc = pre_crc
@@ -244,8 +247,8 @@ class StreamZip(object):
errors.append([f["vp"], repr(ex)]) errors.append([f["vp"], repr(ex)])
if errors: if errors:
errf = errdesc(errors) errf, txt = errdesc(errors)
print(repr(errf)) self.log("\n".join(([repr(errf)] + txt[1:])))
for x in self.ser(errf): for x in self.ser(errf):
yield x yield x

View File

@@ -2,11 +2,9 @@
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import re import re
import time
import socket import socket
import select
from .util import chkcmd, Counter from .util import chkcmd
class TcpSrv(object): class TcpSrv(object):
@@ -20,7 +18,7 @@ class TcpSrv(object):
self.args = hub.args self.args = hub.args
self.log = hub.log self.log = hub.log
self.num_clients = Counter() self.stopping = False
ip = "127.0.0.1" ip = "127.0.0.1"
eps = {ip: "local only"} eps = {ip: "local only"}
@@ -65,36 +63,22 @@ class TcpSrv(object):
for srv in self.srv: for srv in self.srv:
srv.listen(self.args.nc) srv.listen(self.args.nc)
ip, port = srv.getsockname() ip, port = srv.getsockname()
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port)) fno = srv.fileno()
msg = "listening @ {}:{} f{}".format(ip, port, fno)
self.log("tcpsrv", msg)
if self.args.q:
print(msg)
while True: self.hub.broker.put(False, "listen", srv)
if self.args.log_conn:
self.log("tcpsrv", "|%sC-ncli" % ("-" * 1,), c="1;30")
if self.num_clients.v >= self.args.nc:
time.sleep(0.1)
continue
if self.args.log_conn:
self.log("tcpsrv", "|%sC-acc1" % ("-" * 2,), c="1;30")
ready, _, _ = select.select(self.srv, [], [])
for srv in ready:
sck, addr = srv.accept()
sip, sport = srv.getsockname()
if self.args.log_conn:
self.log(
"%s %s" % addr,
"|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
"-" * 3, sip, sport % 8, sport
),
c="1;30",
)
self.num_clients.add()
self.hub.broker.put(False, "httpconn", sck, addr)
def shutdown(self): def shutdown(self):
self.stopping = True
try:
for srv in self.srv:
srv.close()
except:
pass
self.log("tcpsrv", "ok bye") self.log("tcpsrv", "ok bye")
def detect_interfaces(self, listen_ips): def detect_interfaces(self, listen_ips):

View File

@@ -1,5 +1,7 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os import os
import time
from .util import Cooldown from .util import Cooldown
from .th_srv import thumb_path, THUMBABLE, FMT_FF from .th_srv import thumb_path, THUMBABLE, FMT_FF
@@ -9,6 +11,7 @@ class ThumbCli(object):
def __init__(self, broker): def __init__(self, broker):
self.broker = broker self.broker = broker
self.args = broker.args self.args = broker.args
self.asrv = broker.asrv
# cache on both sides for less broker spam # cache on both sides for less broker spam
self.cooldown = Cooldown(self.args.th_poke) self.cooldown = Cooldown(self.args.th_poke)
@@ -18,16 +21,19 @@ class ThumbCli(object):
if ext not in THUMBABLE: if ext not in THUMBABLE:
return None return None
if self.args.no_vthumb and ext in FMT_FF: is_vid = ext in FMT_FF
if is_vid and self.args.no_vthumb:
return None return None
if fmt == "j" and self.args.th_no_jpg: if fmt == "j" and self.args.th_no_jpg:
fmt = "w" fmt = "w"
if fmt == "w" and self.args.th_no_webp: if fmt == "w":
if self.args.th_no_webp or (is_vid and self.args.th_ff_jpg):
fmt = "j" fmt = "j"
tpath = thumb_path(ptop, rem, mtime, fmt) histpath = self.asrv.vfs.histtab[ptop]
tpath = thumb_path(histpath, rem, mtime, fmt)
ret = None ret = None
try: try:
st = os.stat(tpath) st = os.stat(tpath)

View File

@@ -1,5 +1,7 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os import os
import sys
import time import time
import shutil import shutil
import base64 import base64
@@ -7,15 +9,11 @@ import hashlib
import threading import threading
import subprocess as sp import subprocess as sp
from .__init__ import PY2 from .__init__ import PY2, unicode
from .util import fsenc, mchkcmd, Queue, Cooldown, BytesIO from .util import fsenc, runcmd, Queue, Cooldown, BytesIO, min_ex
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
if not PY2:
unicode = str
HAVE_PIL = False HAVE_PIL = False
HAVE_HEIF = False HAVE_HEIF = False
HAVE_AVIF = False HAVE_AVIF = False
@@ -51,7 +49,7 @@ except:
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html # https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# ffmpeg -formats # ffmpeg -formats
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm" FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv" FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
if HAVE_HEIF: if HAVE_HEIF:
FMT_PIL += " heif heifs heic heics" FMT_PIL += " heif heifs heic heics"
@@ -71,7 +69,7 @@ if HAVE_FFMPEG and HAVE_FFPROBE:
THUMBABLE.update(FMT_FF) THUMBABLE.update(FMT_FF)
def thumb_path(ptop, rem, mtime, fmt): def thumb_path(histpath, rem, mtime, fmt):
# base16 = 16 = 256 # base16 = 16 = 256
# b64-lc = 38 = 1444 # b64-lc = 38 = 1444
# base64 = 64 = 4096 # base64 = 64 = 4096
@@ -82,26 +80,25 @@ def thumb_path(ptop, rem, mtime, fmt):
fn = rem fn = rem
if rd: if rd:
h = hashlib.sha512(fsenc(rd)).digest()[:24] h = hashlib.sha512(fsenc(rd)).digest()
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24] b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64 rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
else: else:
rd = "top" rd = "top"
# could keep original filenames but this is safer re pathlen # could keep original filenames but this is safer re pathlen
h = hashlib.sha512(fsenc(fn)).digest()[:24] h = hashlib.sha512(fsenc(fn)).digest()
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24] fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
return "{}/.hist/th/{}/{}.{:x}.{}".format( return "{}/th/{}/{}.{:x}.{}".format(
ptop, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg" histpath, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
) )
class ThumbSrv(object): class ThumbSrv(object):
def __init__(self, hub, vols): def __init__(self, hub):
self.hub = hub self.hub = hub
self.vols = [v.realpath for v in vols.values()] self.asrv = hub.asrv
self.args = hub.args self.args = hub.args
self.log_func = hub.log self.log_func = hub.log
@@ -114,24 +111,27 @@ class ThumbSrv(object):
self.stopping = False self.stopping = False
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4 self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
self.q = Queue(self.nthr * 4) self.q = Queue(self.nthr * 4)
for _ in range(self.nthr): for n in range(self.nthr):
t = threading.Thread(target=self.worker) t = threading.Thread(
target=self.worker, name="thumb-{}-{}".format(n, self.nthr)
)
t.daemon = True t.daemon = True
t.start() t.start()
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE): if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE):
missing = [] missing = []
if not HAVE_FFMPEG: if not HAVE_FFMPEG:
missing.append("ffmpeg") missing.append("FFmpeg")
if not HAVE_FFPROBE: if not HAVE_FFPROBE:
missing.append("ffprobe") missing.append("FFprobe")
msg = "cannot create video thumbnails because some of the required programs are not available: " msg = "cannot create video thumbnails because some of the required programs are not available: "
msg += ", ".join(missing) msg += ", ".join(missing)
self.log(msg, c=1) self.log(msg, c=3)
t = threading.Thread(target=self.cleaner) if self.args.th_clean:
t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
t.daemon = True t.daemon = True
t.start() t.start()
@@ -148,9 +148,11 @@ class ThumbSrv(object):
return not self.nthr return not self.nthr
def get(self, ptop, rem, mtime, fmt): def get(self, ptop, rem, mtime, fmt):
tpath = thumb_path(ptop, rem, mtime, fmt) histpath = self.asrv.vfs.histtab[ptop]
tpath = thumb_path(histpath, rem, mtime, fmt)
abspath = os.path.join(ptop, rem) abspath = os.path.join(ptop, rem)
cond = threading.Condition() cond = threading.Condition(self.mutex)
do_conv = False
with self.mutex: with self.mutex:
try: try:
self.busy[tpath].append(cond) self.busy[tpath].append(cond)
@@ -168,6 +170,9 @@ class ThumbSrv(object):
f.write(fsenc(os.path.dirname(abspath))) f.write(fsenc(os.path.dirname(abspath)))
self.busy[tpath] = [cond] self.busy[tpath] = [cond]
do_conv = True
if do_conv:
self.q.put([abspath, tpath]) self.q.put([abspath, tpath])
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6) self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
@@ -177,7 +182,7 @@ class ThumbSrv(object):
break break
with cond: with cond:
cond.wait() cond.wait(3)
try: try:
st = os.stat(tpath) st = os.stat(tpath)
@@ -206,9 +211,9 @@ class ThumbSrv(object):
if fun: if fun:
try: try:
fun(abspath, tpath) fun(abspath, tpath)
except Exception as ex: except:
msg = "{} failed on {}\n {!r}" msg = "{} failed on {}\n{}"
self.log(msg.format(fun.__name__, abspath, ex), 3) self.log(msg.format(fun.__name__, abspath, min_ex()), 3)
with open(tpath, "wb") as _: with open(tpath, "wb") as _:
pass pass
@@ -240,8 +245,8 @@ class ThumbSrv(object):
except: except:
im.thumbnail(self.res) im.thumbnail(self.res)
if im.mode not in ("RGB", "L"): fmts = ["RGB", "L"]
im = im.convert("RGB") args = {"quality": 40}
if tpath.endswith(".webp"): if tpath.endswith(".webp"):
# quality 80 = pillow-default # quality 80 = pillow-default
@@ -249,15 +254,27 @@ class ThumbSrv(object):
# method 0 = pillow-default, fast # method 0 = pillow-default, fast
# method 4 = ffmpeg-default # method 4 = ffmpeg-default
# method 6 = max, slow # method 6 = max, slow
im.save(tpath, quality=40, method=6) fmts += ["RGBA", "LA"]
args["method"] = 6
else: else:
im.save(tpath, quality=40) # default=75 pass # default q = 75
if im.mode not in fmts:
# print("conv {}".format(im.mode))
im = im.convert("RGB")
im.save(tpath, quality=40, method=6)
def conv_ffmpeg(self, abspath, tpath): def conv_ffmpeg(self, abspath, tpath):
ret, _ = ffprobe(abspath) ret, _ = ffprobe(abspath)
ext = abspath.rsplit(".")[-1]
if ext in ["h264", "h265"]:
seek = []
else:
dur = ret[".dur"][1] if ".dur" in ret else 4 dur = ret[".dur"][1] if ".dur" in ret else 4
seek = "{:.0f}".format(dur / 3) seek = "{:.0f}".format(dur / 3)
seek = [b"-ss", seek.encode("utf-8")]
scale = "scale={0}:{1}:force_original_aspect_ratio=" scale = "scale={0}:{1}:force_original_aspect_ratio="
if self.args.th_no_crop: if self.args.th_no_crop:
@@ -266,19 +283,20 @@ class ThumbSrv(object):
scale += "increase,crop={0}:{1},setsar=1:1" scale += "increase,crop={0}:{1},setsar=1:1"
scale = scale.format(*list(self.res)).encode("utf-8") scale = scale.format(*list(self.res)).encode("utf-8")
# fmt: off
cmd = [ cmd = [
b"ffmpeg", b"ffmpeg",
b"-nostdin", b"-nostdin",
b"-hide_banner", b"-v", b"error",
b"-ss", b"-hide_banner"
seek,
b"-i",
fsenc(abspath),
b"-vf",
scale,
b"-vframes",
b"1",
] ]
cmd += seek
cmd += [
b"-i", fsenc(abspath),
b"-vf", scale,
b"-vframes", b"1",
]
# fmt: on
if tpath.endswith(".jpg"): if tpath.endswith(".jpg"):
cmd += [ cmd += [
@@ -295,7 +313,11 @@ class ThumbSrv(object):
cmd += [fsenc(tpath)] cmd += [fsenc(tpath)]
mchkcmd(cmd) ret, sout, serr = runcmd(*cmd)
if ret != 0:
msg = ["ff: {}".format(x) for x in serr.split("\n")]
self.log("FFmpeg failed:\n" + "\n".join(msg), c="1;30")
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
def poke(self, tdir): def poke(self, tdir):
if not self.poke_cd.poke(tdir): if not self.poke_cd.poke(tdir):
@@ -314,26 +336,32 @@ class ThumbSrv(object):
interval = self.args.th_clean interval = self.args.th_clean
while True: while True:
time.sleep(interval) time.sleep(interval)
for vol in self.vols: ndirs = 0
vol += "/.hist/th" for vol, histpath in self.asrv.vfs.histtab.items():
self.log("\033[Jcln {}/\033[A".format(vol)) if histpath.startswith(vol):
self.clean(vol) self.log("\033[Jcln {}/\033[A".format(histpath))
else:
self.log("\033[Jcln {} ({})/\033[A".format(histpath, vol))
self.log("\033[Jcln ok") ndirs += self.clean(histpath)
def clean(self, vol): self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
# self.log("cln {}".format(vol))
def clean(self, histpath):
thumbpath = os.path.join(histpath, "th")
# self.log("cln {}".format(thumbpath))
maxage = self.args.th_maxage maxage = self.args.th_maxage
now = time.time() now = time.time()
prev_b64 = None prev_b64 = None
prev_fp = None prev_fp = None
try: try:
ents = os.listdir(vol) ents = os.listdir(thumbpath)
except: except:
return return 0
ndirs = 0
for f in sorted(ents): for f in sorted(ents):
fp = os.path.join(vol, f) fp = os.path.join(thumbpath, f)
cmp = fp.lower().replace("\\", "/") cmp = fp.lower().replace("\\", "/")
# "top" or b64 prefix/full (a folder) # "top" or b64 prefix/full (a folder)
@@ -348,10 +376,11 @@ class ThumbSrv(object):
break break
if safe: if safe:
ndirs += 1
self.log("rm -rf [{}]".format(fp)) self.log("rm -rf [{}]".format(fp))
shutil.rmtree(fp, ignore_errors=True) shutil.rmtree(fp, ignore_errors=True)
else: else:
self.clean(fp) ndirs += self.clean(fp)
continue continue
# thumb file # thumb file
@@ -373,3 +402,5 @@ class ThumbSrv(object):
prev_b64 = b64 prev_b64 = b64
prev_fp = fp prev_fp = fp
return ndirs

View File

@@ -7,7 +7,8 @@ import time
import threading import threading
from datetime import datetime from datetime import datetime
from .util import u8safe, s3dec, html_escape, Pebkac from .__init__ import unicode
from .util import s3dec, Pebkac, min_ex
from .up2k import up2k_wark_from_hashlist from .up2k import up2k_wark_from_hashlist
@@ -19,13 +20,14 @@ except:
class U2idx(object): class U2idx(object):
def __init__(self, args, log_func): def __init__(self, conn):
self.args = args self.log_func = conn.log_func
self.log_func = log_func self.asrv = conn.asrv
self.timeout = args.srch_time self.args = conn.args
self.timeout = self.args.srch_time
if not HAVE_SQLITE3: if not HAVE_SQLITE3:
self.log("could not load sqlite3; searchign wqill be disabled") self.log("your python does not have sqlite3; searching will be disabled")
return return
self.cur = {} self.cur = {}
@@ -52,18 +54,23 @@ class U2idx(object):
try: try:
return self.run_query(vols, uq, uv)[0] return self.run_query(vols, uq, uv)[0]
except Exception as ex: except:
raise Pebkac(500, repr(ex)) raise Pebkac(500, min_ex())
def get_cur(self, ptop): def get_cur(self, ptop):
if not HAVE_SQLITE3:
return None
cur = self.cur.get(ptop) cur = self.cur.get(ptop)
if cur: if cur:
return cur return cur
cur = _open(ptop) histpath = self.asrv.vfs.histtab[ptop]
if not cur: db_path = os.path.join(histpath, "up2k.db")
if not os.path.exists(db_path):
return None return None
cur = sqlite3.connect(db_path, 2).cursor()
self.cur[ptop] = cur self.cur[ptop] = cur
return cur return cur
@@ -84,6 +91,8 @@ class U2idx(object):
mt_ctr = 0 mt_ctr = 0
mt_keycmp = "substr(up.w,1,16)" mt_keycmp = "substr(up.w,1,16)"
mt_keycmp2 = None mt_keycmp2 = None
ptn_lc = re.compile(r" (mt[0-9]+\.v) ([=<!>]+) \? $")
ptn_lcv = re.compile(r"[a-zA-Z]")
while True: while True:
uq = uq.strip() uq = uq.strip()
@@ -176,6 +185,21 @@ class U2idx(object):
va.append(v) va.append(v)
is_key = True is_key = True
# lowercase tag searches
m = ptn_lc.search(q)
if not m or not ptn_lcv.search(unicode(v)):
continue
va.pop()
va.append(v.lower())
q = q[: m.start()]
field, oper = m.groups()
if oper in ["=", "=="]:
q += " {} like ? ".format(field)
else:
q += " lower({}) {} ? ".format(field, oper)
try: try:
return self.run_query(vols, joins + "where " + q, va) return self.run_query(vols, joins + "where " + q, va)
except Exception as ex: except Exception as ex:
@@ -192,6 +216,7 @@ class U2idx(object):
self.active_id, self.active_id,
done_flag, done_flag,
), ),
name="u2idx-terminator",
) )
thr.daemon = True thr.daemon = True
thr.start() thr.start()
@@ -241,6 +266,7 @@ class U2idx(object):
hit["tags"] = tags hit["tags"] = tags
ret.extend(sret) ret.extend(sret)
# print("[{}] {}".format(ptop, sret))
done_flag.append(True) done_flag.append(True)
self.active_id = None self.active_id = None
@@ -261,9 +287,3 @@ class U2idx(object):
if identifier == self.active_id: if identifier == self.active_id:
self.active_cur.connection.interrupt() self.active_cur.connection.interrupt()
def _open(ptop):
db_path = os.path.join(ptop, ".hist", "up2k.db")
if os.path.exists(db_path):
return sqlite3.connect(db_path).cursor()

View File

@@ -16,7 +16,7 @@ import traceback
import subprocess as sp import subprocess as sp
from copy import deepcopy from copy import deepcopy
from .__init__ import WINDOWS, ANYWIN from .__init__ import WINDOWS, ANYWIN, PY2
from .util import ( from .util import (
Pebkac, Pebkac,
Queue, Queue,
@@ -30,6 +30,7 @@ from .util import (
s3dec, s3dec,
statdir, statdir,
s2hms, s2hms,
min_ex,
) )
from .mtag import MTag, MParser from .mtag import MTag, MParser
@@ -39,6 +40,8 @@ try:
except: except:
HAVE_SQLITE3 = False HAVE_SQLITE3 = False
DB_VER = 4
class Up2k(object): class Up2k(object):
""" """
@@ -48,8 +51,9 @@ class Up2k(object):
* ~/.config flatfiles for active jobs * ~/.config flatfiles for active jobs
""" """
def __init__(self, hub, all_vols): def __init__(self, hub):
self.hub = hub self.hub = hub
self.asrv = hub.asrv
self.args = hub.args self.args = hub.args
self.log_func = hub.log self.log_func = hub.log
@@ -60,6 +64,8 @@ class Up2k(object):
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.hashq = Queue() self.hashq = Queue()
self.tagq = Queue() self.tagq = Queue()
self.n_hashq = 0
self.n_tagq = 0
self.volstate = {} self.volstate = {}
self.registry = {} self.registry = {}
self.entags = {} self.entags = {}
@@ -83,56 +89,86 @@ class Up2k(object):
if ANYWIN: if ANYWIN:
# usually fails to set lastmod too quickly # usually fails to set lastmod too quickly
self.lastmod_q = Queue() self.lastmod_q = Queue()
thr = threading.Thread(target=self._lastmodder) thr = threading.Thread(target=self._lastmodder, name="up2k-lastmod")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
# static # static
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$") self.r_hash = re.compile("^[0-9a-zA-Z_-]{44}$")
if not HAVE_SQLITE3: if not HAVE_SQLITE3:
self.log("could not initialize sqlite3, will use in-memory registry only") self.log("could not initialize sqlite3, will use in-memory registry only")
if self.args.no_fastboot: if self.args.no_fastboot:
self.deferred_init(all_vols) self.deferred_init()
else: else:
t = threading.Thread(target=self.deferred_init, args=(all_vols,)) t = threading.Thread(
target=self.deferred_init, name="up2k-deferred-init", args=(0.5,)
)
t.daemon = True t.daemon = True
t.start() t.start()
def deferred_init(self, all_vols): def deferred_init(self, wait=0):
if wait:
time.sleep(wait)
all_vols = self.asrv.vfs.all_vols
have_e2d = self.init_indexes(all_vols) have_e2d = self.init_indexes(all_vols)
if have_e2d: if have_e2d:
thr = threading.Thread(target=self._snapshot) thr = threading.Thread(target=self._snapshot, name="up2k-snapshot")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
thr = threading.Thread(target=self._hasher) thr = threading.Thread(target=self._hasher, name="up2k-hasher")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
if self.mtag: if self.mtag:
thr = threading.Thread(target=self._tagger) thr = threading.Thread(target=self._tagger, name="up2k-tagger")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
thr = threading.Thread(target=self._run_all_mtp) thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-init")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
def log(self, msg, c=0): def log(self, msg, c=0):
self.log_func("up2k", msg + "\033[K", c) self.log_func("up2k", msg + "\033[K", c)
def get_volstate(self): def get_state(self):
return json.dumps(self.volstate, indent=4) mtpq = 0
q = "select count(w) from mt where k = 't:mtp'"
got_lock = False if PY2 else self.mutex.acquire(timeout=0.5)
if got_lock:
for cur in self.cur.values():
try:
mtpq += cur.execute(q).fetchone()[0]
except:
pass
self.mutex.release()
else:
mtpq = "?"
ret = {
"volstate": self.volstate,
"scanning": hasattr(self, "pp"),
"hashq": self.n_hashq,
"tagq": self.n_tagq,
"mtpq": mtpq,
}
return json.dumps(ret, indent=4)
def rescan(self, all_vols, scan_vols): def rescan(self, all_vols, scan_vols):
if hasattr(self, "pp"): if hasattr(self, "pp"):
return "cannot initiate; scan is already in progress" return "cannot initiate; scan is already in progress"
args = (all_vols, scan_vols) args = (all_vols, scan_vols)
t = threading.Thread(target=self.init_indexes, args=args) t = threading.Thread(
target=self.init_indexes,
args=args,
name="up2k-rescan-{}".format(scan_vols[0]),
)
t.daemon = True t.daemon = True
t.start() t.start()
return None return None
@@ -159,7 +195,7 @@ class Up2k(object):
return True, ret return True, ret
def init_indexes(self, all_vols, scan_vols=[]): def init_indexes(self, all_vols, scan_vols=None):
self.pp = ProgressPrinter() self.pp = ProgressPrinter()
vols = all_vols.values() vols = all_vols.values()
t0 = time.time() t0 = time.time()
@@ -178,6 +214,8 @@ class Up2k(object):
self.log(msg, c=3) self.log(msg, c=3)
live_vols = [] live_vols = []
with self.mutex:
# only need to protect register_vpath but all in one go feels right
for vol in vols: for vol in vols:
try: try:
os.listdir(vol.realpath) os.listdir(vol.realpath)
@@ -186,11 +224,13 @@ class Up2k(object):
self.log("cannot access " + vol.realpath, c=1) self.log("cannot access " + vol.realpath, c=1)
continue continue
if not self.register_vpath(vol.realpath, vol.flags): if scan_vols and vol.vpath not in scan_vols:
# self.log("db not enabled for {}".format(m, vol.realpath)) continue
if not self.register_vpath(vol.realpath, vol.flags):
# self.log("db not enable for {}".format(m, vol.realpath))
continue continue
if vol.vpath in scan_vols or not scan_vols:
live_vols.append(vol) live_vols.append(vol)
if vol.vpath not in self.volstate: if vol.vpath not in self.volstate:
@@ -264,14 +304,14 @@ class Up2k(object):
self.log(msg.format(len(vols), time.time() - t0)) self.log(msg.format(len(vols), time.time() - t0))
if needed_mutagen: if needed_mutagen:
msg = "could not read tags because no backends are available (mutagen or ffprobe)" msg = "could not read tags because no backends are available (Mutagen or FFprobe)"
self.log(msg, c=1) self.log(msg, c=1)
thr = None thr = None
if self.mtag: if self.mtag:
m = "online (running mtp)" m = "online (running mtp)"
if scan_vols: if scan_vols:
thr = threading.Thread(target=self._run_all_mtp) thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-scan")
thr.daemon = True thr.daemon = True
else: else:
del self.pp del self.pp
@@ -286,9 +326,13 @@ class Up2k(object):
return have_e2d return have_e2d
def register_vpath(self, ptop, flags): def register_vpath(self, ptop, flags):
db_path = os.path.join(ptop, ".hist", "up2k.db") histpath = self.asrv.vfs.histtab[ptop]
db_path = os.path.join(histpath, "up2k.db")
if ptop in self.registry: if ptop in self.registry:
try:
return [self.cur[ptop], db_path] return [self.cur[ptop], db_path]
except:
return None
_, flags = self._expr_idx_filter(flags) _, flags = self._expr_idx_filter(flags)
@@ -300,10 +344,18 @@ class Up2k(object):
for k, v in flags.items() for k, v in flags.items()
] ]
if a: if a:
self.log(" ".join(sorted(a)) + "\033[0m") vpath = "?"
for k, v in self.asrv.vfs.all_vols.items():
if v.realpath == ptop:
vpath = k
if vpath:
vpath += "/"
self.log("/{} {}".format(vpath, " ".join(sorted(a))), "35")
reg = {} reg = {}
path = os.path.join(ptop, ".hist", "up2k.snap") path = os.path.join(histpath, "up2k.snap")
if "e2d" in flags and os.path.exists(path): if "e2d" in flags and os.path.exists(path):
with gzip.GzipFile(path, "rb") as f: with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8") j = f.read().decode("utf-8")
@@ -327,7 +379,7 @@ class Up2k(object):
return None return None
try: try:
os.mkdir(os.path.join(ptop, ".hist")) os.makedirs(histpath)
except: except:
pass pass
@@ -344,6 +396,7 @@ class Up2k(object):
def _build_file_index(self, vol, all_vols): def _build_file_index(self, vol, all_vols):
do_vac = False do_vac = False
top = vol.realpath top = vol.realpath
nohash = "dhash" in vol.flags
with self.mutex: with self.mutex:
cur, _ = self.register_vpath(top, vol.flags) cur, _ = self.register_vpath(top, vol.flags)
@@ -358,7 +411,7 @@ class Up2k(object):
if WINDOWS: if WINDOWS:
excl = [x.replace("/", "\\") for x in excl] excl = [x.replace("/", "\\") for x in excl]
n_add = self._build_dir(dbw, top, set(excl), top) n_add = self._build_dir(dbw, top, set(excl), top, nohash, [])
n_rm = self._drop_lost(dbw[0], top) n_rm = self._drop_lost(dbw[0], top)
if dbw[1]: if dbw[1]:
self.log("commit {} new files".format(dbw[1])) self.log("commit {} new files".format(dbw[1]))
@@ -366,23 +419,42 @@ class Up2k(object):
return True, n_add or n_rm or do_vac return True, n_add or n_rm or do_vac
def _build_dir(self, dbw, top, excl, cdir): def _build_dir(self, dbw, top, excl, cdir, nohash, seen):
rcdir = cdir
if not ANYWIN:
try:
# a bit expensive but worth
rcdir = os.path.realpath(cdir)
except:
pass
if rcdir in seen:
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}"
self.log(m.format(seen[-1], rcdir, cdir), 3)
return 0
seen = seen + [cdir]
self.pp.msg = "a{} {}".format(self.pp.n, cdir) self.pp.msg = "a{} {}".format(self.pp.n, cdir)
histdir = os.path.join(top, ".hist") histpath = self.asrv.vfs.histtab[top]
ret = 0 ret = 0
for iname, inf in statdir(self.log, not self.args.no_scandir, False, cdir): g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
for iname, inf in sorted(g):
abspath = os.path.join(cdir, iname) abspath = os.path.join(cdir, iname)
lmod = int(inf.st_mtime) lmod = int(inf.st_mtime)
sz = inf.st_size
if stat.S_ISDIR(inf.st_mode): if stat.S_ISDIR(inf.st_mode):
if abspath in excl or abspath == histdir: if abspath in excl or abspath == histpath:
continue continue
# self.log(" dir: {}".format(abspath)) # self.log(" dir: {}".format(abspath))
ret += self._build_dir(dbw, top, excl, abspath) ret += self._build_dir(dbw, top, excl, abspath, nohash, seen)
else: else:
# self.log("file: {}".format(abspath)) # self.log("file: {}".format(abspath))
rp = abspath[len(top) :].replace("\\", "/").strip("/") rp = abspath[len(top) + 1 :]
if WINDOWS:
rp = rp.replace("\\", "/").strip("/")
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp] rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
sql = "select * from up where rd = ? and fn = ?" sql = "select w, mt, sz from up where rd = ? and fn = ?"
try: try:
c = dbw[0].execute(sql, (rd, fn)) c = dbw[0].execute(sql, (rd, fn))
except: except:
@@ -391,18 +463,18 @@ class Up2k(object):
in_db = list(c.fetchall()) in_db = list(c.fetchall())
if in_db: if in_db:
self.pp.n -= 1 self.pp.n -= 1
_, dts, dsz, _, _ = in_db[0] dw, dts, dsz = in_db[0]
if len(in_db) > 1: if len(in_db) > 1:
m = "WARN: multiple entries: [{}] => [{}] |{}|\n{}" m = "WARN: multiple entries: [{}] => [{}] |{}|\n{}"
rep_db = "\n".join([repr(x) for x in in_db]) rep_db = "\n".join([repr(x) for x in in_db])
self.log(m.format(top, rp, len(in_db), rep_db)) self.log(m.format(top, rp, len(in_db), rep_db))
dts = -1 dts = -1
if dts == lmod and dsz == inf.st_size: if dts == lmod and dsz == sz and (nohash or dw[0] != "#"):
continue continue
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format( m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
top, rp, dts, lmod, dsz, inf.st_size top, rp, dts, lmod, dsz, sz
) )
self.log(m) self.log(m)
self.db_rm(dbw[0], rd, fn) self.db_rm(dbw[0], rd, fn)
@@ -411,7 +483,11 @@ class Up2k(object):
in_db = None in_db = None
self.pp.msg = "a{} {}".format(self.pp.n, abspath) self.pp.msg = "a{} {}".format(self.pp.n, abspath)
if inf.st_size > 1024 * 1024:
if nohash:
wark = up2k_wark_from_metadata(self.salt, sz, lmod, rd, fn)
else:
if sz > 1024 * 1024:
self.log("file: {}".format(abspath)) self.log("file: {}".format(abspath))
try: try:
@@ -420,8 +496,9 @@ class Up2k(object):
self.log("hash: {} @ [{}]".format(repr(ex), abspath)) self.log("hash: {} @ [{}]".format(repr(ex), abspath))
continue continue
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes) wark = up2k_wark_from_hashlist(self.salt, sz, hashes)
self.db_add(dbw[0], wark, rd, fn, lmod, inf.st_size)
self.db_add(dbw[0], wark, rd, fn, lmod, sz)
dbw[1] += 1 dbw[1] += 1
ret += 1 ret += 1
td = time.time() - dbw[2] td = time.time() - dbw[2]
@@ -519,7 +596,7 @@ class Up2k(object):
c2 = conn.cursor() c2 = conn.cursor()
c3 = conn.cursor() c3 = conn.cursor()
n_left = cur.execute("select count(w) from up").fetchone()[0] n_left = cur.execute("select count(w) from up").fetchone()[0]
for w, rd, fn in cur.execute("select w, rd, fn from up"): for w, rd, fn in cur.execute("select w, rd, fn from up order by rd, fn"):
n_left -= 1 n_left -= 1
q = "select w from mt where w = ?" q = "select w from mt where w = ?"
if c2.execute(q, (w[:16],)).fetchone(): if c2.execute(q, (w[:16],)).fetchone():
@@ -600,7 +677,7 @@ class Up2k(object):
try: try:
parser = MParser(parser) parser = MParser(parser)
except: except:
self.log("invalid argument: " + parser, 1) self.log("invalid argument (could not find program): " + parser, 1)
return return
for tag in entags: for tag in entags:
@@ -753,7 +830,9 @@ class Up2k(object):
mpool = Queue(nw) mpool = Queue(nw)
for _ in range(nw): for _ in range(nw):
thr = threading.Thread(target=self._tag_thr, args=(mpool,)) thr = threading.Thread(
target=self._tag_thr, args=(mpool,), name="up2k-mpool"
)
thr.daemon = True thr.daemon = True
thr.start() thr.start()
@@ -838,59 +917,31 @@ class Up2k(object):
if not existed and ver is None: if not existed and ver is None:
return self._create_db(db_path, cur) return self._create_db(db_path, cur)
orig_ver = ver if ver == DB_VER:
if not ver or ver < 3: try:
nfiles = next(cur.execute("select count(w) from up"))[0]
self.log("OK: {} |{}|".format(db_path, nfiles))
return cur
except:
self.log("WARN: could not list files; DB corrupt?\n" + min_ex())
if (ver or 0) > DB_VER:
m = "database is version {}, this copyparty only supports versions <= {}"
raise Exception(m.format(ver, DB_VER))
bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver) bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
db = cur.connection db = cur.connection
cur.close() cur.close()
db.close() db.close()
msg = "creating new DB (old is bad); backup: {}" msg = "creating new DB (old is bad); backup: {}"
if ver: if ver:
msg = "creating backup before upgrade: {}" msg = "creating new DB (too old to upgrade); backup: {}"
self.log(msg.format(bak)) self.log(msg.format(bak))
shutil.copy2(db_path, bak) os.rename(fsenc(db_path), fsenc(bak))
cur = self._orz(db_path)
if ver == 1:
cur = self._upgrade_v1(cur, db_path)
if cur:
ver = 2
if ver == 2:
cur = self._create_v3(cur)
ver = self._read_ver(cur) if cur else None
if ver == 3:
if orig_ver != ver:
cur.connection.commit()
cur.execute("vacuum")
cur.connection.commit()
try:
nfiles = next(cur.execute("select count(w) from up"))[0]
self.log("OK: {} |{}|".format(db_path, nfiles))
return cur
except Exception as ex:
self.log("WARN: could not list files, DB corrupt?\n " + repr(ex))
if cur:
db = cur.connection
cur.close()
db.close()
return self._create_db(db_path, None) return self._create_db(db_path, None)
def _create_db(self, db_path, cur):
if not cur:
cur = self._orz(db_path)
self._create_v2(cur)
self._create_v3(cur)
cur.connection.commit()
self.log("created DB at {}".format(db_path))
return cur
def _read_ver(self, cur): def _read_ver(self, cur):
for tab in ["ki", "kv"]: for tab in ["ki", "kv"]:
try: try:
@@ -902,77 +953,51 @@ class Up2k(object):
if rows: if rows:
return int(rows[0][0]) return int(rows[0][0])
def _create_v2(self, cur): def _create_db(self, db_path, cur):
for cmd in [
r"create table up (w text, mt int, sz int, rd text, fn text)",
r"create index up_rd on up(rd)",
r"create index up_fn on up(fn)",
]:
cur.execute(cmd)
return cur
def _create_v3(self, cur):
""" """
collision in 2^(n/2) files where n = bits (6 bits/ch) collision in 2^(n/2) files where n = bits (6 bits/ch)
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx 10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx 1<<(3*10)
12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx 12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx
16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx 16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx
""" """
for c, ks in [["drop table k", "isv"], ["drop index up_", "w"]]: if not cur:
for k in ks: cur = self._orz(db_path)
try:
cur.execute(c + k)
except:
pass
idx = r"create index up_w on up(substr(w,1,16))" idx = r"create index up_w on up(substr(w,1,16))"
if self.no_expr_idx: if self.no_expr_idx:
idx = r"create index up_w on up(w)" idx = r"create index up_w on up(w)"
for cmd in [ for cmd in [
r"create table up (w text, mt int, sz int, rd text, fn text)",
r"create index up_rd on up(rd)",
r"create index up_fn on up(fn)",
idx, idx,
r"create table mt (w text, k text, v int)", r"create table mt (w text, k text, v int)",
r"create index mt_w on mt(w)", r"create index mt_w on mt(w)",
r"create index mt_k on mt(k)", r"create index mt_k on mt(k)",
r"create index mt_v on mt(v)", r"create index mt_v on mt(v)",
r"create table kv (k text, v int)", r"create table kv (k text, v int)",
r"insert into kv values ('sver', 3)", r"insert into kv values ('sver', {})".format(DB_VER),
]: ]:
cur.execute(cmd) cur.execute(cmd)
cur.connection.commit()
self.log("created DB at {}".format(db_path))
return cur return cur
def _upgrade_v1(self, odb, db_path):
npath = db_path + ".next"
if os.path.exists(npath):
os.unlink(npath)
ndb = self._orz(npath)
self._create_v2(ndb)
c = odb.execute("select * from up")
for wark, ts, sz, rp in c:
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
v = (wark, ts, sz, rd, fn)
ndb.execute("insert into up values (?,?,?,?,?)", v)
ndb.connection.commit()
ndb.connection.close()
odb.connection.close()
atomic_move(npath, db_path)
return self._orz(db_path)
def handle_json(self, cj): def handle_json(self, cj):
with self.mutex:
if not self.register_vpath(cj["ptop"], cj["vcfg"]): if not self.register_vpath(cj["ptop"], cj["vcfg"]):
if cj["ptop"] not in self.registry: if cj["ptop"] not in self.registry:
raise Pebkac(410, "location unavailable") raise Pebkac(410, "location unavailable")
cj["name"] = sanitize_fn(cj["name"], bad=[".prologue.html", ".epilogue.html"]) cj["name"] = sanitize_fn(cj["name"], "", [".prologue.html", ".epilogue.html"])
cj["poke"] = time.time() cj["poke"] = time.time()
wark = self._get_wark(cj) wark = self._get_wark(cj)
now = time.time() now = time.time()
job = None job = None
with self.mutex: with self.mutex:
cur = self.cur.get(cj["ptop"], None) cur = self.cur.get(cj["ptop"])
reg = self.registry[cj["ptop"]] reg = self.registry[cj["ptop"]]
if cur: if cur:
if self.no_expr_idx: if self.no_expr_idx:
@@ -1018,6 +1043,7 @@ class Up2k(object):
break break
except: except:
# missing; restart # missing; restart
if not self.args.nw:
job = None job = None
break break
else: else:
@@ -1045,6 +1071,7 @@ class Up2k(object):
pdir = os.path.join(cj["ptop"], cj["prel"]) pdir = os.path.join(cj["ptop"], cj["prel"])
job["name"] = self._untaken(pdir, cj["name"], now, cj["addr"]) job["name"] = self._untaken(pdir, cj["name"], now, cj["addr"])
dst = os.path.join(job["ptop"], job["prel"], job["name"]) dst = os.path.join(job["ptop"], job["prel"], job["name"])
if not self.args.nw:
os.unlink(fsenc(dst)) # TODO ed pls os.unlink(fsenc(dst)) # TODO ed pls
self._symlink(src, dst) self._symlink(src, dst)
@@ -1088,6 +1115,9 @@ class Up2k(object):
} }
def _untaken(self, fdir, fname, ts, ip): def _untaken(self, fdir, fname, ts, ip):
if self.args.nw:
return fname
# TODO broker which avoid this race and # TODO broker which avoid this race and
# provides a new filename if taken (same as bup) # provides a new filename if taken (same as bup)
suffix = ".{:.6f}-{}".format(ts, ip) suffix = ".{:.6f}-{}".format(ts, ip)
@@ -1097,6 +1127,9 @@ class Up2k(object):
def _symlink(self, src, dst): def _symlink(self, src, dst):
# TODO store this in linktab so we never delete src if there are links to it # TODO store this in linktab so we never delete src if there are links to it
self.log("linking dupe:\n {0}\n {1}".format(src, dst)) self.log("linking dupe:\n {0}\n {1}".format(src, dst))
if self.args.nw:
return
try: try:
lsrc = src lsrc = src
ldst = dst ldst = dst
@@ -1130,7 +1163,7 @@ class Up2k(object):
def handle_chunk(self, ptop, wark, chash): def handle_chunk(self, ptop, wark, chash):
with self.mutex: with self.mutex:
job = self.registry[ptop].get(wark, None) job = self.registry[ptop].get(wark)
if not job: if not job:
known = " ".join([x for x in self.registry[ptop].keys()]) known = " ".join([x for x in self.registry[ptop].keys()])
self.log("unknown wark [{}], known: {}".format(wark, known)) self.log("unknown wark [{}], known: {}".format(wark, known))
@@ -1174,6 +1207,10 @@ class Up2k(object):
if ret > 0: if ret > 0:
return ret, src return ret, src
if self.args.nw:
# del self.registry[ptop][wark]
return ret, dst
atomic_move(src, dst) atomic_move(src, dst)
if ANYWIN: if ANYWIN:
@@ -1195,7 +1232,7 @@ class Up2k(object):
return ret, dst return ret, dst
def idx_wark(self, ptop, wark, rd, fn, lmod, sz): def idx_wark(self, ptop, wark, rd, fn, lmod, sz):
cur = self.cur.get(ptop, None) cur = self.cur.get(ptop)
if not cur: if not cur:
return False return False
@@ -1205,6 +1242,7 @@ class Up2k(object):
if "e2t" in self.flags[ptop]: if "e2t" in self.flags[ptop]:
self.tagq.put([ptop, wark, rd, fn]) self.tagq.put([ptop, wark, rd, fn])
self.n_tagq += 1
return True return True
@@ -1265,9 +1303,9 @@ class Up2k(object):
hashobj.update(buf) hashobj.update(buf)
rem -= len(buf) rem -= len(buf)
digest = hashobj.digest()[:32] digest = hashobj.digest()[:33]
digest = base64.urlsafe_b64encode(digest) digest = base64.urlsafe_b64encode(digest)
ret.append(digest.decode("utf-8").rstrip("=")) ret.append(digest.decode("utf-8"))
return ret return ret
@@ -1282,6 +1320,10 @@ class Up2k(object):
if self.args.dotpart: if self.args.dotpart:
tnam = "." + tnam tnam = "." + tnam
if self.args.nw:
job["tnam"] = tnam
return
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"]) suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f: with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
f, job["tnam"] = f["orz"] f, job["tnam"] = f["orz"]
@@ -1321,20 +1363,24 @@ class Up2k(object):
self.log("could not unsparse [{}]".format(path), 3) self.log("could not unsparse [{}]".format(path), 3)
def _snapshot(self): def _snapshot(self):
persist_interval = 30 # persist unfinished uploads index every 30 sec self.snap_persist_interval = 300 # persist unfinished index every 5 min
discard_interval = 21600 # drop unfinished uploads after 6 hours inactivity self.snap_discard_interval = 21600 # drop unfinished after 6 hours inactivity
prev = {} self.snap_prev = {}
while True: while True:
time.sleep(persist_interval) time.sleep(self.snap_persist_interval)
self.do_snapshot()
def do_snapshot(self):
with self.mutex: with self.mutex:
for k, reg in self.registry.items(): for k, reg in self.registry.items():
self._snap_reg(prev, k, reg, discard_interval) self._snap_reg(k, reg)
def _snap_reg(self, prev, k, reg, discard_interval): def _snap_reg(self, ptop, reg):
now = time.time() now = time.time()
rm = [x for x in reg.values() if now - x["poke"] > discard_interval] histpath = self.asrv.vfs.histtab[ptop]
rm = [x for x in reg.values() if now - x["poke"] > self.snap_discard_interval]
if rm: if rm:
m = "dropping {} abandoned uploads in {}".format(len(rm), k) m = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
vis = [self._vis_job_progress(x) for x in rm] vis = [self._vis_job_progress(x) for x in rm]
self.log("\n".join([m] + vis)) self.log("\n".join([m] + vis))
for job in rm: for job in rm:
@@ -1352,21 +1398,21 @@ class Up2k(object):
except: except:
pass pass
path = os.path.join(k, ".hist", "up2k.snap") path = os.path.join(histpath, "up2k.snap")
if not reg: if not reg:
if k not in prev or prev[k] is not None: if ptop not in self.snap_prev or self.snap_prev[ptop] is not None:
prev[k] = None self.snap_prev[ptop] = None
if os.path.exists(fsenc(path)): if os.path.exists(fsenc(path)):
os.unlink(fsenc(path)) os.unlink(fsenc(path))
return return
newest = max(x["poke"] for _, x in reg.items()) if reg else 0 newest = max(x["poke"] for _, x in reg.items()) if reg else 0
etag = [len(reg), newest] etag = [len(reg), newest]
if etag == prev.get(k, None): if etag == self.snap_prev.get(ptop):
return return
try: try:
os.mkdir(os.path.join(k, ".hist")) os.makedirs(histpath)
except: except:
pass pass
@@ -1378,14 +1424,21 @@ class Up2k(object):
atomic_move(path2, path) atomic_move(path2, path)
self.log("snap: {} |{}|".format(path, len(reg.keys()))) self.log("snap: {} |{}|".format(path, len(reg.keys())))
prev[k] = etag self.snap_prev[ptop] = etag
def _tagger(self): def _tagger(self):
with self.mutex:
self.n_tagq += 1
while True: while True:
with self.mutex:
self.n_tagq -= 1
ptop, wark, rd, fn = self.tagq.get() ptop, wark, rd, fn = self.tagq.get()
if "e2t" not in self.flags[ptop]: if "e2t" not in self.flags[ptop]:
continue continue
# self.log("\n " + repr([ptop, rd, fn]))
abspath = os.path.join(ptop, rd, fn) abspath = os.path.join(ptop, rd, fn)
tags = self.mtag.get(abspath) tags = self.mtag.get(abspath)
ntags1 = len(tags) ntags1 = len(tags)
@@ -1411,8 +1464,16 @@ class Up2k(object):
self.log("tagged {} ({}+{})".format(abspath, ntags1, len(tags) - ntags1)) self.log("tagged {} ({}+{})".format(abspath, ntags1, len(tags) - ntags1))
def _hasher(self): def _hasher(self):
with self.mutex:
self.n_hashq += 1
while True: while True:
with self.mutex:
self.n_hashq -= 1
# self.log("hashq {}".format(self.n_hashq))
ptop, rd, fn = self.hashq.get() ptop, rd, fn = self.hashq.get()
# self.log("hashq {} pop {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
if "e2d" not in self.flags[ptop]: if "e2d" not in self.flags[ptop]:
continue continue
@@ -1425,8 +1486,16 @@ class Up2k(object):
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size) self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size)
def hash_file(self, ptop, flags, rd, fn): def hash_file(self, ptop, flags, rd, fn):
with self.mutex:
self.register_vpath(ptop, flags) self.register_vpath(ptop, flags)
self.hashq.put([ptop, rd, fn]) self.hashq.put([ptop, rd, fn])
self.n_hashq += 1
# self.log("hashq {} push {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
def shutdown(self):
if hasattr(self, "snap_prev"):
self.log("writing snapshot")
self.do_snapshot()
def up2k_chunksize(filesize): def up2k_chunksize(filesize):
@@ -1448,9 +1517,12 @@ def up2k_wark_from_hashlist(salt, filesize, hashes):
ident.extend(hashes) ident.extend(hashes)
ident = "\n".join(ident) ident = "\n".join(ident)
hasher = hashlib.sha512() wark = hashlib.sha512(ident.encode("utf-8")).digest()[:33]
hasher.update(ident.encode("utf-8")) wark = base64.urlsafe_b64encode(wark)
digest = hasher.digest()[:32] return wark.decode("ascii")
wark = base64.urlsafe_b64encode(digest)
return wark.decode("utf-8").rstrip("=") def up2k_wark_from_metadata(salt, sz, lastmod, rd, fn):
ret = fsenc("{}\n{}\n{}\n{}\n{}".format(salt, lastmod, sz, rd, fn))
ret = base64.urlsafe_b64encode(hashlib.sha512(ret).digest())
return "#{}".format(ret.decode("ascii"))[:44]

View File

@@ -16,6 +16,7 @@ import mimetypes
import contextlib import contextlib
import subprocess as sp # nosec import subprocess as sp # nosec
from datetime import datetime from datetime import datetime
from collections import Counter
from .__init__ import PY2, WINDOWS, ANYWIN from .__init__ import PY2, WINDOWS, ANYWIN
from .stolen import surrogateescape from .stolen import surrogateescape
@@ -42,6 +43,20 @@ else:
from Queue import Queue # pylint: disable=import-error,no-name-in-module from Queue import Queue # pylint: disable=import-error,no-name-in-module
from StringIO import StringIO as BytesIO from StringIO import StringIO as BytesIO
try:
struct.unpack(b">i", b"idgi")
spack = struct.pack
sunpack = struct.unpack
except:
def spack(f, *a, **ka):
return struct.pack(f.decode("ascii"), *a, **ka)
def sunpack(f, *a, **ka):
return struct.unpack(f.decode("ascii"), *a, **ka)
surrogateescape.register_surrogateescape() surrogateescape.register_surrogateescape()
FS_ENCODING = sys.getfilesystemencoding() FS_ENCODING = sys.getfilesystemencoding()
if WINDOWS and PY2: if WINDOWS and PY2:
@@ -123,20 +138,6 @@ REKOBO_KEY = {
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()} REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
class Counter(object):
def __init__(self, v=0):
self.v = v
self.mutex = threading.Lock()
def add(self, delta=1):
with self.mutex:
self.v += delta
def set(self, absval):
with self.mutex:
self.v = absval
class Cooldown(object): class Cooldown(object):
def __init__(self, maxage): def __init__(self, maxage):
self.maxage = maxage self.maxage = maxage
@@ -193,7 +194,7 @@ class ProgressPrinter(threading.Thread):
""" """
def __init__(self): def __init__(self):
threading.Thread.__init__(self) threading.Thread.__init__(self, name="pp")
self.daemon = True self.daemon = True
self.msg = None self.msg = None
self.end = False self.end = False
@@ -208,6 +209,8 @@ class ProgressPrinter(threading.Thread):
msg = self.msg msg = self.msg
uprint(" {}\033[K\r".format(msg)) uprint(" {}\033[K\r".format(msg))
if PY2:
sys.stdout.flush()
print("\033[K", end="") print("\033[K", end="")
sys.stdout.flush() # necessary on win10 even w/ stderr btw sys.stdout.flush() # necessary on win10 even w/ stderr btw
@@ -229,7 +232,7 @@ def nuprint(msg):
def rice_tid(): def rice_tid():
tid = threading.current_thread().ident tid = threading.current_thread().ident
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:]) c = sunpack(b"B" * 5, spack(b">Q", tid)[-5:])
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m" return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
@@ -252,6 +255,99 @@ def trace(*args, **kwargs):
nuprint(msg) nuprint(msg)
def alltrace():
threads = {}
names = dict([(t.ident, t.name) for t in threading.enumerate()])
for tid, stack in sys._current_frames().items():
name = "{} ({:x})".format(names.get(tid), tid)
threads[name] = stack
rret = []
bret = []
for name, stack in sorted(threads.items()):
ret = ["\n\n# {}".format(name)]
pad = None
for fn, lno, name, line in traceback.extract_stack(stack):
fn = os.sep.join(fn.split(os.sep)[-3:])
ret.append('File: "{}", line {}, in {}'.format(fn, lno, name))
if line:
ret.append(" " + str(line.strip()))
if "self.not_empty.wait()" in line:
pad = " " * 4
if pad:
bret += [ret[0]] + [pad + x for x in ret[1:]]
else:
rret += ret
return "\n".join(rret + bret)
def start_stackmon(arg_str, nid):
suffix = "-{}".format(nid) if nid else ""
fp, f = arg_str.rsplit(",", 1)
f = int(f)
t = threading.Thread(
target=stackmon,
args=(fp, f, suffix),
name="stackmon" + suffix,
)
t.daemon = True
t.start()
def stackmon(fp, ival, suffix):
ctr = 0
while True:
ctr += 1
time.sleep(ival)
st = "{}, {}\n{}".format(ctr, time.time(), alltrace())
with open(fp + suffix, "wb") as f:
f.write(st.encode("utf-8", "replace"))
def start_log_thrs(logger, ival, nid):
ival = int(ival)
tname = lname = "log-thrs"
if nid:
tname = "logthr-n{}-i{:x}".format(nid, os.getpid())
lname = tname[3:]
t = threading.Thread(
target=log_thrs,
args=(logger, ival, lname),
name=tname,
)
t.daemon = True
t.start()
def log_thrs(log, ival, name):
while True:
time.sleep(ival)
tv = [x.name for x in threading.enumerate()]
tv = [
x.split("-")[0]
if x.startswith("httpconn-") or x.startswith("thumb-")
else "listen"
if "-listen-" in x
else x
for x in tv
if not x.startswith("pydevd.")
]
tv = ["{}\033[36m{}".format(v, k) for k, v in sorted(Counter(tv).items())]
log(name, "\033[0m \033[33m".join(tv), 3)
def min_ex():
et, ev, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
fmt = "{} @ {} <{}>: {}"
ex = [fmt.format(fp.split(os.sep)[-1], ln, fun, txt) for fp, ln, fun, txt in tb]
ex.append("[{}] {}".format(et.__name__, ev))
return "\n".join(ex[-8:])
@contextlib.contextmanager @contextlib.contextmanager
def ren_open(fname, *args, **kwargs): def ren_open(fname, *args, **kwargs):
fdir = kwargs.pop("fdir", None) fdir = kwargs.pop("fdir", None)
@@ -262,6 +358,11 @@ def ren_open(fname, *args, **kwargs):
yield {"orz": [f, fname]} yield {"orz": [f, fname]}
return return
if suffix:
ext = fname.split(".")[-1]
if len(ext) < 7:
suffix += "." + ext
orig_name = fname orig_name = fname
bname = fname bname = fname
ext = "" ext = ""
@@ -305,7 +406,7 @@ def ren_open(fname, *args, **kwargs):
if not b64: if not b64:
b64 = (bname + ext).encode("utf-8", "replace") b64 = (bname + ext).encode("utf-8", "replace")
b64 = hashlib.sha512(b64).digest()[:12] b64 = hashlib.sha512(b64).digest()[:12]
b64 = base64.urlsafe_b64encode(b64).decode("utf-8").rstrip("=") b64 = base64.urlsafe_b64encode(b64).decode("utf-8")
badlen = len(fname) badlen = len(fname)
while len(fname) >= badlen: while len(fname) >= badlen:
@@ -561,8 +662,10 @@ def read_header(sr):
else: else:
continue continue
if len(ret) > ofs + 4:
sr.unrecv(ret[ofs + 4 :]) sr.unrecv(ret[ofs + 4 :])
return ret[:ofs].decode("utf-8", "surrogateescape").split("\r\n")
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
def humansize(sz, terse=False): def humansize(sz, terse=False):
@@ -600,6 +703,16 @@ def s2hms(s, optional_h=False):
return "{}:{:02}:{:02}".format(h, m, s) return "{}:{:02}:{:02}".format(h, m, s)
def uncyg(path):
if len(path) < 2 or not path.startswith("/"):
return path
if len(path) > 2 and path[2] != "/":
return path
return "{}:\\{}".format(path[1], path[3:])
def undot(path): def undot(path):
ret = [] ret = []
for node in path.split("/"): for node in path.split("/"):
@@ -616,7 +729,7 @@ def undot(path):
return "/".join(ret) return "/".join(ret)
def sanitize_fn(fn, ok="", bad=[]): def sanitize_fn(fn, ok, bad):
if "/" not in ok: if "/" not in ok:
fn = fn.replace("\\", "/").split("/")[-1] fn = fn.replace("\\", "/").split("/")[-1]
@@ -846,21 +959,16 @@ def yieldfile(fn):
yield buf yield buf
def hashcopy(actor, fin, fout): def hashcopy(fin, fout):
u32_lim = int((2 ** 31) * 0.9)
hashobj = hashlib.sha512() hashobj = hashlib.sha512()
tlen = 0 tlen = 0
for buf in fin: for buf in fin:
actor.workload += 1
if actor.workload > u32_lim:
actor.workload = 100 # prevent overflow
tlen += len(buf) tlen += len(buf)
hashobj.update(buf) hashobj.update(buf)
fout.write(buf) fout.write(buf)
digest32 = hashobj.digest()[:32] digest = hashobj.digest()[:33]
digest_b64 = base64.urlsafe_b64encode(digest32).decode("utf-8").rstrip("=") digest_b64 = base64.urlsafe_b64encode(digest).decode("utf-8")
return tlen, hashobj.hexdigest(), digest_b64 return tlen, hashobj.hexdigest(), digest_b64
@@ -870,7 +978,7 @@ def sendfile_py(lower, upper, f, s):
f.seek(lower) f.seek(lower)
while remains > 0: while remains > 0:
# time.sleep(0.01) # time.sleep(0.01)
buf = f.read(min(4096, remains)) buf = f.read(min(1024 * 32, remains))
if not buf: if not buf:
return remains return remains
@@ -915,8 +1023,7 @@ def statdir(logger, scandir, lstat, top):
try: try:
yield [fsdec(fh.name), fh.stat(follow_symlinks=not lstat)] yield [fsdec(fh.name), fh.stat(follow_symlinks=not lstat)]
except Exception as ex: except Exception as ex:
msg = "scan-stat: \033[36m{} @ {}" logger(src, "[s] {} @ {}".format(repr(ex), fsdec(fh.path)), 6)
logger(msg.format(repr(ex), fsdec(fh.path)))
else: else:
src = "listdir" src = "listdir"
fun = os.lstat if lstat else os.stat fun = os.lstat if lstat else os.stat
@@ -925,11 +1032,10 @@ def statdir(logger, scandir, lstat, top):
try: try:
yield [fsdec(name), fun(abspath)] yield [fsdec(name), fun(abspath)]
except Exception as ex: except Exception as ex:
msg = "list-stat: \033[36m{} @ {}" logger(src, "[s] {} @ {}".format(repr(ex), fsdec(abspath)), 6)
logger(msg.format(repr(ex), fsdec(abspath)))
except Exception as ex: except Exception as ex:
logger("{}: \033[31m{} @ {}".format(src, repr(ex), top)) logger(src, "{} @ {}".format(repr(ex), top), 1)
def unescape_cookie(orig): def unescape_cookie(orig):
@@ -966,14 +1072,20 @@ def guess_mime(url, fallback="application/octet-stream"):
except: except:
return fallback return fallback
return MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback ret = MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
if ";" not in ret:
if ret.startswith("text/") or ret.endswith("/javascript"):
ret += "; charset=UTF-8"
return ret
def runcmd(*argv): def runcmd(*argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE) p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
stdout = stdout.decode("utf-8") stdout = stdout.decode("utf-8", "replace")
stderr = stderr.decode("utf-8") stderr = stderr.decode("utf-8", "replace")
return [p.returncode, stdout, stderr] return [p.returncode, stdout, stderr]
@@ -1000,10 +1112,7 @@ def gzip_orig_sz(fn):
with open(fsenc(fn), "rb") as f: with open(fsenc(fn), "rb") as f:
f.seek(-4, 2) f.seek(-4, 2)
rv = f.read(4) rv = f.read(4)
try: return sunpack(b"I", rv)[0]
return struct.unpack(b"I", rv)[0]
except:
return struct.unpack("I", rv)[0]
def py_desc(): def py_desc():

View File

@@ -0,0 +1,733 @@
/*!
* baguetteBox.js
* @author feimosi
* @version 1.11.1-mod
* @url https://github.com/feimosi/baguetteBox.js
*/
window.baguetteBox = (function () {
'use strict';
var options = {},
defaults = {
captions: true,
buttons: 'auto',
noScrollbars: false,
bodyClass: 'bbox-open',
titleTag: false,
async: false,
preload: 2,
animation: 'slideIn',
afterShow: null,
afterHide: null,
onChange: null,
},
overlay, slider, btnPrev, btnNext, btnHelp, btnVmode, btnClose,
currentGallery = [],
currentIndex = 0,
isOverlayVisible = false,
touch = {}, // start-pos
touchFlag = false, // busy
re_i = /.+\.(gif|jpe?g|png|webp)(\?|$)/i,
re_v = /.+\.(webm|mp4)(\?|$)/i,
data = {}, // all galleries
imagesElements = [],
documentLastFocus = null,
isFullscreen = false,
vmute = false,
vloop = false,
vnext = false,
resume_mp = false;
var onFSC = function (e) {
isFullscreen = !!document.fullscreenElement;
};
var overlayClickHandler = function (e) {
if (e.target.id.indexOf('baguette-img') !== -1)
hideOverlay();
};
var touchstartHandler = function (e) {
touch.count++;
if (touch.count > 1)
touch.multitouch = true;
touch.startX = e.changedTouches[0].pageX;
touch.startY = e.changedTouches[0].pageY;
};
var touchmoveHandler = function (e) {
if (touchFlag || touch.multitouch)
return;
e.preventDefault ? e.preventDefault() : e.returnValue = false;
var touchEvent = e.touches[0] || e.changedTouches[0];
if (touchEvent.pageX - touch.startX > 40) {
touchFlag = true;
showPreviousImage();
} else if (touchEvent.pageX - touch.startX < -40) {
touchFlag = true;
showNextImage();
} else if (touch.startY - touchEvent.pageY > 100) {
hideOverlay();
}
};
var touchendHandler = function () {
touch.count--;
if (touch.count <= 0)
touch.multitouch = false;
touchFlag = false;
};
var contextmenuHandler = function () {
touchendHandler();
};
var trapFocusInsideOverlay = function (e) {
if (overlay.style.display === 'block' && (overlay.contains && !overlay.contains(e.target))) {
e.stopPropagation();
btnClose.focus();
}
};
function run(selector, userOptions) {
buildOverlay();
removeFromCache(selector);
return bindImageClickListeners(selector, userOptions);
}
function bindImageClickListeners(selector, userOptions) {
var galleryNodeList = QSA(selector);
var selectorData = {
galleries: [],
nodeList: galleryNodeList
};
data[selector] = selectorData;
[].forEach.call(galleryNodeList, function (galleryElement) {
var tagsNodeList = [];
if (galleryElement.tagName === 'A')
tagsNodeList = [galleryElement];
else
tagsNodeList = galleryElement.getElementsByTagName('a');
tagsNodeList = [].filter.call(tagsNodeList, function (element) {
if (element.className.indexOf(userOptions && userOptions.ignoreClass) === -1)
return re_i.test(element.href) || re_v.test(element.href);
});
if (!tagsNodeList.length)
return;
var gallery = [];
[].forEach.call(tagsNodeList, function (imageElement, imageIndex) {
var imageElementClickHandler = function (e) {
if (ctrl(e))
return true;
e.preventDefault ? e.preventDefault() : e.returnValue = false;
prepareOverlay(gallery, userOptions);
showOverlay(imageIndex);
};
var imageItem = {
eventHandler: imageElementClickHandler,
imageElement: imageElement
};
bind(imageElement, 'click', imageElementClickHandler);
gallery.push(imageItem);
});
selectorData.galleries.push(gallery);
});
return selectorData.galleries;
}
function clearCachedData() {
for (var selector in data)
if (data.hasOwnProperty(selector))
removeFromCache(selector);
}
function removeFromCache(selector) {
if (!data.hasOwnProperty(selector))
return;
var galleries = data[selector].galleries;
[].forEach.call(galleries, function (gallery) {
[].forEach.call(gallery, function (imageItem) {
unbind(imageItem.imageElement, 'click', imageItem.eventHandler);
});
if (currentGallery === gallery)
currentGallery = [];
});
delete data[selector];
}
function buildOverlay() {
overlay = ebi('bbox-overlay');
if (!overlay) {
var ctr = mknod('div');
ctr.innerHTML = (
'<div id="bbox-overlay" role="dialog">' +
'<div id="bbox-slider"></div>' +
'<button id="bbox-prev" class="bbox-btn" type="button" aria-label="Previous">&lt;</button>' +
'<button id="bbox-next" class="bbox-btn" type="button" aria-label="Next">&gt;</button>' +
'<div id="bbox-btns">' +
'<button id="bbox-help" type="button">?</button>' +
'<button id="bbox-vmode" type="button" tt="a"></button>' +
'<button id="bbox-close" type="button" aria-label="Close">X</button>' +
'</div></div>'
);
overlay = ctr.firstChild;
QS('body').appendChild(overlay);
tt.att(overlay);
}
slider = ebi('bbox-slider');
btnPrev = ebi('bbox-prev');
btnNext = ebi('bbox-next');
btnHelp = ebi('bbox-help');
btnVmode = ebi('bbox-vmode');
btnClose = ebi('bbox-close');
bindEvents();
}
function halp() {
if (ebi('bbox-halp'))
return;
var list = [
['<b># hotkey</b>', '<b># operation</b>'],
['escape', 'close'],
['left, J', 'previous file'],
['right, L', 'next file'],
['home', 'first file'],
['end', 'last file'],
['space, P, K', 'video: play / pause'],
['U', 'video: seek 10sec back'],
['P', 'video: seek 10sec ahead'],
['M', 'video: toggle mute'],
['R', 'video: toggle loop'],
['C', 'video: toggle auto-next'],
['F', 'video: toggle fullscreen'],
],
d = mknod('table'),
html = ['<tbody>'];
for (var a = 0; a < list.length; a++)
html.push('<tr><td>' + list[a][0] + '</td><td>' + list[a][1] + '</td></tr>');
d.innerHTML = html.join('\n') + '</tbody>';
d.setAttribute('id', 'bbox-halp');
d.onclick = function () {
overlay.removeChild(d);
};
overlay.appendChild(d);
}
function keyDownHandler(e) {
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
return;
var k = e.code + '', v = vid();
if (k == "ArrowLeft" || k == "KeyJ")
showPreviousImage();
else if (k == "ArrowRight" || k == "KeyL")
showNextImage();
else if (k == "Escape")
hideOverlay();
else if (k == "Home")
showFirstImage(e);
else if (k == "End")
showLastImage(e);
else if (k == "Space" || k == "KeyP" || k == "KeyK")
playpause();
else if (k == "KeyU" || k == "KeyO")
relseek(k == "KeyU" ? -10 : 10);
else if (k == "KeyM" && v) {
v.muted = vmute = !vmute;
mp_ctl();
}
else if (k == "KeyR" && v) {
vloop = !vloop;
vnext = vnext && !vloop;
setVmode();
}
else if (k == "KeyC" && v) {
vnext = !vnext;
vloop = vloop && !vnext;
setVmode();
}
else if (k == "KeyF")
try {
if (isFullscreen)
document.exitFullscreen();
else
v.requestFullscreen();
}
catch (ex) { }
}
function setVmode() {
var v = vid();
ebi('bbox-vmode').style.display = v ? '' : 'none';
if (!v)
return;
var msg = 'When video ends, ', tts = '', lbl;
if (vloop) {
lbl = 'Loop';
msg += 'repeat it';
tts = '$NHotkey: R';
}
else if (vnext) {
lbl = 'Cont';
msg += 'continue to next';
tts = '$NHotkey: C';
}
else {
lbl = 'Stop';
msg += 'just stop'
}
btnVmode.setAttribute('aria-label', msg);
btnVmode.setAttribute('tt', msg + tts);
btnVmode.textContent = lbl;
v.loop = vloop
if (vloop && v.paused)
v.play();
}
function tglVmode() {
if (vloop) {
vnext = true;
vloop = false;
}
else if (vnext)
vnext = false;
else
vloop = true;
setVmode();
if (tt.en)
tt.show.bind(this)();
}
function keyUpHandler(e) {
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
return;
var k = e.code + '';
if (k == "Space")
ev(e);
}
var passiveSupp = false;
try {
var opts = {
get passive() {
passiveSupp = true;
return false;
}
};
window.addEventListener('test', null, opts);
window.removeEventListener('test', null, opts);
}
catch (ex) {
passiveSupp = false;
}
var passiveEvent = passiveSupp ? { passive: false } : null;
var nonPassiveEvent = passiveSupp ? { passive: true } : null;
function bindEvents() {
bind(overlay, 'click', overlayClickHandler);
bind(btnPrev, 'click', showPreviousImage);
bind(btnNext, 'click', showNextImage);
bind(btnClose, 'click', hideOverlay);
bind(btnVmode, 'click', tglVmode);
bind(btnHelp, 'click', halp);
bind(slider, 'contextmenu', contextmenuHandler);
bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
bind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
bind(overlay, 'touchend', touchendHandler);
bind(document, 'focus', trapFocusInsideOverlay, true);
}
function unbindEvents() {
unbind(overlay, 'click', overlayClickHandler);
unbind(btnPrev, 'click', showPreviousImage);
unbind(btnNext, 'click', showNextImage);
unbind(btnClose, 'click', hideOverlay);
unbind(btnVmode, 'click', tglVmode);
unbind(btnHelp, 'click', halp);
unbind(slider, 'contextmenu', contextmenuHandler);
unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
unbind(overlay, 'touchend', touchendHandler);
unbind(document, 'focus', trapFocusInsideOverlay, true);
}
function prepareOverlay(gallery, userOptions) {
if (currentGallery === gallery)
return;
currentGallery = gallery;
setOptions(userOptions);
slider.innerHTML = '';
imagesElements.length = 0;
var imagesFiguresIds = [];
var imagesCaptionsIds = [];
for (var i = 0, fullImage; i < gallery.length; i++) {
fullImage = mknod('div');
fullImage.className = 'full-image';
fullImage.id = 'baguette-img-' + i;
imagesElements.push(fullImage);
imagesFiguresIds.push('bbox-figure-' + i);
imagesCaptionsIds.push('bbox-figcaption-' + i);
slider.appendChild(imagesElements[i]);
}
overlay.setAttribute('aria-labelledby', imagesFiguresIds.join(' '));
overlay.setAttribute('aria-describedby', imagesCaptionsIds.join(' '));
}
function setOptions(newOptions) {
if (!newOptions)
newOptions = {};
for (var item in defaults) {
options[item] = defaults[item];
if (typeof newOptions[item] !== 'undefined')
options[item] = newOptions[item];
}
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .4s ease' :
options.animation === 'slideIn' ? '' : 'none');
if (options.buttons === 'auto' && ('ontouchstart' in window || currentGallery.length === 1))
options.buttons = false;
btnPrev.style.display = btnNext.style.display = (options.buttons ? '' : 'none');
}
function showOverlay(chosenImageIndex) {
if (options.noScrollbars) {
document.documentElement.style.overflowY = 'hidden';
document.body.style.overflowY = 'scroll';
}
if (overlay.style.display === 'block')
return;
bind(document, 'keydown', keyDownHandler);
bind(document, 'keyup', keyUpHandler);
bind(document, 'fullscreenchange', onFSC);
currentIndex = chosenImageIndex;
touch = {
count: 0,
startX: null,
startY: null
};
loadImage(currentIndex, function () {
preloadNext(currentIndex);
preloadPrev(currentIndex);
});
updateOffset();
overlay.style.display = 'block';
// Fade in overlay
setTimeout(function () {
overlay.className = 'visible';
if (options.bodyClass && document.body.classList)
document.body.classList.add(options.bodyClass);
if (options.afterShow)
options.afterShow();
}, 50);
if (options.onChange)
options.onChange(currentIndex, imagesElements.length);
documentLastFocus = document.activeElement;
btnClose.focus();
isOverlayVisible = true;
}
function hideOverlay(e) {
ev(e);
playvid(false);
if (options.noScrollbars) {
document.documentElement.style.overflowY = 'auto';
document.body.style.overflowY = 'auto';
}
if (overlay.style.display === 'none')
return;
unbind(document, 'keydown', keyDownHandler);
unbind(document, 'keyup', keyUpHandler);
unbind(document, 'fullscreenchange', onFSC);
// Fade out and hide the overlay
overlay.className = '';
setTimeout(function () {
overlay.style.display = 'none';
if (options.bodyClass && document.body.classList)
document.body.classList.remove(options.bodyClass);
var h = ebi('bbox-halp');
if (h)
h.parentNode.removeChild(h);
if (options.afterHide)
options.afterHide();
documentLastFocus && documentLastFocus.focus();
isOverlayVisible = false;
}, 500);
}
function loadImage(index, callback) {
var imageContainer = imagesElements[index];
var galleryItem = currentGallery[index];
if (typeof imageContainer === 'undefined' || typeof galleryItem === 'undefined')
return; // out-of-bounds or gallery dirty
if (imageContainer.querySelector('img, video'))
// was loaded, cb and bail
return callback ? callback() : null;
// maybe unloaded video
while (imageContainer.firstChild)
imageContainer.removeChild(imageContainer.firstChild);
var imageElement = galleryItem.imageElement,
imageSrc = imageElement.href,
is_vid = re_v.test(imageSrc),
thumbnailElement = imageElement.querySelector('img, video'),
imageCaption = typeof options.captions === 'function' ?
options.captions.call(currentGallery, imageElement) :
imageElement.getAttribute('data-caption') || imageElement.title;
imageSrc += imageSrc.indexOf('?') < 0 ? '?cache' : '&cache';
if (is_vid && index != currentIndex)
return; // no preload
var figure = mknod('figure');
figure.id = 'bbox-figure-' + index;
figure.innerHTML = '<div class="bbox-spinner">' +
'<div class="bbox-double-bounce1"></div>' +
'<div class="bbox-double-bounce2"></div>' +
'</div>';
if (options.captions && imageCaption) {
var figcaption = mknod('figcaption');
figcaption.id = 'bbox-figcaption-' + index;
figcaption.innerHTML = imageCaption;
figure.appendChild(figcaption);
}
imageContainer.appendChild(figure);
var image = mknod(is_vid ? 'video' : 'img');
clmod(imageContainer, 'vid', is_vid);
image.addEventListener(is_vid ? 'loadedmetadata' : 'load', function () {
// Remove loader element
var spinner = QS('#baguette-img-' + index + ' .bbox-spinner');
figure.removeChild(spinner);
if (!options.async && callback)
callback();
});
image.setAttribute('src', imageSrc);
if (is_vid) {
image.setAttribute('controls', 'controls');
image.onended = vidEnd;
}
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
if (options.titleTag && imageCaption)
image.title = imageCaption;
figure.appendChild(image);
if (options.async && callback)
callback();
}
function showNextImage(e) {
ev(e);
return show(currentIndex + 1);
}
function showPreviousImage(e) {
ev(e);
return show(currentIndex - 1);
}
function showFirstImage(e) {
if (e)
e.preventDefault();
return show(0);
}
function showLastImage(e) {
if (e)
e.preventDefault();
return show(currentGallery.length - 1);
}
function show(index, gallery) {
if (!isOverlayVisible && index >= 0 && index < gallery.length) {
prepareOverlay(gallery, options);
showOverlay(index);
return true;
}
if (index < 0) {
if (options.animation)
bounceAnimation('left');
return false;
}
if (index >= imagesElements.length) {
if (options.animation)
bounceAnimation('right');
return false;
}
var v = vid();
if (v) {
v.src = '';
v.load();
v.parentNode.removeChild(v);
}
currentIndex = index;
loadImage(currentIndex, function () {
preloadNext(currentIndex);
preloadPrev(currentIndex);
});
updateOffset();
if (options.onChange)
options.onChange(currentIndex, imagesElements.length);
return true;
}
function vid() {
return imagesElements[currentIndex].querySelector('video');
}
function playvid(play) {
if (vid())
vid()[play ? 'play' : 'pause']();
}
function playpause() {
var v = vid();
if (v)
v[v.paused ? "play" : "pause"]();
}
function relseek(sec) {
if (vid())
vid().currentTime += sec;
}
function vidEnd() {
if (this == vid() && vnext)
showNextImage();
}
function mp_ctl() {
var v = vid();
if (!vmute && v && mp.au && !mp.au.paused) {
mp.fade_out();
resume_mp = true;
}
else if (resume_mp && (vmute || !v) && mp.au && mp.au.paused) {
mp.fade_in();
resume_mp = false;
}
}
function bounceAnimation(direction) {
slider.className = 'bounce-from-' + direction;
setTimeout(function () {
slider.className = '';
}, 400);
}
function updateOffset() {
var offset = -currentIndex * 100 + '%';
if (options.animation === 'fadeIn') {
slider.style.opacity = 0;
setTimeout(function () {
slider.style.transform = 'translate3d(' + offset + ',0,0)';
slider.style.opacity = 1;
}, 400);
} else {
slider.style.transform = 'translate3d(' + offset + ',0,0)';
}
playvid(false);
var v = vid();
if (v) {
playvid(true);
v.muted = vmute;
v.loop = vloop;
}
mp_ctl();
setVmode();
}
function preloadNext(index) {
if (index - currentIndex >= options.preload)
return;
loadImage(index + 1, function () {
preloadNext(index + 1);
});
}
function preloadPrev(index) {
if (currentIndex - index >= options.preload)
return;
loadImage(index - 1, function () {
preloadPrev(index - 1);
});
}
function bind(element, event, callback, options) {
element.addEventListener(event, callback, options);
}
function unbind(element, event, callback, options) {
element.removeEventListener(event, callback, options);
}
function destroyPlugin() {
unbindEvents();
clearCachedData();
unbind(document, 'keydown', keyDownHandler);
unbind(document, 'keyup', keyUpHandler);
document.getElementsByTagName('body')[0].removeChild(ebi('bbox-overlay'));
data = {};
currentGallery = [];
currentIndex = 0;
}
return {
run: run,
show: show,
showNext: showNextImage,
showPrevious: showPreviousImage,
relseek: relseek,
playpause: playpause,
hide: hideOverlay,
destroy: destroyPlugin
};
})();

View File

@@ -25,6 +25,47 @@ html, body {
body { body {
padding-bottom: 5em; padding-bottom: 5em;
} }
#tt {
position: fixed;
max-width: 34em;
background: #222;
border: 0 solid #777;
overflow: hidden;
margin-top: 1em;
padding: 0 1.3em;
height: 0;
opacity: .1;
transition: opacity 0.14s, height 0.14s, padding 0.14s;
box-shadow: 0 .2em .5em #222;
border-radius: .4em;
z-index: 9001;
}
#tt.b {
padding: 0 2em;
border-radius: .5em;
box-shadow: 0 .2em 1em #000;
}
#tt.show {
padding: 1em 1.3em;
border-width: .4em 0;
height: auto;
opacity: 1;
}
#tt.show.b {
padding: 1.5em 2em;
border-width: .5em 0;
}
#tt code {
background: #3c3c3c;
padding: .1em .3em;
border-top: 1px solid #777;
border-radius: .3em;
font-family: monospace, monospace;
line-height: 1.7em;
}
#tt em {
color: #f6a;
}
#path, #path,
#path * { #path * {
font-size: 1em; font-size: 1em;
@@ -53,6 +94,7 @@ body {
#files tbody a { #files tbody a {
display: block; display: block;
padding: .3em 0; padding: .3em 0;
scroll-margin-top: 45vh;
} }
#files tbody div a { #files tbody div a {
color: #f5a; color: #f5a;
@@ -68,7 +110,6 @@ a, #files tbody div a:last-child {
text-decoration: underline; text-decoration: underline;
} }
#files thead { #files thead {
background: #333;
position: sticky; position: sticky;
top: 0; top: 0;
} }
@@ -76,29 +117,30 @@ a, #files tbody div a:last-child {
color: #999; color: #999;
font-weight: normal; font-weight: normal;
} }
#files tr:hover { #files tr:hover td {
background: #1c1c1c; background: #1c1c1c;
} }
#files thead th { #files thead th {
padding: .5em 1.3em .3em 1.3em; padding: .5em .3em .3em .3em;
border-right: 2px solid #3c3c3c;
border-bottom: 2px solid #444;
background: #333;
cursor: pointer; cursor: pointer;
} }
#files thead th+th {
border-left: 2px solid #2a2a2a;
}
#files thead th:last-child { #files thead th:last-child {
background: #444; border-right: none;
border-radius: .7em .7em 0 0;
} }
#files thead th:first-child { #files tbody {
background: #222; background: #222;
} }
#files tbody,
#files thead th:nth-child(2) {
background: #222;
border-radius: 0 .7em 0 0;
}
#files td { #files td {
margin: 0; margin: 0;
padding: 0 .5em; padding: 0 .5em;
border-bottom: 1px solid #111; border-bottom: 1px solid #111;
border-left: 1px solid #2c2c2c;
} }
#files td+td+td { #files td+td+td {
max-width: 30em; max-width: 30em;
@@ -185,9 +227,17 @@ a, #files tbody div a:last-child {
margin: -.2em; margin: -.2em;
} }
#files tbody a.play.act { #files tbody a.play.act {
color: #840; color: #720;
text-shadow: 0 0 .3em #b80; text-shadow: 0 0 .3em #b80;
} }
#ggrid a.play,
html.light #ggrid a.play {
color: #fff;
background: #750;
border-color: #c90;
border-top: 1px solid #da4;
box-shadow: 0 .1em 1.2em #b83;
}
#files tbody tr.sel td, #files tbody tr.sel td,
#ggrid a.sel, #ggrid a.sel,
html.light #ggrid a.sel { html.light #ggrid a.sel {
@@ -209,11 +259,17 @@ html.light #ggrid a.sel {
box-shadow: 0 .1em 1.2em #b36; box-shadow: 0 .1em 1.2em #b36;
transition: all 0.2s cubic-bezier(.2, 2.2, .5, 1); /* https://cubic-bezier.com/#.4,2,.7,1 */ transition: all 0.2s cubic-bezier(.2, 2.2, .5, 1); /* https://cubic-bezier.com/#.4,2,.7,1 */
} }
#ggrid a.sel img { #ggrid a.sel img,
#ggrid a.play img {
opacity: .7; opacity: .7;
box-shadow: 0 0 1em #b36;
filter: contrast(130%) brightness(107%); filter: contrast(130%) brightness(107%);
} }
#ggrid a.sel img {
box-shadow: 0 0 1em #b36;
}
#ggrid a.play img {
box-shadow: 0 0 1em #b83;
}
#files tr.sel a { #files tr.sel a {
color: #fff; color: #fff;
} }
@@ -267,6 +323,7 @@ html.light #ggrid a.sel {
height: 6em; height: 6em;
width: 100%; width: 100%;
z-index: 3; z-index: 3;
touch-action: none;
transition: bottom 0.15s; transition: bottom 0.15s;
} }
#widget.open { #widget.open {
@@ -280,10 +337,18 @@ html.light #ggrid a.sel {
height: 100%; height: 100%;
background: #3c3c3c; background: #3c3c3c;
} }
#wtgrid,
#wtico { #wtico {
cursor: url(/.cpr/dd/4.png), pointer; cursor: url(/.cpr/dd/4.png), pointer;
animation: cursor 500ms; animation: cursor 500ms;
position: relative;
top: -.06em;
} }
#wtgrid {
font-size: .8em;
top: -.12em;
}
#wtgrid:hover,
#wtico:hover { #wtico:hover {
animation: cursor 500ms infinite; animation: cursor 500ms infinite;
} }
@@ -299,9 +364,10 @@ html.light #ggrid a.sel {
} }
#wtoggle { #wtoggle {
position: absolute; position: absolute;
white-space: nowrap;
top: -1.2em; top: -1.2em;
right: 0; right: 0;
width: 1.2em; width: 2.5em;
height: 1em; height: 1em;
font-size: 2em; font-size: 2em;
line-height: 1em; line-height: 1em;
@@ -333,10 +399,10 @@ html.light #ggrid a.sel {
line-height: 1em; line-height: 1em;
} }
#wtoggle.np { #wtoggle.np {
width: 5.5em; width: 6.63em;
} }
#wtoggle.sel { #wtoggle.sel {
width: 6.4em; width: 7.57em;
} }
#wtoggle.sel #wzip, #wtoggle.sel #wzip,
#wtoggle.np #wnp { #wtoggle.np #wnp {
@@ -419,6 +485,17 @@ html.light #ggrid a.sel {
max-width: 9em; max-width: 9em;
} }
} }
@media (max-width: 35em) {
#ops>a[data-dest="new_md"],
#ops>a[data-dest="msg"] {
display: none;
}
#op_mkdir.act+div,
#op_mkdir.act+div+div {
display: block;
margin-top: 1em;
}
}
@@ -483,20 +560,56 @@ html.light #ggrid a.sel {
margin: .5em; margin: .5em;
} }
.opview input[type=text] { .opview input[type=text] {
color: #fff;
background: #383838; background: #383838;
color: #fff;
border: none; border: none;
box-shadow: 0 0 .3em #222; box-shadow: 0 0 .3em #222;
border-bottom: 1px solid #fc5; border-bottom: 1px solid #fc5;
border-radius: .2em; border-radius: .2em;
padding: .2em .3em; padding: .2em .3em;
} }
.opview input.err,
html.light .opview input[type="text"].err {
color: #fff;
background: #a20;
border-color: #f00;
box-shadow: 0 0 .7em #f00;
text-shadow: 1px 1px 0 #500;
outline: none;
}
input[type="checkbox"]+label { input[type="checkbox"]+label {
color: #f5a; color: #f5a;
} }
input[type="checkbox"]:checked+label { input[type="checkbox"]:checked+label {
color: #fc5; color: #fc5;
} }
input[type="radio"]:checked+label {
color: #fc0;
}
html.light input[type="radio"]:checked+label {
color: #07c;
}
input.eq_gain {
width: 3em;
text-align: center;
margin: 0 .6em;
}
#audio_eq table {
border-collapse: collapse;
}
#audio_eq td {
text-align: center;
}
#audio_eq a.eq_step {
font-size: 1.5em;
display: block;
padding: 0;
}
#au_eq {
display: block;
margin-top: .5em;
padding: 1.3em .3em;
}
@@ -526,7 +639,7 @@ input[type="checkbox"]:checked+label {
#srch_q { #srch_q {
white-space: pre; white-space: pre;
color: #f80; color: #f80;
height: 1em; min-height: 1em;
margin: .2em 0 -1em 1.6em; margin: .2em 0 -1em 1.6em;
} }
#tq_raw { #tq_raw {
@@ -563,6 +676,7 @@ input[type="checkbox"]:checked+label {
} }
#wrap { #wrap {
margin-top: 2em; margin-top: 2em;
min-height: 90vh;
} }
#tree { #tree {
display: none; display: none;
@@ -575,8 +689,15 @@ input[type="checkbox"]:checked+label {
overscroll-behavior-y: none; overscroll-behavior-y: none;
scrollbar-color: #eb0 #333; scrollbar-color: #eb0 #333;
} }
#treeh {
background: #333;
position: sticky;
z-index: 1;
top: 0;
}
#thx_ff { #thx_ff {
padding: 5em 0; padding: 5em 0;
/* widget */
} }
#tree::-webkit-scrollbar-track, #tree::-webkit-scrollbar-track,
#tree::-webkit-scrollbar { #tree::-webkit-scrollbar {
@@ -600,6 +721,7 @@ input[type="checkbox"]:checked+label {
box-shadow: 0 .1em .2em #222 inset; box-shadow: 0 .1em .2em #222 inset;
border-radius: .3em; border-radius: .3em;
margin: .2em; margin: .2em;
white-space: pre;
position: relative; position: relative;
top: -.2em; top: -.2em;
} }
@@ -636,15 +758,14 @@ input[type="checkbox"]:checked+label {
#treeul a.hl { #treeul a.hl {
color: #400; color: #400;
background: #fc4; background: #fc4;
border-radius: .3em;
text-shadow: none; text-shadow: none;
} }
#treeul a { #treeul a {
border-radius: .3em;
display: inline-block; display: inline-block;
} }
#treeul a+a { #treeul a+a {
width: calc(100% - 2em); width: calc(100% - 2em);
background: #333;
line-height: 1em; line-height: 1em;
} }
#treeul a+a:hover { #treeul a+a:hover {
@@ -668,34 +789,25 @@ input[type="checkbox"]:checked+label {
font-size: 2em; font-size: 2em;
white-space: nowrap; white-space: nowrap;
} }
#files th:hover .cfg, #files th:hover .cfg {
#files th.min .cfg {
display: block; display: block;
width: 1em; width: 1em;
border-radius: .2em; border-radius: .2em;
margin: -1.3em auto 0 auto; margin: -1.2em auto 0 auto;
top: 2em;
position: relative;
background: #444; background: #444;
} }
#files th.min .cfg { #files th span {
margin: -.6em; position: relative;
} }
#files>thead>tr>th.min span { #files>thead>tr>th.min,
position: absolute; #files td.min {
transform: rotate(270deg); display: none;
background: linear-gradient(90deg, rgba(68,68,68,0), rgba(68,68,68,0.5) 70%, #444);
margin-left: -4.6em;
padding: .4em;
top: 5.4em;
width: 8em;
text-align: right;
letter-spacing: .04em;
} }
#files td:nth-child(2n) { #files td:nth-child(2n) {
color: #f5a; color: #f5a;
} }
#files td.min a {
display: none;
}
#files tr.play td, #files tr.play td,
#files tr.play div a { #files tr.play div a {
background: #fc4; background: #fc4;
@@ -710,50 +822,42 @@ input[type="checkbox"]:checked+label {
color: #300; color: #300;
background: #fea; background: #fea;
} }
#op_cfg { .opwide {
max-width: none; max-width: none;
margin-right: 1.5em; margin-right: 1.5em;
} }
#op_cfg>div>a { .opwide>div {
display: inline-block;
vertical-align: top;
border-left: .2em solid #4c4c4c;
margin-left: .5em;
padding-left: .5em;
}
.opwide>div.fill {
display: block;
}
.opwide>div>div>a {
line-height: 2em; line-height: 2em;
} }
#op_cfg>div>span { #op_cfg>div>div>span {
display: inline-block; display: inline-block;
padding: .2em .4em; padding: .2em .4em;
} }
#op_cfg h3 { .opbox h3 {
margin: .8em 0 0 .6em; margin: .8em 0 0 .6em;
padding: 0; padding: 0;
border-bottom: 1px solid #555; border-bottom: 1px solid #555;
} }
#opdesc { #thumbs,
display: none; #au_osd_cv,
} #u2tdate {
#ops:hover #opdesc {
display: block;
background: linear-gradient(0deg,#555, #4c4c4c 80%, #444);
box-shadow: 0 .3em 1em #222;
padding: 1em;
border-radius: .3em;
position: absolute;
z-index: 3;
top: 6em;
right: 1.5em;
}
#ops:hover #opdesc.off {
display: none;
}
#opdesc code {
background: #3c3c3c;
padding: .2em .3em;
border-top: 1px solid #777;
border-radius: .3em;
font-family: monospace, monospace;
line-height: 2em;
}
#griden.on+#thumbs {
opacity: .3; opacity: .3;
} }
#griden.on+#thumbs,
#au_os_ctl.on+#au_osd_cv,
#u2turbo.on+#u2tdate {
opacity: 1;
}
#ghead { #ghead {
background: #3c3c3c; background: #3c3c3c;
border: 1px solid #444; border: 1px solid #444;
@@ -798,6 +902,12 @@ html.light #ghead {
padding: .2em .3em; padding: .2em .3em;
display: block; display: block;
} }
#ggrid span.dir:before {
content: '📂';
line-height: 0;
font-size: 2em;
margin: -.7em .1em -.5em -.3em;
}
#ggrid a:hover { #ggrid a:hover {
background: #444; background: #444;
border-color: #555; border-color: #555;
@@ -817,7 +927,8 @@ html.light #ggrid a:hover {
#pvol, #pvol,
#barbuf, #barbuf,
#barpos, #barpos,
#u2conf label { #u2conf label,
#ops {
-webkit-user-select: none; -webkit-user-select: none;
-moz-user-select: none; -moz-user-select: none;
-ms-user-select: none; -ms-user-select: none;
@@ -848,6 +959,18 @@ html.light {
background: #eee; background: #eee;
text-shadow: none; text-shadow: none;
} }
html.light #tt {
background: #fff;
border-color: #888 #000 #777 #000;
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
}
html.light #tt code {
background: #060;
color: #fff;
}
html.light #tt em {
color: #d38;
}
html.light #ops, html.light #ops,
html.light .opbox, html.light .opbox,
html.light #srch_form { html.light #srch_form {
@@ -891,8 +1014,14 @@ html.light #treeul a.hl {
background: #07a; background: #07a;
color: #fff; color: #fff;
} }
html.light #treeul a.hl:hover {
background: #059;
}
html.light #tree li { html.light #tree li {
border-color: #ddd #fff #f7f7f7 #fff; border-color: #f7f7f7 #fff #ddd #fff;
}
html.light #tree a:hover {
background: #fff;
} }
html.light #tree ul { html.light #tree ul {
border-color: #ccc; border-color: #ccc;
@@ -910,12 +1039,14 @@ html.light #files {
} }
html.light #files thead th { html.light #files thead th {
background: #eee; background: #eee;
border: 1px solid #ccc;
border-top: none;
} }
html.light #files tr td { html.light #files thead th+th {
border-top: 1px solid #ddd; border-left: 1px solid #f7f7f7;
} }
html.light #files td { html.light #files td {
border-bottom: 1px solid #f7f7f7; border-color: #fff #fff #ddd #ddd;
} }
html.light #files tbody tr:last-child td { html.light #files tbody tr:last-child td {
border-bottom: .2em solid #ccc; border-bottom: .2em solid #ccc;
@@ -923,25 +1054,28 @@ html.light #files tbody tr:last-child td {
html.light #files td:nth-child(2n) { html.light #files td:nth-child(2n) {
color: #d38; color: #d38;
} }
html.light #files tr:hover td { html.light #files tr.play td:nth-child(2n) {
background: #fff; color: #c16;
} }
html.light #files tbody a.play { html.light #files tbody a.play {
color: #c0f; color: #c0f;
} }
html.light tr.play td { html.light #files tbody a.play.act {
color: #90c;
}
html.light #files tr.play td {
background: #fc5; background: #fc5;
border-color: #eb1;
}
html.light #files tr:hover td {
background: #fff;
} }
html.light tr.play a { html.light tr.play a {
color: #406; color: #406;
} }
html.light #files th:hover .cfg, html.light #files th:hover .cfg {
html.light #files th.min .cfg {
background: #ccc; background: #ccc;
} }
html.light #files > thead > tr > th.min span {
background: linear-gradient(90deg, rgba(204,204,204,0), rgba(204,204,204,0.5) 70%, #ccc);
}
html.light #blocked { html.light #blocked {
background: #eee; background: #eee;
} }
@@ -951,7 +1085,24 @@ html.light #blk_abrt a {
box-shadow: 0 .2em .4em #ddd; box-shadow: 0 .2em .4em #ddd;
} }
html.light #widget a { html.light #widget a {
color: #fc5; color: #06a;
}
html.light #wtoggle,
html.light #widgeti {
background: #eee;
}
html.light #wtoggle {
box-shadow: 0 0 .5em #bbb;
}
html.light #widget.open {
border-top: .2em solid #f7f7f7;
}
html.light #wzip,
html.light #wnp {
border-color: #ccc;
}
html.light #barbuf {
background: none;
} }
html.light #files tr.sel:hover td { html.light #files tr.sel:hover td {
background: #c37; background: #c37;
@@ -968,20 +1119,15 @@ html.light #files tr.sel a.play.act {
html.light input[type="checkbox"] + label { html.light input[type="checkbox"] + label {
color: #333; color: #333;
} }
html.light .opwide>div {
border-color: #ccc;
}
html.light .opview input[type="text"] { html.light .opview input[type="text"] {
background: #fff; background: #fff;
color: #333; color: #333;
box-shadow: 0 0 2px #888; box-shadow: 0 0 2px #888;
border-color: #38d; border-color: #38d;
} }
html.light #ops:hover #opdesc {
background: #fff;
box-shadow: 0 .3em 1em #ccc;
}
html.light #opdesc code {
background: #060;
color: #fff;
}
html.light #u2tab a>span, html.light #u2tab a>span,
html.light #files td div span { html.light #files td div span {
color: #000; color: #000;
@@ -991,9 +1137,6 @@ html.light #path {
text-shadow: none; text-shadow: none;
box-shadow: 0 0 .3em #bbb; box-shadow: 0 0 .3em #bbb;
} }
html.light #path a {
color: #333;
}
html.light #path a:not(:last-child)::after { html.light #path a:not(:last-child)::after {
border-color: #ccc; border-color: #ccc;
background: none; background: none;
@@ -1002,7 +1145,7 @@ html.light #path a:not(:last-child)::after {
} }
html.light #path a:hover { html.light #path a:hover {
background: none; background: none;
color: #60a; color: #90d;
} }
html.light #files tbody div a { html.light #files tbody div a {
color: #d38; color: #d38;
@@ -1012,6 +1155,9 @@ html.light #files tr.sel a:hover {
color: #000; color: #000;
background: #fff; background: #fff;
} }
html.light #treeh {
background: #eee;
}
html.light #tree { html.light #tree {
scrollbar-color: #a70 #ddd; scrollbar-color: #a70 #ddd;
} }
@@ -1022,3 +1168,184 @@ html.light #tree::-webkit-scrollbar {
#tree::-webkit-scrollbar-thumb { #tree::-webkit-scrollbar-thumb {
background: #da0; background: #da0;
} }
#bbox-overlay {
display: none;
opacity: 0;
position: fixed;
overflow: hidden;
touch-action: none;
top: 0;
left: 0;
width: 100%;
height: 100%;
z-index: 10;
background: rgba(0, 0, 0, 0.8);
transition: opacity .3s ease;
}
#bbox-overlay.visible {
opacity: 1;
}
.full-image {
display: inline-block;
position: relative;
width: 100%;
height: 100%;
text-align: center;
}
.full-image figure {
display: inline;
margin: 0;
height: 100%;
}
.full-image img,
.full-image video {
display: inline-block;
width: auto;
height: auto;
max-width: 100%;
max-height: 100%;
max-height: calc(100% - 1.4em);
margin-bottom: 1.4em;
vertical-align: middle;
box-shadow: 0 0 8px rgba(0, 0, 0, 0.6);
}
.full-image video {
background: #333;
}
.full-image figcaption {
display: block;
position: fixed;
bottom: .1em;
width: 100%;
text-align: center;
white-space: normal;
color: #ccc;
}
#bbox-overlay figcaption a {
background: rgba(0, 0, 0, 0.6);
border-radius: .4em;
padding: .3em .6em;
}
html.light #bbox-overlay figcaption a {
color: #0bf;
}
.full-image:before {
content: "";
display: inline-block;
height: 50%;
width: 1px;
margin-right: -1px;
}
#bbox-slider {
position: fixed;
left: 0;
top: 0;
height: 100%;
width: 100%;
white-space: nowrap;
transition: left .2s ease, transform .2s ease;
}
.bounce-from-right {
animation: bounceFromRight .4s ease-out;
}
.bounce-from-left {
animation: bounceFromLeft .4s ease-out;
}
@keyframes bounceFromRight {
0% {margin-left: 0}
50% {margin-left: -30px}
100% {margin-left: 0}
}
@keyframes bounceFromLeft {
0% {margin-left: 0}
50% {margin-left: 30px}
100% {margin-left: 0}
}
#bbox-next,
#bbox-prev {
top: 50%;
top: calc(50% - 30px);
width: 44px;
height: 60px;
}
.bbox-btn {
position: fixed;
}
#bbox-overlay button {
cursor: pointer;
outline: none;
padding: 0 .3em;
margin: 0 .4em;
border: 0;
border-radius: 15%;
background: rgba(50, 50, 50, 0.5);
color: rgba(255,255,255,0.7);
transition: background-color .3s ease;
transition: color .3s ease;
font-size: 1.4em;
line-height: 1.4em;
vertical-align: top;
}
#bbox-overlay button:focus,
#bbox-overlay button:hover {
color: rgba(255,255,255,0.9);
background: rgba(50, 50, 50, 0.9);
}
#bbox-next {
right: 1%;
}
#bbox-prev {
left: 1%;
}
#bbox-btns {
top: .5em;
right: 2%;
position: fixed;
}
#bbox-halp {
color: #fff;
background: #333;
position: absolute;
top: 0;
left: 0;
z-index: 20;
padding: .4em;
}
#bbox-halp td {
padding: .2em .5em;
}
#bbox-halp td:first-child {
text-align: right;
}
.bbox-spinner {
width: 40px;
height: 40px;
display: inline-block;
position: absolute;
top: 50%;
left: 50%;
margin-top: -20px;
margin-left: -20px;
}
.bbox-double-bounce1,
.bbox-double-bounce2 {
width: 100%;
height: 100%;
border-radius: 50%;
background-color: #fff;
opacity: .6;
position: absolute;
top: 0;
left: 0;
animation: bounce 2s infinite ease-in-out;
}
.bbox-double-bounce2 {
animation-delay: -1s;
}
@keyframes bounce {
0%, 100% {transform: scale(0)}
50% {transform: scale(1)}
}

View File

@@ -6,26 +6,15 @@
<title>⇆🎉 {{ title }}</title> <title>⇆🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css{{ ts }}"> <link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css?_={{ ts }}">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css{{ ts }}"> <link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css?_={{ ts }}">
{%- if css %}
<link rel="stylesheet" type="text/css" media="screen" href="{{ css }}?_={{ ts }}">
{%- endif %}
</head> </head>
<body> <body>
<div id="ops"> <div id="ops"></div>
<a href="#" data-dest="" data-desc="close submenu">---</a>
{%- if have_up2k_idx %}
<a href="#" data-perm="read" data-dest="search" data-desc="search for files by attributes, path/name, music tags, or any combination of those.&lt;br /&gt;&lt;br /&gt;&lt;code&gt;foo bar&lt;/code&gt; = must contain both foo and bar,&lt;br /&gt;&lt;code&gt;foo -bar&lt;/code&gt; = must contain foo but not bar,&lt;br /&gt;&lt;code&gt;^yana .opus$&lt;/code&gt; = must start with yana and have the opus extension">🔎</a>
<a href="#" data-dest="up2k" data-desc="up2k: upload files (if you have write-access) or toggle into the search-mode and drag files onto the search button to see if they exist somewhere on the server">🚀</a>
{%- else %}
<a href="#" data-perm="write" data-dest="up2k" data-desc="up2k: upload files with resume support (close your browser and drop the same files in later)">🚀</a>
{%- endif %}
<a href="#" data-perm="write" data-dest="bup" data-desc="bup: basic uploader, even supports netscape 4.0">🎈</a>
<a href="#" data-perm="write" data-dest="mkdir" data-desc="mkdir: create a new directory">📂</a>
<a href="#" data-perm="read write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
<a href="#" data-perm="write" data-dest="msg" data-desc="msg: send a message to the server log">📟</a>
<a href="#" data-dest="cfg" data-desc="configuration options">⚙️</a>
<div id="opdesc"></div>
</div>
<div id="op_search" class="opview"> <div id="op_search" class="opview">
{%- if have_tags_idx %} {%- if have_tags_idx %}
@@ -36,39 +25,52 @@
<div id="srch_q"></div> <div id="srch_q"></div>
</div> </div>
{%- include 'upload.html' %} <div id="op_player" class="opview opbox opwide"></div>
<div id="op_cfg" class="opview opbox"> <div id="op_bup" class="opview opbox act">
<h3>switches</h3> <div id="u2err"></div>
<div> <form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<a id="tooltips" class="tgl btn" href="#">tooltips</a> <input type="hidden" name="act" value="bput" />
<a id="lightmode" class="tgl btn" href="#">lightmode</a> <input type="file" name="f" multiple><br />
<a id="griden" class="tgl btn" href="#">the grid</a> <input type="submit" value="start upload">
<a id="thumbs" class="tgl btn" href="#">thumbs</a> </form>
</div> </div>
{%- if have_zip %}
<h3>folder download</h3> <div id="op_mkdir" class="opview opbox act">
<div id="arc_fmt"></div> <form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
{%- endif %} <input type="hidden" name="act" value="mkdir" />
<h3>key notation</h3> 📂<input type="text" name="name" size="30">
<div id="key_notation"></div> <input type="submit" value="make directory">
</form>
</div> </div>
<div id="op_new_md" class="opview opbox">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="new_md" />
📝<input type="text" name="name" size="30">
<input type="submit" value="new markdown doc">
</form>
</div>
<div id="op_msg" class="opview opbox act">
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
📟<input type="text" name="msg" size="30">
<input type="submit" value="send msg to server log">
</form>
</div>
<div id="op_up2k" class="opview"></div>
<div id="op_cfg" class="opview opbox opwide"></div>
<h1 id="path"> <h1 id="path">
<a href="#" id="entree">🌲</a> <a href="#" id="entree" tt="show directory tree$NHotkey: B">🌲</a>
{%- for n in vpnodes %} {%- for n in vpnodes %}
<a href="/{{ n[0] }}">{{ n[1] }}</a> <a href="/{{ n[0] }}">{{ n[1] }}</a>
{%- endfor %} {%- endfor %}
</h1> </h1>
<div id="tree"> <div id="tree"></div>
<a href="#" id="detree">🍞...</a>
<a href="#" class="btn" step="2" id="twobytwo">+</a>
<a href="#" class="btn" step="-2" id="twig">&ndash;</a>
<a href="#" class="tgl btn" id="dyntree">a</a>
<ul id="treeul"></ul>
<div id="thx_ff">&nbsp;</div>
</div>
<div id="wrap"> <div id="wrap">
@@ -108,7 +110,7 @@
<div id="epi" class="logue">{{ logues[1] }}</div> <div id="epi" class="logue">{{ logues[1] }}</div>
<h2><a href="?h">control-panel</a></h2> <h2><a href="/?h">control-panel</a></h2>
</div> </div>
@@ -119,14 +121,15 @@
<div id="widget"></div> <div id="widget"></div>
<script> <script>
var tag_order_cfg = {{ tag_order }}; var perms = {{ perms }},
</script> tag_order_cfg = {{ tag_order }},
<script src="/.cpr/util.js{{ ts }}"></script> have_up2k_idx = {{ have_up2k_idx|tojson }},
<script src="/.cpr/browser.js{{ ts }}"></script> have_tags_idx = {{ have_tags_idx|tojson }},
<script src="/.cpr/up2k.js{{ ts }}"></script> have_zip = {{ have_zip|tojson }};
<script>
apply_perms({{ perms }});
</script> </script>
<script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/browser.js?_={{ ts }}"></script>
<script src="/.cpr/up2k.js?_={{ ts }}"></script>
</body> </body>
</html> </html>

File diff suppressed because it is too large Load Diff

View File

@@ -54,7 +54,7 @@
<div>{{ logues[1] }}</div><br /> <div>{{ logues[1] }}</div><br />
{%- endif %} {%- endif %}
<h2><a href="{{ url_suf }}{{ url_suf and '&amp;' or '?' }}h">control-panel</a></h2> <h2><a href="/{{ url_suf }}{{ url_suf and '&amp;' or '?' }}h">control-panel</a></h2>
</body> </body>
</html> </html>

View File

@@ -0,0 +1,61 @@
var ofun = audio_eq.apply.bind(audio_eq);
audio_eq.apply = function () {
var ac1 = mp.ac;
ofun();
var ac = mp.ac,
w = 2048,
h = 256;
if (!audio_eq.filters.length) {
audio_eq.ana = null;
return;
}
var can = ebi('fft_can');
if (!can) {
can = mknod('canvas');
can.setAttribute('id', 'fft_can');
can.style.cssText = 'position:absolute;left:0;bottom:5em;width:' + w + 'px;height:' + h + 'px;z-index:9001';
document.body.appendChild(can);
can.width = w;
can.height = h;
}
var cc = can.getContext('2d');
if (!ac)
return;
var ana = ac.createAnalyser();
ana.smoothingTimeConstant = 0;
ana.fftSize = 8192;
audio_eq.filters[0].connect(ana);
audio_eq.ana = ana;
var buf = new Uint8Array(ana.frequencyBinCount),
colw = can.width / buf.length;
cc.fillStyle = '#fc0';
function draw() {
if (ana == audio_eq.ana)
requestAnimationFrame(draw);
ana.getByteFrequencyData(buf);
cc.clearRect(0, 0, can.width, can.height);
/*var x = 0, w = 1;
for (var a = 0; a < buf.length; a++) {
cc.fillRect(x, h - buf[a], w, h);
x += w;
}*/
var mul = Math.pow(w, 4) / buf.length;
for (var x = 0; x < w; x++) {
var a = Math.floor(Math.pow(x, 4) / mul),
v = buf[a];
cc.fillRect(x, h - v, 1, v);
}
}
draw();
};
audio_eq.apply();

View File

@@ -8,6 +8,47 @@ html, body {
font-family: sans-serif; font-family: sans-serif;
line-height: 1.5em; line-height: 1.5em;
} }
#tt {
position: fixed;
max-width: 34em;
background: #222;
border: 0 solid #777;
overflow: hidden;
margin-top: 1em;
padding: 0 1.3em;
height: 0;
opacity: .1;
transition: opacity 0.14s, height 0.14s, padding 0.14s;
box-shadow: 0 .2em .5em #222;
border-radius: .4em;
z-index: 9001;
}
#tt.b {
padding: 0 2em;
border-radius: .5em;
box-shadow: 0 .2em 1em #000;
}
#tt.show {
padding: 1em 1.3em;
border-width: .4em 0;
height: auto;
opacity: 1;
}
#tt.show.b {
padding: 1.5em 2em;
border-width: .5em 0;
}
#tt code {
background: #3c3c3c;
padding: .1em .3em;
border-top: 1px solid #777;
border-radius: .3em;
font-family: monospace, monospace;
line-height: 1.7em;
}
#tt em {
color: #f6a;
}
#mtw { #mtw {
display: none; display: none;
} }

View File

@@ -3,9 +3,9 @@
<title>📝🎉 {{ title }}</title> <!-- 📜 --> <title>📝🎉 {{ title }}</title> <!-- 📜 -->
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7"> <meta name="viewport" content="width=device-width, initial-scale=0.7">
<link href="/.cpr/md.css" rel="stylesheet"> <link href="/.cpr/md.css?_={{ ts }}" rel="stylesheet">
{%- if edit %} {%- if edit %}
<link href="/.cpr/md2.css" rel="stylesheet"> <link href="/.cpr/md2.css?_={{ ts }}" rel="stylesheet">
{%- endif %} {%- endif %}
</head> </head>
<body> <body>
@@ -14,9 +14,9 @@
<a id="lightswitch" href="#">go dark</a> <a id="lightswitch" href="#">go dark</a>
<a id="navtoggle" href="#">hide nav</a> <a id="navtoggle" href="#">hide nav</a>
{%- if edit %} {%- if edit %}
<a id="save" href="?edit">save</a> <a id="save" href="?edit" tt="Hotkey: ctrl-s">save</a>
<a id="sbs" href="#">sbs</a> <a id="sbs" href="#" tt="editor and preview side by side">sbs</a>
<a id="nsbs" href="#">editor</a> <a id="nsbs" href="#" tt="switch between editor and preview$NHotkey: ctrl-e">editor</a>
<div id="toolsbox"> <div id="toolsbox">
<a id="tools" href="#">tools</a> <a id="tools" href="#">tools</a>
<a id="fmt_table" href="#">prettify table (ctrl-k)</a> <a id="fmt_table" href="#">prettify table (ctrl-k)</a>
@@ -26,8 +26,8 @@
<a id="help" href="#">help</a> <a id="help" href="#">help</a>
</div> </div>
{%- else %} {%- else %}
<a href="?edit">edit (basic)</a> <a href="?edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
<a href="?edit2">edit (fancy)</a> <a href="?edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
<a href="?raw">view raw</a> <a href="?raw">view raw</a>
{%- endif %} {%- endif %}
</div> </div>
@@ -146,10 +146,10 @@ var md_opt = {
})(); })();
</script> </script>
<script src="/.cpr/util.js"></script> <script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/deps/marked.js"></script> <script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
<script src="/.cpr/md.js"></script> <script src="/.cpr/md.js?_={{ ts }}"></script>
{%- if edit %} {%- if edit %}
<script src="/.cpr/md2.js"></script> <script src="/.cpr/md2.js?_={{ ts }}"></script>
{%- endif %} {%- endif %}
</body></html> </body></html>

View File

@@ -176,7 +176,7 @@ function md_plug_err(ex, js) {
var lns = js.split('\n'); var lns = js.split('\n');
if (ln < lns.length) { if (ln < lns.length) {
o = mknod('span'); o = mknod('span');
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block'; o.style.cssText = "color:#ac2;font-size:.9em;font-family:'scp',monospace,monospace;display:block";
o.textContent = lns[ln - 1]; o.textContent = lns[ln - 1];
} }
} }
@@ -530,3 +530,6 @@ dom_navtgl.onclick = function () {
if (sread('hidenav') == 1) if (sread('hidenav') == 1)
dom_navtgl.onclick(); dom_navtgl.onclick();
if (window['tt'])
tt.init();

View File

@@ -924,10 +924,9 @@ function cfg_uni(e) {
(function () { (function () {
function keydown(ev) { function keydown(ev) {
ev = ev || window.event; ev = ev || window.event;
var kc = ev.keyCode || ev.which; var kc = ev.code || ev.keyCode || ev.which;
var ctrl = ev.ctrlKey || ev.metaKey; //console.log(ev.key, ev.code, ev.keyCode, ev.which);
//console.log(ev.code, kc); if (ctrl(ev) && (ev.code == "KeyS" || kc == 83)) {
if (ctrl && (ev.code == "KeyS" || kc == 83)) {
save(); save();
return false; return false;
} }
@@ -936,23 +935,15 @@ function cfg_uni(e) {
if (d) if (d)
d.click(); d.click();
} }
if (document.activeElement == dom_src) { if (document.activeElement != dom_src)
if (ev.code == "Tab" || kc == 9) { return true;
md_indent(ev.shiftKey);
return false; if (ctrl(ev)) {
} if (ev.code == "KeyH" || kc == 72) {
if (ctrl && (ev.code == "KeyH" || kc == 72)) {
md_header(ev.shiftKey); md_header(ev.shiftKey);
return false; return false;
} }
if (!ctrl && (ev.code == "Home" || kc == 36)) { if (ev.code == "KeyZ" || kc == 90) {
md_home(ev.shiftKey);
return false;
}
if (!ctrl && !ev.shiftKey && (ev.code == "Enter" || kc == 13)) {
return md_newline();
}
if (ctrl && (ev.code == "KeyZ" || kc == 90)) {
if (ev.shiftKey) if (ev.shiftKey)
action_stack.redo(); action_stack.redo();
else else
@@ -960,33 +951,45 @@ function cfg_uni(e) {
return false; return false;
} }
if (ctrl && (ev.code == "KeyY" || kc == 89)) { if (ev.code == "KeyY" || kc == 89) {
action_stack.redo(); action_stack.redo();
return false; return false;
} }
if (!ctrl && !ev.shiftKey && kc == 8) { if (ev.code == "KeyK") {
return md_backspace();
}
if (ctrl && (ev.code == "KeyK")) {
fmt_table(); fmt_table();
return false; return false;
} }
if (ctrl && (ev.code == "KeyU")) { if (ev.code == "KeyU") {
iter_uni(); iter_uni();
return false; return false;
} }
if (ctrl && (ev.code == "KeyE")) { if (ev.code == "KeyE") {
dom_nsbs.click(); dom_nsbs.click();
//fmt_table();
return false; return false;
} }
var up = ev.code == "ArrowUp" || kc == 38; var up = ev.code == "ArrowUp" || kc == 38;
var dn = ev.code == "ArrowDown" || kc == 40; var dn = ev.code == "ArrowDown" || kc == 40;
if (ctrl && (up || dn)) { if (up || dn) {
md_p_jump(dn); md_p_jump(dn);
return false; return false;
} }
} }
else {
if (ev.code == "Tab" || kc == 9) {
md_indent(ev.shiftKey);
return false;
}
if (ev.code == "Home" || kc == 36) {
md_home(ev.shiftKey);
return false;
}
if (!ev.shiftKey && (ev.code == "Enter" || kc == 13)) {
return md_newline();
}
if (!ev.shiftKey && kc == 8) {
return md_backspace();
}
}
} }
document.onkeydown = keydown; document.onkeydown = keydown;
ebi('save').onclick = save; ebi('save').onclick = save;

View File

@@ -3,9 +3,9 @@
<title>📝🎉 {{ title }}</title> <title>📝🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7"> <meta name="viewport" content="width=device-width, initial-scale=0.7">
<link href="/.cpr/mde.css" rel="stylesheet"> <link href="/.cpr/mde.css?_={{ ts }}" rel="stylesheet">
<link href="/.cpr/deps/mini-fa.css" rel="stylesheet"> <link href="/.cpr/deps/mini-fa.css?_={{ ts }}" rel="stylesheet">
<link href="/.cpr/deps/easymde.css" rel="stylesheet"> <link href="/.cpr/deps/easymde.css?_={{ ts }}" rel="stylesheet">
</head> </head>
<body> <body>
<div id="mw"> <div id="mw">
@@ -43,7 +43,7 @@ var lightswitch = (function () {
})(); })();
</script> </script>
<script src="/.cpr/util.js"></script> <script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/deps/easymde.js"></script> <script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
<script src="/.cpr/mde.js"></script> <script src="/.cpr/mde.js?_={{ ts }}"></script>
</body></html> </body></html>

View File

@@ -6,7 +6,7 @@
<title>copyparty</title> <title>copyparty</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/msg.css"> <link rel="stylesheet" type="text/css" media="screen" href="/.cpr/msg.css?_={{ ts }}">
</head> </head>
<body> <body>

View File

@@ -26,10 +26,23 @@ a {
border-radius: .2em; border-radius: .2em;
padding: .2em .8em; padding: .2em .8em;
} }
td, th { table {
border-collapse: collapse;
}
.vols td,
.vols th {
padding: .3em .6em; padding: .3em .6em;
text-align: left; text-align: left;
} }
.num {
border-right: 1px solid #bbb;
}
.num td {
padding: .1em .7em .1em 0;
}
.num td:first-child {
text-align: right;
}
.btns { .btns {
margin: 1em 0; margin: 1em 0;
} }
@@ -58,3 +71,6 @@ html.dark input {
padding: .5em .7em; padding: .5em .7em;
margin: 0 .5em 0 0; margin: 0 .5em 0 0;
} }
html.dark .num {
border-color: #777;
}

View File

@@ -6,7 +6,7 @@
<title>copyparty</title> <title>copyparty</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/splash.css"> <link rel="stylesheet" type="text/css" media="screen" href="/.cpr/splash.css?_={{ ts }}">
</head> </head>
<body> <body>
@@ -15,7 +15,15 @@
{%- if avol %} {%- if avol %}
<h1>admin panel:</h1> <h1>admin panel:</h1>
<table> <table><tr><td> <!-- hehehe -->
<table class="num">
<tr><td>scanning</td><td>{{ scanning }}</td></tr>
<tr><td>hash-q</td><td>{{ hashq }}</td></tr>
<tr><td>tag-q</td><td>{{ tagq }}</td></tr>
<tr><td>mtp-q</td><td>{{ mtpq }}</td></tr>
</table>
</td><td>
<table class="vols">
<thead><tr><th>vol</th><th>action</th><th>status</th></tr></thead> <thead><tr><th>vol</th><th>action</th><th>status</th></tr></thead>
<tbody> <tbody>
{% for mp in avol %} {% for mp in avol %}
@@ -25,8 +33,9 @@
{% endfor %} {% endfor %}
</tbody> </tbody>
</table> </table>
</td></tr></table>
<div class="btns"> <div class="btns">
<a href="{{ avol[0] }}?stack">dump stack</a> <a href="/?stack">dump stack</a>
</div> </div>
{%- endif %} {%- endif %}
@@ -50,7 +59,7 @@
<h1>login for more:</h1> <h1>login for more:</h1>
<ul> <ul>
<form method="post" enctype="multipart/form-data" action="/{{ url_suf }}"> <form method="post" enctype="multipart/form-data" action="/">
<input type="hidden" name="act" value="login" /> <input type="hidden" name="act" value="login" />
<input type="password" name="cppwd" /> <input type="password" name="cppwd" />
<input type="submit" value="Login" /> <input type="submit" value="Login" />

File diff suppressed because it is too large Load Diff

View File

@@ -211,36 +211,35 @@
box-shadow: none; box-shadow: none;
opacity: .2; opacity: .2;
} }
#u2cdesc {
position: absolute;
width: 34em;
left: calc(50% - 15em);
background: #222;
border: 0 solid #555;
text-align: center;
overflow: hidden;
margin: 0 -2em;
padding: 0 1em;
height: 0;
opacity: .1;
transition: all 0.14s ease-in-out;
box-shadow: 0 .2em .5em #222;
border-radius: .4em;
z-index: 1;
}
#u2cdesc.show {
padding: 1em;
height: auto;
border-width: .2em 0;
opacity: 1;
}
#u2foot { #u2foot {
color: #fff; color: #fff;
font-style: italic; font-style: italic;
} }
#u2foot .warn {
font-size: 1.3em;
padding: .5em .8em;
margin: 1em -.6em;
color: #f74;
background: #322;
border: 1px solid #633;
border-width: .1em 0;
text-align: center;
}
#u2foot .warn span {
color: #f86;
}
html.light #u2foot .warn {
color: #b00;
background: #fca;
border-color: #f70;
}
html.light #u2foot .warn span {
color: #930;
}
#u2foot span { #u2foot span {
color: #999; color: #999;
font-size: .9em; font-size: .9em;
font-weight: normal;
} }
#u2footfoot { #u2footfoot {
margin-bottom: -1em; margin-bottom: -1em;
@@ -258,6 +257,11 @@
float: right; float: right;
margin-bottom: -.3em; margin-bottom: -.3em;
} }
.fsearch_explain {
padding-left: .7em;
font-size: 1.1em;
line-height: 0;
}
@@ -286,10 +290,6 @@ html.light #u2conf .txtbox.err {
background: #f96; background: #f96;
color: #300; color: #300;
} }
html.light #u2cdesc {
background: #fff;
border: none;
}
html.light #op_up2k.srch #u2btn { html.light #op_up2k.srch #u2btn {
border-color: #a80; border-color: #a80;
} }

View File

@@ -1,103 +0,0 @@
<div id="op_bup" class="opview opbox act">
<div id="u2err"></div>
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="bput" />
<input type="file" name="f" multiple><br />
<input type="submit" value="start upload">
</form>
</div>
<div id="op_mkdir" class="opview opbox act">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="mkdir" />
<input type="text" name="name" size="30">
<input type="submit" value="mkdir">
</form>
</div>
<div id="op_new_md" class="opview opbox">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="new_md" />
<input type="text" name="name" size="30">
<input type="submit" value="create doc">
</form>
</div>
<div id="op_msg" class="opview opbox act">
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
<input type="text" name="msg" size="30">
<input type="submit" value="send msg">
</form>
</div>
<div id="op_up2k" class="opview">
<form id="u2form" method="post" enctype="multipart/form-data" onsubmit="return false;"></form>
<table id="u2conf">
<tr>
<td><br />parallel uploads:</td>
<td rowspan="2">
<input type="checkbox" id="multitask" />
<label for="multitask" alt="continue hashing other files while uploading">🏃</label>
</td>
<td rowspan="2">
<input type="checkbox" id="ask_up" />
<label for="ask_up" alt="ask for confirmation befofre upload starts">💭</label>
</td>
<td rowspan="2">
<input type="checkbox" id="flag_en" />
<label for="flag_en" alt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</label>
</td>
{%- if have_up2k_idx %}
<td data-perm="read" rowspan="2">
<input type="checkbox" id="fsearch" />
<label for="fsearch" alt="don't actually upload, instead check if the files already $N exist on the server (will scan all folders you can read)">🔎</label>
</td>
{%- endif %}
<td data-perm="read" rowspan="2" id="u2btn_cw"></td>
</tr>
<tr>
<td>
<a href="#" id="nthread_sub">&ndash;</a><input
class="txtbox" id="nthread" value="2"/><a
href="#" id="nthread_add">+</a><br />&nbsp;
</td>
</tr>
</table>
<div id="u2cdesc"></div>
<div id="u2notbtn"></div>
<div id="u2btn_ct">
<div id="u2btn">
<span id="u2bm"></span><br />
drag/drop files<br />
and folders here<br />
(or click me)
</div>
</div>
<div id="u2cards">
<a href="#" act="ok">ok <span>0</span></a><a
href="#" act="ng">ng <span>0</span></a><a
href="#" act="done">done <span>0</span></a><a
href="#" act="bz" class="act">busy <span>0</span></a><a
href="#" act="q">que <span>0</span></a>
</div>
<table id="u2tab">
<thead>
<tr>
<td>filename</td>
<td>status</td>
<td>progress<a href="#" id="u2cleanup">cleanup</a></td>
</tr>
</thead>
<tbody></tbody>
</table>
<p id="u2foot"></p>
<p id="u2footfoot" data-perm="write">( you can use the <a href="#" id="u2nope">basic uploader</a> if you don't need lastmod timestamps, resumable uploads, or progress bars )</p>
</div>

View File

@@ -6,21 +6,11 @@ if (!window['console'])
}; };
var clickev = window.Touch ? 'touchstart' : 'click', var is_touch = 'ontouchstart' in window,
ANDROID = /(android)/i.test(navigator.userAgent); ANDROID = /(android)/i.test(navigator.userAgent);
// error handler for mobile devices // error handler for mobile devices
function hcroak(msg) {
document.body.innerHTML = msg;
window.onerror = undefined;
throw 'fatal_err';
}
function croak(msg) {
document.body.textContent = msg;
window.onerror = undefined;
throw msg;
}
function esc(txt) { function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) { return txt.replace(/[&"<>]/g, function (c) {
return { return {
@@ -32,21 +22,28 @@ function esc(txt) {
}); });
} }
function vis_exh(msg, url, lineNo, columnNo, error) { function vis_exh(msg, url, lineNo, columnNo, error) {
if (!window.onerror)
return;
window.onerror = undefined; window.onerror = undefined;
window['vis_exh'] = null; window['vis_exh'] = null;
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>', var html = ['<h1>you hit a bug!</h1><p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();" style="text-decoration:underline;color:#fc0">reset copyparty settings</a> if you are stuck here</p><p>please send me a screenshot arigathanks gozaimuch: <code>ed/irc.rizon.net</code> or <code>ed#2644</code><br />&nbsp; (and if you can, press F12 and include the "Console" tab in the screenshot too)</p><p>',
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>']; esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)) + '</p>'];
if (error) { if (error) {
var find = ['desc', 'stack', 'trace']; var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++) for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined') if (String(error[find[a]]) !== 'undefined')
html.push('<h2>' + find[a] + '</h2>' + html.push('<h3>' + find[a] + '</h3>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n')); esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
} }
document.body.style.fontSize = '0.8em'; document.body.innerHTML = html.join('\n');
document.body.style.padding = '0 1em 1em 1em';
hcroak(html.join('\n')); var s = mknod('style');
s.innerHTML = 'body{background:#333;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em} h1{margin:.5em 1em 0 0;padding:0} h3{border-top:1px solid #999;margin:0} code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} *{line-height:1.5em}';
document.head.appendChild(s);
throw 'fatal_err';
} }
@@ -56,6 +53,11 @@ var ebi = document.getElementById.bind(document),
mknod = document.createElement.bind(document); mknod = document.createElement.bind(document);
function ctrl(e) {
return e && (e.ctrlKey || e.metaKey);
}
function ev(e) { function ev(e) {
e = e || window.event; e = e || window.event;
if (!e) if (!e)
@@ -67,6 +69,9 @@ function ev(e) {
if (e.stopPropagation) if (e.stopPropagation)
e.stopPropagation(); e.stopPropagation();
if (e.stopImmediatePropagation)
e.stopImmediatePropagation();
e.returnValue = false; e.returnValue = false;
return e; return e;
} }
@@ -285,63 +290,6 @@ function makeSortable(table, cb) {
} }
(function () {
var ops = QSA('#ops>a');
for (var a = 0; a < ops.length; a++) {
ops[a].onclick = opclick;
}
})();
function opclick(e) {
ev(e);
var dest = this.getAttribute('data-dest');
goto(dest);
swrite('opmode', dest || null);
var input = QS('.opview.act input:not([type="hidden"])')
if (input)
input.focus();
}
function goto(dest) {
var obj = QSA('.opview.act');
for (var a = obj.length - 1; a >= 0; a--)
clmod(obj[a], 'act');
obj = QSA('#ops>a');
for (var a = obj.length - 1; a >= 0; a--)
clmod(obj[a], 'act');
if (dest) {
var ui = ebi('op_' + dest);
clmod(ui, 'act', true);
QS('#ops>a[data-dest=' + dest + ']').className += " act";
var fn = window['goto_' + dest];
if (fn)
fn();
}
if (window['treectl'])
treectl.onscroll();
}
(function () {
goto();
var op = sread('opmode');
if (op !== null && op !== '.')
try {
goto(op);
}
catch (ex) { }
})();
function linksplit(rp) { function linksplit(rp) {
var ret = []; var ret = [];
var apath = '/'; var apath = '/';
@@ -416,6 +364,15 @@ function get_vpath() {
} }
function get_pwd() {
var pwd = ('; ' + document.cookie).split('; cppwd=');
if (pwd.length < 2)
return null;
return pwd[1].split(';')[0];
}
function unix2iso(ts) { function unix2iso(ts) {
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5); return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
} }
@@ -437,6 +394,18 @@ function has(haystack, needle) {
} }
function apop(arr, v) {
var ofs = arr.indexOf(v);
if (ofs !== -1)
arr.splice(ofs, 1);
}
function jcp(obj) {
return JSON.parse(JSON.stringify(obj));
}
function sread(key) { function sread(key) {
if (window.localStorage) if (window.localStorage)
return localStorage.getItem(key); return localStorage.getItem(key);
@@ -528,3 +497,91 @@ function hist_replace(url) {
console.log("h-repl " + url); console.log("h-repl " + url);
history.replaceState(url, url, url); history.replaceState(url, url, url);
} }
var tt = (function () {
var r = {
"tt": mknod("div"),
"en": true,
"el": null,
"skip": false
};
r.tt.setAttribute('id', 'tt');
document.body.appendChild(r.tt);
r.show = function () {
if (r.skip) {
r.skip = false;
return;
}
var cfg = sread('tooltips');
if (cfg !== null && cfg != '1')
return;
var msg = this.getAttribute('tt');
if (!msg)
return;
r.el = this;
var pos = this.getBoundingClientRect(),
dir = this.getAttribute('ttd') || '',
left = pos.left < window.innerWidth / 2,
top = pos.top < window.innerHeight / 2,
big = this.className.indexOf(' ttb') !== -1;
if (dir.indexOf('u') + 1) top = false;
if (dir.indexOf('d') + 1) top = true;
if (dir.indexOf('l') + 1) left = false;
if (dir.indexOf('r') + 1) left = true;
clmod(r.tt, 'b', big);
r.tt.style.top = top ? pos.bottom + 'px' : 'auto';
r.tt.style.bottom = top ? 'auto' : (window.innerHeight - pos.top) + 'px';
r.tt.style.left = left ? pos.left + 'px' : 'auto';
r.tt.style.right = left ? 'auto' : (window.innerWidth - pos.right) + 'px';
r.tt.innerHTML = msg.replace(/\$N/g, "<br />");
r.el.addEventListener('mouseleave', r.hide);
clmod(r.tt, 'show', 1);
};
r.hide = function () {
clmod(r.tt, 'show');
if (r.el)
r.el.removeEventListener('mouseleave', r.hide);
};
r.tt.onclick = r.hide;
r.att = function (ctr) {
var _show = r.en ? r.show : null,
_hide = r.en ? r.hide : null,
o = ctr.querySelectorAll('*[tt]');
for (var a = o.length - 1; a >= 0; a--) {
o[a].onfocus = _show;
o[a].onblur = _hide;
o[a].onmouseenter = _show;
o[a].onmouseleave = _hide;
}
r.hide();
}
r.init = function () {
var ttb = ebi('tooltips');
if (ttb) {
ttb.onclick = function (e) {
ev(e);
r.en = !r.en;
bcfg_set('tooltips', r.en);
r.init();
};
r.en = bcfg_get('tooltips', true)
}
r.att(document);
};
return r;
})();

22
docs/README.md Normal file
View File

@@ -0,0 +1,22 @@
# example `.epilogue.html`
save one of these as `.epilogue.html` inside a folder to customize it:
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
# example browser-css
point `--css-browser` to one of these by URL:
* [`browser.css`](browser.css) changes the background
* [`browser-icons.css`](browser-icons.css) adds filetype icons
# other stuff
## [`rclone.md`](rclone.md)
* notes on using rclone as a fuse client/server
## [`example.conf`](example.conf)
* example config file for `-c` which never really happened

95
docs/biquad.html Normal file
View File

@@ -0,0 +1,95 @@
<!DOCTYPE html><html><head></head><body><script>
setTimeout(location.reload.bind(location), 700);
document.documentElement.scrollLeft = 0;
var can = document.createElement('canvas'),
cc = can.getContext('2d'),
w = 2048,
h = 1024;
w = 2048;
can.width = w;
can.height = h;
document.body.appendChild(can);
can.style.cssText = 'width:' + w + 'px;height:' + h + 'px';
cc.fillStyle = '#000';
cc.fillRect(0, 0, w, h);
var cfg = [ // hz, q, g
[31.25 * 0.88, 0, 1.4], // shelf
[31.25 * 1.04, 0.7, 0.96], // peak
[62.5, 0.7, 1],
[125, 0.8, 1],
[250, 0.9, 1.03],
[500, 0.9, 1.1],
[1000, 0.9, 1.1],
[2000, 0.9, 1.105],
[4000, 0.88, 1.05],
[8000 * 1.006, 0.73, 1.24],
//[16000 * 1.00, 0.5, 1.75], // peak.v1
//[16000 * 1.19, 0, 1.8] // shelf.v1
[16000 * 0.89, 0.7, 1.26], // peak
[16000 * 1.13, 0.82, 1.09], // peak
[16000 * 1.205, 0, 1.9] // shelf
];
var freqs = new Float32Array(22000),
sum = new Float32Array(freqs.length),
ac = new AudioContext(),
step = w / freqs.length,
colors = [
'rgba(255, 0, 0, 0.7)',
'rgba(0, 224, 0, 0.7)',
'rgba(0, 64, 255, 0.7)'
];
var order = [];
for (var a = 0; a < cfg.length; a += 2)
order.push(a);
for (var a = 1; a < cfg.length; a += 2)
order.push(a);
for (var ia = 0; ia < order.length; ia++) {
var a = order[ia],
fi = ac.createBiquadFilter(),
mag = new Float32Array(freqs.length),
phase = new Float32Array(freqs.length);
for (var b = 0; b < freqs.length; b++)
freqs[b] = b;
fi.type = a == 0 ? 'lowshelf' : a == cfg.length - 1 ? 'highshelf' : 'peaking';
fi.frequency.value = cfg[a][0];
fi.Q.value = cfg[a][1];
fi.gain.value = 1;
fi.getFrequencyResponse(freqs, mag, phase);
cc.fillStyle = colors[a % colors.length];
for (var b = 0; b < sum.length; b++) {
mag[b] -= 1;
sum[b] += mag[b] * cfg[a][2];
var y = h - (mag[b] * h * 3);
cc.fillRect(b * step, y, step, h - y);
cc.fillRect(b * step - 1, y - 1, 3, 3);
}
}
var min = 999999, max = 0;
for (var a = 0; a < sum.length; a++) {
min = Math.min(min, sum[a]);
max = Math.max(max, sum[a]);
}
cc.fillStyle = 'rgba(255,255,255,1)';
for (var a = 0; a < sum.length; a++) {
var v = (sum[a] - min) / (max - min);
cc.fillRect(a * step, 0, step, v * h / 2);
}
cc.fillRect(0, 460, w, 1);
</script></body></html>

66
docs/browser-icons.css Normal file
View File

@@ -0,0 +1,66 @@
/* put filetype icons inline with text
#ggrid>a>span:before,
#ggrid>a>span.dir:before {
display: inline;
line-height: 0;
font-size: 1.7em;
margin: -.7em .1em -.5em -.6em;
}
*/
/* move folder icons top-left */
#ggrid>a>span.dir:before {
content: initial;
}
#ggrid>a[href$="/"]:before {
content: '📂';
}
/* put filetype icons top-left */
#ggrid>a:before {
display: block;
position: absolute;
padding: .3em 0;
margin: -.4em;
text-shadow: 0 0 .1em #000;
background: linear-gradient(135deg,rgba(255,255,255,0) 50%,rgba(255,255,255,0.2));
border-radius: .3em;
font-size: 2em;
}
/* video */
#ggrid>a:is(
[href$=".mkv"i],
[href$=".mp4"i],
[href$=".webm"i],
):before {
content: '📺';
}
/* audio */
#ggrid>a:is(
[href$=".mp3"i],
[href$=".ogg"i],
[href$=".opus"i],
[href$=".flac"i],
[href$=".m4a"i],
[href$=".aac"i],
):before {
content: '🎵';
}
/* image */
#ggrid>a:is(
[href$=".jpg"i],
[href$=".jpeg"i],
[href$=".png"i],
[href$=".gif"i],
[href$=".webp"i],
):before {
content: '🎨';
}

29
docs/browser.css Normal file
View File

@@ -0,0 +1,29 @@
html {
background: #333 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
}
#files th {
background: rgba(32, 32, 32, 0.9) !important;
}
#ops,
#treeul,
#files td {
background: rgba(32, 32, 32, 0.3) !important;
}
html.light {
background: #eee url('/wp/wallhaven-dpxl6l.png') center / cover no-repeat fixed;
}
html.light #files th {
background: rgba(255, 255, 255, 0.9) !important;
}
html.light #ops,
html.light #treeul,
html.light #files td {
background: rgba(248, 248, 248, 0.8) !important;
}
#files * {
background: transparent !important;
}

51
docs/hls.html Normal file
View File

@@ -0,0 +1,51 @@
<!DOCTYPE html><html lang="en"><head>
<meta charset="utf-8">
<title>hls-test</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
</head><body>
<video id="vid" controls></video>
<script src="hls.light.js"></script>
<script>
var video = document.getElementById('vid');
var hls = new Hls({
debug: true,
autoStartLoad: false
});
hls.loadSource('live/v.m3u8');
hls.attachMedia(video);
hls.on(Hls.Events.MANIFEST_PARSED, function() {
hls.startLoad(0);
});
hls.on(Hls.Events.MEDIA_ATTACHED, function() {
video.muted = true;
video.play();
});
/*
general good news:
- doesn't need fixed-length segments; ok to let x264 pick optimal keyframes and slice on those
- hls.js polls the m3u8 for new segments, scales the duration accordingly, seeking works great
- the sfx will grow by 66 KiB since that's how small hls.js can get, wait thats not good
# vod, creates m3u8 at the end, fixed keyframes, v bad
ffmpeg -hide_banner -threads 0 -flags -global_header -i ..\CowboyBebopMovie-OP1.webm -vf scale=1280:-4,format=yuv420p -ac 2 -c:a libopus -b:a 128k -c:v libx264 -preset slow -crf 24 -maxrate:v 5M -bufsize:v 10M -g 120 -keyint_min 120 -sc_threshold 0 -hls_time 4 -hls_playlist_type vod -hls_segment_filename v%05d.ts v.m3u8
# live, updates m3u8 as it goes, dynamic keyframes, streamable with hls.js
ffmpeg -hide_banner -threads 0 -flags -global_header -i ..\..\CowboyBebopMovie-OP1.webm -vf scale=1280:-4,format=yuv420p -ac 2 -c:a libopus -b:a 128k -c:v libx264 -preset slow -crf 24 -maxrate:v 5M -bufsize:v 10M -f segment -segment_list v.m3u8 -segment_format mpegts -segment_list_flags live v%05d.ts
# fmp4 (fragmented mp4), doesn't work with hls.js, gets duratoin 149:07:51 (536871s), probably the tkhd/mdhd 0xffffffff (timebase 8000? ok)
ffmpeg -re -hide_banner -threads 0 -flags +cgop -i ..\..\CowboyBebopMovie-OP1.webm -vf scale=1280:-4,format=yuv420p -ac 2 -c:a libopus -b:a 128k -c:v libx264 -preset slow -crf 24 -maxrate:v 5M -bufsize:v 10M -f segment -segment_list v.m3u8 -segment_format fmp4 -segment_list_flags live v%05d.mp4
# try 2, works, uses tempfiles for m3u8 updates, good, 6% smaller
ffmpeg -re -hide_banner -threads 0 -flags +cgop -i ..\..\CowboyBebopMovie-OP1.webm -vf scale=1280:-4,format=yuv420p -ac 2 -c:a libopus -b:a 128k -c:v libx264 -preset slow -crf 24 -maxrate:v 5M -bufsize:v 10M -f hls -hls_segment_type fmp4 -hls_list_size 0 -hls_segment_filename v%05d.mp4 v.m3u8
more notes
- adding -hls_flags single_file makes duration wack during playback (for both fmp4 and ts), ok once finalized and refreshed, gives no size reduction anyways
- bebop op has good keyframe spacing for testing hls.js, in particular it hops one seg back and immediately resumes if it hits eof with the explicit hls.startLoad(0); otherwise it jumps into the middle of a seg and becomes art
- can probably -c:v copy most of the time, is there a way to check for cgop? todo
*/
</script>
</body></html>

View File

@@ -86,6 +86,9 @@ var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.quer
# get the size and video-id of all youtube vids in folder, assuming filename ends with -id.ext, and create a copyparty search query # get the size and video-id of all youtube vids in folder, assuming filename ends with -id.ext, and create a copyparty search query
find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv|mp4|webm)$/{next} {sub(/\.[^\.]+$/,"");n=length($0);v=substr($0,n-10);print $1, v}' | tee /dev/stderr | awk 'BEGIN {p="("} {printf("%s name like *-%s.* ",p,$2);p="or"} END {print ")\n"}' | cat >&2 find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv|mp4|webm)$/{next} {sub(/\.[^\.]+$/,"");n=length($0);v=substr($0,n-10);print $1, v}' | tee /dev/stderr | awk 'BEGIN {p="("} {printf("%s name like *-%s.* ",p,$2);p="or"} END {print ")\n"}' | cat >&2
# unique stacks in a stackdump
f=a; rm -rf stacks; mkdir stacks; grep -E '^#' $f | while IFS= read -r n; do awk -v n="$n" '!$0{o=0} o; $0==n{o=1}' <$f >stacks/f; h=$(sha1sum <stacks/f | cut -c-16); mv stacks/f stacks/$h-"$n"; done ; find stacks/ | sort | uniq -cw24
## ##
## sqlite3 stuff ## sqlite3 stuff
@@ -100,6 +103,15 @@ cat warks | while IFS= read -r x; do sqlite3 up2k.db "delete from mt where w = '
# dump all dbs # dump all dbs
find -iname up2k.db | while IFS= read -r x; do sqlite3 "$x" 'select substr(w,1,12), rd, fn from up' | sed -r 's/\|/ \| /g' | while IFS= read -r y; do printf '%s | %s\n' "$x" "$y"; done; done find -iname up2k.db | while IFS= read -r x; do sqlite3 "$x" 'select substr(w,1,12), rd, fn from up' | sed -r 's/\|/ \| /g' | while IFS= read -r y; do printf '%s | %s\n' "$x" "$y"; done; done
# unschedule mtp scan for all files somewhere under "enc/"
sqlite3 -readonly up2k.db 'select substr(up.w,1,16) from up inner join mt on mt.w = substr(up.w,1,16) where rd like "enc/%" and +mt.k = "t:mtp"' > keys; awk '{printf "delete from mt where w = \"%s\" and +k = \"t:mtp\";\n", $0}' <keys | tee /dev/stderr | sqlite3 up2k.db
# compare metadata key "key" between two databases
sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by w' > k1; sqlite3 -readonly up2k.db 'select w, v from mt where k = "key" order by w' > k2; ok=0; ng=0; while IFS='|' read w k2; do k1="$(grep -E "^$w" k1 | sed -r 's/.*\|//')"; [ "$k1" = "$k2" ] && ok=$((ok+1)) || { ng=$((ng+1)); printf '%3s %3s %s\n' "$k1" "$k2" "$(sqlite3 -readonly up2k.db.key-full "select * from up where substr(w,1,16) = '$w'" | sed -r 's/\|/ | /g')"; }; done < <(cat k2); echo "match $ok diff $ng"
# actually this is much better
sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by w' > k1; sqlite3 -readonly up2k.db 'select mt.w, mt.v, up.rd, up.fn from mt inner join up on mt.w = substr(up.w,1,16) where mt.k = "key" order by up.rd, up.fn' > k2; ok=0; ng=0; while IFS='|' read w k2 path; do k1="$(grep -E "^$w" k1 | sed -r 's/.*\|//')"; [ "$k1" = "$k2" ] && ok=$((ok+1)) || { ng=$((ng+1)); printf '%3s %3s %s\n' "$k1" "$k2" "$path"; }; done < <(cat k2); echo "match $ok diff $ng"
## ##
## media ## media
@@ -153,6 +165,12 @@ dbg.asyncStore.pendingBreakpoints = {}
# fix firefox phantom breakpoints # fix firefox phantom breakpoints
about:config >> devtools.debugger.prefs-schema-version = -1 about:config >> devtools.debugger.prefs-schema-version = -1
# determine server version
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
# download all sfx versions
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | while read v t; do fn="copyparty $v $t.py"; [ -e $fn ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
## ##
## http 206 ## http 206
@@ -194,3 +212,4 @@ mk() { rm -rf /tmp/foo; sudo -u ed bash -c 'mkdir /tmp/foo; echo hi > /tmp/foo/b
mk && t0="$(date)" && while true; do date -s "$(date '+ 1 hour')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0" mk && t0="$(date)" && while true; do date -s "$(date '+ 1 hour')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0" mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
mk && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done; echo "$t0" mk && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done; echo "$t0"

32
docs/tcp-debug.sh Normal file
View File

@@ -0,0 +1,32 @@
(cd ~/dev/copyparty && strace -Tttyyvfs 256 -o strace.strace python3 -um copyparty -i 127.0.0.1 --http-only --stackmon /dev/shm/cpps,10 ) 2>&1 | tee /dev/stderr > ~/log-copyparty-$(date +%Y-%m%d-%H%M%S).txt
14/Jun/2021:16:34:02 1623688447.212405 death
14/Jun/2021:16:35:02 1623688502.420860 back
tcpdump -nni lo -w /home/ed/lo.pcap
# 16:35:25.324662 IP 127.0.0.1.48632 > 127.0.0.1.3920: Flags [F.], seq 849, ack 544, win 359, options [nop,nop,TS val 809396796 ecr 809396796], length 0
tcpdump -nnr /home/ed/lo.pcap | awk '/ > 127.0.0.1.3920: /{sub(/ > .*/,"");sub(/.*\./,"");print}' | sort -n | uniq | while IFS= read -r port; do echo; tcpdump -nnr /home/ed/lo.pcap 2>/dev/null | grep -E "\.$port( > |: F)" | sed -r 's/ > .*, /, /'; done | grep -E '^16:35:0.*length [^0]' -C50
16:34:02.441732 IP 127.0.0.1.48638, length 0
16:34:02.441738 IP 127.0.0.1.3920, length 0
16:34:02.441744 IP 127.0.0.1.48638, length 0
16:34:02.441756 IP 127.0.0.1.48638, length 791
16:34:02.441759 IP 127.0.0.1.3920, length 0
16:35:02.445529 IP 127.0.0.1.48638, length 0
16:35:02.489194 IP 127.0.0.1.3920, length 0
16:35:02.515595 IP 127.0.0.1.3920, length 216
16:35:02.515600 IP 127.0.0.1.48638, length 0
grep 48638 "$(find ~ -maxdepth 1 -name log-copyparty-\*.txt | sort | tail -n 1)"
1623688502.510380 48638 rh
1623688502.511291 48638 Unrecv direct ...
1623688502.511827 48638 rh = 791
16:35:02.518 127.0.0.1 48638 shut(8): [Errno 107] Socket not connected
Exception in thread httpsrv-0.1-48638:
grep 48638 ~/dev/copyparty/strace.strace
14561 16:35:02.506310 <... accept4 resumed> {sa_family=AF_INET, sin_port=htons(48638), sin_addr=inet_addr("127.0.0.1")}, [16], SOCK_CLOEXEC) = 8<TCP:[127.0.0.1:3920->127.0.0.1:48638]> <0.000012>
15230 16:35:02.510725 write(1<pipe:[256639555]>, "1623688502.510380 48638 rh\n", 27 <unfinished ...>

View File

@@ -92,20 +92,34 @@ chmod 755 \
copyparty-extras/copyparty-*/{scripts,bin}/* copyparty-extras/copyparty-*/{scripts,bin}/*
# extract and repack the sfx with less features enabled # extract the sfx
( cd copyparty-extras/sfx-full/ ( cd copyparty-extras/sfx-full/
./copyparty-sfx.py -h ./copyparty-sfx.py -h
cd ../copyparty-*/
./scripts/make-sfx.sh re no-ogv no-cm
) )
# put new sfx into copyparty-extras/sfx-lite/, repack() {
# fuse client into copyparty-extras/,
# do the repack
(cd copyparty-extras/copyparty-*/
./scripts/make-sfx.sh $2
)
# put new sfx into copyparty-extras/$name/,
( cd copyparty-extras/
mv copyparty-*/dist/* $1/
)
}
repack sfx-full "re gz no-sh"
repack sfx-lite "re no-ogv no-cm"
repack sfx-lite "re no-ogv no-cm gz no-sh"
# move fuse client into copyparty-extras/,
# copy lite-sfx.py to ./copyparty, # copy lite-sfx.py to ./copyparty,
# delete extracted source code # delete extracted source code
( cd copyparty-extras/ ( cd copyparty-extras/
mv copyparty-*/dist/* sfx-lite/
mv copyparty-*/bin/copyparty-fuse.py . mv copyparty-*/bin/copyparty-fuse.py .
cp -pv sfx-lite/copyparty-sfx.py ../copyparty cp -pv sfx-lite/copyparty-sfx.py ../copyparty
rm -rf copyparty-{0..9}*.*.*{0..9} rm -rf copyparty-{0..9}*.*.*{0..9}
@@ -119,6 +133,7 @@ true
# create the bundle # create the bundle
printf '\n\n'
fn=copyparty-$(date +%Y-%m%d-%H%M%S).tgz fn=copyparty-$(date +%Y-%m%d-%H%M%S).tgz
tar -czvf "$od/$fn" * tar -czvf "$od/$fn" *
cd "$od" cd "$od"

View File

@@ -1,6 +1,7 @@
FROM alpine:3.13 FROM alpine:3.13
WORKDIR /z WORKDIR /z
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_hashwasm=4.7.0 \
ver_marked=1.1.0 \ ver_marked=1.1.0 \
ver_ogvjs=1.8.0 \ ver_ogvjs=1.8.0 \
ver_mde=2.14.0 \ ver_mde=2.14.0 \
@@ -9,12 +10,6 @@ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_zopfli=1.0.3 ver_zopfli=1.0.3
# TODO
# sha512.hw.js https://github.com/Daninet/hash-wasm
# sha512.kc.js https://github.com/chm-diederichs/sha3-wasm
# awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' <sha512.ac.js.orig >sha512.ac.js; for fn in sha512.ac.js.orig sha512.ac.js; do wc -c <$fn; wc -c <$fn.gz ; for n in {1..9}; do printf '%8d %d bz\n' $(bzip2 -c$n <$fn | wc -c) $n; done; done
# download; # download;
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap # the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
RUN mkdir -p /z/dist/no-pk \ RUN mkdir -p /z/dist/no-pk \
@@ -27,7 +22,11 @@ RUN mkdir -p /z/dist/no-pk \
&& wget https://github.com/codemirror/CodeMirror/archive/$ver_codemirror.tar.gz -O codemirror.tgz \ && wget https://github.com/codemirror/CodeMirror/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \ && wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \ && wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \
&& unzip ogvjs.zip \ && unzip ogvjs.zip \
&& (mkdir hash-wasm \
&& cd hash-wasm \
&& unzip ../hash-wasm.zip) \
&& (tar -xf asmcrypto.tgz \ && (tar -xf asmcrypto.tgz \
&& cd asmcrypto.js-$ver_asmcrypto \ && cd asmcrypto.js-$ver_asmcrypto \
&& npm install ) \ && npm install ) \
@@ -64,7 +63,12 @@ RUN tar -xf zopfli.tgz \
RUN cd asmcrypto.js-$ver_asmcrypto \ RUN cd asmcrypto.js-$ver_asmcrypto \
&& echo "export { Sha512 } from './hash/sha512/sha512';" > src/entry-export_all.ts \ && echo "export { Sha512 } from './hash/sha512/sha512';" > src/entry-export_all.ts \
&& node -r esm build.js \ && node -r esm build.js \
&& mv asmcrypto.all.es5.js /z/dist/sha512.js && awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' < asmcrypto.all.es5.js > /z/dist/sha512.ac.js
# build hash-wasm
RUN cd hash-wasm \
&& mv sha512.umd.min.js /z/dist/sha512.hw.js
# build ogvjs # build ogvjs

View File

@@ -11,11 +11,20 @@ echo
# `re` does a repack of an sfx which you already executed once # `re` does a repack of an sfx which you already executed once
# (grabs files from the sfx-created tempdir), overrides `clean` # (grabs files from the sfx-created tempdir), overrides `clean`
# #
# `gz` creates a gzip-compressed python sfx instead of bzip2
#
# `no-sh` makes just the python sfx, skips the sh/unix sfx
#
# `no-ogv` saves ~500k by removing the opus/vorbis audio codecs # `no-ogv` saves ~500k by removing the opus/vorbis audio codecs
# (only affects apple devices; everything else has native support) # (only affects apple devices; everything else has native support)
# #
# `no-cm` saves ~90k by removing easymde/codemirror # `no-cm` saves ~90k by removing easymde/codemirror
# (the fancy markdown editor) # (the fancy markdown editor)
#
# `no-fnt` saves ~9k by removing the source-code-pro font
# (mainly used my the markdown viewer/editor)
#
# `no-dd` saves ~2k by removing the mouse cursor
# port install gnutar findutils gsed coreutils # port install gnutar findutils gsed coreutils
@@ -32,6 +41,10 @@ gtar=$(command -v gtar || command -v gnutar) || true
[ -e /opt/local/bin/bzip2 ] && [ -e /opt/local/bin/bzip2 ] &&
bzip2() { /opt/local/bin/bzip2 "$@"; } bzip2() { /opt/local/bin/bzip2 "$@"; }
} }
gawk=$(command -v gawk || command -v gnuawk || command -v awk)
awk() { $gawk "$@"; }
pybin=$(command -v python3 || command -v python) || { pybin=$(command -v python3 || command -v python) || {
echo need python echo need python
exit 1 exit 1
@@ -49,14 +62,18 @@ use_gz=
do_sh=1 do_sh=1
do_py=1 do_py=1
while [ ! -z "$1" ]; do while [ ! -z "$1" ]; do
[ "$1" = clean ] && clean=1 && shift && continue case $1 in
[ "$1" = re ] && repack=1 && shift && continue clean) clean=1 ; ;;
[ "$1" = gz ] && use_gz=1 && shift && continue re) repack=1 ; ;;
[ "$1" = no-ogv ] && no_ogv=1 && shift && continue gz) use_gz=1 ; ;;
[ "$1" = no-cm ] && no_cm=1 && shift && continue no-ogv) no_ogv=1 ; ;;
[ "$1" = no-sh ] && do_sh= && shift && continue no-fnt) no_fnt=1 ; ;;
[ "$1" = no-py ] && do_py= && shift && continue no-dd) no_dd=1 ; ;;
break no-cm) no_cm=1 ; ;;
no-sh) do_sh= ; ;;
no-py) do_py= ; ;;
esac
shift
done done
tmv() { tmv() {
@@ -163,7 +180,7 @@ find .. -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
echo use smol web deps echo use smol web deps
rm -f copyparty/web/deps/*.full.* copyparty/web/Makefile rm -f copyparty/web/deps/*.full.* copyparty/web/dbg-* copyparty/web/Makefile
# it's fine dw # it's fine dw
grep -lE '\.full\.(js|css)' copyparty/web/* | grep -lE '\.full\.(js|css)' copyparty/web/* |
@@ -182,6 +199,18 @@ done
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f" sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
} }
[ $no_fnt ] && {
rm -f copyparty/web/deps/scp.woff2
f=copyparty/web/md.css
sed -r '/scp\.woff2/d' <$f >t && tmv "$f"
}
[ $no_dd ] && {
rm -rf copyparty/web/dd
f=copyparty/web/browser.css
sed -r 's/(cursor: )url\([^)]+\), (pointer)/\1\2/; /[0-9]+% \{cursor:/d; /animation: cursor/d' <$f >t && tmv "$f"
}
[ $repack ] || [ $repack ] ||
find | grep -E '\.py$' | find | grep -E '\.py$' |
grep -vE '__version__' | grep -vE '__version__' |
@@ -194,17 +223,46 @@ tmv "$f"
# up2k goes from 28k to 22k laff # up2k goes from 28k to 22k laff
echo entabbening echo entabbening
find | grep -E '\.(js|css|html)$' | while IFS= read -r f; do find | grep -E '\.css$' | while IFS= read -r f; do
awk '{
sub(/^[ \t]+/,"");
sub(/[ \t]+$/,"");
$0=gensub(/^([a-z-]+) *: *(.*[^ ]) *;$/,"\\1:\\2;","1");
sub(/ +\{$/,"{");
gsub(/, /,",")
}
!/\}$/ {printf "%s",$0;next}
1
' <$f | sed 's/;\}$/}/' >t
tmv "$f"
done
find | grep -E '\.(js|html)$' | while IFS= read -r f; do
unexpand -t 4 --first-only <"$f" >t unexpand -t 4 --first-only <"$f" >t
tmv "$f" tmv "$f"
done done
gzres() {
command -v pigz &&
pk='pigz -11 -J 34 -I 100' ||
pk='gzip'
echo "$pk"
find | grep -E '\.(js|css)$' | grep -vF /deps/ | while IFS= read -r f; do
echo -n .
$pk "$f"
done
echo
}
gzres
echo gen tarlist echo gen tarlist
for d in copyparty dep-j2; do find $d -type f; done | for d in copyparty dep-j2; do find $d -type f; done |
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort | sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1 sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
(grep -vE 'gz$' list1; grep -E 'gz$' list1) >list (grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1) >list || true
echo creating tar echo creating tar
args=(--owner=1000 --group=1000) args=(--owner=1000 --group=1000)

View File

@@ -3,10 +3,13 @@ set -ex
pids=() pids=()
for py in python{2,3}; do for py in python{2,3}; do
$py -m unittest discover -s tests >/dev/null & nice $py -m unittest discover -s tests >/dev/null &
pids+=($!) pids+=($!)
done done
python3 scripts/test/smoketest.py &
pids+=($!)
for pid in ${pids[@]}; do for pid in ${pids[@]}; do
wait $pid wait $pid
done done

View File

@@ -6,10 +6,10 @@ import re, os, sys, time, shutil, signal, threading, tarfile, hashlib, platform,
import subprocess as sp import subprocess as sp
""" """
run me with any version of python, i will unpack and run copyparty to edit this file, use HxD or "vim -b"
(there is compressed stuff at the end)
(but please don't edit this file with a text editor run me with any version of python, i will unpack and run copyparty
since that would probably corrupt the binary stuff at the end)
there's zero binaries! just plaintext python scripts all the way down there's zero binaries! just plaintext python scripts all the way down
so you can easily unpack the archive and inspect it for shady stuff so you can easily unpack the archive and inspect it for shady stuff
@@ -380,7 +380,7 @@ def run(tmp, j2):
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except Exception as ex: except Exception as ex:
if not WINDOWS: if not WINDOWS:
msg("\033[31mflock:", repr(ex)) msg("\033[31mflock:{!r}\033[0m".format(ex))
t = threading.Thread(target=utime, args=(tmp,)) t = threading.Thread(target=utime, args=(tmp,))
t.daemon = True t.daemon = True

View File

@@ -47,7 +47,7 @@ grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
printf '\033[1;30mlooking for jinja2 in [%s]\033[0m\n' "$_py" >&2 printf '\033[1;30mlooking for jinja2 in [%s]\033[0m\n' "$_py" >&2
$_py -c 'import jinja2' 2>/dev/null || continue $_py -c 'import jinja2' 2>/dev/null || continue
printf '%s\n' "$_py" printf '%s\n' "$_py"
mv $dir/{,x.}jinja2 mv $dir/{,x.}dep-j2
break break
done)" done)"

105
scripts/test/race.py Normal file
View File

@@ -0,0 +1,105 @@
#!/usr/bin/env python3
import os
import sys
import time
import json
import threading
import http.client
class Conn(object):
def __init__(self, ip, port):
self.s = http.client.HTTPConnection(ip, port, timeout=260)
self.st = []
def get(self, vpath):
self.st = [time.time()]
self.s.request("GET", vpath)
self.st.append(time.time())
ret = self.s.getresponse()
self.st.append(time.time())
if ret.status < 200 or ret.status >= 400:
raise Exception(ret.status)
ret = ret.read()
self.st.append(time.time())
return ret
def get_json(self, vpath):
ret = self.get(vpath)
return json.loads(ret)
class CState(threading.Thread):
def __init__(self, cs):
threading.Thread.__init__(self)
self.daemon = True
self.cs = cs
self.start()
def run(self):
colors = [5, 1, 3, 2, 7]
remotes = []
remotes_ok = False
while True:
time.sleep(0.001)
if not remotes_ok:
remotes = []
remotes_ok = True
for conn in self.cs:
try:
remotes.append(conn.s.sock.getsockname()[1])
except:
remotes.append("?")
remotes_ok = False
m = []
for conn, remote in zip(self.cs, remotes):
stage = len(conn.st)
m.append(f"\033[3{colors[stage]}m{remote}")
m = " ".join(m)
print(f"{m}\033[0m\n\033[A", end="")
def allget(cs, urls):
thrs = []
for c, url in zip(cs, urls):
t = threading.Thread(target=c.get, args=(url,))
t.start()
thrs.append(t)
for t in thrs:
t.join()
def main():
os.system("")
ip, port = sys.argv[1].split(":")
port = int(port)
cs = []
for _ in range(64):
cs.append(Conn(ip, 3923))
CState(cs)
urlbase = "/doujin/c95"
j = cs[0].get_json(f"{urlbase}?ls")
urls = []
for d in j["dirs"]:
urls.append(f"{urlbase}/{d['href']}?th=w")
for n in range(100):
print(n)
allget(cs, urls)
if __name__ == "__main__":
main()

209
scripts/test/smoketest.py Normal file
View File

@@ -0,0 +1,209 @@
import os
import sys
import time
import shlex
import shutil
import signal
import tempfile
import requests
import threading
import subprocess as sp
CPP = []
class Cpp(object):
def __init__(self, args):
args = [sys.executable, "-m", "copyparty"] + args
print(" ".join([shlex.quote(x) for x in args]))
self.ls_pre = set(list(os.listdir()))
self.p = sp.Popen(args)
# , stdout=sp.PIPE, stderr=sp.PIPE)
self.t = threading.Thread(target=self._run)
self.t.daemon = True
self.t.start()
def _run(self):
self.so, self.se = self.p.communicate()
def stop(self, wait):
if wait:
os.kill(self.p.pid, signal.SIGINT)
self.t.join(timeout=2)
else:
self.p.kill() # macos py3.8
def clean(self):
t = os.listdir()
for f in t:
if f not in self.ls_pre and f.startswith("up."):
os.unlink(f)
def await_idle(self, ub, timeout):
req = ["scanning</td><td>False", "hash-q</td><td>0", "tag-q</td><td>0"]
lim = int(timeout * 10)
u = ub + "?h"
for n in range(lim):
try:
time.sleep(0.1)
r = requests.get(u, timeout=0.1)
for x in req:
if x not in r.text:
print("ST: {}/{} miss {}".format(n, lim, x))
raise Exception()
print("ST: idle")
return
except:
pass
def tc1():
ub = "http://127.0.0.1:4321/"
td = os.path.join("srv", "smoketest")
try:
shutil.rmtree(td)
except:
if os.path.exists(td):
raise
for _ in range(10):
try:
os.mkdir(td)
except:
time.sleep(0.1) # win10
assert os.path.exists(td)
vidp = os.path.join(tempfile.gettempdir(), "smoketest.h264")
if not os.path.exists(vidp):
cmd = "ffmpeg -f lavfi -i testsrc=48x32:3 -t 1 -c:v libx264 -tune animation -preset veryslow -crf 69"
sp.check_call(cmd.split(" ") + [vidp])
with open(vidp, "rb") as f:
ovid = f.read()
args = [
"-p4321",
"-e2dsa",
"-e2tsr",
"--no-mutagen",
"--th-ff-jpg",
"--hist",
os.path.join(td, "dbm"),
]
pdirs = []
hpaths = {}
for d1 in ["r", "w", "a"]:
pdirs.append("{}/{}".format(td, d1))
pdirs.append("{}/{}/j".format(td, d1))
for d2 in ["r", "w", "a"]:
d = os.path.join(td, d1, "j", d2)
pdirs.append(d)
os.makedirs(d)
pdirs = [x.replace("\\", "/") for x in pdirs]
udirs = [x.split("/", 2)[2] for x in pdirs]
perms = [x.rstrip("j/")[-1] for x in pdirs]
for pd, ud, p in zip(pdirs, udirs, perms):
if ud[-1] == "j":
continue
hp = None
if pd.endswith("st/a"):
hp = hpaths[ud] = os.path.join(td, "db1")
elif pd[:-1].endswith("a/j/"):
hpaths[ud] = os.path.join(td, "dbm")
hp = None
else:
hp = "-"
hpaths[ud] = os.path.join(pd, ".hist")
arg = "{}:{}:{}".format(pd, ud, p, hp)
if hp:
arg += ":chist=" + hp
args += ["-v", arg]
# return
cpp = Cpp(args)
CPP.append(cpp)
cpp.await_idle(ub, 3)
for d in udirs:
vid = ovid + "\n{}".format(d).encode("utf-8")
try:
requests.post(ub + d, data={"act": "bput"}, files={"f": ("a.h264", vid)})
except:
pass
cpp.clean()
# GET permission
for d, p in zip(udirs, perms):
u = "{}{}/a.h264".format(ub, d)
r = requests.get(u)
ok = bool(r)
if ok != (p in ["a"]):
raise Exception("get {} with perm {} at {}".format(ok, p, u))
# stat filesystem
for d, p in zip(pdirs, perms):
u = "{}/a.h264".format(d)
ok = os.path.exists(u)
if ok != (p in ["a", "w"]):
raise Exception("stat {} with perm {} at {}".format(ok, p, u))
# GET thumbnail, vreify contents
for d, p in zip(udirs, perms):
u = "{}{}/a.h264?th=j".format(ub, d)
r = requests.get(u)
ok = bool(r and r.content[:3] == b"\xff\xd8\xff")
if ok != (p in ["a"]):
raise Exception("thumb {} with perm {} at {}".format(ok, p, u))
# check tags
cpp.await_idle(ub, 5)
for d, p in zip(udirs, perms):
u = "{}{}?ls".format(ub, d)
r = requests.get(u)
j = r.json() if r else False
tag = None
if j:
for f in j["files"]:
tag = tag or f["tags"].get("res")
r_ok = bool(j)
w_ok = bool(r_ok and j.get("files"))
if not r_ok or w_ok != (p in ["a"]):
raise Exception("ls {} with perm {} at {}".format(ok, p, u))
if (tag and p != "a") or (not tag and p == "a"):
raise Exception("tag {} with perm {} at {}".format(tag, p, u))
if tag is not None and tag != "48x32":
raise Exception("tag [{}] at {}".format(tag, u))
cpp.stop(True)
def run(tc):
try:
tc()
finally:
try:
CPP[0].stop(False)
except:
pass
def main():
run(tc1)
if __name__ == "__main__":
main()

View File

@@ -23,12 +23,14 @@ def hdr(query):
class Cfg(Namespace): class Cfg(Namespace):
def __init__(self, a=[], v=[], c=None): def __init__(self, a=None, v=None, c=None):
super(Cfg, self).__init__( super(Cfg, self).__init__(
a=a, a=a or [],
v=v, v=v or [],
c=c, c=c,
rproxy=0,
ed=False, ed=False,
nw=False,
no_zip=False, no_zip=False,
no_scandir=False, no_scandir=False,
no_sendfile=True, no_sendfile=True,
@@ -37,6 +39,9 @@ class Cfg(Namespace):
nih=True, nih=True,
mtp=[], mtp=[],
mte="a", mte="a",
hist=None,
no_hash=False,
css_browser=None,
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()} **{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
) )
@@ -99,7 +104,7 @@ class TestHttpCli(unittest.TestCase):
pprint.pprint(vcfg) pprint.pprint(vcfg)
self.args = Cfg(v=vcfg, a=["o:o", "x:x"]) self.args = Cfg(v=vcfg, a=["o:o", "x:x"])
self.auth = AuthSrv(self.args, self.log) self.asrv = AuthSrv(self.args, self.log)
vfiles = [x for x in allfiles if x.startswith(top)] vfiles = [x for x in allfiles if x.startswith(top)]
for fp in vfiles: for fp in vfiles:
rok, wok = self.can_rw(fp) rok, wok = self.can_rw(fp)
@@ -188,12 +193,12 @@ class TestHttpCli(unittest.TestCase):
def put(self, url): def put(self, url):
buf = "PUT /{0} HTTP/1.1\r\nCookie: cppwd=o\r\nConnection: close\r\nContent-Length: {1}\r\n\r\nok {0}\n" buf = "PUT /{0} HTTP/1.1\r\nCookie: cppwd=o\r\nConnection: close\r\nContent-Length: {1}\r\n\r\nok {0}\n"
buf = buf.format(url, len(url) + 4).encode("utf-8") buf = buf.format(url, len(url) + 4).encode("utf-8")
conn = tu.VHttpConn(self.args, self.auth, self.log, buf) conn = tu.VHttpConn(self.args, self.asrv, self.log, buf)
HttpCli(conn).run() HttpCli(conn).run()
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1) return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
def curl(self, url, binary=False): def curl(self, url, binary=False):
conn = tu.VHttpConn(self.args, self.auth, self.log, hdr(url)) conn = tu.VHttpConn(self.args, self.asrv, self.log, hdr(url))
HttpCli(conn).run() HttpCli(conn).run()
if binary: if binary:
h, b = conn.s._reply.split(b"\r\n\r\n", 1) h, b = conn.s._reply.split(b"\r\n\r\n", 1)

View File

@@ -11,16 +11,23 @@ from textwrap import dedent
from argparse import Namespace from argparse import Namespace
from tests import util as tu from tests import util as tu
from copyparty.authsrv import AuthSrv from copyparty.authsrv import AuthSrv, VFS
from copyparty import util from copyparty import util
class Cfg(Namespace): class Cfg(Namespace):
def __init__(self, a=[], v=[], c=None): def __init__(self, a=None, v=None, c=None):
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()} ex = {k: False for k in "nw e2d e2ds e2dsa e2t e2ts e2tsr".split()}
ex["mtp"] = [] ex2 = {
ex["mte"] = "a" "mtp": [],
super(Cfg, self).__init__(a=a, v=v, c=c, **ex) "mte": "a",
"hist": None,
"no_hash": False,
"css_browser": None,
"rproxy": 0,
}
ex.update(ex2)
super(Cfg, self).__init__(a=a or [], v=v or [], c=c, **ex)
class TestVFS(unittest.TestCase): class TestVFS(unittest.TestCase):
@@ -47,6 +54,7 @@ class TestVFS(unittest.TestCase):
self.assertEqual(util.undot(query), response) self.assertEqual(util.undot(query), response)
def ls(self, vfs, vpath, uname): def ls(self, vfs, vpath, uname):
# type: (VFS, str, str) -> tuple[str, str, str]
"""helper for resolving and listing a folder""" """helper for resolving and listing a folder"""
vn, rem = vfs.get(vpath, uname, True, False) vn, rem = vfs.get(vpath, uname, True, False)
r1 = vn.ls(rem, uname, False) r1 = vn.ls(rem, uname, False)
@@ -112,13 +120,13 @@ class TestVFS(unittest.TestCase):
n = vfs.nodes["a"] n = vfs.nodes["a"]
self.assertEqual(len(vfs.nodes), 1) self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a") self.assertEqual(n.vpath, "a")
self.assertEqual(n.realpath, td + "/a") self.assertEqual(n.realpath, os.path.join(td, "a"))
self.assertEqual(n.uread, ["*", "k"]) self.assertEqual(n.uread, ["*", "k"])
self.assertEqual(n.uwrite, ["k"]) self.assertEqual(n.uwrite, ["k"])
n = n.nodes["ac"] n = n.nodes["ac"]
self.assertEqual(len(vfs.nodes), 1) self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a/ac") self.assertEqual(n.vpath, "a/ac")
self.assertEqual(n.realpath, td + "/a/ac") self.assertEqual(n.realpath, os.path.join(td, "a", "ac"))
self.assertEqual(n.uread, ["*", "k"]) self.assertEqual(n.uread, ["*", "k"])
self.assertEqual(n.uwrite, ["k"]) self.assertEqual(n.uwrite, ["k"])
n = n.nodes["acb"] n = n.nodes["acb"]
@@ -250,7 +258,7 @@ class TestVFS(unittest.TestCase):
n = au.vfs n = au.vfs
# root was not defined, so PWD with no access to anyone # root was not defined, so PWD with no access to anyone
self.assertEqual(n.vpath, "") self.assertEqual(n.vpath, "")
self.assertEqual(n.realpath, td) self.assertEqual(n.realpath, None)
self.assertEqual(n.uread, []) self.assertEqual(n.uread, [])
self.assertEqual(n.uwrite, []) self.assertEqual(n.uwrite, [])
self.assertEqual(len(n.nodes), 1) self.assertEqual(len(n.nodes), 1)

View File

@@ -60,12 +60,20 @@ def get_ramdisk():
if os.path.exists("/Volumes"): if os.path.exists("/Volumes"):
# hdiutil eject /Volumes/cptd/ # hdiutil eject /Volumes/cptd/
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://65536") devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://131072")
devname = devname.strip() devname = devname.strip()
print("devname: [{}]".format(devname)) print("devname: [{}]".format(devname))
for _ in range(10): for _ in range(10):
try: try:
_, _ = chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname) _, _ = chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
with open("/Volumes/cptd/.metadata_never_index", "w") as f:
f.write("orz")
try:
shutil.rmtree("/Volumes/cptd/.fseventsd")
except:
pass
return subdir("/Volumes/cptd") return subdir("/Volumes/cptd")
except Exception as ex: except Exception as ex:
print(repr(ex)) print(repr(ex))
@@ -108,20 +116,24 @@ class VHttpSrv(object):
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"] aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
self.j2 = {x: J2_FILES for x in aliases} self.j2 = {x: J2_FILES for x in aliases}
def cachebuster(self):
return "a"
class VHttpConn(object): class VHttpConn(object):
def __init__(self, args, auth, log, buf): def __init__(self, args, asrv, log, buf):
self.s = VSock(buf) self.s = VSock(buf)
self.sr = Unrecv(self.s) self.sr = Unrecv(self.s)
self.addr = ("127.0.0.1", "42069") self.addr = ("127.0.0.1", "42069")
self.args = args self.args = args
self.auth = auth self.asrv = asrv
self.nid = None
self.log_func = log self.log_func = log
self.log_src = "a" self.log_src = "a"
self.lf_url = None self.lf_url = None
self.hsrv = VHttpSrv() self.hsrv = VHttpSrv()
self.nreq = 0
self.nbyte = 0 self.nbyte = 0
self.workload = 0
self.ico = None self.ico = None
self.thumbcli = None self.thumbcli = None
self.t0 = time.time() self.t0 = time.time()