Compare commits

...

252 Commits

Author SHA1 Message Date
ed
3649e8288a v0.12.0 2021-07-28 01:47:42 +02:00
ed
9a45e26026 another windows sighandler fix 2021-07-28 01:18:51 +02:00
ed
e65f127571 list server ips on windows 2021-07-28 01:18:38 +02:00
ed
3bfc699787 block hotkeys when insufficient permissions 2021-07-27 23:16:50 +02:00
ed
955318428a font adjustments 2021-07-27 23:12:47 +02:00
ed
f6279b356a fix more signal handler jank 2021-07-27 22:11:33 +02:00
ed
4cc3cdc989 list server ips on macos 2021-07-27 20:39:16 +02:00
ed
f9aa20a3ad naming: navpane 2021-07-27 20:39:01 +02:00
ed
129d33f1a0 mv/del: recursive rmdir 2021-07-27 19:15:58 +02:00
ed
1ad7a3f378 await and monitor workers on startup 2021-07-27 15:48:00 +00:00
ed
b533be8818 actually this is much better 2021-07-27 12:26:34 +02:00
ed
fb729e5166 file selection scroll behavior 2021-07-27 12:13:00 +02:00
ed
d337ecdb20 fix color bleed 2021-07-27 12:02:55 +02:00
ed
5f1f0a48b0 toast appearance 2021-07-27 11:48:32 +02:00
ed
e0f1cb94a5 toast close-handle 2021-07-27 10:05:53 +02:00
ed
a362ee2246 dodge a bullet on centos7 2021-07-27 00:28:40 +02:00
ed
19f23c686e toasty 2021-07-27 00:18:08 +02:00
ed
23b20ff4a6 bos abspath 2021-07-26 23:53:13 +02:00
ed
72574da834 hide fileman buttons when argv-disabled 2021-07-26 23:35:55 +02:00
ed
d5a79455d1 cleanup 2021-07-26 23:31:45 +02:00
ed
070d4b9da9 allow regular hotkeys during file selection 2021-07-26 22:50:58 +02:00
ed
0ace22fffe file selection hotkeys 2021-07-26 22:47:54 +02:00
ed
9e483d7694 ctrl-a 2021-07-26 22:44:07 +02:00
ed
26458b7a06 keyboard file selection 2021-07-26 22:40:55 +02:00
ed
b6a4604952 show fileman buttons conditionally 2021-07-26 21:00:36 +02:00
ed
af752fbbc2 reload-signal to source folder on paste 2021-07-26 20:49:26 +02:00
ed
279c9d706a list volumes/permissions on startup 2021-07-26 20:07:23 +02:00
ed
806e7b5530 fix argv compat bug 2021-07-26 19:40:12 +02:00
ed
f3dc6a217b use the new toast in md-editor 2021-07-26 19:20:36 +02:00
ed
7671d791fa rename works + more symlink fixes 2021-07-26 17:44:20 +02:00
ed
8cd84608a5 toast coloring 2021-07-26 03:00:37 +02:00
ed
980c6fc810 add scheduled rescans + fix mv bugs 2021-07-26 02:34:56 +02:00
ed
fb40a484c5 mv(folder) works 2021-07-26 01:26:58 +02:00
ed
daa9dedcaa rm works 2021-07-26 00:29:28 +02:00
ed
0d634345ac signal handling was still busted 2021-07-26 00:19:33 +02:00
ed
e648252479 mv works (at least in trivial cases) 2021-07-25 21:15:43 +02:00
ed
179d7a9ad8 bikeshedding 2021-07-25 19:47:40 +02:00
ed
19bc962ad5 add toasts 2021-07-25 10:50:11 +02:00
ed
27cce086c6 fileman ui 2021-07-25 01:09:14 +02:00
ed
fec0c620d4 add accounts/volumes section 2021-07-24 22:26:52 +02:00
ed
05a1a31cab too soon 2021-07-24 22:20:02 +02:00
ed
d020527c6f centralize mojibake support stuff 2021-07-24 21:56:55 +02:00
ed
4451485664 mv/rm (serverside), 100% untested 2021-07-24 20:08:31 +02:00
ed
a4e1a3738a more deletion progress 2021-07-23 23:42:07 +02:00
ed
4339dbeb8d mv/rm handlers 2021-07-23 01:14:49 +02:00
ed
5b0605774c add move/delete permission flags 2021-07-22 23:48:29 +02:00
ed
e3684e25f8 treat symlinks as regular files in db 2021-07-22 19:34:40 +02:00
ed
1359213196 prefer native sqlite3 backup (journal-aware) 2021-07-22 19:10:42 +02:00
ed
03efc6a169 support ancient glibc 2021-07-22 19:04:59 +02:00
ed
15b5982211 v0.11.47 2021-07-22 10:09:04 +02:00
ed
0eb3a5d387 ignorable exceptions 2021-07-22 10:08:39 +02:00
Lytexx
7f8777389c fix typo 2021-07-22 09:34:04 +02:00
ed
4eb20f10ad v0.11.46 2021-07-22 08:42:27 +02:00
ed
daa11df558 avoid chrome bug 809574 2021-07-22 08:40:46 +02:00
ed
1bb0db30a0 fix logout link going 404 2021-07-21 01:30:27 +02:00
ed
02910b0020 v0.11.45 2021-07-20 23:23:08 +02:00
ed
23b8901c9c include localstore on the crashpage 2021-07-20 23:22:35 +02:00
ed
99f6ed0cd7 up2k-cli: avoid loading sha.js multiple times 2021-07-20 23:14:30 +02:00
ed
890c310880 another attempt at fixing tooltips on iphone 2021-07-20 23:07:15 +02:00
ed
0194eeb31f add login/permissions indicator 2021-07-20 22:42:03 +02:00
ed
f9be4c62b1 v0.11.44 2021-07-20 01:03:08 +02:00
ed
027e8c18f1 sfx: option to remove mouse cursor 2021-07-20 01:00:28 +02:00
ed
4a3bb35a95 sfx: option to remove scp.woff2 2021-07-20 00:45:54 +02:00
ed
4bfb0d4494 notes 2021-07-19 23:46:44 +02:00
ed
7e0ef03a1e fix audio player edgecase (continue into next folder with sidebar closed) 2021-07-19 23:10:48 +02:00
ed
f7dbd95a54 v0.11.43 2021-07-19 01:56:19 +02:00
ed
515ee2290b v0.11.42 2021-07-18 23:22:09 +02:00
ed
b0c78910bb fix tabchange triggering tooltips 2021-07-18 23:21:36 +02:00
ed
f4ca62b664 reattach tooltips on column show/hide 2021-07-18 23:14:57 +02:00
ed
8eb8043a3d fix 3rdparty namecase 2021-07-18 22:50:29 +02:00
ed
3e8541362a keep active dir scrolled into view on keybd nav 2021-07-18 22:32:34 +02:00
ed
789724e348 use preferred key notation in search results 2021-07-18 21:50:57 +02:00
ed
5125b9532f fix multiple whitespace in query translator 2021-07-18 21:39:28 +02:00
ed
ebc9de02b0 case-insensitive tag search 2021-07-18 21:34:36 +02:00
ed
ec788fa491 mutagen fixes:
* extract codec and format info
* add FFprobe as fallback when mutagen fails
* add option to blacklist FFprobe for tags
2021-07-18 19:57:31 +02:00
ed
9b5e264574 systemd: fix name in journalctl 2021-07-17 19:14:15 +02:00
ed
57c297274b v0.11.41 2021-07-17 17:53:34 +02:00
ed
e9bf092317 tweak audio drawer tab 2021-07-17 17:24:48 +02:00
ed
d173887324 explain confusing behavior in journalctl 2021-07-17 16:45:49 +02:00
ed
99820d854c oh that wasnt enough ok then 2021-07-17 16:45:25 +02:00
ed
62df0a0eb2 thx osx 2021-07-17 16:43:22 +02:00
ed
600e9ac947 try to workaround iphones not hiding tooltips 2021-07-17 16:03:21 +02:00
ed
3ca41be2b4 do up2k snapshot on shutdown 2021-07-17 14:48:35 +02:00
ed
5c7debd900 improve signal handling + emit sd-notify on start 2021-07-17 04:15:07 +02:00
ed
7fa5b23ce3 sfx: fix color bleed on flock errors 2021-07-17 04:12:14 +02:00
ed
ff82738aaf vscode: support whitespace in python binary path 2021-07-17 04:11:14 +02:00
ed
bf5ee9d643 colum header tooltips 2021-07-17 02:52:55 +02:00
ed
72a8593ecd gridmode shortcut in the audio drawer 2021-07-17 01:45:05 +02:00
ed
bc3bbe07d4 combine tabs on narrow screens 2021-07-17 01:21:49 +02:00
ed
c7cb64bfef gallery: add hotkey list button 2021-07-17 01:14:14 +02:00
ed
629f537d06 add more hotkey tooltips 2021-07-17 01:05:26 +02:00
ed
9e988041b8 cosmetics 2021-07-16 02:56:21 +02:00
ed
f9a8b5c9d7 update readme 2021-07-16 02:44:06 +02:00
ed
b9c3538253 nope, not doing this 2021-07-15 23:49:30 +02:00
ed
2bc0cdf017 fix md-editor hotkeys on dvorak 2021-07-15 23:24:10 +02:00
ed
02a91f60d4 playing some golf 2021-07-15 23:19:37 +02:00
ed
fae83da197 v0.11.40 2021-07-15 01:13:15 +02:00
ed
0fe4aa6418 ux tweaks 2021-07-15 01:04:38 +02:00
ed
21a51bf0dc make it feel like home 2021-07-15 00:50:43 +02:00
ed
bcb353cc30 allow ctrl-clicking primary tabs 2021-07-15 00:37:14 +02:00
ed
6af4508518 adjust the sfx edit warning 2021-07-15 00:26:33 +02:00
ed
6a559bc28a gallery: dispose videos to stop buffering 2021-07-15 00:22:26 +02:00
ed
0f5026cd20 gallery: option to autoplay next video on end 2021-07-15 00:04:33 +02:00
ed
a91b80a311 gallery: add video loop hotkey R 2021-07-14 09:42:38 +02:00
ed
ec534701c8 gallery: pause/resume audio player on video 2021-07-14 09:40:12 +02:00
ed
af5169f67f gallery: fix hotkeys + focus 2021-07-14 09:35:50 +02:00
ed
18676c5e65 better crash page 2021-07-14 09:34:42 +02:00
ed
e2df6fda7b update hotkeys 2021-07-13 02:20:52 +02:00
ed
e9ae9782fe v0.11.39 2021-07-13 00:54:23 +02:00
ed
016dba4ca9 v0.11.38 2021-07-13 00:35:34 +02:00
ed
39c7ef305f add a link to clear settings on the js crash page 2021-07-13 00:33:46 +02:00
ed
849c1dc848 video-player: add hotkeys m=mute, f=fullscreen 2021-07-13 00:23:48 +02:00
ed
61414014fe gallery: fix link overlapping image 2021-07-13 00:14:06 +02:00
ed
578a915884 stack/thread monitors in mpw + better thread names 2021-07-12 23:03:52 +02:00
ed
eacafb8a63 add option to log summary of running threads 2021-07-12 22:57:37 +02:00
ed
4446760f74 fix link to ?stack on rootless configs 2021-07-12 22:55:38 +02:00
ed
6da2a083f9 v0.11.37 2021-07-12 00:51:59 +02:00
ed
8837c8f822 print zip/tar errors to log 2021-07-12 00:47:22 +02:00
ed
bac301ed66 get rid of iffy default-args 2021-07-12 00:15:13 +02:00
ed
061db3906d v0.11.36 2021-07-11 06:39:58 +02:00
ed
fd7df5c952 v0.11.35 2021-07-11 06:22:56 +02:00
ed
a270019147 easier to tell youre trying to watch a video that firefox cant deal with 2021-07-11 06:21:25 +02:00
ed
55e0209901 add video-player keybinds 2021-07-11 06:12:24 +02:00
ed
2b255fbbed add in-gallery video playback 2021-07-11 03:25:46 +02:00
ed
8a2345a0fb top of the sandwich fell off 2021-07-11 02:06:18 +02:00
ed
bfa9f535aa more context in exceptions 2021-07-11 01:59:07 +02:00
ed
f757623ad8 make bdmv thumbnails 2021-07-09 20:09:32 +02:00
ed
3c7465e268 option to disable thumbcache eviction 2021-07-09 19:55:17 +02:00
ed
108665fc4f v0.11.34 2021-07-09 17:12:21 +02:00
ed
ed519c9138 add performance notes 2021-07-09 17:10:37 +02:00
ed
2dd2e2c57e discard logs in mpw 2021-07-09 17:01:11 +02:00
ed
6c3a976222 scale max-clients to mp-workers 2021-07-09 16:48:02 +02:00
ed
80cc26bd95 fix max-client limit 2021-07-09 16:33:11 +02:00
ed
970fb84fd8 hex looks better 2021-07-09 16:11:33 +02:00
ed
20cbcf6931 logging + shutdown cleanup 2021-07-09 16:07:16 +02:00
ed
8fcde2a579 move tcp accept into mp-worker 2021-07-09 15:49:36 +02:00
ed
b32d1f8ad3 make ?stack work anywhere 2021-07-09 13:46:42 +02:00
ed
03513e0cb1 effectively pointless but cool 2021-07-09 03:41:44 +02:00
ed
e041a2b197 fix centos7 support 2021-07-08 23:35:28 +02:00
ed
d7d625be2a v0.11.33 2021-07-07 10:45:47 +02:00
ed
4121266678 v0.11.32 2021-07-06 21:58:03 +02:00
ed
22971a6be4 up2k-cli: add turbo button 2021-07-06 21:43:07 +02:00
ed
efbf8d7e0d better handling of invalid requests 2021-07-06 01:03:09 +02:00
ed
397396ea4a apply -nw to PUT uploads too 2021-07-06 00:49:39 +02:00
ed
e59b077c21 announce the rotates 2021-07-06 00:43:37 +02:00
ed
4bc39f3084 add logrotate 2021-07-06 00:23:51 +02:00
ed
21c3570786 detect more recursive symlinks 2021-07-05 23:50:03 +02:00
ed
2f85c1fb18 add logging to file 2021-07-05 23:30:33 +02:00
ed
1e27a4c2df make thumb-dir.txt unretrievable 2021-07-05 00:21:33 +02:00
ed
456f575637 v0.11.31 2021-07-04 16:44:29 +02:00
ed
51546c9e64 add missing -nw check 2021-07-04 16:10:20 +02:00
ed
83b4b70ef4 add keepalive handshakes 2021-07-04 16:04:26 +02:00
ed
a5120d4f6f parallelize handshakes 2021-07-04 01:48:01 +02:00
ed
c95941e14f add testimonials, drop bad idea 2021-07-04 00:32:29 +02:00
ed
0dd531149d good 2021-07-03 18:11:52 +02:00
ed
67da1b5219 add ideas 2021-07-03 17:29:49 +02:00
ed
919bd16437 add hls notes 2021-07-03 01:32:36 +02:00
ed
ecead109ab v0.11.30 2021-07-01 22:27:19 +02:00
ed
765294c263 ignore dupe-chunk warnings; handshake takes care of it 2021-07-01 20:22:12 +02:00
ed
d6b5351207 add cachebuster because chrome ignores no-cache 2021-07-01 20:10:02 +02:00
ed
a2009bcc6b up2k-cli: recover from tcp/dns issues on upload 2021-07-01 00:52:09 +02:00
ed
12709a8a0a up2k-cli: recover from antivirus yanking files mid-read 2021-07-01 00:11:40 +02:00
ed
c055baefd2 up2k-client: maybe fix busy-tab (assumed linear progress) 2021-06-30 23:17:07 +02:00
ed
56522599b5 up2k-client: way faster init on large filedrops 2021-06-30 21:26:13 +02:00
ed
664f53b75d chrome gets stuck iterating over aux.h on win10 2021-06-30 19:26:06 +02:00
ed
87200d9f10 make -nw apply to more stuff 2021-06-30 19:23:45 +02:00
ed
5c3d0b6520 catch errors in onloads 2021-06-30 17:09:37 +02:00
ed
bd49979f4a v0.11.29 2021-06-30 01:51:57 +02:00
ed
7e606cdd9f make search rate-control less visually confusing 2021-06-30 01:44:25 +02:00
ed
8b4b7fa794 allow opening tree nodes in a new tab 2021-06-30 01:08:20 +02:00
ed
05345ddf8b add per-connection request counting 2021-06-30 01:00:00 +02:00
ed
66adb470ad optional progressbar tint 2021-06-30 00:55:57 +02:00
ed
e15c8fd146 add upload pause 2021-06-30 00:34:33 +02:00
ed
0f09b98a39 scan for additional folder thumbnails 2021-06-30 00:19:39 +02:00
ed
b4d6f4e24d american-friendly upload limits (allow additional bypass using manual text entry) 2021-06-30 00:11:23 +02:00
ed
3217fa625b more todo 2021-06-29 23:59:15 +02:00
ed
e719ff8a47 make sfx kipu-proof 2021-06-29 23:53:57 +02:00
ed
9fcf528d45 update readme 2021-06-29 23:32:21 +02:00
ed
1ddbf5a158 update todo 2021-06-29 23:00:28 +02:00
ed
64bf4574b0 add todo maybe 2021-06-28 20:38:59 +02:00
ed
5649d26077 v0.11.28 2021-06-28 15:36:13 +02:00
ed
92f923effe hotkey for adjusting tree width 2021-06-28 15:34:10 +02:00
ed
0d46d548b9 fix panic when zero accounts 2021-06-28 15:20:40 +02:00
ed
062df3f0c3 point control-panel link to / 2021-06-27 00:52:15 +02:00
ed
789fb53b8e tweaks 2021-06-27 00:49:28 +02:00
ed
351db5a18f ah yes trailing whitespace as markup my good old friend we meet again 2021-06-27 00:20:42 +02:00
ed
aabbd271c8 add debian howto 2021-06-27 00:19:37 +02:00
ed
aae8e0171e v0.11.27 2021-06-25 22:23:21 +02:00
ed
45827a2458 fix exit-search button in gridview 2021-06-25 22:18:16 +02:00
ed
726030296f apparently the html dom-property is not normalized 2021-06-25 22:07:37 +02:00
ed
6659ab3881 ajax subfolders from gridview 2021-06-25 21:49:09 +02:00
ed
c6a103609e fix gridview selection/baguettebox order 2021-06-25 21:35:45 +02:00
ed
c6b3f035e5 gridview audio playback in search results too 2021-06-25 21:12:49 +02:00
ed
2b0a7e378e persist url-password as cookie 2021-06-25 20:39:55 +02:00
ed
b75ce909c8 audio seek with scrollbar on progressbar 2021-06-25 20:24:30 +02:00
ed
229c3f5dab play audio from grid when widget open 2021-06-25 20:04:19 +02:00
ed
ec73094506 v0.11.26 2021-06-25 03:10:43 +02:00
ed
c7650c9326 v0.11.25 2021-06-25 03:06:15 +02:00
ed
d94c6d4e72 more rice 2021-06-25 03:02:04 +02:00
ed
3cc8760733 clear seekbar when switching folders 2021-06-25 02:56:21 +02:00
ed
a2f6973495 heh 2021-06-25 02:43:47 +02:00
ed
f8648fa651 always set mediasession play/pause state 2021-06-25 02:39:39 +02:00
ed
177aa038df send charset=utf8 for css, js files 2021-06-25 02:10:42 +02:00
ed
e0a14ec881 event hints for ogvjs playback 2021-06-25 02:03:18 +02:00
ed
9366512f2f audio player: add pause-fade + track-restart +
fix ogvjs paused-seek
2021-06-25 01:46:30 +02:00
ed
ea38b8041a actually fix autoplay on some chromes 2021-06-25 00:43:58 +02:00
ed
f1870daf0d retry filesearch when rate-limited 2021-06-23 22:01:06 +02:00
ed
9722441aad maybe fix autoplay on some chromes 2021-06-23 20:35:05 +02:00
ed
9d014087f4 censor passwords in logs 2021-06-23 00:04:11 +02:00
ed
83b4038b85 ok they actually served a purpose 2021-06-22 21:33:11 +00:00
ed
1e0a448feb audio-key: truncate at 5min + mojibake support 2021-06-22 22:21:39 +02:00
ed
fb81de3b36 v0.11.24 2021-06-22 17:28:09 +02:00
ed
aa4f352301 prefer audio tags in audio files 2021-06-22 17:21:24 +02:00
ed
f1a1c2ea45 recover from opening a corrupt database 2021-06-22 17:19:56 +02:00
ed
6249bd4163 add pebkac hints 2021-06-22 17:18:34 +02:00
ed
2579dc64ce update notes 2021-06-21 22:49:28 +00:00
ed
356512270a file extensions dont contain whitespace 2021-06-21 23:50:35 +02:00
ed
bed27f2b43 mention fix for the OSD popup on windows 2021-06-21 23:43:07 +02:00
ed
54013d861b v0.11.23 2021-06-21 21:15:56 +02:00
ed
ec100210dc support showing album-cover on windows lockscreen 2021-06-21 19:15:22 +00:00
ed
3ab1acf32c v0.11.22 2021-06-21 20:30:29 +02:00
ed
8c28266418 subscribe to media-keys globally as a media player 2021-06-21 20:26:11 +02:00
ed
7f8b8dcb92 scandir is not withable before py3.6 2021-06-21 20:23:35 +02:00
ed
6dd39811d4 disable u2idx if sqlite3 is unavailable 2021-06-21 20:22:54 +02:00
ed
35e2138e3e doc: macos support 2021-06-21 18:42:15 +02:00
ed
239b4e9fe6 v0.11.21 2021-06-20 21:25:18 +02:00
ed
2fcd0e7e72 abandon listing tags in browser when db busy 2021-06-20 21:19:47 +02:00
ed
357347ce3a lower timeout on db reads 2021-06-20 21:03:35 +02:00
ed
36dc1107fb update dbtool desc 2021-06-20 20:05:43 +02:00
ed
0a3bbc4b4a v0.11.20 for real 2021-06-20 19:32:17 +02:00
ed
855b93dcf6 v0.11.20 2021-06-20 18:53:58 +02:00
ed
89b79ba267 fix histpath getting indexed on windows 2021-06-20 17:59:27 +02:00
ed
f5651b7d94 dont include hidden colums in /np clips 2021-06-20 17:45:59 +02:00
ed
1881019ede support cygpaths for mtag binaries 2021-06-20 17:45:23 +02:00
ed
caba4e974c upgrade dbtool for v4 2021-06-20 17:44:24 +02:00
ed
bc3c9613bc cosmetic macos fix on shutdown 2021-06-20 15:50:37 +02:00
ed
15a3ee252e support backslash in filenames 2021-06-20 15:50:06 +02:00
ed
be055961ae adjust up2k hashlen to match base64 window 2021-06-20 15:32:36 +02:00
ed
e3031bdeec fix up2k folder-upload 2021-06-20 00:00:50 +00:00
ed
75917b9f7c better fallback 2021-06-19 16:21:39 +02:00
ed
910732e02c update build notes 2021-06-19 16:20:35 +02:00
ed
264b497681 v0.11.19 2021-06-19 01:32:17 +02:00
ed
372b949622 fix tooltip indicator 2021-06-19 01:25:07 +02:00
ed
789a602914 save some more bytes on the wire 2021-06-19 01:18:48 +02:00
ed
093e955100 move stuff that needs javascript out of the html 2021-06-19 01:10:40 +02:00
ed
c32a89bebf minor lightmode tweaks 2021-06-19 00:17:39 +02:00
ed
c0bebe9f9f eq-param error-hilight in lightmode 2021-06-18 23:51:26 +02:00
ed
57579b2fe5 fix android-chrome layout glitch in up2k 2021-06-18 23:38:43 +02:00
ed
51d14a6b4d fix toolbar tooltips on android 2021-06-18 22:11:01 +02:00
ed
c50f1b64e5 dodge android-chrome bug: canvas aspect ratio 2021-06-18 21:46:15 +02:00
ed
98aaab02c5 block scroll events, hilight selected radios 2021-06-18 20:49:38 +02:00
ed
0fc7973d8b add shadow to playback times 2021-06-18 20:24:36 +02:00
63 changed files with 5489 additions and 2291 deletions

5
.vscode/tasks.json vendored
View File

@@ -9,7 +9,10 @@
{
"label": "no_dbg",
"type": "shell",
"command": "${config:python.pythonPath} .vscode/launch.py"
"command": "${config:python.pythonPath}",
"args": [
".vscode/launch.py"
]
}
]
}

294
README.md
View File

@@ -16,23 +16,31 @@ turn your phone or raspi into a portable file server with resumable uploads/down
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [thumbnails](#thumbnails) // [md-viewer](#markdown-viewer) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [ie4](#browser-support)
## breaking changes \o/
this is the readme for v0.12 which has a different expression for volume permissions (`-v`); see [the v0.11.x readme](https://github.com/9001/copyparty/tree/15b59822112dda56cee576df30f331252fc62628#readme) for stuff regarding the [current stable release](https://github.com/9001/copyparty/releases/tag/v0.11.47)
## readme toc
* top
* [quickstart](#quickstart)
* [on debian](#on-debian)
* [notes](#notes)
* [status](#status)
* [testimonials](#testimonials)
* [bugs](#bugs)
* [general bugs](#general-bugs)
* [not my bugs](#not-my-bugs)
* [the browser](#the-browser)
* [tabs](#tabs)
* [hotkeys](#hotkeys)
* [tree-mode](#tree-mode)
* [navpane](#navpane)
* [thumbnails](#thumbnails)
* [zip downloads](#zip-downloads)
* [uploading](#uploading)
* [file-search](#file-search)
* [file manager](#file-manager)
* [markdown viewer](#markdown-viewer)
* [other tricks](#other-tricks)
* [searching](#searching)
@@ -44,6 +52,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [browser support](#browser-support)
* [client examples](#client-examples)
* [up2k](#up2k)
* [performance](#performance)
* [dependencies](#dependencies)
* [optional dependencies](#optional-dependencies)
* [install recommended deps](#install-recommended-deps)
@@ -51,23 +60,26 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [sfx](#sfx)
* [sfx repack](#sfx-repack)
* [install on android](#install-on-android)
* [dev env setup](#dev-env-setup)
* [how to release](#how-to-release)
* [building](#building)
* [dev env setup](#dev-env-setup)
* [just the sfx](#just-the-sfx)
* [complete release](#complete-release)
* [todo](#todo)
* [discarded ideas](#discarded-ideas)
## quickstart
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone full access to the current folder; see `-h` for help if you want accounts and volumes etc
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone full access to the current folder; see `-h` for help if you want [accounts and volumes](#accounts-and-volumes) etc
some recommended options:
* `-e2dsa` enables general file indexing, see [search configuration](#search-configuration)
* `-e2ts` enables audio metadata indexing (needs either FFprobe or mutagen), see [optional dependencies](#optional-dependencies)
* `-v /mnt/music:/music:r:afoo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, with user `foo` as `a`dmin (read/write), password `bar`
* replace `:r:afoo` with `:rfoo` to only make the folder readable by `foo` and nobody else
* in addition to `r`ead and `a`dmin, `w`rite makes a folder write-only, so cannot list/access files in it
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies)
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other access levels (`r`ead, `w`rite, `m`ove, `d`elete)
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
you may also want these, especially on servers:
@@ -75,6 +87,19 @@ you may also want these, especially on servers:
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
### on debian
recommended steps to enable audio metadata and thumbnails (from images and videos):
* as root, run the following:
`apt install python3 python3-pip python3-dev ffmpeg`
* then, as the user which will be running copyparty (so hopefully not root), run this:
`python3 -m pip install --user -U Pillow pillow-avif-plugin`
(skipped `pyheif-pillow-opener` because apparently debian is too old to build it)
## notes
general:
@@ -95,9 +120,9 @@ summary: all planned features work! now please enjoy the bloatening
* backend stuff
* ☑ sanic multipart parser
*load balancer (multiprocessing)
*multiprocessing (actual multithreading)
* ☑ volumes (mountpoints)
* ☑ accounts
*[accounts](#accounts-and-volumes)
* upload
* ☑ basic: plain multipart, ie6 support
* ☑ up2k: js, resumable, multithreaded
@@ -108,15 +133,15 @@ summary: all planned features work! now please enjoy the bloatening
* ☑ folders as zip / tar files
* ☑ FUSE client (read-only)
* browser
*tree-view
* ☑ audio player
*navpane (directory tree sidebar)
* ☑ audio player (with OS media controls)
* ☑ thumbnails
* ☑ images using Pillow
* ☑ videos using FFmpeg
* ...of images using Pillow
* ...of videos using FFmpeg
* ☑ cache eviction (max-age; maybe max-size eventually)
* ☑ image gallery
* ☑ image gallery with webm player
* ☑ SPA (browse while uploading)
* if you use the file-tree on the left only, not folders in the file list
* if you use the navpane to navigate, not folders in the file list
* server indexing
* ☑ locate files by contents
* ☑ search by name/path/date/size
@@ -126,12 +151,19 @@ summary: all planned features work! now please enjoy the bloatening
* ☑ editor (sure why not)
## testimonials
small collection of user feedback
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`
# bugs
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
* Windows: python 3.7 and older cannot read tags with FFprobe, so use Mutagen or upgrade
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* Windows: python 2.7 cannot handle filenames with mojibake
* MacOS: `--th-ff-jpg` may fix thumbnails using macports-FFmpeg
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions
## general bugs
@@ -141,9 +173,39 @@ summary: all planned features work! now please enjoy the bloatening
## not my bugs
* Windows: msys2-python 3.8.6 occasionally throws "RuntimeError: release unlocked lock" when leaving a scoped mutex in up2k
* Windows: folders cannot be accessed if the name ends with `.`
* python or windows bug
* Windows: msys2-python 3.8.6 occasionally throws `RuntimeError: release unlocked lock` when leaving a scoped mutex in up2k
* this is an msys2 bug, the regular windows edition of python is fine
* VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf
* use `--hist` or the `hist` volflag (`-v [...]:chist=/tmp/foo`) to place the db inside the vm instead
# accounts and volumes
* `-a usr:pwd` adds account `usr` with password `pwd`
* `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
* when granting permissions to an account, the names are comma-separated: `-v .::r,usr1,usr2:rw,usr3,usr4`
permissions:
* `r` (read): browse folder contents, download files, download as zip/tar
* `w` (write): upload files, move files *into* folder
* `m` (move): move files/folders *from* folder
* `d` (delete): delete files/folders
example:
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
* make folder `/srv` the root of the filesystem, read-only by anyone: `-v /srv::r`
* make folder `/mnt/music` available at `/music`, read-only for u1 and u2, read-write for u3: `-v /mnt/music:music:r,u1,u2:rw,u3`
* unauthorized users accessing the webroot can see that the `music` folder exists, but cannot open it
* make folder `/mnt/incoming` available at `/inc`, write-only for u1, read-move for u2: `-v /mnt/incoming:inc:w,u1:rm,u2`
* unauthorized users accessing the webroot can see that the `inc` folder exists, but cannot open it
* `u1` can open the `inc` folder, but cannot see the contents, only upload new files to it
* `u2` can browse it and move files *from* `/inc` into any folder where `u2` has write-access
# the browser
@@ -157,40 +219,73 @@ summary: all planned features work! now please enjoy the bloatening
* `[📂]` mkdir, create directories
* `[📝]` new-md, create a new markdown document
* `[📟]` send-msg, either to server-log or into textfiles if `--urlform save`
* `[⚙️]` client configuration options
* `[🎺]` audio-player config options
* `[⚙️]` general client config options
## hotkeys
the browser has the following hotkeys
the browser has the following hotkeys (assumes qwerty, ignores actual layout)
* `B` toggle breadcrumbs / navpane
* `I/K` prev/next folder
* `P` parent folder
* `M` parent folder (or unexpand current)
* `G` toggle list / grid view
* `T` toggle thumbnails / icons
* `ctrl-X` cut selected files/folders
* `ctrl-V` paste
* `F2` rename selected file/folder
* when a file/folder is selected (in not-grid-view):
* `Up/Down` move cursor
* shift+`Up/Down` select and move cursor
* ctrl+`Up/Down` move cursor and scroll viewport
* `Space` toggle file selection
* `Ctrl-A` toggle select all
* when playing audio:
* `0..9` jump to 10%..90%
* `U/O` skip 10sec back/forward
* `J/L` prev/next song
* `M` play/pause (also starts playing the folder)
* `U/O` skip 10sec back/forward
* `0..9` jump to 0%..90%
* `P` play/pause (also starts playing the folder)
* when viewing images / playing videos:
* `J/L, Left/Right` prev/next file
* `Home/End` first/last file
* `Esc` close viewer
* videos:
* `U/O` skip 10sec back/forward
* `P/K/Space` play/pause
* `F` fullscreen
* `C` continue playing next video
* `R` loop
* `M` mute
* when the navpane is open:
* `A/D` adjust tree width
* in the grid view:
* `S` toggle multiselect
* `A/D` zoom
* shift+`A/D` zoom
* in the markdown editor:
* `^s` save
* `^h` header
* `^k` autoformat table
* `^u` jump to next unicode character
* `^e` toggle editor / preview
* `^up, ^down` jump paragraphs
## tree-mode
## navpane
by default there's a breadcrumbs path; you can replace this with a tree-browser sidebar thing by clicking the 🌲
by default there's a breadcrumbs path; you can replace this with a navpane (tree-browser sidebar thing) by clicking the `🌲` or pressing the `B` hotkey
click `[-]` and `[+]` to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
click `[-]` and `[+]` (or hotkeys `A`/`D`) to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
## thumbnails
![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/120070302-10836b00-c08a-11eb-8eb4-82004a34c342.png)
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how destructive your users are
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
images named `folder.jpg` and `folder.png` become the thumbnail of the folder they're in
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
## zip downloads
@@ -205,9 +300,10 @@ the `zip` link next to folders can produce various types of zip/tar files using
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
* hidden files (dotfiles) are excluded unless `-ed`
* the up2k.db is always excluded
* `up2k.db` and `dir.txt` is always excluded
* `zip_crc` will take longer to download since the server has to read each file twice
* please let me know if you find a program old enough to actually need this
* this is only to support MS-DOS PKZIP v2.04g (october 1993) and older
* how are you accessing copyparty actually
you can also zip a selection of files or folders by clicking them in the browser, that brings up a selection editor and zip button in the bottom right
@@ -222,9 +318,11 @@ two upload methods are available in the html client:
up2k has several advantages:
* you can drop folders into the browser (files are added recursively)
* files are processed in chunks, and each chunk is checksummed
* uploads resume if they are interrupted (for example by a reboot)
* uploads autoresume if they are interrupted by network issues
* uploads resume if you reboot your browser or pc, just upload the same files again
* server detects any corruption; the client reuploads affected chunks
* the client doesn't upload anything that already exists on the server
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
* the last-modified timestamp of the file is preserved
see [up2k](#up2k) for details on how it works
@@ -257,11 +355,18 @@ in the `[🚀 up2k]` tab, after toggling the `[🔎]` switch green, any files/fo
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files (or just refresh the page)
note that since up2k has to read the file twice, `[🎈 bup]` can be up to 2x faster in extreme cases (if your internet connection is faster than the read-speed of your HDD)
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well thanks to tls also functioning as an integrity check
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well (thanks to tls also functioning as an integrity check)
## file manager
if you have the required permissions, you can cut/paste, rename, and delete files/folders
you can move files across browser tabs (cut in one tab, paste in another)
## markdown viewer
@@ -275,6 +380,8 @@ up2k has saved a few uploads from becoming corrupted in-transfer already; caught
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
# searching
@@ -297,11 +404,11 @@ searching relies on two databases, the up2k filetree (`-e2d`) and the metadata t
through arguments:
* `-e2d` enables file indexing on upload
* `-e2ds` scans writable folders on startup
* `-e2ds` scans writable folders for new files on startup
* `-e2dsa` scans all mounted volumes (including readonly ones)
* `-e2t` enables metadata indexing on upload
* `-e2ts` scans for tags in all files that don't have tags yet
* `-e2tsr` deletes all existing tags, so a full reindex
* `-e2tsr` deletes all existing tags, does a full reindex
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
* `-v ~/music::r:ce2dsa:ce2tsr` does a full reindex of everything on startup
@@ -309,11 +416,11 @@ the same arguments can be set as volume flags, in addition to `d2d` and `d2t` fo
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
note:
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `cdhash`, this has the following consequences:
* initial indexing is way faster, especially when the volume is on a networked disk
* initial indexing is way faster, especially when the volume is on a network disk
* makes it impossible to [file-search](#file-search)
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
@@ -346,17 +453,17 @@ tags that start with a `.` such as `.bpm` and `.dur`(ation) indicate numeric val
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
* is about 20x slower than mutagen
* catches a few tags that mutagen doesn't
`--no-mutagen` disables Mutagen and uses FFprobe instead, which...
* is about 20x slower than Mutagen
* catches a few tags that Mutagen doesn't
* melodic key, video resolution, framerate, pixfmt
* avoids pulling any GPL code into copyparty
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
* more importantly runs FFprobe on incoming files which is bad if your FFmpeg has a cve
## file parser plugins
copyparty can invoke external programs to collect additional metadata for files using `mtp` (as argument or volume flag), there is a default timeout of 30sec
copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec
* `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
@@ -388,13 +495,15 @@ copyparty can invoke external programs to collect additional metadata for files
| send message | yep | yep | yep | yep | yep | yep | yep | yep |
| set sort order | - | yep | yep | yep | yep | yep | yep | yep |
| zip selection | - | yep | yep | yep | yep | yep | yep | yep |
| directory tree | - | - | `*1` | yep | yep | yep | yep | yep |
| navpane | - | - | `*1` | yep | yep | yep | yep | yep |
| up2k | - | - | yep | yep | yep | yep | yep | yep |
| icons work | - | - | yep | yep | yep | yep | yep | yep |
| markdown editor | - | - | yep | yep | yep | yep | yep | yep |
| markdown viewer | - | - | yep | yep | yep | yep | yep | yep |
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
| play ogg/opus | - | - | - | - | yep | yep | `*2` | yep |
| thumbnail view | - | - | - | - | yep | yep | yep | yep |
| image viewer | - | - | - | - | yep | yep | yep | yep |
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
* internet explorer 6 to 8 behave the same
* firefox 52 and chrome 49 are the last winxp versions
@@ -412,7 +521,7 @@ quick summary of more eccentric web-browsers trying to view a directory index:
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
| **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` |
| **SerenityOS** (22d13d8) | hits a page fault, works with `?b=u`, file input not-impl, url params are multiplying |
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
# client examples
@@ -437,7 +546,7 @@ quick summary of more eccentric web-browsers trying to view a directory index:
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
b512 <movie.mkv
@@ -457,6 +566,23 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
# performance
defaults are good for most cases, don't mind the `cannot efficiently use multiple CPU cores` message, it's very unlikely to be a problem
below are some tweaks roughly ordered by usefulness:
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
* `--no-hash` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
* huge amount of short-lived connections
* really heavy traffic (downloads/uploads)
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
# dependencies
* `jinja2` (is built into the SFX)
@@ -466,18 +592,18 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
enable music tags:
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
* or `ffprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
enable image thumbnails:
enable thumbnails of images:
* `Pillow` (requires py2.7 or py3.5+)
enable video thumbnails:
enable thumbnails of videos:
* `ffmpeg` and `ffprobe` somewhere in `$PATH`
enable reading HEIF pictures:
enable thumbnails of HEIF pictures:
* `pyheif-pillow-opener` (requires Linux or a C compiler)
enable reading AVIF pictures:
enable thumbnails of AVIF pictures:
* `pillow-avif-plugin`
@@ -491,7 +617,7 @@ python -m pip install --user -U jinja2 mutagen Pillow
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
these are standalone programs and will never be imported / evaluated by copyparty
these are standalone programs and will never be imported / evaluated by copyparty, and must be enabled through `-mtp` configs
# sfx
@@ -507,10 +633,10 @@ pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `
## sfx repack
if you don't need all the features you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except for either msys2 or WSL if you're on windows)
* `724K` original size as of v0.4.0
* `256K` after `./scripts/make-sfx.sh re no-ogv`
* `164K` after `./scripts/make-sfx.sh re no-ogv no-cm`
if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL)
* `525k` size of original sfx.py as of v0.11.30
* `315k` after `./scripts/make-sfx.sh re no-ogv`
* `223k` after `./scripts/make-sfx.sh re no-ogv no-cm`
the features you can opt to drop are
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files
@@ -532,18 +658,45 @@ echo $?
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
# dev env setup
# building
## dev env setup
mostly optional; if you need a working env for vscode or similar
```sh
python3 -m venv .venv
. .venv/bin/activate
pip install jinja2 # mandatory deps
pip install Pillow # thumbnail deps
pip install jinja2 # mandatory
pip install mutagen # audio metadata
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
pip install black bandit pylint flake8 # vscode tooling
```
# how to release
## just the sfx
unless you need to modify something in the web-dependencies, it's faster to grab those from a previous release:
```sh
rm -rf copyparty/web/deps
curl -L https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py >x.py
python3 x.py -h
rm x.py
mv /tmp/pe-copyparty/copyparty/web/deps/ copyparty/web/deps/
```
then build the sfx using any of the following examples:
```sh
./scripts/make-sfx.sh # both python and sh editions
./scripts/make-sfx.sh no-sh gz # just python with gzip
```
## complete release
also builds the sfx so disregard the sfx section above
in the `scripts` folder:
@@ -558,14 +711,18 @@ in the `scripts` folder:
roughly sorted by priority
* hls framework for Someone Else to drop code into :^)
* readme.md as epilogue
* single sha512 across all up2k chunks? maybe
## discarded ideas
* reduce up2k roundtrips
* start from a chunk index and just go
* terminate client on bad data
discarded ideas
* not worth the effort, just throw enough conncetions at it
* single sha512 across all up2k chunks?
* crypto.subtle cannot into streaming, would have to use hashwasm, expensive
* separate sqlite table per tag
* performance fixed by skipping some indexes (`+mt.k`)
* audio fingerprinting
@@ -580,3 +737,6 @@ discarded ideas
* nah
* look into android thumbnail cache file format
* absolutely not
* indexedDB for hashes, cfg enable/clear/sz, 2gb avail, ~9k for 1g, ~4k for 100m, 500k items before autoeviction
* blank hashlist when up-ok to skip handshake
* too many confusing side-effects

View File

@@ -48,15 +48,16 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
# [`dbtool.py`](dbtool.py)
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty recommends to wipe the DB and reindex because it now collects additional metadata during analysis, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
for that example (upgrading to v0.11.0), first move the old db aside, launch copyparty, let it rebuild the db until the point where it starts running mtp (colored messages as it adds the mtp tags), then CTRL-C and patch in the old mtp tags from the old db instead
for that example (upgrading to v0.11.20), first launch the new version of copyparty like usual, let it make a backup of the old db and rebuild the new db until the point where it starts running mtp (colored messages as it adds the mtp tags), that's when you hit CTRL-C and patch in the old mtp tags from the old db instead
so assuming you have `-mtp` parsers to provide the tags `key` and `.bpm`:
```
~/bin/dbtool.py -ls up2k.db
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -cmp
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy key
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy .bpm -vac
cd /mnt/nas/music/.hist
~/src/copyparty/bin/dbtool.py -ls up2k.db
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -cmp
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy key
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
```

View File

@@ -345,7 +345,7 @@ class Gateway(object):
except:
pass
def sendreq(self, *args, headers={}, **kwargs):
def sendreq(self, meth, path, headers, **kwargs):
if self.password:
headers["Cookie"] = "=".join(["cppwd", self.password])
@@ -354,21 +354,21 @@ class Gateway(object):
if c.rx_path:
raise Exception()
c.request(*list(args), headers=headers, **kwargs)
c.request(meth, path, headers=headers, **kwargs)
c.rx = c.getresponse()
return c
except:
tid = threading.current_thread().ident
dbg(
"\033[1;37;44mbad conn {:x}\n {}\n {}\033[0m".format(
tid, " ".join(str(x) for x in args), c.rx_path if c else "(null)"
"\033[1;37;44mbad conn {:x}\n {} {}\n {}\033[0m".format(
tid, meth, path, c.rx_path if c else "(null)"
)
)
self.closeconn(c)
c = self.getconn()
try:
c.request(*list(args), headers=headers, **kwargs)
c.request(meth, path, headers=headers, **kwargs)
c.rx = c.getresponse()
return c
except:
@@ -386,7 +386,7 @@ class Gateway(object):
path = dewin(path)
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
c = self.sendreq("GET", web_path)
c = self.sendreq("GET", web_path, {})
if c.rx.status != 200:
self.closeconn(c)
log(
@@ -440,7 +440,7 @@ class Gateway(object):
)
)
c = self.sendreq("GET", web_path, headers={"Range": hdr_range})
c = self.sendreq("GET", web_path, {"Range": hdr_range})
if c.rx.status != http.client.PARTIAL_CONTENT:
self.closeconn(c)
raise Exception(

View File

@@ -54,10 +54,13 @@ MACOS = platform.system() == "Darwin"
info = log = dbg = None
print("{} v{} @ {}".format(
platform.python_implementation(),
".".join([str(x) for x in sys.version_info]),
sys.executable))
print(
"{} v{} @ {}".format(
platform.python_implementation(),
".".join([str(x) for x in sys.version_info]),
sys.executable,
)
)
try:
@@ -299,14 +302,14 @@ class Gateway(object):
except:
pass
def sendreq(self, *args, headers={}, **kwargs):
def sendreq(self, meth, path, headers, **kwargs):
tid = get_tid()
if self.password:
headers["Cookie"] = "=".join(["cppwd", self.password])
try:
c = self.getconn(tid)
c.request(*list(args), headers=headers, **kwargs)
c.request(meth, path, headers=headers, **kwargs)
return c.getresponse()
except:
dbg("bad conn")
@@ -314,7 +317,7 @@ class Gateway(object):
self.closeconn(tid)
try:
c = self.getconn(tid)
c.request(*list(args), headers=headers, **kwargs)
c.request(meth, path, headers=headers, **kwargs)
return c.getresponse()
except:
info("http connection failed:\n" + traceback.format_exc())
@@ -331,7 +334,7 @@ class Gateway(object):
path = dewin(path)
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls"
r = self.sendreq("GET", web_path)
r = self.sendreq("GET", web_path, {})
if r.status != 200:
self.closeconn()
log(
@@ -368,7 +371,7 @@ class Gateway(object):
)
)
r = self.sendreq("GET", web_path, headers={"Range": hdr_range})
r = self.sendreq("GET", web_path, {"Range": hdr_range})
if r.status != http.client.PARTIAL_CONTENT:
self.closeconn()
raise Exception(

View File

@@ -2,10 +2,13 @@
import os
import sys
import time
import shutil
import sqlite3
import argparse
DB_VER = 3
DB_VER1 = 3
DB_VER2 = 4
def die(msg):
@@ -45,18 +48,21 @@ def compare(n1, d1, n2, d2, verbose):
nt = next(d1.execute("select count(w) from up"))[0]
n = 0
miss = 0
for w, rd, fn in d1.execute("select w, rd, fn from up"):
for w1, rd, fn in d1.execute("select w, rd, fn from up"):
n += 1
if n % 25_000 == 0:
m = f"\033[36mchecked {n:,} of {nt:,} files in {n1} against {n2}\033[0m"
print(m)
q = "select w from up where substr(w,1,16) = ?"
hit = d2.execute(q, (w[:16],)).fetchone()
if rd.split("/", 1)[0] == ".hist":
continue
q = "select w from up where rd = ? and fn = ?"
hit = d2.execute(q, (rd, fn)).fetchone()
if not hit:
miss += 1
if verbose:
print(f"file in {n1} missing in {n2}: [{w}] {rd}/{fn}")
print(f"file in {n1} missing in {n2}: [{w1}] {rd}/{fn}")
print(f" {miss} files in {n1} missing in {n2}\n")
@@ -64,15 +70,30 @@ def compare(n1, d1, n2, d2, verbose):
n = 0
miss = {}
nmiss = 0
for w, k, v in d1.execute("select * from mt"):
for w1, k, v in d1.execute("select * from mt"):
n += 1
if n % 100_000 == 0:
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
print(m)
v2 = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
if v2:
v2 = v2[0]
q = "select rd, fn from up where substr(w,1,16) = ?"
rd, fn = d1.execute(q, (w1,)).fetchone()
if rd.split("/", 1)[0] == ".hist":
continue
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
w2 = d2.execute(q, (rd, fn)).fetchone()
if w2:
w2 = w2[0]
v2 = None
if w2:
v2 = d2.execute(
"select v from mt where w = ? and +k = ?", (w2, k)
).fetchone()
if v2:
v2 = v2[0]
# if v != v2 and v2 and k in [".bpm", "key"] and n2 == "src":
# print(f"{w} [{rd}/{fn}] {k} = [{v}] / [{v2}]")
@@ -99,9 +120,7 @@ def compare(n1, d1, n2, d2, verbose):
miss[k] = 1
if verbose:
q = "select rd, fn from up where substr(w,1,16) = ?"
rd, fn = d1.execute(q, (w,)).fetchone()
print(f"missing in {n2}: [{w}] [{rd}/{fn}] {k} = {v}")
print(f"missing in {n2}: [{w1}] [{rd}/{fn}] {k} = {v}")
for k, v in sorted(miss.items()):
if v:
@@ -114,24 +133,35 @@ def copy_mtp(d1, d2, tag, rm):
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
n = 0
ndone = 0
for w, k, v in d1.execute("select * from mt where k = ?", (tag,)):
for w1, k, v in d1.execute("select * from mt where k = ?", (tag,)):
n += 1
if n % 25_000 == 0:
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
print(m)
hit = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
q = "select rd, fn from up where substr(w,1,16) = ?"
rd, fn = d1.execute(q, (w1,)).fetchone()
if rd.split("/", 1)[0] == ".hist":
continue
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
w2 = d2.execute(q, (rd, fn)).fetchone()
if not w2:
continue
w2 = w2[0]
hit = d2.execute("select v from mt where w = ? and +k = ?", (w2, k)).fetchone()
if hit:
hit = hit[0]
if hit != v:
ndone += 1
if hit is not None:
d2.execute("delete from mt where w = ? and +k = ?", (w, k))
d2.execute("delete from mt where w = ? and +k = ?", (w2, k))
d2.execute("insert into mt values (?,?,?)", (w, k, v))
d2.execute("insert into mt values (?,?,?)", (w2, k, v))
if rm:
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w,))
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2,))
d2.commit()
print(f"copied {ndone} {tag} tags over")
@@ -140,7 +170,7 @@ def copy_mtp(d1, d2, tag, rm):
def main():
os.system("")
print()
ap = argparse.ArgumentParser()
ap.add_argument("db", help="database to work on")
ap.add_argument("-src", metavar="DB", type=str, help="database to copy from")
@@ -168,6 +198,23 @@ def main():
db = sqlite3.connect(ar.db)
ds = sqlite3.connect(ar.src) if ar.src else None
# revert journals
for d, p in [[db, ar.db], [ds, ar.src]]:
if not d:
continue
pj = "{}-journal".format(p)
if not os.path.exists(pj):
continue
d.execute("create table foo (bar int)")
d.execute("drop table foo")
if ar.copy:
db.close()
shutil.copy2(ar.db, "{}.bak.dbtool.{:x}".format(ar.db, int(time.time())))
db = sqlite3.connect(ar.db)
for d, n in [[ds, "src"], [db, "dst"]]:
if not d:
continue
@@ -176,8 +223,8 @@ def main():
if ver == "corrupt":
die("{} database appears to be corrupt, sorry")
if ver != DB_VER:
m = f"{n} db is version {ver}, this tool only supports version {DB_VER}, please upgrade it with copyparty first"
if ver < DB_VER1 or ver > DB_VER2:
m = f"{n} db is version {ver}, this tool only supports versions between {DB_VER1} and {DB_VER2}, please upgrade it with copyparty first"
die(m)
if ar.ls:

View File

@@ -60,7 +60,7 @@ def main():
try:
det(tf)
except:
pass
pass # mute
finally:
os.unlink(tf)

123
bin/mtag/audio-key-slicing.py Executable file
View File

@@ -0,0 +1,123 @@
#!/usr/bin/env python
import re
import os
import sys
import tempfile
import subprocess as sp
import keyfinder
from copyparty.util import fsenc
"""
dep: github/mixxxdj/libkeyfinder
dep: pypi/keyfinder
dep: ffmpeg
note: this is a janky edition of the regular audio-key.py,
slicing the files at 20sec intervals and keeping 5sec from each,
surprisingly accurate but still garbage (446 ok, 69 bad, 13% miss)
it is fast tho
"""
def get_duration():
# TODO provide ffprobe tags to mtp as json
# fmt: off
dur = sp.check_output([
"ffprobe",
"-hide_banner",
"-v", "fatal",
"-show_streams",
"-show_format",
fsenc(sys.argv[1])
])
# fmt: on
dur = dur.decode("ascii", "replace").split("\n")
dur = [x.split("=")[1] for x in dur if x.startswith("duration=")]
dur = [float(x) for x in dur if re.match(r"^[0-9\.,]+$", x)]
return list(sorted(dur))[-1] if dur else None
def get_segs(dur):
# keep first 5s of each 20s,
# keep entire last segment
ofs = 0
segs = []
while True:
seg = [ofs, 5]
segs.append(seg)
if dur - ofs < 20:
seg[-1] = int(dur - seg[0])
break
ofs += 20
return segs
def slice(tf):
dur = get_duration()
dur = min(dur, 600) # max 10min
segs = get_segs(dur)
# fmt: off
cmd = [
"ffmpeg",
"-nostdin",
"-hide_banner",
"-v", "fatal",
"-y"
]
for seg in segs:
cmd.extend([
"-ss", str(seg[0]),
"-i", fsenc(sys.argv[1])
])
filt = ""
for n, seg in enumerate(segs):
filt += "[{}:a:0]atrim=duration={}[a{}]; ".format(n, seg[1], n)
prev = "a0"
for n in range(1, len(segs)):
nxt = "b{}".format(n)
filt += "[{}][a{}]acrossfade=d=0.5[{}]; ".format(prev, n, nxt)
prev = nxt
cmd.extend([
"-filter_complex", filt[:-2],
"-map", "[{}]".format(nxt),
"-sample_fmt", "s16",
tf
])
# fmt: on
# print(cmd)
sp.check_call(cmd)
def det(tf):
slice(tf)
print(keyfinder.key(tf).camelot())
def main():
with tempfile.NamedTemporaryFile(suffix=".flac", delete=False) as f:
f.write(b"h")
tf = f.name
try:
det(tf)
finally:
os.unlink(tf)
pass
if __name__ == "__main__":
main()

View File

@@ -1,18 +1,54 @@
#!/usr/bin/env python
import os
import sys
import tempfile
import subprocess as sp
import keyfinder
from copyparty.util import fsenc
"""
dep: github/mixxxdj/libkeyfinder
dep: pypi/keyfinder
dep: ffmpeg
note: cannot fsenc
"""
try:
print(keyfinder.key(sys.argv[1]).camelot())
except:
pass
# tried trimming the first/last 5th, bad idea,
# misdetects 9a law field (Sphere Caliber) as 10b,
# obvious when mixing 9a ghostly parapara ship
def det(tf):
# fmt: off
sp.check_call([
"ffmpeg",
"-nostdin",
"-hide_banner",
"-v", "fatal",
"-y", "-i", fsenc(sys.argv[1]),
"-t", "300",
"-sample_fmt", "s16",
tf
])
# fmt: on
print(keyfinder.key(tf).camelot())
def main():
with tempfile.NamedTemporaryFile(suffix=".flac", delete=False) as f:
f.write(b"h")
tf = f.name
try:
det(tf)
except:
pass # mute
finally:
os.unlink(tf)
if __name__ == "__main__":
main()

View File

@@ -1,7 +1,15 @@
# when running copyparty behind a reverse-proxy,
# make sure that copyparty allows at least as many clients as the proxy does,
# so run copyparty with -nc 512 if your nginx has the default limits
# (worker_processes 1, worker_connections 512)
# when running copyparty behind a reverse proxy,
# the following arguments are recommended:
#
# -nc 512 important, see next paragraph
# --http-only lower latency on initial connection
# -i 127.0.0.1 only accept connections from nginx
#
# -nc must match or exceed the webserver's max number of concurrent clients;
# nginx default is 512 (worker_processes 1, worker_connections 512)
#
# you may also consider adding -j0 for CPU-intensive configurations
# (not that i can really think of any good examples)
upstream cpp {
server 127.0.0.1:3923;

View File

@@ -7,11 +7,19 @@
# you may want to:
# change '/usr/bin/python' to another interpreter
# change '/mnt::a' to another location or permission-set
#
# with `Type=notify`, copyparty will signal systemd when it is ready to
# accept connections; correctly delaying units depending on copyparty.
# But note that journalctl will get the timestamps wrong due to
# python disabling line-buffering, so messages are out-of-order:
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
[Unit]
Description=copyparty file server
[Service]
Type=notify
SyslogIdentifier=copyparty
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'

View File

@@ -9,6 +9,9 @@ import os
PY2 = sys.version_info[0] == 2
if PY2:
sys.dont_write_bytecode = True
unicode = unicode
else:
unicode = str
WINDOWS = False
if platform.system() == "Windows":

View File

@@ -20,10 +20,10 @@ import threading
import traceback
from textwrap import dedent
from .__init__ import E, WINDOWS, VT100, PY2
from .__init__ import E, WINDOWS, VT100, PY2, unicode
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub
from .util import py_desc, align_tab, IMPLICATIONS, alltrace
from .util import py_desc, align_tab, IMPLICATIONS
HAVE_SSL = True
try:
@@ -31,6 +31,8 @@ try:
except:
HAVE_SSL = False
printed = ""
class RiceFormatter(argparse.HelpFormatter):
def _get_help_string(self, action):
@@ -61,8 +63,15 @@ class Dodge11874(RiceFormatter):
super(Dodge11874, self).__init__(*args, **kwargs)
def lprint(*a, **ka):
global printed
printed += " ".join(unicode(x) for x in a) + ka.get("end", "\n")
print(*a, **ka)
def warn(msg):
print("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
def ensure_locale():
@@ -73,7 +82,7 @@ def ensure_locale():
]:
try:
locale.setlocale(locale.LC_ALL, x)
print("Locale:", x)
lprint("Locale:", x)
break
except:
continue
@@ -94,7 +103,7 @@ def ensure_cert():
try:
if filecmp.cmp(cert_cfg, cert_insec):
print(
lprint(
"\033[33m using default TLS certificate; https will be insecure."
+ "\033[36m\n certificate location: {}\033[0m\n".format(cert_cfg)
)
@@ -123,7 +132,7 @@ def configure_ssl_ver(al):
if "help" in sslver:
avail = [terse_sslver(x[6:]) for x in flags]
avail = " ".join(sorted(avail) + ["all"])
print("\navailable ssl/tls versions:\n " + avail)
lprint("\navailable ssl/tls versions:\n " + avail)
sys.exit(0)
al.ssl_flags_en = 0
@@ -143,7 +152,7 @@ def configure_ssl_ver(al):
for k in ["ssl_flags_en", "ssl_flags_de"]:
num = getattr(al, k)
print("{}: {:8x} ({})".format(k, num, num))
lprint("{}: {:8x} ({})".format(k, num, num))
# think i need that beer now
@@ -160,13 +169,13 @@ def configure_ssl_ciphers(al):
try:
ctx.set_ciphers(al.ciphers)
except:
print("\n\033[1;31mfailed to set ciphers\033[0m\n")
lprint("\n\033[1;31mfailed to set ciphers\033[0m\n")
if not hasattr(ctx, "get_ciphers"):
print("cannot read cipher list: openssl or python too old")
lprint("cannot read cipher list: openssl or python too old")
else:
ciphers = [x["description"] for x in ctx.get_ciphers()]
print("\n ".join(["\nenabled ciphers:"] + align_tab(ciphers) + [""]))
lprint("\n ".join(["\nenabled ciphers:"] + align_tab(ciphers) + [""]))
if is_help:
sys.exit(0)
@@ -182,16 +191,6 @@ def sighandler(sig=None, frame=None):
print("\n".join(msg))
def stackmon(fp, ival):
ctr = 0
while True:
ctr += 1
time.sleep(ival)
st = "{}, {}\n{}".format(ctr, time.time(), alltrace())
with open(fp, "wb") as f:
f.write(st.encode("utf-8", "replace"))
def run_argparse(argv, formatter):
ap = argparse.ArgumentParser(
formatter_class=formatter,
@@ -200,24 +199,30 @@ def run_argparse(argv, formatter):
epilog=dedent(
"""
-a takes username:password,
-v takes src:dst:permset:permset:cflag:cflag:...
where "permset" is accesslevel followed by username (no separator)
-v takes src:dst:perm1:perm2:permN:cflag1:cflag2:cflagN:...
where "perm" is "accesslevels,username1,username2,..."
and "cflag" is config flags to set on this volume
list of accesslevels:
"r" (read): list folder contents, download files
"w" (write): upload files; need "r" to see the uploads
"m" (move): move files and folders; need "w" at destination
"d" (delete): permanently delete files and folders
list of cflags:
"cnodupe" rejects existing files (instead of symlinking them)
"ce2d" sets -e2d (all -e2* args can be set using ce2* cflags)
"cd2t" disables metadata collection, overrides -e2t*
"cd2d" disables all database stuff, overrides -e2*
"c,nodupe" rejects existing files (instead of symlinking them)
"c,e2d" sets -e2d (all -e2* args can be set using ce2* cflags)
"c,d2t" disables metadata collection, overrides -e2t*
"c,d2d" disables all database stuff, overrides -e2*
example:\033[35m
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
-a ed:hunter2 -v .::r:rw,ed -v ../inc:dump:w:rw,ed:c,nodupe \033[36m
mount current directory at "/" with
* r (read-only) for everyone
* a (read+write) for ed
* rw (read+write) for ed
mount ../inc at "/dump" with
* w (write-only) for everyone
* a (read+write) for ed
* rw (read+write) for ed
* reject duplicate files \033[0m
if no accounts or volumes are configured,
@@ -249,46 +254,53 @@ def run_argparse(argv, formatter):
),
)
# fmt: off
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account, USER:PASS; example [ed:wark")
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
ap.add_argument("-ed", action="store_true", help="enable ?dots")
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
ap.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
u = unicode
ap2 = ap.add_argument_group('general options')
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark")
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
ap2.add_argument("-ed", action="store_true", help="enable ?dots")
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
ap2 = ap.add_argument_group('network options')
ap2.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap2.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
ap2.add_argument("--ssl-ver", metavar="LIST", type=str, help="set allowed ssl/tls versions; [help] shows available versions; default is what your python version considers safe")
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ssl/tls ciphers; [help] shows available ciphers")
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [help] shows available versions; default is what your python version considers safe")
ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [help] shows available ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets")
ap2 = ap.add_argument_group('opt-outs')
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
ap2.add_argument("-nih", action="store_true", help="no info hostname")
ap2.add_argument("-nid", action="store_true", help="no info disk-usage")
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap2 = ap.add_argument_group('safety options')
ap2.add_argument("--ls", metavar="U[,V[,F]]", help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
ap2.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
ap2 = ap.add_argument_group('logging options')
ap2.add_argument("-q", action="store_true", help="quiet")
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header")
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
ap2.add_argument("--log-htp", action="store_true", help="print http-server threadpool scaling")
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
ap2 = ap.add_argument_group('admin panel options')
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
@@ -303,8 +315,9 @@ def run_argparse(argv, formatter):
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for")
ap2 = ap.add_argument_group('database options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
@@ -313,24 +326,29 @@ def run_argparse(argv, formatter):
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
ap2.add_argument("--hist", metavar="PATH", type=str, help="where to store volume state")
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume state")
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead")
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader")
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval")
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval (0=off)")
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps")
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
ap2 = ap.add_argument_group('appearance options')
ap2.add_argument("--css-browser", metavar="L", help="URL to additional CSS to include")
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
ap2 = ap.add_argument_group('debug options')
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing")
ap2.add_argument("--stackmon", metavar="P,S", help="write stacktrace to Path every S second")
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second")
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
return ap.parse_args(args=argv[1:])
# fmt: on
@@ -347,7 +365,7 @@ def main(argv=None):
desc = py_desc().replace("[", "\033[1;30m[")
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0m\n'
print(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
lprint(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
ensure_locale()
if HAVE_SSL:
@@ -361,7 +379,7 @@ def main(argv=None):
continue
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
print(msg.format(dk, nk))
lprint(msg.format(dk, nk))
argv[idx] = nk
time.sleep(2)
@@ -370,15 +388,35 @@ def main(argv=None):
except AssertionError:
al = run_argparse(argv, Dodge11874)
if al.stackmon:
fp, f = al.stackmon.rsplit(",", 1)
f = int(f)
t = threading.Thread(
target=stackmon,
args=(fp, f),
)
t.daemon = True
t.start()
nstrs = []
anymod = False
for ostr in al.v or []:
mod = False
oa = ostr.split(":")
na = oa[:2]
for opt in oa[2:]:
if re.match("c[^,]", opt):
mod = True
na.append("c," + opt[2:])
elif re.sub("^[rwmd]*", "", opt) and "," not in opt:
mod = True
perm = opt[0]
if perm == "a":
perm = "rw"
na.append(perm + "," + opt[1:])
else:
na.append(opt)
nstr = ":".join(na)
nstrs.append(nstr if mod else ostr)
if mod:
msg = "\033[1;31mWARNING:\033[0;1m\n -v {} \033[0;33mwas replaced with\033[0;1m\n -v {} \n\033[0m"
lprint(msg.format(ostr, nstr))
anymod = True
if anymod:
al.v = nstrs
time.sleep(2)
# propagate implications
for k1, k2 in IMPLICATIONS:
@@ -410,12 +448,12 @@ def main(argv=None):
+ " (if you crash with codec errors then that is why)"
)
if WINDOWS and sys.version_info < (3, 6):
if sys.version_info < (3, 6):
al.no_scandir = True
# signal.signal(signal.SIGINT, sighandler)
SvcHub(al).run()
SvcHub(al, argv, printed).run()
if __name__ == "__main__":

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (0, 11, 18)
CODENAME = "the grid"
BUILD_DT = (2021, 6, 18)
VERSION = (0, 12, 0)
CODENAME = "fil\033[33med"
BUILD_DT = (2021, 7, 28)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -10,19 +10,35 @@ import hashlib
import threading
from .__init__ import WINDOWS
from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint
from .util import IMPLICATIONS, uncyg, undot, absreal, Pebkac, fsdec, fsenc, statdir
from .bos import bos
class AXS(object):
def __init__(self, uread=None, uwrite=None, umove=None, udel=None):
self.uread = {} if uread is None else {k: 1 for k in uread}
self.uwrite = {} if uwrite is None else {k: 1 for k in uwrite}
self.umove = {} if umove is None else {k: 1 for k in umove}
self.udel = {} if udel is None else {k: 1 for k in udel}
def __repr__(self):
return "AXS({})".format(
", ".join(
"{}={!r}".format(k, self.__dict__[k])
for k in "uread uwrite umove udel".split()
)
)
class VFS(object):
"""single level in the virtual fs"""
def __init__(self, realpath, vpath, uread=[], uwrite=[], uadm=[], flags={}):
def __init__(self, log, realpath, vpath, axs, flags):
self.log = log
self.realpath = realpath # absolute path on host filesystem
self.vpath = vpath # absolute path in the virtual filesystem
self.uread = uread # users who can read this
self.uwrite = uwrite # users who can write this
self.uadm = uadm # users who are regular admins
self.flags = flags # config switches
self.axs = axs # type: AXS
self.flags = flags # config options
self.nodes = {} # child nodes
self.histtab = None # all realpath->histpath
self.dbv = None # closest full/non-jump parent
@@ -30,15 +46,23 @@ class VFS(object):
if realpath:
self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
self.all_vols = {vpath: self} # flattened recursive
self.aread = {}
self.awrite = {}
self.amove = {}
self.adel = {}
else:
self.histpath = None
self.all_vols = None
self.aread = None
self.awrite = None
self.amove = None
self.adel = None
def __repr__(self):
return "VFS({})".format(
", ".join(
"{}={!r}".format(k, self.__dict__[k])
for k in "realpath vpath uread uwrite uadm flags".split()
for k in "realpath vpath axs flags".split()
)
)
@@ -62,11 +86,10 @@ class VFS(object):
return self.nodes[name].add(src, dst)
vn = VFS(
self.log,
os.path.join(self.realpath, name) if self.realpath else None,
"{}/{}".format(self.vpath, name).lstrip("/"),
self.uread,
self.uwrite,
self.uadm,
self.axs,
self._copy_flags(name),
)
vn.dbv = self.dbv or self
@@ -79,7 +102,7 @@ class VFS(object):
# leaf does not exist; create and keep permissions blank
vp = "{}/{}".format(self.vpath, dst).lstrip("/")
vn = VFS(src, vp)
vn = VFS(self.log, src, vp, AXS(), {})
vn.dbv = self.dbv or self
self.nodes[dst] = vn
return vn
@@ -119,23 +142,32 @@ class VFS(object):
return [self, vpath]
def can_access(self, vpath, uname):
"""return [readable,writable]"""
# type: (str, str) -> tuple[bool, bool, bool, bool]
"""can Read,Write,Move,Delete"""
vn, _ = self._find(vpath)
c = vn.axs
return [
uname in vn.uread or "*" in vn.uread,
uname in vn.uwrite or "*" in vn.uwrite,
uname in c.uread or "*" in c.uread,
uname in c.uwrite or "*" in c.uwrite,
uname in c.umove or "*" in c.umove,
uname in c.udel or "*" in c.udel,
]
def get(self, vpath, uname, will_read, will_write):
# type: (str, str, bool, bool) -> tuple[VFS, str]
def get(self, vpath, uname, will_read, will_write, will_move=False, will_del=False):
# type: (str, str, bool, bool, bool, bool) -> tuple[VFS, str]
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
vn, rem = self._find(vpath)
c = vn.axs
if will_read and (uname not in vn.uread and "*" not in vn.uread):
raise Pebkac(403, "you don't have read-access for this location")
if will_write and (uname not in vn.uwrite and "*" not in vn.uwrite):
raise Pebkac(403, "you don't have write-access for this location")
for req, d, msg in [
[will_read, c.uread, "read"],
[will_write, c.uwrite, "write"],
[will_move, c.umove, "move"],
[will_del, c.udel, "delete"],
]:
if req and (uname not in d and "*" not in d):
m = "you don't have {}-access for this location"
raise Pebkac(403, m.format(msg))
return vn, rem
@@ -148,68 +180,58 @@ class VFS(object):
vrem = "/".join([x for x in vrem if x])
return dbv, vrem
def canonical(self, rem):
def canonical(self, rem, resolve=True):
"""returns the canonical path (fully-resolved absolute fs path)"""
rp = self.realpath
if rem:
rp += "/" + rem
try:
return fsdec(os.path.realpath(fsenc(rp)))
except:
if not WINDOWS:
raise
return absreal(rp) if resolve else rp
# cpython bug introduced in 3.8, still exists in 3.9.1;
# some win7sp1 and win10:20H2 boxes cannot realpath a
# networked drive letter such as b"n:" or b"n:\\"
#
# requirements to trigger:
# * bytestring (not unicode str)
# * just the drive letter (subfolders are ok)
# * networked drive (regular disks and vmhgfs are ok)
# * on an enterprise network (idk, cannot repro with samba)
#
# hits the following exceptions in succession:
# * access denied at L601: "path = _getfinalpathname(path)"
# * "cant concat str to bytes" at L621: "return path + tail"
#
return os.path.realpath(rp)
def ls(self, rem, uname, scandir, incl_wo=False, lstat=False):
# type: (str, str, bool, bool, bool) -> tuple[str, str, dict[str, VFS]]
def ls(self, rem, uname, scandir, permsets, lstat=False):
# type: (str, str, bool, list[list[bool]], bool) -> tuple[str, str, dict[str, VFS]]
"""return user-readable [fsdir,real,virt] items at vpath"""
virt_vis = {} # nodes readable by user
abspath = self.canonical(rem)
real = list(statdir(nuprint, scandir, lstat, abspath))
real = list(statdir(self.log, scandir, lstat, abspath))
real.sort()
if not rem:
for name, vn2 in sorted(self.nodes.items()):
ok = uname in vn2.uread or "*" in vn2.uread
# no vfs nodes in the list of real inodes
real = [x for x in real if x[0] not in self.nodes]
if not ok and incl_wo:
ok = uname in vn2.uwrite or "*" in vn2.uwrite
for name, vn2 in sorted(self.nodes.items()):
ok = False
axs = vn2.axs
axs = [axs.uread, axs.uwrite, axs.umove, axs.udel]
for pset in permsets:
ok = True
for req, lst in zip(pset, axs):
if req and uname not in lst and "*" not in lst:
ok = False
if ok:
break
if ok:
virt_vis[name] = vn2
# no vfs nodes in the list of real inodes
real = [x for x in real if x[0] not in self.nodes]
return [abspath, real, virt_vis]
def walk(self, rel, rem, seen, uname, dots, scandir, lstat):
def walk(self, rel, rem, seen, uname, permsets, dots, scandir, lstat):
"""
recursively yields from ./rem;
rel is a unix-style user-defined vpath (not vfs-related)
"""
fsroot, vfs_ls, vfs_virt = self.ls(
rem, uname, scandir, incl_wo=False, lstat=lstat
)
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, permsets, lstat=lstat)
dbv, vrem = self.get_dbv(rem)
if seen and not fsroot.startswith(seen[-1]) and fsroot in seen:
print("bailing from symlink loop,\n {}\n {}".format(seen[-1], fsroot))
if (
seen
and (not fsroot.startswith(seen[-1]) or fsroot == seen[-1])
and fsroot in seen
):
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}/{}"
self.log("vfs.walk", m.format(seen[-1], fsroot, self.vpath, rem), 3)
return
seen = seen[:] + [fsroot]
@@ -219,7 +241,7 @@ class VFS(object):
rfiles.sort()
rdirs.sort()
yield rel, fsroot, rfiles, rdirs, vfs_virt
yield dbv, vrem, rel, fsroot, rfiles, rdirs, vfs_virt
for rdir, _ in rdirs:
if not dots and rdir.startswith("."):
@@ -227,7 +249,7 @@ class VFS(object):
wrel = (rel + "/" + rdir).lstrip("/")
wrem = (rem + "/" + rdir).lstrip("/")
for x in self.walk(wrel, wrem, seen, uname, dots, scandir, lstat):
for x in self.walk(wrel, wrem, seen, uname, permsets, dots, scandir, lstat):
yield x
for n, vfs in sorted(vfs_virt.items()):
@@ -235,16 +257,19 @@ class VFS(object):
continue
wrel = (rel + "/" + n).lstrip("/")
for x in vfs.walk(wrel, "", seen, uname, dots, scandir, lstat):
for x in vfs.walk(wrel, "", seen, uname, permsets, dots, scandir, lstat):
yield x
def zipgen(self, vrem, flt, uname, dots, scandir):
if flt:
flt = {k: True for k in flt}
for vpath, apath, files, rd, vd in self.walk(
"", vrem, [], uname, dots, scandir, False
):
f1 = "{0}.hist{0}up2k.".format(os.sep)
f2a = os.sep + "dir.txt"
f2b = "{0}.hist{0}".format(os.sep)
g = self.walk("", vrem, [], uname, [[True]], dots, scandir, False)
for _, _, vpath, apath, files, rd, vd in g:
if flt:
files = [x for x in files if x[0] in flt]
@@ -275,25 +300,15 @@ class VFS(object):
del vd[x]
# up2k filetring based on actual abspath
files = [x for x in files if "{0}.hist{0}up2k.".format(os.sep) not in x[1]]
files = [
x
for x in files
if f1 not in x[1] and (not x[1].endswith(f2a) or f2b not in x[1])
]
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
yield f
def user_tree(self, uname, readable, writable, admin):
is_readable = False
if uname in self.uread or "*" in self.uread:
readable.append(self.vpath)
is_readable = True
if uname in self.uwrite or "*" in self.uwrite:
writable.append(self.vpath)
if is_readable:
admin.append(self.vpath)
for _, vn in sorted(self.nodes.items()):
vn.user_tree(uname, readable, writable, admin)
class AuthSrv(object):
"""verifies users against given paths"""
@@ -326,7 +341,8 @@ class AuthSrv(object):
yield prev, True
def _parse_config_file(self, fd, user, mread, mwrite, madm, mflags, mount):
def _parse_config_file(self, fd, acct, daxs, mflags, mount):
# type: (any, str, dict[str, AXS], any, str) -> None
vol_src = None
vol_dst = None
self.line_ctr = 0
@@ -342,7 +358,7 @@ class AuthSrv(object):
if vol_src is None:
if ln.startswith("u "):
u, p = ln[2:].split(":", 1)
user[u] = p
acct[u] = p
else:
vol_src = ln
continue
@@ -353,50 +369,49 @@ class AuthSrv(object):
raise Exception('invalid mountpoint "{}"'.format(vol_dst))
# cfg files override arguments and previous files
vol_src = fsdec(os.path.abspath(fsenc(vol_src)))
vol_src = bos.path.abspath(vol_src)
vol_dst = vol_dst.strip("/")
mount[vol_dst] = vol_src
mread[vol_dst] = []
mwrite[vol_dst] = []
madm[vol_dst] = []
daxs[vol_dst] = AXS()
mflags[vol_dst] = {}
continue
if len(ln) > 1:
lvl, uname = ln.split(" ")
else:
try:
lvl, uname = ln.split(" ", 1)
except:
lvl = ln
uname = "*"
self._read_vol_str(
lvl,
uname,
mread[vol_dst],
mwrite[vol_dst],
madm[vol_dst],
mflags[vol_dst],
)
if lvl == "a":
m = "WARNING (config-file): permission flag 'a' is deprecated; please use 'rw' instead"
self.log(m, 1)
def _read_vol_str(self, lvl, uname, mr, mw, ma, mf):
self._read_vol_str(lvl, uname, daxs[vol_dst], mflags[vol_dst])
def _read_vol_str(self, lvl, uname, axs, flags):
# type: (str, str, AXS, any) -> None
if lvl == "c":
cval = True
if "=" in uname:
uname, cval = uname.split("=", 1)
self._read_volflag(mf, uname, cval, False)
self._read_volflag(flags, uname, cval, False)
return
if uname == "":
uname = "*"
if lvl in "ra":
mr.append(uname)
if "r" in lvl:
axs.uread[uname] = 1
if lvl in "wa":
mw.append(uname)
if "w" in lvl:
axs.uwrite[uname] = 1
if lvl == "a":
ma.append(uname)
if "m" in lvl:
axs.umove[uname] = 1
if "d" in lvl:
axs.udel[uname] = 1
def _read_volflag(self, flags, name, value, is_list):
if name not in ["mtp"]:
@@ -418,63 +433,69 @@ class AuthSrv(object):
before finally building the VFS
"""
user = {} # username:password
mread = {} # mountpoint:[username]
mwrite = {} # mountpoint:[username]
madm = {} # mountpoint:[username]
acct = {} # username:password
daxs = {} # type: dict[str, AXS]
mflags = {} # mountpoint:[flag]
mount = {} # dst:src (mountpoint:realpath)
if self.args.a:
# list of username:password
for u, p in [x.split(":", 1) for x in self.args.a]:
user[u] = p
for x in self.args.a:
try:
u, p = x.split(":", 1)
acct[u] = p
except:
m = '\n invalid value "{}" for argument -a, must be username:password'
raise Exception(m.format(x))
if self.args.v:
# list of src:dst:permset:permset:...
# permset is [rwa]username or [c]flag
# permset is <rwmd>[,username][,username] or <c>,<flag>[=args]
for v_str in self.args.v:
m = self.re_vol.match(v_str)
if not m:
raise Exception("invalid -v argument: [{}]".format(v_str))
src, dst, perms = m.groups()
if WINDOWS and src.startswith("/"):
src = "{}:\\{}".format(src[1], src[3:])
if WINDOWS:
src = uncyg(src)
# print("\n".join([src, dst, perms]))
src = fsdec(os.path.abspath(fsenc(src)))
src = bos.path.abspath(src)
dst = dst.strip("/")
mount[dst] = src
mread[dst] = []
mwrite[dst] = []
madm[dst] = []
daxs[dst] = AXS()
mflags[dst] = {}
perms = perms.split(":")
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
self._read_vol_str(
lvl, uname, mread[dst], mwrite[dst], madm[dst], mflags[dst]
)
for x in perms.split(":"):
lvl, uname = x.split(",", 1) if "," in x else [x, ""]
self._read_vol_str(lvl, uname, daxs[dst], mflags[dst])
if self.args.c:
for cfg_fn in self.args.c:
with open(cfg_fn, "rb") as f:
try:
self._parse_config_file(
f, user, mread, mwrite, madm, mflags, mount
)
self._parse_config_file(f, acct, daxs, mflags, mount)
except:
m = "\n\033[1;31m\nerror in config file {} on line {}:\n\033[0m"
print(m.format(cfg_fn, self.line_ctr))
self.log(m.format(cfg_fn, self.line_ctr), 1)
raise
# case-insensitive; normalize
if WINDOWS:
cased = {}
for k, v in mount.items():
cased[k] = absreal(v)
mount = cased
if not mount:
# -h says our defaults are CWD at root and read/write for everyone
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
axs = AXS(["*"], ["*"], None, None)
vfs = VFS(self.log_func, bos.path.abspath("."), "", axs, {})
elif "" not in mount:
# there's volumes but no root; make root inaccessible
vfs = VFS(None, "")
vfs = VFS(self.log_func, None, "", AXS(), {})
vfs.flags["d2d"] = True
maxdepth = 0
@@ -485,26 +506,34 @@ class AuthSrv(object):
if dst == "":
# rootfs was mapped; fully replaces the default CWD vfs
vfs = VFS(
mount[dst], dst, mread[dst], mwrite[dst], madm[dst], mflags[dst]
)
vfs = VFS(self.log_func, mount[dst], dst, daxs[dst], mflags[dst])
continue
v = vfs.add(mount[dst], dst)
v.uread = mread[dst]
v.uwrite = mwrite[dst]
v.uadm = madm[dst]
v.axs = daxs[dst]
v.flags = mflags[dst]
v.dbv = None
vfs.all_vols = {}
vfs.get_all_vols(vfs.all_vols)
for perm in "read write move del".split():
axs_key = "u" + perm
unames = ["*"] + list(acct.keys())
umap = {x: [] for x in unames}
for usr in unames:
for mp, vol in vfs.all_vols.items():
if usr in getattr(vol.axs, axs_key):
umap[usr].append(mp)
setattr(vfs, "a" + perm, umap)
all_users = {}
missing_users = {}
for d in [mread, mwrite]:
for _, ul in d.items():
for usr in ul:
if usr != "*" and usr not in user:
for axs in daxs.values():
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel]:
for usr in d.keys():
all_users[usr] = 1
if usr != "*" and usr not in acct:
missing_users[usr] = 1
if missing_users:
@@ -524,16 +553,11 @@ class AuthSrv(object):
if vflag == "-":
pass
elif vflag:
if WINDOWS and vflag.startswith("/"):
vflag = "{}:\\{}".format(vflag[1], vflag[3:])
vol.histpath = vflag
vol.histpath = uncyg(vflag) if WINDOWS else vflag
elif self.args.hist:
for nch in range(len(hid)):
hpath = os.path.join(self.args.hist, hid[: nch + 1])
try:
os.makedirs(hpath)
except:
pass
bos.makedirs(hpath)
powner = os.path.join(hpath, "owner.txt")
try:
@@ -553,9 +577,9 @@ class AuthSrv(object):
vol.histpath = hpath
break
vol.histpath = os.path.realpath(vol.histpath)
vol.histpath = absreal(vol.histpath)
if vol.dbv:
if os.path.exists(os.path.join(vol.histpath, "up2k.db")):
if bos.path.exists(os.path.join(vol.histpath, "up2k.db")):
promote.append(vol)
vol.dbv = None
else:
@@ -581,7 +605,7 @@ class AuthSrv(object):
all_mte = {}
errors = False
for vol in vfs.all_vols.values():
if (self.args.e2ds and vol.uwrite) or self.args.e2dsa:
if (self.args.e2ds and vol.axs.uwrite) or self.args.e2dsa:
vol.flags["e2ds"] = True
if self.args.e2d or "e2ds" in vol.flags:
@@ -670,6 +694,27 @@ class AuthSrv(object):
vfs.bubble_flags()
m = "volumes and permissions:\n"
for v in vfs.all_vols.values():
if not self.warn_anonwrite:
break
m += '\n\033[36m"/{}" \033[33m{}\033[0m'.format(v.vpath, v.realpath)
for txt, attr in [
[" read", "uread"],
[" write", "uwrite"],
[" move", "umove"],
["delete", "udel"],
]:
u = list(sorted(getattr(v.axs, attr).keys()))
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
u = u if u else "\033[36m--none--\033[0m"
m += "\n| {}: {}".format(txt, u)
m += "\n"
if self.warn_anonwrite and not self.args.no_voldump:
self.log(m)
try:
v, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == v.realpath:
@@ -681,11 +726,13 @@ class AuthSrv(object):
with self.mutex:
self.vfs = vfs
self.user = user
self.iuser = {v: k for k, v in user.items()}
self.acct = acct
self.iacct = {v: k for k, v in acct.items()}
# import pprint
# pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount})
self.re_pwd = None
pwds = [re.escape(x) for x in self.iacct.keys()]
if pwds:
self.re_pwd = re.compile("=(" + "|".join(pwds) + ")([]&; ]|$)")
def dbg_ls(self):
users = self.args.ls
@@ -704,12 +751,12 @@ class AuthSrv(object):
pass
if users == "**":
users = list(self.user.keys()) + ["*"]
users = list(self.acct.keys()) + ["*"]
else:
users = [users]
for u in users:
if u not in self.user and u != "*":
if u not in self.acct and u != "*":
raise Exception("user not found: " + u)
if vols == "*":
@@ -725,8 +772,10 @@ class AuthSrv(object):
raise Exception("volume not found: " + v)
self.log({"users": users, "vols": vols, "flags": flags})
m = "/{}: read({}) write({}) move({}) del({})"
for k, v in self.vfs.all_vols.items():
self.log("/{}: read({}) write({})".format(k, v.uread, v.uwrite))
vc = v.axs
self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel))
flag_v = "v" in flags
flag_ln = "ln" in flags
@@ -740,13 +789,15 @@ class AuthSrv(object):
for u in users:
self.log("checking /{} as {}".format(v, u))
try:
vn, _ = self.vfs.get(v, u, True, False)
vn, _ = self.vfs.get(v, u, True, False, False, False)
except:
continue
atop = vn.realpath
g = vn.walk("", "", [], u, True, not self.args.no_scandir, False)
for vpath, apath, files, _, _ in g:
g = vn.walk(
"", "", [], u, True, [[True]], not self.args.no_scandir, False
)
for _, _, vpath, apath, files, _, _ in g:
fnames = [n[0] for n in files]
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
vpaths = [vtop + x for x in vpaths]
@@ -766,7 +817,7 @@ class AuthSrv(object):
msg = [x[1] for x in files]
if msg:
nuprint("\n".join(msg))
self.log("\n" + "\n".join(msg))
if n_bads and flag_p:
raise Exception("found symlink leaving volume, and strict is set")

View File

59
copyparty/bos/bos.py Normal file
View File

@@ -0,0 +1,59 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
from ..util import fsenc, fsdec
from . import path
# grep -hRiE '(^|[^a-zA-Z_\.-])os\.' . | gsed -r 's/ /\n/g;s/\(/(\n/g' | grep -hRiE '(^|[^a-zA-Z_\.-])os\.' | sort | uniq -c
# printf 'os\.(%s)' "$(grep ^def bos/__init__.py | gsed -r 's/^def //;s/\(.*//' | tr '\n' '|' | gsed -r 's/.$//')"
def chmod(p, mode):
return os.chmod(fsenc(p), mode)
def listdir(p="."):
return [fsdec(x) for x in os.listdir(fsenc(p))]
def lstat(p):
return os.lstat(fsenc(p))
def makedirs(name, mode=0o755, exist_ok=True):
bname = fsenc(name)
try:
os.makedirs(bname, mode=mode)
except:
if not exist_ok or not os.path.isdir(bname):
raise
def mkdir(p, mode=0o755):
return os.mkdir(fsenc(p), mode=mode)
def rename(src, dst):
return os.rename(fsenc(src), fsenc(dst))
def replace(src, dst):
return os.replace(fsenc(src), fsenc(dst))
def rmdir(p):
return os.rmdir(fsenc(p))
def stat(p):
return os.stat(fsenc(p))
def unlink(p):
return os.unlink(fsenc(p))
def utime(p, times=None):
return os.utime(fsenc(p), times)

33
copyparty/bos/path.py Normal file
View File

@@ -0,0 +1,33 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
from ..util import fsenc, fsdec
def abspath(p):
return fsdec(os.path.abspath(fsenc(p)))
def exists(p):
return os.path.exists(fsenc(p))
def getmtime(p):
return os.path.getmtime(fsenc(p))
def getsize(p):
return os.path.getsize(fsenc(p))
def isdir(p):
return os.path.isdir(fsenc(p))
def islink(p):
return os.path.islink(fsenc(p))
def realpath(p):
return fsdec(os.path.realpath(fsenc(p)))

View File

@@ -4,17 +4,11 @@ from __future__ import print_function, unicode_literals
import time
import threading
from .__init__ import PY2, WINDOWS, VT100
from .broker_util import try_exec
from .broker_mpw import MpWorker
from .util import mp
if PY2 and not WINDOWS:
from multiprocessing.reduction import ForkingPickler
from StringIO import StringIO as MemesIO # pylint: disable=import-error
class BrokerMp(object):
"""external api; manages MpWorkers"""
@@ -28,24 +22,19 @@ class BrokerMp(object):
self.retpend_mutex = threading.Lock()
self.mutex = threading.Lock()
cores = self.args.j
if not cores:
cores = mp.cpu_count()
self.log("broker", "booting {} subprocesses".format(cores))
for n in range(cores):
self.num_workers = self.args.j or mp.cpu_count()
self.log("broker", "booting {} subprocesses".format(self.num_workers))
for n in range(1, self.num_workers + 1):
q_pend = mp.Queue(1)
q_yield = mp.Queue(64)
proc = mp.Process(target=MpWorker, args=(q_pend, q_yield, self.args, n))
proc.q_pend = q_pend
proc.q_yield = q_yield
proc.nid = n
proc.clients = {}
proc.workload = 0
thr = threading.Thread(
target=self.collector, args=(proc,), name="mp-collector"
target=self.collector, args=(proc,), name="mp-sink-{}".format(n)
)
thr.daemon = True
thr.start()
@@ -53,13 +42,6 @@ class BrokerMp(object):
self.procs.append(proc)
proc.start()
if not self.args.q:
thr = threading.Thread(
target=self.debug_load_balancer, name="mp-dbg-loadbalancer"
)
thr.daemon = True
thr.start()
def shutdown(self):
self.log("broker", "shutting down")
for n, proc in enumerate(self.procs):
@@ -89,20 +71,6 @@ class BrokerMp(object):
if dest == "log":
self.log(*args)
elif dest == "workload":
with self.mutex:
proc.workload = args[0]
elif dest == "httpdrop":
addr = args[0]
with self.mutex:
del proc.clients[addr]
if not proc.clients:
proc.workload = 0
self.hub.tcpsrv.num_clients.add(-1)
elif dest == "retq":
# response from previous ipc call
with self.retpend_mutex:
@@ -128,38 +96,12 @@ class BrokerMp(object):
returns a Queue object which eventually contains the response if want_retval
(not-impl here since nothing uses it yet)
"""
if dest == "httpconn":
sck, addr = args
sck2 = sck
if PY2:
buf = MemesIO()
ForkingPickler(buf).dump(sck)
sck2 = buf.getvalue()
if dest == "listen":
for p in self.procs:
p.q_pend.put([0, dest, [args[0], len(self.procs)]])
proc = sorted(self.procs, key=lambda x: x.workload)[0]
proc.q_pend.put([0, dest, [sck2, addr]])
with self.mutex:
proc.clients[addr] = 50
proc.workload += 50
elif dest == "cb_httpsrv_up":
self.hub.cb_httpsrv_up()
else:
raise Exception("what is " + str(dest))
def debug_load_balancer(self):
fmt = "\033[1m{}\033[0;36m{:4}\033[0m "
if not VT100:
fmt = "({}{:4})"
last = ""
while self.procs:
msg = ""
for proc in self.procs:
msg += fmt.format(len(proc.clients), proc.workload)
if msg != last:
last = msg
with self.hub.log_mutex:
print(msg)
time.sleep(0.1)

View File

@@ -3,18 +3,13 @@ from __future__ import print_function, unicode_literals
from copyparty.authsrv import AuthSrv
import sys
import time
import signal
import threading
from .__init__ import PY2, WINDOWS
from .broker_util import ExceptionalQueue
from .httpsrv import HttpSrv
from .util import FAKE_MP
if PY2 and not WINDOWS:
import pickle # nosec
class MpWorker(object):
"""one single mp instance"""
@@ -25,22 +20,23 @@ class MpWorker(object):
self.args = args
self.n = n
self.log = self._log_disabled if args.q and not args.lo else self._log_enabled
self.retpend = {}
self.retpend_mutex = threading.Lock()
self.mutex = threading.Lock()
self.workload_thr_alive = False
# we inherited signal_handler from parent,
# replace it with something harmless
if not FAKE_MP:
signal.signal(signal.SIGINT, self.signal_handler)
for sig in [signal.SIGINT, signal.SIGTERM]:
signal.signal(sig, self.signal_handler)
# starting to look like a good idea
self.asrv = AuthSrv(args, None, False)
# instantiate all services here (TODO: inheritance?)
self.httpsrv = HttpSrv(self, True)
self.httpsrv.disconnect_func = self.httpdrop
self.httpsrv = HttpSrv(self, n)
# on winxp and some other platforms,
# use thr.join() to block all signals
@@ -49,19 +45,19 @@ class MpWorker(object):
thr.start()
thr.join()
def signal_handler(self, signal, frame):
def signal_handler(self, sig, frame):
# print('k')
pass
def log(self, src, msg, c=0):
def _log_enabled(self, src, msg, c=0):
self.q_yield.put([0, "log", [src, msg, c]])
def _log_disabled(self, src, msg, c=0):
pass
def logw(self, msg, c=0):
self.log("mp{}".format(self.n), msg, c)
def httpdrop(self, addr):
self.q_yield.put([0, "httpdrop", [addr]])
def main(self):
while True:
retq_id, dest, args = self.q_pend.get()
@@ -73,24 +69,8 @@ class MpWorker(object):
sys.exit(0)
return
elif dest == "httpconn":
sck, addr = args
if PY2:
sck = pickle.loads(sck) # nosec
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
self.httpsrv.accept(sck, addr)
with self.mutex:
if not self.workload_thr_alive:
self.workload_thr_alive = True
thr = threading.Thread(
target=self.thr_workload, name="mpw-workload"
)
thr.daemon = True
thr.start()
elif dest == "listen":
self.httpsrv.listen(args[0], args[1])
elif dest == "retq":
# response from previous ipc call
@@ -114,16 +94,3 @@ class MpWorker(object):
self.q_yield.put([retq_id, dest, args])
return retq
def thr_workload(self):
"""announce workloads to MpSrv (the mp controller / loadbalancer)"""
# avoid locking in extract_filedata by tracking difference here
while True:
time.sleep(0.2)
with self.mutex:
if self.httpsrv.num_clients() == 0:
# no clients rn, termiante thread
self.workload_thr_alive = False
return
self.q_yield.put([0, "workload", [self.httpsrv.workload]])

View File

@@ -3,7 +3,6 @@ from __future__ import print_function, unicode_literals
import threading
from .authsrv import AuthSrv
from .httpsrv import HttpSrv
from .broker_util import ExceptionalQueue, try_exec
@@ -18,10 +17,10 @@ class BrokerThr(object):
self.asrv = hub.asrv
self.mutex = threading.Lock()
self.num_workers = 1
# instantiate all services here (TODO: inheritance?)
self.httpsrv = HttpSrv(self)
self.httpsrv.disconnect_func = self.httpdrop
self.httpsrv = HttpSrv(self, None)
def shutdown(self):
# self.log("broker", "shutting down")
@@ -29,12 +28,8 @@ class BrokerThr(object):
pass
def put(self, want_retval, dest, *args):
if dest == "httpconn":
sck, addr = args
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
self.httpsrv.accept(sck, addr)
if dest == "listen":
self.httpsrv.listen(args[0], 1)
else:
# new ipc invoking managed service in hub
@@ -51,6 +46,3 @@ class BrokerThr(object):
retq = ExceptionalQueue(1)
retq.put(rv)
return retq
def httpdrop(self, addr):
self.hub.tcpsrv.num_clients.add(-1)

View File

@@ -10,19 +10,16 @@ import json
import string
import socket
import ctypes
import traceback
from datetime import datetime
import calendar
from .__init__ import E, PY2, WINDOWS, ANYWIN
from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
from .util import * # noqa # pylint: disable=unused-wildcard-import
from .bos import bos
from .authsrv import AuthSrv
from .szip import StreamZip
from .star import StreamTar
if not PY2:
unicode = str
NO_CACHE = {"Cache-Control": "no-cache"}
NO_STORE = {"Cache-Control": "no-store; max-age=0"}
@@ -41,7 +38,6 @@ class HttpCli(object):
self.ip = conn.addr[0]
self.addr = conn.addr # type: tuple[str, int]
self.args = conn.args
self.is_mp = conn.is_mp
self.asrv = conn.asrv # type: AuthSrv
self.ico = conn.ico
self.thumbcli = conn.thumbcli
@@ -50,12 +46,21 @@ class HttpCli(object):
self.tls = hasattr(self.s, "cipher")
self.bufsz = 1024 * 32
self.hint = None
self.absolute_urls = False
self.out_headers = {"Access-Control-Allow-Origin": "*"}
def log(self, msg, c=0):
ptn = self.asrv.re_pwd
if ptn and ptn.search(msg):
msg = ptn.sub(self.unpwd, msg)
self.log_func(self.log_src, msg, c)
def unpwd(self, m):
a, b = m.groups()
return "=\033[7m {} \033[27m{}".format(self.asrv.iacct[a], b)
def _check_nonfatal(self, ex):
return ex.code < 400 or ex.code in [404, 429]
@@ -64,14 +69,19 @@ class HttpCli(object):
if rem.startswith("/") or rem.startswith("../") or "/../" in rem:
raise Exception("that was close")
def j2(self, name, **kwargs):
def j2(self, name, **ka):
tpl = self.conn.hsrv.j2[name]
return tpl.render(**kwargs) if kwargs else tpl
if ka:
ka["ts"] = self.conn.hsrv.cachebuster()
return tpl.render(**ka)
return tpl
def run(self):
"""returns true if connection can be reused"""
self.keepalive = False
self.headers = {}
self.hint = None
try:
headerlines = read_header(self.sr)
if not headerlines:
@@ -85,9 +95,13 @@ class HttpCli(object):
try:
self.mode, self.req, self.http_ver = headerlines[0].split(" ")
except:
raise Pebkac(400, "bad headers:\n" + "\n".join(headerlines))
msg = " ]\n#[ ".join(headerlines)
raise Pebkac(400, "bad headers:\n#[ " + msg + " ]")
except Pebkac as ex:
self.mode = "GET"
self.req = "[junk]"
self.http_ver = "HTTP/1.1"
# self.log("pebkac at httpcli.run #1: " + repr(ex))
self.keepalive = self._check_nonfatal(ex)
self.loud_reply(unicode(ex), status=ex.code)
@@ -115,7 +129,7 @@ class HttpCli(object):
try:
self.ip = vs[n].strip()
except:
self.ip = vs[-1].strip()
self.ip = vs[0].strip()
self.log("rproxy={} oob x-fwd {}".format(self.args.rproxy, v), c=3)
self.log_src = self.conn.set_rproxy(self.ip)
@@ -130,6 +144,9 @@ class HttpCli(object):
if v is not None:
self.log("[H] {}: \033[33m[{}]".format(k, v), 6)
if "&" in self.req and "?" not in self.req:
self.hint = "did you mean '?' instead of '&'"
# split req into vpath + uparam
uparam = {}
if "?" not in self.req:
@@ -165,9 +182,14 @@ class HttpCli(object):
self.vpath = unquotep(vpath)
pwd = uparam.get("pw")
self.uname = self.asrv.iuser.get(pwd, "*")
self.rvol, self.wvol, self.avol = [[], [], []]
self.asrv.vfs.user_tree(self.uname, self.rvol, self.wvol, self.avol)
self.uname = self.asrv.iacct.get(pwd, "*")
self.rvol = self.asrv.vfs.aread[self.uname]
self.wvol = self.asrv.vfs.awrite[self.uname]
self.mvol = self.asrv.vfs.amove[self.uname]
self.dvol = self.asrv.vfs.adel[self.uname]
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0]
ua = self.headers.get("user-agent", "")
self.is_rclone = ua.startswith("rclone/")
@@ -199,12 +221,15 @@ class HttpCli(object):
self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3)
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
if self.hint:
msg += "hint: {}\r\n".format(self.hint)
self.reply(msg.encode("utf-8", "replace"), status=ex.code)
return self.keepalive
except Pebkac:
return False
def send_headers(self, length, status=200, mime=None, headers={}):
def send_headers(self, length, status=200, mime=None, headers=None):
response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])]
if length is not None:
@@ -214,7 +239,8 @@ class HttpCli(object):
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
# headers{} overrides anything set previously
self.out_headers.update(headers)
if headers:
self.out_headers.update(headers)
# default to utf8 html if no content-type is set
if not mime:
@@ -231,7 +257,7 @@ class HttpCli(object):
except:
raise Pebkac(400, "client d/c while replying headers")
def reply(self, body, status=200, mime=None, headers={}):
def reply(self, body, status=200, mime=None, headers=None):
# TODO something to reply with user-supplied values safely
self.send_headers(len(body), status, mime, headers)
@@ -247,7 +273,7 @@ class HttpCli(object):
self.log(body.rstrip())
self.reply(b"<pre>" + body.encode("utf-8") + b"\r\n", *list(args), **kwargs)
def urlq(self, add={}, rm=[]):
def urlq(self, add, rm):
"""
generates url query based on uparam (b, pw, all others)
removing anything in rm, adding pairs in add
@@ -319,6 +345,9 @@ class HttpCli(object):
if "tree" in self.uparam:
return self.tx_tree()
if "stack" in self.uparam:
return self.tx_stack()
# conditional redirect to single volumes
if self.vpath == "" and not self.ouparam:
nread = len(self.rvol)
@@ -333,14 +362,21 @@ class HttpCli(object):
self.redirect(vpath, flavor="redirecting to", use302=True)
return True
self.readable, self.writable = self.asrv.vfs.can_access(self.vpath, self.uname)
if not self.readable and not self.writable:
x = self.asrv.vfs.can_access(self.vpath, self.uname)
self.can_read, self.can_write, self.can_move, self.can_delete = x
if not self.can_read and not self.can_write:
if self.vpath:
self.log("inaccessible: [{}]".format(self.vpath))
raise Pebkac(404)
self.uparam = {"h": False}
if "delete" in self.uparam:
return self.handle_rm()
if "move" in self.uparam:
return self.handle_mv()
if "h" in self.uparam:
self.vpath = None
return self.tx_mounts()
@@ -348,9 +384,6 @@ class HttpCli(object):
if "scan" in self.uparam:
return self.scanvol()
if "stack" in self.uparam:
return self.tx_stack()
return self.tx_browser()
def handle_options(self):
@@ -456,15 +489,17 @@ class HttpCli(object):
addr = self.ip.replace(":", ".")
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
path = os.path.join(fdir, fn)
if self.args.nw:
path = os.devnull
with open(fsenc(path), "wb", 512 * 1024) as f:
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
post_sz, _, sha_b64 = hashcopy(reader, f)
vfs, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, vrem, fn
)
if not self.args.nw:
vfs, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, vrem, fn
)
return post_sz, sha_b64, remains, path
@@ -481,7 +516,7 @@ class HttpCli(object):
spd1 = get_spd(nbytes, self.t0)
spd2 = get_spd(self.conn.nbyte, self.conn.t0)
return spd1 + " " + spd2
return "{} {} n{}".format(spd1, spd2, self.conn.nreq)
def handle_post_multipart(self):
self.parser = MultipartParser(self.log, self.sr, self.headers)
@@ -581,15 +616,18 @@ class HttpCli(object):
if sub:
try:
dst = os.path.join(vfs.realpath, rem)
os.makedirs(fsenc(dst))
if not bos.path.isdir(dst):
bos.makedirs(dst)
except OSError as ex:
if ex.errno == 13:
raise Pebkac(500, "the server OS denied write-access")
self.log("makedirs failed [{}]".format(dst))
if not bos.path.isdir(dst):
if ex.errno == 13:
raise Pebkac(500, "the server OS denied write-access")
if ex.errno == 17:
raise Pebkac(400, "some file got your folder name")
if ex.errno == 17:
raise Pebkac(400, "some file got your folder name")
raise Pebkac(500, min_ex())
raise Pebkac(500, min_ex())
except:
raise Pebkac(500, min_ex())
@@ -621,7 +659,7 @@ class HttpCli(object):
penalty = 0.7
t_idle = t0 - idx.p_end
if idx.p_dur > 0.7 and t_idle < penalty:
m = "rate-limit ({:.1f} sec), cost {:.2f}, idle {:.2f}"
m = "rate-limit {:.1f} sec, cost {:.2f}, idle {:.2f}"
raise Pebkac(429, m.format(penalty, idx.p_dur, t_idle))
if "srch" in body:
@@ -687,7 +725,7 @@ class HttpCli(object):
with open(fsenc(path), "rb+", 512 * 1024) as f:
f.seek(cstart[0])
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
post_sz, _, sha_b64 = hashcopy(reader, f)
if sha_b64 != chash:
raise Pebkac(
@@ -728,7 +766,7 @@ class HttpCli(object):
times = (int(time.time()), int(lastmod))
self.log("no more chunks, setting times {}".format(times))
try:
os.utime(fsenc(path), times)
bos.utime(path, times)
except:
self.log("failed to utime ({}, {})".format(path, times))
@@ -741,7 +779,13 @@ class HttpCli(object):
pwd = self.parser.require("cppwd", 64)
self.parser.drop()
if pwd in self.asrv.iuser:
ck, msg = self.get_pwd_cookie(pwd)
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
return True
def get_pwd_cookie(self, pwd):
if pwd in self.asrv.iacct:
msg = "login ok"
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
@@ -751,9 +795,7 @@ class HttpCli(object):
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp)
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
return True
return [ck, msg]
def handle_mkdir(self):
new_dir = self.parser.require("name", 512)
@@ -763,20 +805,20 @@ class HttpCli(object):
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem)
sanitized = sanitize_fn(new_dir)
sanitized = sanitize_fn(new_dir, "", [])
if not nullwrite:
fdir = os.path.join(vfs.realpath, rem)
fn = os.path.join(fdir, sanitized)
if not os.path.isdir(fsenc(fdir)):
if not bos.path.isdir(fdir):
raise Pebkac(500, "parent folder does not exist")
if os.path.isdir(fsenc(fn)):
if bos.path.isdir(fn):
raise Pebkac(500, "that folder exists already")
try:
os.mkdir(fsenc(fn))
bos.mkdir(fn)
except OSError as ex:
if ex.errno == 13:
raise Pebkac(500, "the server OS denied write-access")
@@ -800,13 +842,13 @@ class HttpCli(object):
if not new_file.endswith(".md"):
new_file += ".md"
sanitized = sanitize_fn(new_file)
sanitized = sanitize_fn(new_file, "", [])
if not nullwrite:
fdir = os.path.join(vfs.realpath, rem)
fn = os.path.join(fdir, sanitized)
if os.path.exists(fsenc(fn)):
if bos.path.exists(fn):
raise Pebkac(500, "that file exists already")
with open(fsenc(fn), "wb") as f:
@@ -833,10 +875,10 @@ class HttpCli(object):
if p_file and not nullwrite:
fdir = os.path.join(vfs.realpath, rem)
fname = sanitize_fn(
p_file, bad=[".prologue.html", ".epilogue.html"]
p_file, "", [".prologue.html", ".epilogue.html"]
)
if not os.path.isdir(fsenc(fdir)):
if not bos.path.isdir(fdir):
raise Pebkac(404, "that folder does not exist")
suffix = ".{:.6f}-{}".format(time.time(), self.ip)
@@ -850,7 +892,7 @@ class HttpCli(object):
with ren_open(fname, "wb", 512 * 1024, **open_args) as f:
f, fname = f["orz"]
self.log("writing to {}/{}".format(fdir, fname))
sz, sha512_hex, _ = hashcopy(self.conn, p_data, f)
sz, sha512_hex, _ = hashcopy(p_data, f)
if sz == 0:
raise Pebkac(400, "empty files in post")
@@ -875,10 +917,10 @@ class HttpCli(object):
suffix = ".PARTIAL"
try:
os.rename(fsenc(fp), fsenc(fp2 + suffix))
bos.rename(fp, fp2 + suffix)
except:
fp2 = fp2[: -len(suffix) - 1]
os.rename(fsenc(fp), fsenc(fp2 + suffix))
bos.rename(fp, fp2 + suffix)
raise
@@ -962,13 +1004,6 @@ class HttpCli(object):
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem)
# TODO:
# the per-volume read/write permissions must be replaced with permission flags
# which would decide how to handle uploads to filenames which are taken,
# current behavior of creating a new name is a good default for binary files
# but should also offer a flag to takeover the filename and rename the old one
#
# stopgap:
if not rem.endswith(".md"):
raise Pebkac(400, "only markdown pls")
@@ -983,7 +1018,7 @@ class HttpCli(object):
fp = os.path.join(vfs.realpath, rem)
srv_lastmod = srv_lastmod3 = -1
try:
st = os.stat(fsenc(fp))
st = bos.stat(fp)
srv_lastmod = st.st_mtime
srv_lastmod3 = int(srv_lastmod * 1000)
except OSError as ex:
@@ -1019,23 +1054,22 @@ class HttpCli(object):
self.reply(response.encode("utf-8"))
return True
# TODO another hack re: pending permissions rework
mdir, mfile = os.path.split(fp)
mfile2 = "{}.{:.3f}.md".format(mfile[:-3], srv_lastmod)
try:
os.mkdir(fsenc(os.path.join(mdir, ".hist")))
bos.mkdir(os.path.join(mdir, ".hist"))
except:
pass
os.rename(fsenc(fp), fsenc(os.path.join(mdir, ".hist", mfile2)))
bos.rename(fp, os.path.join(mdir, ".hist", mfile2))
p_field, _, p_data = next(self.parser.gen)
if p_field != "body":
raise Pebkac(400, "expected body, got {}".format(p_field))
with open(fsenc(fp), "wb", 512 * 1024) as f:
sz, sha512, _ = hashcopy(self.conn, p_data, f)
sz, sha512, _ = hashcopy(p_data, f)
new_lastmod = os.stat(fsenc(fp)).st_mtime
new_lastmod = bos.stat(fp).st_mtime
new_lastmod3 = int(new_lastmod * 1000)
sha512 = sha512[:56]
@@ -1080,7 +1114,7 @@ class HttpCli(object):
for ext in ["", ".gz", ".br"]:
try:
fs_path = req_path + ext
st = os.stat(fsenc(fs_path))
st = bos.stat(fs_path)
file_ts = max(file_ts, st.st_mtime)
editions[ext or "plain"] = [fs_path, st.st_size]
except:
@@ -1223,8 +1257,7 @@ class HttpCli(object):
if use_sendfile:
remains = sendfile_kern(lower, upper, f, self.s)
else:
actor = self.conn if self.is_mp else None
remains = sendfile_py(lower, upper, f, self.s, actor)
remains = sendfile_py(lower, upper, f, self.s)
if remains > 0:
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
@@ -1281,7 +1314,7 @@ class HttpCli(object):
fgen = vn.zipgen(rem, items, self.uname, dots, not self.args.no_scandir)
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
bgen = packer(fgen, utf8="utf" in uarg, pre_crc="crc" in uarg)
bgen = packer(self.log, fgen, utf8="utf" in uarg, pre_crc="crc" in uarg)
bsent = 0
for buf in bgen.gen():
if not buf:
@@ -1303,7 +1336,7 @@ class HttpCli(object):
ext = "folder"
exact = True
bad = re.compile(r"[](){}/[]|^[0-9_-]*$")
bad = re.compile(r"[](){}/ []|^[0-9_-]*$")
n = ext.split(".")[::-1]
if not exact:
n = n[:-1]
@@ -1333,10 +1366,10 @@ class HttpCli(object):
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
template = self.j2(tpl)
st = os.stat(fsenc(fs_path))
st = bos.stat(fs_path)
ts_md = st.st_mtime
st = os.stat(fsenc(html_path))
st = bos.stat(html_path)
ts_html = st.st_mtime
sz_md = 0
@@ -1345,7 +1378,7 @@ class HttpCli(object):
for c, v in [[b"&", 4], [b"<", 3], [b">", 3]]:
sz_md += (len(buf) - len(buf.replace(c, b""))) * v
file_ts = max(ts_md, ts_html)
file_ts = max(ts_md, ts_html, E.t0)
file_lastmod, do_send = self._chk_lastmod(file_ts)
self.out_headers["Last-Modified"] = file_lastmod
self.out_headers.update(NO_CACHE)
@@ -1359,6 +1392,7 @@ class HttpCli(object):
"md_plug": "true" if self.args.emp else "false",
"md_chk_rate": self.args.mcr,
"md": boundary,
"ts": self.conn.hsrv.cachebuster(),
}
html = template.render(**targs).encode("utf-8", "replace")
html = html.split(boundary.encode("utf-8"))
@@ -1391,13 +1425,14 @@ class HttpCli(object):
return True
def tx_mounts(self):
suf = self.urlq(rm=["h"])
suf = self.urlq({}, ["h"])
avol = [x for x in self.wvol if x in self.rvol]
rvol, wvol, avol = [
[("/" + x).rstrip("/") + "/" for x in y]
for y in [self.rvol, self.wvol, self.avol]
for y in [self.rvol, self.wvol, avol]
]
if self.avol and not self.args.no_rescan:
if avol and not self.args.no_rescan:
x = self.conn.hsrv.broker.put(True, "up2k.get_state")
vs = json.loads(x.get())
vstate = {("/" + k).rstrip("/") + "/": v for k, v in vs["volstate"].items()}
@@ -1422,8 +1457,8 @@ class HttpCli(object):
return True
def scanvol(self):
if not self.readable or not self.writable:
raise Pebkac(403, "not admin")
if not self.can_read or not self.can_write:
raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_rescan:
raise Pebkac(403, "disabled by argv")
@@ -1441,8 +1476,8 @@ class HttpCli(object):
raise Pebkac(500, x)
def tx_stack(self):
if not self.readable or not self.writable:
raise Pebkac(403, "not admin")
if not [x for x in self.wvol if x in self.rvol]:
raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_stack:
raise Pebkac(403, "disabled by argv")
@@ -1480,7 +1515,7 @@ class HttpCli(object):
try:
vn, rem = self.asrv.vfs.get(top, self.uname, True, False)
fsroot, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, incl_wo=True
rem, self.uname, not self.args.no_scandir, [[True], [False, True]]
)
except:
vfs_ls = []
@@ -1507,6 +1542,33 @@ class HttpCli(object):
ret["a"] = dirs
return ret
def handle_rm(self):
if not self.can_delete:
raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_del:
raise Pebkac(403, "disabled by argv")
x = self.conn.hsrv.broker.put(True, "up2k.handle_rm", self.uname, self.vpath)
self.loud_reply(x.get())
def handle_mv(self):
if not self.can_move:
raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_mv:
raise Pebkac(403, "disabled by argv")
# full path of new loc (incl filename)
dst = self.uparam.get("move")
if not dst:
raise Pebkac(400, "need dst vpath")
x = self.conn.hsrv.broker.put(
True, "up2k.handle_mv", self.uname, self.vpath, dst
)
self.loud_reply(x.get())
def tx_browser(self):
vpath = ""
vpnodes = [["", "/"]]
@@ -1519,28 +1581,28 @@ class HttpCli(object):
vpnodes.append([quotep(vpath) + "/", html_escape(node, crlf=True)])
vn, rem = self.asrv.vfs.get(
self.vpath, self.uname, self.readable, self.writable
)
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, False, False)
abspath = vn.canonical(rem)
dbv, vrem = vn.get_dbv(rem)
try:
st = os.stat(fsenc(abspath))
st = bos.stat(abspath)
except:
raise Pebkac(404)
if self.readable:
if rem.startswith(".hist/up2k."):
if self.can_read:
if rem.startswith(".hist/up2k.") or (
rem.endswith("/dir.txt") and rem.startswith(".hist/th/")
):
raise Pebkac(403)
is_dir = stat.S_ISDIR(st.st_mode)
th_fmt = self.uparam.get("th")
if th_fmt is not None:
if is_dir:
for fn in ["folder.png", "folder.jpg"]:
for fn in self.args.th_covers.split(","):
fp = os.path.join(abspath, fn)
if os.path.exists(fp):
if bos.path.exists(fp):
vrem = "{}/{}".format(vrem.rstrip("/"), fn)
is_dir = False
break
@@ -1595,14 +1657,17 @@ class HttpCli(object):
srv_info = "</span> /// <span>".join(srv_info)
perms = []
if self.readable:
if self.can_read:
perms.append("read")
if self.writable:
if self.can_write:
perms.append("write")
if self.can_move:
perms.append("move")
if self.can_delete:
perms.append("delete")
url_suf = self.urlq()
url_suf = self.urlq({}, [])
is_ls = "ls" in self.uparam
ts = "" # "?{}".format(time.time())
tpl = "browser"
if "b" in self.uparam:
@@ -1611,7 +1676,7 @@ class HttpCli(object):
logues = ["", ""]
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
fn = os.path.join(abspath, fn)
if os.path.exists(fsenc(fn)):
if bos.path.exists(fn):
with open(fsenc(fn), "rb") as f:
logues[n] = f.read().decode("utf-8")
@@ -1620,6 +1685,7 @@ class HttpCli(object):
"files": [],
"taglist": [],
"srvinf": srv_info,
"acct": self.uname,
"perms": perms,
"logues": logues,
}
@@ -1627,20 +1693,22 @@ class HttpCli(object):
"vdir": quotep(self.vpath),
"vpnodes": vpnodes,
"files": [],
"ts": ts,
"acct": self.uname,
"perms": json.dumps(perms),
"taglist": [],
"tag_order": [],
"have_up2k_idx": ("e2d" in vn.flags),
"have_tags_idx": ("e2t" in vn.flags),
"have_mv": (not self.args.no_mv),
"have_del": (not self.args.no_del),
"have_zip": (not self.args.no_zip),
"have_b_u": (self.writable and self.uparam.get("b") == "u"),
"have_b_u": (self.can_write and self.uparam.get("b") == "u"),
"url_suf": url_suf,
"logues": logues,
"title": html_escape(self.vpath, crlf=True),
"srv_info": srv_info,
}
if not self.readable:
if not self.can_read:
if is_ls:
ret = json.dumps(ls_ret)
self.reply(
@@ -1663,7 +1731,7 @@ class HttpCli(object):
return self.tx_zip(k, v, vn, rem, [], self.args.ed)
fsroot, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, incl_wo=True
rem, self.uname, not self.args.no_scandir, [[True], [False, True]]
)
stats = {k: v for k, v in vfs_ls}
vfs_ls = [x[0] for x in vfs_ls]
@@ -1674,7 +1742,7 @@ class HttpCli(object):
histdir = os.path.join(fsroot, ".hist")
ptn = re.compile(r"(.*)\.([0-9]+\.[0-9]{3})(\.[^\.]+)$")
try:
for hfn in os.listdir(histdir):
for hfn in bos.listdir(histdir):
m = ptn.match(hfn)
if not m:
continue
@@ -1715,7 +1783,7 @@ class HttpCli(object):
fspath = fsroot + "/" + fn
try:
inf = stats.get(fn) or os.stat(fsenc(fspath))
inf = stats.get(fn) or bos.stat(fspath)
except:
self.log("broken symlink: {}".format(repr(fspath)))
continue
@@ -1763,28 +1831,44 @@ class HttpCli(object):
fn = f["name"]
rd = f["rd"]
del f["rd"]
if icur:
if vn != dbv:
_, rd = vn.get_dbv(rd)
if not icur:
break
if vn != dbv:
_, rd = vn.get_dbv(rd)
q = "select w from up where rd = ? and fn = ?"
r = None
try:
r = icur.execute(q, (rd, fn)).fetchone()
except Exception as ex:
if "database is locked" in str(ex):
break
q = "select w from up where rd = ? and fn = ?"
try:
r = icur.execute(q, (rd, fn)).fetchone()
except:
args = s3enc(idx.mem_cur, rd, fn)
r = icur.execute(q, args).fetchone()
except:
m = "tag list error, {}/{}\n{}"
self.log(m.format(rd, fn, min_ex()))
break
tags = {}
f["tags"] = tags
tags = {}
f["tags"] = tags
if not r:
continue
if not r:
continue
w = r[0][:16]
q = "select k, v from mt where w = ? and k != 'x'"
w = r[0][:16]
q = "select k, v from mt where w = ? and k != 'x'"
try:
for k, v in icur.execute(q, (w,)):
taglist[k] = True
tags[k] = v
except:
m = "tag read error, {}/{} [{}]:\n{}"
self.log(m.format(rd, fn, w, min_ex()))
break
if icur:
taglist = [k for k in vn.flags.get("mte", "").split(",") if k in taglist]

View File

@@ -3,7 +3,6 @@ from __future__ import print_function, unicode_literals
import re
import os
import sys
import time
import socket
@@ -35,7 +34,6 @@ class HttpConn(object):
self.args = hsrv.args
self.asrv = hsrv.asrv
self.is_mp = hsrv.is_mp
self.cert_path = hsrv.cert_path
enth = HAVE_PIL and not self.args.no_thumb
@@ -44,8 +42,8 @@ class HttpConn(object):
self.t0 = time.time()
self.stopping = False
self.nreq = 0
self.nbyte = 0
self.workload = 0
self.u2idx = None
self.log_func = hsrv.log
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None
@@ -172,7 +170,7 @@ class HttpConn(object):
self.log("client rejected our certificate (nice)")
elif "ALERT_CERTIFICATE_UNKNOWN" in em:
# chrome-android keeps doing this
# android-chrome keeps doing this
pass
else:
@@ -184,11 +182,7 @@ class HttpConn(object):
self.sr = Unrecv(self.s)
while not self.stopping:
if self.is_mp:
self.workload += 50
if self.workload >= 2 ** 31:
self.workload = 100
self.nreq += 1
cli = HttpCli(self)
if not cli.run():
return

View File

@@ -4,6 +4,8 @@ from __future__ import print_function, unicode_literals
import os
import sys
import time
import math
import base64
import socket
import threading
@@ -24,10 +26,16 @@ except ImportError:
)
sys.exit(1)
from .__init__ import E, MACOS
from .authsrv import AuthSrv
from .__init__ import E, PY2, MACOS
from .util import spack, min_ex, start_stackmon, start_log_thrs
from .bos import bos
from .httpconn import HttpConn
if PY2:
import Queue as queue
else:
import queue
class HttpSrv(object):
"""
@@ -35,19 +43,28 @@ class HttpSrv(object):
relying on MpSrv for performance (HttpSrv is just plain threads)
"""
def __init__(self, broker, is_mp=False):
def __init__(self, broker, nid):
self.broker = broker
self.is_mp = is_mp
self.nid = nid
self.args = broker.args
self.log = broker.log
self.asrv = broker.asrv
self.disconnect_func = None
self.name = "httpsrv" + ("-n{}-i{:x}".format(nid, os.getpid()) if nid else "")
self.mutex = threading.Lock()
self.stopping = False
self.clients = {}
self.workload = 0
self.workload_thr_alive = False
self.tp_nthr = 0 # actual
self.tp_ncli = 0 # fading
self.tp_time = None # latest worker collect
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
self.srvs = []
self.ncli = 0 # exact
self.clients = {} # laggy
self.nclimax = 0
self.cb_ts = 0
self.cb_v = 0
env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
@@ -57,29 +74,161 @@ class HttpSrv(object):
}
cert_path = os.path.join(E.cfg, "cert.pem")
if os.path.exists(cert_path):
if bos.path.exists(cert_path):
self.cert_path = cert_path
else:
self.cert_path = None
if self.tp_q:
self.start_threads(4)
name = "httpsrv-scaler" + ("-{}".format(nid) if nid else "")
t = threading.Thread(target=self.thr_scaler, name=name)
t.daemon = True
t.start()
if nid:
if self.args.stackmon:
start_stackmon(self.args.stackmon, nid)
if self.args.log_thrs:
start_log_thrs(self.log, self.args.log_thrs, nid)
def start_threads(self, n):
self.tp_nthr += n
if self.args.log_htp:
self.log(self.name, "workers += {} = {}".format(n, self.tp_nthr), 6)
for _ in range(n):
thr = threading.Thread(
target=self.thr_poolw,
name=self.name + "-poolw",
)
thr.daemon = True
thr.start()
def stop_threads(self, n):
self.tp_nthr -= n
if self.args.log_htp:
self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6)
for _ in range(n):
self.tp_q.put(None)
def thr_scaler(self):
while True:
time.sleep(2 if self.tp_ncli else 30)
with self.mutex:
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
if self.tp_nthr > self.tp_ncli + 8:
self.stop_threads(4)
def listen(self, sck, nlisteners):
ip, port = sck.getsockname()
self.srvs.append(sck)
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
t = threading.Thread(
target=self.thr_listen,
args=(sck,),
name="httpsrv-n{}-listen-{}-{}".format(self.nid or "0", ip, port),
)
t.daemon = True
t.start()
def thr_listen(self, srv_sck):
"""listens on a shared tcp server"""
ip, port = srv_sck.getsockname()
fno = srv_sck.fileno()
msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
self.log(self.name, msg)
self.broker.put(False, "cb_httpsrv_up")
while not self.stopping:
if self.args.log_conn:
self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30")
if self.ncli >= self.nclimax:
self.log(self.name, "at connection limit; waiting", 3)
while self.ncli >= self.nclimax:
time.sleep(0.1)
if self.args.log_conn:
self.log(self.name, "|%sC-acc1" % ("-" * 2,), c="1;30")
try:
sck, addr = srv_sck.accept()
except (OSError, socket.error) as ex:
self.log(self.name, "accept({}): {}".format(fno, ex), c=6)
time.sleep(0.02)
continue
if self.args.log_conn:
m = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
"-" * 3, ip, port % 8, port
)
self.log("%s %s" % addr, m, c="1;30")
self.accept(sck, addr)
def accept(self, sck, addr):
"""takes an incoming tcp connection and creates a thread to handle it"""
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
now = time.time()
if now - (self.tp_time or now) > 300:
self.tp_q = None
if self.tp_q:
self.tp_q.put((sck, addr))
with self.mutex:
self.ncli += 1
self.tp_time = self.tp_time or now
self.tp_ncli = max(self.tp_ncli, self.ncli + 1)
if self.tp_nthr < self.ncli + 4:
self.start_threads(8)
return
if not self.args.no_htp:
m = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n"
self.log(self.name, m, 1)
with self.mutex:
self.ncli += 1
thr = threading.Thread(
target=self.thr_client,
args=(sck, addr),
name="httpsrv-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]),
name="httpconn-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]),
)
thr.daemon = True
thr.start()
def num_clients(self):
with self.mutex:
return len(self.clients)
def thr_poolw(self):
while True:
task = self.tp_q.get()
if not task:
break
with self.mutex:
self.tp_time = None
try:
sck, addr = task
me = threading.current_thread()
me.name = "httpconn-{}-{}".format(
addr[0].split(".", 2)[-1][-6:], addr[1]
)
self.thr_client(sck, addr)
me.name = self.name + "-poolw"
except:
self.log(self.name, "thr_client: " + min_ex(), 3)
def shutdown(self):
self.stopping = True
for srv in self.srvs:
try:
srv.close()
except:
pass
clients = list(self.clients.keys())
for cli in clients:
try:
@@ -87,7 +236,14 @@ class HttpSrv(object):
except:
pass
self.log("httpsrv-n", "ok bye")
if self.tp_q:
self.stop_threads(self.tp_nthr)
for _ in range(10):
time.sleep(0.05)
if self.tp_q.empty():
break
self.log(self.name, "ok bye")
def thr_client(self, sck, addr):
"""thread managing one tcp client"""
@@ -97,25 +253,15 @@ class HttpSrv(object):
with self.mutex:
self.clients[cli] = 0
if self.is_mp:
self.workload += 50
if not self.workload_thr_alive:
self.workload_thr_alive = True
thr = threading.Thread(
target=self.thr_workload, name="httpsrv-workload"
)
thr.daemon = True
thr.start()
fno = sck.fileno()
try:
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-crun" % ("-" * 6,), c="1;30")
self.log("%s %s" % addr, "|%sC-crun" % ("-" * 4,), c="1;30")
cli.run()
except (OSError, socket.error) as ex:
if ex.errno not in [10038, 10054, 107, 57, 9]:
if ex.errno not in [10038, 10054, 107, 57, 49, 9]:
self.log(
"%s %s" % addr,
"run({}): {}".format(fno, ex),
@@ -125,7 +271,7 @@ class HttpSrv(object):
finally:
sck = cli.s
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 5,), c="1;30")
try:
fno = sck.fileno()
@@ -138,42 +284,37 @@ class HttpSrv(object):
"shut({}): {}".format(fno, ex),
c="1;30",
)
if ex.errno not in [10038, 10054, 107, 57, 9]:
if ex.errno not in [10038, 10054, 107, 57, 49, 9]:
# 10038 No longer considered a socket
# 10054 Foribly closed by remote
# 107 Transport endpoint not connected
# 57 Socket is not connected
# 49 Can't assign requested address (wifi down)
# 9 Bad file descriptor
raise
finally:
with self.mutex:
del self.clients[cli]
self.ncli -= 1
if self.disconnect_func:
self.disconnect_func(addr) # pylint: disable=not-callable
def cachebuster(self):
if time.time() - self.cb_ts < 1:
return self.cb_v
def thr_workload(self):
"""indicates the python interpreter workload caused by this HttpSrv"""
# avoid locking in extract_filedata by tracking difference here
while True:
time.sleep(0.2)
with self.mutex:
if not self.clients:
# no clients rn, termiante thread
self.workload_thr_alive = False
self.workload = 0
return
with self.mutex:
if time.time() - self.cb_ts < 1:
return self.cb_v
total = 0
with self.mutex:
for cli in self.clients.keys():
now = cli.workload
delta = now - self.clients[cli]
if delta < 0:
# was reset in HttpCli to prevent overflow
delta = now
v = E.t0
try:
with os.scandir(os.path.join(E.mod, "web")) as dh:
for fh in dh:
inf = fh.stat()
v = max(v, inf.st_mtime)
except:
pass
total += delta
self.clients[cli] = now
self.workload = total
v = base64.urlsafe_b64encode(spack(b">xxL", int(v)))
self.cb_v = v.decode("ascii")[-4:]
self.cb_ts = time.time()
return self.cb_v

View File

@@ -7,11 +7,9 @@ import json
import shutil
import subprocess as sp
from .__init__ import PY2, WINDOWS
from .util import fsenc, fsdec, REKOBO_LKEY
if not PY2:
unicode = str
from .__init__ import PY2, WINDOWS, unicode
from .util import fsenc, fsdec, uncyg, REKOBO_LKEY
from .bos import bos
def have_ff(cmd):
@@ -44,7 +42,10 @@ class MParser(object):
while True:
try:
bp = os.path.expanduser(args)
if os.path.exists(bp):
if WINDOWS:
bp = uncyg(bp)
if bos.path.exists(bp):
self.bin = bp
return
except:
@@ -112,6 +113,19 @@ def parse_ffprobe(txt):
ret = {} # processed
md = {} # raw tags
is_audio = fmt.get("format_name") in ["mp3", "ogg", "flac", "wav"]
if fmt.get("filename", "").split(".")[-1].lower() in ["m4a", "aac"]:
is_audio = True
# if audio file, ensure audio stream appears first
if (
is_audio
and len(streams) > 2
and streams[1].get("codec_type") != "audio"
and streams[2].get("codec_type") == "audio"
):
streams = [fmt, streams[2], streams[1]] + streams[3:]
have = {}
for strm in streams:
typ = strm.get("codec_type")
@@ -131,9 +145,7 @@ def parse_ffprobe(txt):
]
if typ == "video":
if strm.get("DISPOSITION:attached_pic") == "1" or fmt.get(
"format_name"
) in ["mp3", "ogg", "flac"]:
if strm.get("DISPOSITION:attached_pic") == "1" or is_audio:
continue
kvm = [
@@ -177,7 +189,7 @@ def parse_ffprobe(txt):
k = k[4:].strip()
v = v.strip()
if k and v:
if k and v and k not in md:
md[k] = [v]
for k in [".q", ".vq", ".aq"]:
@@ -216,37 +228,47 @@ def parse_ffprobe(txt):
class MTag(object):
def __init__(self, log_func, args):
self.log_func = log_func
self.args = args
self.usable = True
self.prefer_mt = False
mappings = args.mtm
self.prefer_mt = not args.no_mtag_ff
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
or_ffprobe = " or ffprobe"
self.can_ffprobe = (
HAVE_FFPROBE
and not args.no_mtag_ff
and (not WINDOWS or sys.version_info >= (3, 8))
)
mappings = args.mtm
or_ffprobe = " or FFprobe"
if self.backend == "mutagen":
self.get = self.get_mutagen
try:
import mutagen
except:
self.log("could not load mutagen, trying ffprobe instead", c=3)
self.log("could not load Mutagen, trying FFprobe instead", c=3)
self.backend = "ffprobe"
if self.backend == "ffprobe":
self.usable = self.can_ffprobe
self.get = self.get_ffprobe
self.prefer_mt = True
# about 20x slower
self.usable = HAVE_FFPROBE
if self.usable and WINDOWS and sys.version_info < (3, 8):
self.usable = False
if not HAVE_FFPROBE:
pass
elif args.no_mtag_ff:
msg = "found FFprobe but it was disabled by --no-mtag-ff"
self.log(msg, c=3)
elif WINDOWS and sys.version_info < (3, 8):
or_ffprobe = " or python >= 3.8"
msg = "found ffprobe but your python is too old; need 3.8 or newer"
msg = "found FFprobe but your python is too old; need 3.8 or newer"
self.log(msg, c=1)
if not self.usable:
msg = "need mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
self.log(
msg.format(or_ffprobe, " " * 37, os.path.basename(sys.executable)), c=1
)
msg = "need Mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
pybin = os.path.basename(sys.executable)
self.log(msg.format(or_ffprobe, " " * 37, pybin), c=1)
return
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
@@ -376,7 +398,7 @@ class MTag(object):
v2 = r2.get(k)
if v1 == v2:
print(" ", k, v1)
elif v1 != "0000": # ffprobe date=0
elif v1 != "0000": # FFprobe date=0
diffs.append(k)
print(" 1", k, v1)
print(" 2", k, v2)
@@ -397,20 +419,33 @@ class MTag(object):
md = mutagen.File(fsenc(abspath), easy=True)
x = md.info.length
except Exception as ex:
return {}
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
ret = {}
try:
dur = int(md.info.length)
sz = bos.path.getsize(abspath)
ret = {".q": [0, int((sz / md.info.length) / 128)]}
for attr, k, norm in [
["codec", "ac", unicode],
["channels", "chs", int],
["sample_rate", ".hz", int],
["bitrate", ".aq", int],
["length", ".dur", int],
]:
try:
q = int(md.info.bitrate / 1024)
v = getattr(md.info, attr)
except:
q = int((os.path.getsize(fsenc(abspath)) / dur) / 128)
continue
ret[".dur"] = [0, dur]
ret[".q"] = [0, q]
except:
pass
if not v:
continue
if k == ".aq":
v /= 1000
if k == "ac" and v.startswith("mp4a.40."):
v = "aac"
ret[k] = [0, norm(v)]
return self.normalize_tags(ret, md)

View File

@@ -1,12 +1,12 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import tarfile
import threading
from .sutil import errdesc
from .util import Queue, fsenc
from .bos import bos
class QFile(object):
@@ -33,10 +33,11 @@ class QFile(object):
class StreamTar(object):
"""construct in-memory tar file from the given path"""
def __init__(self, fgen, **kwargs):
def __init__(self, log, fgen, **kwargs):
self.ci = 0
self.co = 0
self.qfile = QFile()
self.log = log
self.fgen = fgen
self.errf = None
@@ -60,7 +61,7 @@ class StreamTar(object):
yield None
if self.errf:
os.unlink(self.errf["ap"])
bos.unlink(self.errf["ap"])
def ser(self, f):
name = f["vp"]
@@ -91,7 +92,8 @@ class StreamTar(object):
errors.append([f["vp"], repr(ex)])
if errors:
self.errf = errdesc(errors)
self.errf, txt = errdesc(errors)
self.log("\n".join(([repr(self.errf)] + txt[1:])))
self.ser(self.errf)
self.tar.close()

View File

@@ -1,11 +1,12 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import time
import tempfile
from datetime import datetime
from .bos import bos
def errdesc(errors):
report = ["copyparty failed to add the following files to the archive:", ""]
@@ -20,9 +21,9 @@ def errdesc(errors):
dt = datetime.utcfromtimestamp(time.time())
dt = dt.strftime("%Y-%m%d-%H%M%S")
os.chmod(tf_path, 0o444)
bos.chmod(tf_path, 0o444)
return {
"vp": "archive-errors-{}.txt".format(dt),
"ap": tf_path,
"st": os.stat(tf_path),
}
"st": bos.stat(tf_path),
}, report

View File

@@ -5,12 +5,16 @@ import re
import os
import sys
import time
import shlex
import string
import signal
import socket
import threading
from datetime import datetime, timedelta
import calendar
from .__init__ import PY2, WINDOWS, MACOS, VT100
from .util import mp
from .__init__ import E, PY2, WINDOWS, ANYWIN, MACOS, VT100, unicode
from .util import mp, start_log_thrs, start_stackmon, min_ex
from .authsrv import AuthSrv
from .tcpsrv import TcpSrv
from .up2k import Up2k
@@ -28,17 +32,31 @@ class SvcHub(object):
put() can return a queue (if want_reply=True) which has a blocking get() with the response.
"""
def __init__(self, args):
def __init__(self, args, argv, printed):
self.args = args
self.argv = argv
self.logf = None
self.stop_req = False
self.stopping = False
self.stop_cond = threading.Condition()
self.httpsrv_up = 0
self.ansi_re = re.compile("\033\\[[^m]*m")
self.log_mutex = threading.Lock()
self.next_day = 0
self.log = self._log_disabled if args.q else self._log_enabled
if args.lo:
self._setup_logfile(printed)
if args.stackmon:
start_stackmon(args.stackmon, 0)
if args.log_thrs:
start_log_thrs(self.log, args.log_thrs, 0)
# initiate all services to manage
self.asrv = AuthSrv(self.args, self.log, False)
self.asrv = AuthSrv(self.args, self.log)
if args.ls:
self.asrv.dbg_ls()
@@ -69,22 +87,138 @@ class SvcHub(object):
self.broker = Broker(self)
def run(self):
thr = threading.Thread(target=self.tcpsrv.run, name="svchub-main")
def thr_httpsrv_up(self):
time.sleep(5)
failed = self.broker.num_workers - self.httpsrv_up
if not failed:
return
m = "{}/{} workers failed to start"
m = m.format(failed, self.broker.num_workers)
self.log("root", m, 1)
os._exit(1)
def cb_httpsrv_up(self):
self.httpsrv_up += 1
if self.httpsrv_up != self.broker.num_workers:
return
self.log("root", "workers OK\n")
self.up2k.init_vols()
thr = threading.Thread(target=self.sd_notify, name="sd-notify")
thr.daemon = True
thr.start()
# winxp/py2.7 support: thr.join() kills signals
try:
while True:
time.sleep(9001)
def _logname(self):
dt = datetime.utcfromtimestamp(time.time())
fn = self.args.lo
for fs in "YmdHMS":
fs = "%" + fs
if fs in fn:
fn = fn.replace(fs, dt.strftime(fs))
except KeyboardInterrupt:
return fn
def _setup_logfile(self, printed):
base_fn = fn = sel_fn = self._logname()
if fn != self.args.lo:
ctr = 0
# yup this is a race; if started sufficiently concurrently, two
# copyparties can grab the same logfile (considered and ignored)
while os.path.exists(sel_fn):
ctr += 1
sel_fn = "{}.{}".format(fn, ctr)
fn = sel_fn
try:
import lzma
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
except:
import codecs
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
lh.base_fn = base_fn
argv = [sys.executable] + self.argv
if hasattr(shlex, "quote"):
argv = [shlex.quote(x) for x in argv]
else:
argv = ['"{}"'.format(x) for x in argv]
msg = "[+] opened logfile [{}]\n".format(fn)
printed += msg
lh.write("t0: {:.3f}\nargv: {}\n\n{}".format(E.t0, " ".join(argv), printed))
self.logf = lh
print(msg, end="")
def run(self):
self.tcpsrv.run()
thr = threading.Thread(target=self.thr_httpsrv_up)
thr.daemon = True
thr.start()
for sig in [signal.SIGINT, signal.SIGTERM]:
signal.signal(sig, self.signal_handler)
# macos hangs after shutdown on sigterm with while-sleep,
# windows cannot ^c stop_cond (and win10 does the macos thing but winxp is fine??)
# linux is fine with both,
# never lucky
if ANYWIN:
# msys-python probably fine but >msys-python
thr = threading.Thread(target=self.stop_thr, name="svchub-sig")
thr.daemon = True
thr.start()
try:
while not self.stop_req:
time.sleep(1)
except:
pass
self.shutdown()
thr.join()
else:
self.stop_thr()
def stop_thr(self):
while not self.stop_req:
with self.stop_cond:
self.stop_cond.wait(9001)
self.shutdown()
def signal_handler(self, sig, frame):
if self.stopping:
return
self.stop_req = True
with self.stop_cond:
self.stop_cond.notify_all()
def shutdown(self):
if self.stopping:
return
self.stopping = True
self.stop_req = True
with self.stop_cond:
self.stop_cond.notify_all()
ret = 1
try:
with self.log_mutex:
print("OPYTHAT")
self.tcpsrv.shutdown()
self.broker.shutdown()
self.up2k.shutdown()
if self.thumbsrv:
self.thumbsrv.shutdown()
@@ -97,11 +231,41 @@ class SvcHub(object):
print("waiting for thumbsrv (10sec)...")
print("nailed it", end="")
ret = 0
finally:
print("\033[0m")
if self.logf:
self.logf.close()
sys.exit(ret)
def _log_disabled(self, src, msg, c=0):
pass
if not self.logf:
return
with self.log_mutex:
ts = datetime.utcfromtimestamp(time.time())
ts = ts.strftime("%Y-%m%d-%H%M%S.%f")[:-3]
self.logf.write("@{} [{}] {}\n".format(ts, src, msg))
now = time.time()
if now >= self.next_day:
self._set_next_day()
def _set_next_day(self):
if self.next_day and self.logf and self.logf.base_fn != self._logname():
self.logf.close()
self._setup_logfile("")
dt = datetime.utcfromtimestamp(time.time())
# unix timestamp of next 00:00:00 (leap-seconds safe)
day_now = dt.day
while dt.day == day_now:
dt += timedelta(hours=12)
dt = dt.replace(hour=0, minute=0, second=0)
self.next_day = calendar.timegm(dt.utctimetuple())
def _log_enabled(self, src, msg, c=0):
"""handles logging from all components"""
@@ -110,14 +274,7 @@ class SvcHub(object):
if now >= self.next_day:
dt = datetime.utcfromtimestamp(now)
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
# unix timestamp of next 00:00:00 (leap-seconds safe)
day_now = dt.day
while dt.day == day_now:
dt += timedelta(hours=12)
dt = dt.replace(hour=0, minute=0, second=0)
self.next_day = calendar.timegm(dt.utctimetuple())
self._set_next_day()
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
if not VT100:
@@ -144,20 +301,20 @@ class SvcHub(object):
except:
print(msg.encode("ascii", "replace").decode(), end="")
if self.logf:
self.logf.write(msg)
def check_mp_support(self):
vmin = sys.version_info[1]
if WINDOWS:
msg = "need python 3.3 or newer for multiprocessing;"
if PY2:
# py2 pickler doesn't support winsock
return msg
elif vmin < 3:
if PY2 or vmin < 3:
return msg
elif MACOS:
return "multiprocessing is wonky on mac osx;"
else:
msg = "need python 2.7 or 3.3+ for multiprocessing;"
if not PY2 and vmin < 3:
msg = "need python 3.3+ for multiprocessing;"
if PY2 or vmin < 3:
return msg
try:
@@ -189,5 +346,24 @@ class SvcHub(object):
if not err:
return True
else:
self.log("root", err)
self.log("svchub", err)
return False
def sd_notify(self):
try:
addr = os.getenv("NOTIFY_SOCKET")
if not addr:
return
addr = unicode(addr)
if addr.startswith("@"):
addr = "\0" + addr[1:]
m = "".join(x for x in addr if x in string.printable)
self.log("sd_notify", m)
sck = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
sck.connect(addr)
sck.sendall(b"READY=1")
except:
self.log("sd_notify", min_ex())

View File

@@ -4,15 +4,15 @@ from __future__ import print_function, unicode_literals
import os
import time
import zlib
import struct
from datetime import datetime
from .sutil import errdesc
from .util import yieldfile, sanitize_fn
from .util import yieldfile, sanitize_fn, spack, sunpack
from .bos import bos
def dostime2unix(buf):
t, d = struct.unpack("<HH", buf)
t, d = sunpack(b"<HH", buf)
ts = (t & 0x1F) * 2
tm = (t >> 5) & 0x3F
@@ -36,13 +36,13 @@ def unixtime2dos(ts):
bd = ((dy - 1980) << 9) + (dm << 5) + dd
bt = (th << 11) + (tm << 5) + ts // 2
return struct.pack("<HH", bt, bd)
return spack(b"<HH", bt, bd)
def gen_fdesc(sz, crc32, z64):
ret = b"\x50\x4b\x07\x08"
fmt = "<LQQ" if z64 else "<LLL"
ret += struct.pack(fmt, crc32, sz, sz)
fmt = b"<LQQ" if z64 else b"<LLL"
ret += spack(fmt, crc32, sz, sz)
return ret
@@ -66,7 +66,7 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
req_ver = b"\x2d\x00" if z64 else b"\x0a\x00"
if crc32:
crc32 = struct.pack("<L", crc32)
crc32 = spack(b"<L", crc32)
else:
crc32 = b"\x00" * 4
@@ -87,14 +87,14 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
# however infozip does actual sz and it even works on winxp
# (same reasning for z64 extradata later)
vsz = 0xFFFFFFFF if z64 else sz
ret += struct.pack("<LL", vsz, vsz)
ret += spack(b"<LL", vsz, vsz)
# windows support (the "?" replace below too)
fn = sanitize_fn(fn, ok="/")
fn = sanitize_fn(fn, "/", [])
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
z64_len = len(z64v) * 8 + 4 if z64v else 0
ret += struct.pack("<HH", len(bfn), z64_len)
ret += spack(b"<HH", len(bfn), z64_len)
if h_pos is not None:
# 2b comment, 2b diskno
@@ -106,12 +106,12 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
ret += b"\x01\x00\x00\x00\xa4\x81"
# 4b local-header-ofs
ret += struct.pack("<L", min(h_pos, 0xFFFFFFFF))
ret += spack(b"<L", min(h_pos, 0xFFFFFFFF))
ret += bfn
if z64v:
ret += struct.pack("<HH" + "Q" * len(z64v), 1, len(z64v) * 8, *z64v)
ret += spack(b"<HH" + b"Q" * len(z64v), 1, len(z64v) * 8, *z64v)
return ret
@@ -136,7 +136,7 @@ def gen_ecdr(items, cdir_pos, cdir_end):
need_64 = nitems == 0xFFFF or 0xFFFFFFFF in [csz, cpos]
# 2b tnfiles, 2b dnfiles, 4b dir sz, 4b dir pos
ret += struct.pack("<HHLL", nitems, nitems, csz, cpos)
ret += spack(b"<HHLL", nitems, nitems, csz, cpos)
# 2b comment length
ret += b"\x00\x00"
@@ -163,7 +163,7 @@ def gen_ecdr64(items, cdir_pos, cdir_end):
# 8b tnfiles, 8b dnfiles, 8b dir sz, 8b dir pos
cdir_sz = cdir_end - cdir_pos
ret += struct.pack("<QQQQ", len(items), len(items), cdir_sz, cdir_pos)
ret += spack(b"<QQQQ", len(items), len(items), cdir_sz, cdir_pos)
return ret
@@ -178,13 +178,14 @@ def gen_ecdr64_loc(ecdr64_pos):
ret = b"\x50\x4b\x06\x07"
# 4b cdisk, 8b start of ecdr64, 4b ndisks
ret += struct.pack("<LQL", 0, ecdr64_pos, 1)
ret += spack(b"<LQL", 0, ecdr64_pos, 1)
return ret
class StreamZip(object):
def __init__(self, fgen, utf8=False, pre_crc=False):
def __init__(self, log, fgen, utf8=False, pre_crc=False):
self.log = log
self.fgen = fgen
self.utf8 = utf8
self.pre_crc = pre_crc
@@ -247,8 +248,8 @@ class StreamZip(object):
errors.append([f["vp"], repr(ex)])
if errors:
errf = errdesc(errors)
print(repr(errf))
errf, txt = errdesc(errors)
self.log("\n".join(([repr(errf)] + txt[1:])))
for x in self.ser(errf):
yield x
@@ -271,4 +272,4 @@ class StreamZip(object):
yield self._ct(ecdr)
if errors:
os.unlink(errf["ap"])
bos.unlink(errf["ap"])

View File

@@ -2,11 +2,10 @@
from __future__ import print_function, unicode_literals
import re
import time
import socket
import select
from .util import chkcmd, Counter
from .__init__ import MACOS, ANYWIN
from .util import chkcmd
class TcpSrv(object):
@@ -20,7 +19,6 @@ class TcpSrv(object):
self.args = hub.args
self.log = hub.log
self.num_clients = Counter()
self.stopping = False
ip = "127.0.0.1"
@@ -32,14 +30,16 @@ class TcpSrv(object):
for x in nonlocals:
eps[x] = "external"
msgs = []
m = "available @ http://{}:{}/ (\033[33m{}\033[0m)"
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
for port in sorted(self.args.p):
self.log(
"tcpsrv",
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
ip, port, desc
),
)
msgs.append(m.format(ip, port, desc))
if msgs:
msgs[-1] += "\n"
for m in msgs:
self.log("tcpsrv", m)
self.srv = []
for ip in self.args.i:
@@ -66,37 +66,13 @@ class TcpSrv(object):
for srv in self.srv:
srv.listen(self.args.nc)
ip, port = srv.getsockname()
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
fno = srv.fileno()
msg = "listening @ {}:{} f{}".format(ip, port, fno)
self.log("tcpsrv", msg)
if self.args.q:
print(msg)
while not self.stopping:
if self.args.log_conn:
self.log("tcpsrv", "|%sC-ncli" % ("-" * 1,), c="1;30")
if self.num_clients.v >= self.args.nc:
time.sleep(0.1)
continue
if self.args.log_conn:
self.log("tcpsrv", "|%sC-acc1" % ("-" * 2,), c="1;30")
ready, _, _ = select.select(self.srv, [], [])
for srv in ready:
if self.stopping:
break
sck, addr = srv.accept()
sip, sport = srv.getsockname()
if self.args.log_conn:
self.log(
"%s %s" % addr,
"|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
"-" * 3, sip, sport % 8, sport
),
c="1;30",
)
self.num_clients.add()
self.hub.broker.put(False, "httpconn", sck, addr)
self.hub.broker.put(False, "listen", srv)
def shutdown(self):
self.stopping = True
@@ -108,25 +84,100 @@ class TcpSrv(object):
self.log("tcpsrv", "ok bye")
def detect_interfaces(self, listen_ips):
def ips_linux(self):
eps = {}
# get all ips and their interfaces
try:
ip_addr, _ = chkcmd("ip", "addr")
txt, _ = chkcmd(["ip", "addr"])
except:
ip_addr = None
return eps
if ip_addr:
r = re.compile(r"^\s+inet ([^ ]+)/.* (.*)")
for ln in ip_addr.split("\n"):
try:
ip, dev = r.match(ln.rstrip()).groups()
for lip in listen_ips:
if lip in ["0.0.0.0", ip]:
eps[ip] = dev
except:
pass
r = re.compile(r"^\s+inet ([^ ]+)/.* (.*)")
for ln in txt.split("\n"):
try:
ip, dev = r.match(ln.rstrip()).groups()
eps[ip] = dev
except:
pass
return eps
def ips_macos(self):
eps = {}
try:
txt, _ = chkcmd(["ifconfig"])
except:
return eps
rdev = re.compile(r"^([^ ]+):")
rip = re.compile(r"^\tinet ([0-9\.]+) ")
dev = None
for ln in txt.split("\n"):
m = rdev.match(ln)
if m:
dev = m.group(1)
m = rip.match(ln)
if m:
eps[m.group(1)] = dev
dev = None
return eps
def ips_windows_ipconfig(self):
eps = {}
try:
txt, _ = chkcmd(["ipconfig"])
except:
return eps
rdev = re.compile(r"(^[^ ].*):$")
rip = re.compile(r"^ +IPv?4? [^:]+: *([0-9\.]{7,15})$")
dev = None
for ln in txt.replace("\r", "").split("\n"):
m = rdev.match(ln)
if m:
dev = m.group(1).split(" adapter ", 1)[-1]
m = rip.match(ln)
if m and dev:
eps[m.group(1)] = dev
dev = None
return eps
def ips_windows_netsh(self):
eps = {}
try:
txt, _ = chkcmd("netsh interface ip show address".split())
except:
return eps
rdev = re.compile(r'.* "([^"]+)"$')
rip = re.compile(r".* IP\b.*: +([0-9\.]{7,15})$")
dev = None
for ln in txt.replace("\r", "").split("\n"):
m = rdev.match(ln)
if m:
dev = m.group(1)
m = rip.match(ln)
if m and dev:
eps[m.group(1)] = dev
dev = None
return eps
def detect_interfaces(self, listen_ips):
if MACOS:
eps = self.ips_macos()
elif ANYWIN:
eps = self.ips_windows_ipconfig() # sees more interfaces
eps.update(self.ips_windows_netsh()) # has better names
else:
eps = self.ips_linux()
if "0.0.0.0" not in listen_ips:
eps = {k: v for k, v in eps if k in listen_ips}
default_route = None
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)

View File

@@ -5,6 +5,7 @@ import os
from .util import Cooldown
from .th_srv import thumb_path, THUMBABLE, FMT_FF
from .bos import bos
class ThumbCli(object):
@@ -36,7 +37,7 @@ class ThumbCli(object):
tpath = thumb_path(histpath, rem, mtime, fmt)
ret = None
try:
st = os.stat(tpath)
st = bos.stat(tpath)
if st.st_size:
ret = tpath
else:

View File

@@ -9,15 +9,12 @@ import hashlib
import threading
import subprocess as sp
from .__init__ import PY2
from .util import fsenc, runcmd, Queue, Cooldown, BytesIO, min_ex
from .__init__ import PY2, unicode
from .util import fsenc, vsplit, runcmd, Queue, Cooldown, BytesIO, min_ex
from .bos import bos
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
if not PY2:
unicode = str
HAVE_PIL = False
HAVE_HEIF = False
HAVE_AVIF = False
@@ -53,7 +50,7 @@ except:
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# ffmpeg -formats
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
if HAVE_HEIF:
FMT_PIL += " heif heifs heic heics"
@@ -77,21 +74,16 @@ def thumb_path(histpath, rem, mtime, fmt):
# base16 = 16 = 256
# b64-lc = 38 = 1444
# base64 = 64 = 4096
try:
rd, fn = rem.rsplit("/", 1)
except:
rd = ""
fn = rem
rd, fn = vsplit(rem)
if rd:
h = hashlib.sha512(fsenc(rd)).digest()[:24]
h = hashlib.sha512(fsenc(rd)).digest()
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
else:
rd = "top"
# could keep original filenames but this is safer re pathlen
h = hashlib.sha512(fsenc(fn)).digest()[:24]
h = hashlib.sha512(fsenc(fn)).digest()
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
return "{}/th/{}/{}.{:x}.{}".format(
@@ -125,18 +117,19 @@ class ThumbSrv(object):
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE):
missing = []
if not HAVE_FFMPEG:
missing.append("ffmpeg")
missing.append("FFmpeg")
if not HAVE_FFPROBE:
missing.append("ffprobe")
missing.append("FFprobe")
msg = "cannot create video thumbnails because some of the required programs are not available: "
msg += ", ".join(missing)
self.log(msg, c=3)
t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
t.daemon = True
t.start()
if self.args.th_clean:
t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
t.daemon = True
t.start()
def log(self, msg, c=0):
self.log_func("thumb", msg, c)
@@ -162,13 +155,10 @@ class ThumbSrv(object):
self.log("wait {}".format(tpath))
except:
thdir = os.path.dirname(tpath)
try:
os.makedirs(thdir)
except:
pass
bos.makedirs(thdir)
inf_path = os.path.join(thdir, "dir.txt")
if not os.path.exists(inf_path):
if not bos.path.exists(inf_path):
with open(inf_path, "wb") as f:
f.write(fsenc(os.path.dirname(abspath)))
@@ -188,7 +178,7 @@ class ThumbSrv(object):
cond.wait(3)
try:
st = os.stat(tpath)
st = bos.stat(tpath)
if st.st_size:
return tpath
except:
@@ -205,7 +195,7 @@ class ThumbSrv(object):
abspath, tpath = task
ext = abspath.split(".")[-1].lower()
fun = None
if not os.path.exists(tpath):
if not bos.path.exists(tpath):
if ext in FMT_PIL:
fun = self.conv_pil
elif ext in FMT_FF:
@@ -263,7 +253,7 @@ class ThumbSrv(object):
pass # default q = 75
if im.mode not in fmts:
print("conv {}".format(im.mode))
# print("conv {}".format(im.mode))
im = im.convert("RGB")
im.save(tpath, quality=40, method=6)
@@ -316,7 +306,7 @@ class ThumbSrv(object):
cmd += [fsenc(tpath)]
ret, sout, serr = runcmd(*cmd)
ret, sout, serr = runcmd(cmd)
if ret != 0:
msg = ["ff: {}".format(x) for x in serr.split("\n")]
self.log("FFmpeg failed:\n" + "\n".join(msg), c="1;30")
@@ -331,7 +321,7 @@ class ThumbSrv(object):
p1 = os.path.dirname(tdir)
p2 = os.path.dirname(p1)
for dp in [tdir, p1, p2]:
os.utime(fsenc(dp), (ts, ts))
bos.utime(dp, (ts, ts))
except:
pass
@@ -358,7 +348,7 @@ class ThumbSrv(object):
prev_b64 = None
prev_fp = None
try:
ents = os.listdir(thumbpath)
ents = bos.listdir(thumbpath)
except:
return 0
@@ -369,7 +359,7 @@ class ThumbSrv(object):
# "top" or b64 prefix/full (a folder)
if len(f) <= 3 or len(f) == 24:
age = now - os.path.getmtime(fp)
age = now - bos.path.getmtime(fp)
if age > maxage:
with self.mutex:
safe = True
@@ -401,7 +391,7 @@ class ThumbSrv(object):
if b64 == prev_b64:
self.log("rm replaced [{}]".format(fp))
os.unlink(prev_fp)
bos.unlink(prev_fp)
prev_b64 = b64
prev_fp = fp

View File

@@ -7,7 +7,9 @@ import time
import threading
from datetime import datetime
from .__init__ import unicode
from .util import s3dec, Pebkac, min_ex
from .bos import bos
from .up2k import up2k_wark_from_hashlist
@@ -26,7 +28,7 @@ class U2idx(object):
self.timeout = self.args.srch_time
if not HAVE_SQLITE3:
self.log("could not load sqlite3; searchign wqill be disabled")
self.log("your python does not have sqlite3; searching will be disabled")
return
self.cur = {}
@@ -57,16 +59,19 @@ class U2idx(object):
raise Pebkac(500, min_ex())
def get_cur(self, ptop):
if not HAVE_SQLITE3:
return None
cur = self.cur.get(ptop)
if cur:
return cur
histpath = self.asrv.vfs.histtab[ptop]
db_path = os.path.join(histpath, "up2k.db")
if not os.path.exists(db_path):
if not bos.path.exists(db_path):
return None
cur = sqlite3.connect(db_path).cursor()
cur = sqlite3.connect(db_path, 2).cursor()
self.cur[ptop] = cur
return cur
@@ -87,6 +92,8 @@ class U2idx(object):
mt_ctr = 0
mt_keycmp = "substr(up.w,1,16)"
mt_keycmp2 = None
ptn_lc = re.compile(r" (mt[0-9]+\.v) ([=<!>]+) \? $")
ptn_lcv = re.compile(r"[a-zA-Z]")
while True:
uq = uq.strip()
@@ -179,6 +186,21 @@ class U2idx(object):
va.append(v)
is_key = True
# lowercase tag searches
m = ptn_lc.search(q)
if not m or not ptn_lcv.search(unicode(v)):
continue
va.pop()
va.append(v.lower())
q = q[: m.start()]
field, oper = m.groups()
if oper in ["=", "=="]:
q += " {} like ? ".format(field)
else:
q += " lower({}) {} ? ".format(field, oper)
try:
return self.run_query(vols, joins + "where " + q, va)
except Exception as ex:

View File

@@ -23,14 +23,20 @@ from .util import (
ProgressPrinter,
fsdec,
fsenc,
absreal,
sanitize_fn,
ren_open,
atomic_move,
vsplit,
s3enc,
s3dec,
rmdirs,
statdir,
s2hms,
min_ex,
)
from .bos import bos
from .authsrv import AuthSrv
from .mtag import MTag, MParser
try:
@@ -39,18 +45,13 @@ try:
except:
HAVE_SQLITE3 = False
DB_VER = 4
class Up2k(object):
"""
TODO:
* documentation
* registry persistence
* ~/.config flatfiles for active jobs
"""
def __init__(self, hub):
self.hub = hub
self.asrv = hub.asrv
self.asrv = hub.asrv # type: AuthSrv
self.args = hub.args
self.log_func = hub.log
@@ -64,6 +65,7 @@ class Up2k(object):
self.n_hashq = 0
self.n_tagq = 0
self.volstate = {}
self.need_rescan = {}
self.registry = {}
self.entags = {}
self.flags = {}
@@ -91,20 +93,21 @@ class Up2k(object):
thr.start()
# static
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
self.r_hash = re.compile("^[0-9a-zA-Z_-]{44}$")
if not HAVE_SQLITE3:
self.log("could not initialize sqlite3, will use in-memory registry only")
if self.args.no_fastboot:
self.deferred_init()
else:
t = threading.Thread(
target=self.deferred_init,
name="up2k-deferred-init",
)
t.daemon = True
t.start()
def init_vols(self):
if self.args.no_fastboot:
return
t = threading.Thread(target=self.deferred_init, name="up2k-deferred-init")
t.daemon = True
t.start()
def deferred_init(self):
all_vols = self.asrv.vfs.all_vols
@@ -119,6 +122,10 @@ class Up2k(object):
thr.daemon = True
thr.start()
thr = threading.Thread(target=self._sched_rescan, name="up2k-rescan")
thr.daemon = True
thr.start()
if self.mtag:
thr = threading.Thread(target=self._tagger, name="up2k-tagger")
thr.daemon = True
@@ -168,6 +175,38 @@ class Up2k(object):
t.start()
return None
def _sched_rescan(self):
maxage = self.args.re_maxage
volage = {}
while True:
time.sleep(self.args.re_int)
now = time.time()
vpaths = list(sorted(self.asrv.vfs.all_vols.keys()))
with self.mutex:
if maxage:
for vp in vpaths:
if vp not in volage:
volage[vp] = now
if now - volage[vp] >= maxage:
self.need_rescan[vp] = 1
if not self.need_rescan:
continue
vols = list(sorted(self.need_rescan.keys()))
self.need_rescan = {}
err = self.rescan(self.asrv.vfs.all_vols, vols)
if err:
for v in vols:
self.need_rescan[v] = True
continue
for v in vols:
volage[v] = now
def _vis_job_progress(self, job):
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
path = os.path.join(job["ptop"], job["prel"], job["name"])
@@ -190,7 +229,7 @@ class Up2k(object):
return True, ret
def init_indexes(self, all_vols, scan_vols=[]):
def init_indexes(self, all_vols, scan_vols=None):
self.pp = ProgressPrinter()
vols = all_vols.values()
t0 = time.time()
@@ -213,7 +252,7 @@ class Up2k(object):
# only need to protect register_vpath but all in one go feels right
for vol in vols:
try:
os.listdir(vol.realpath)
bos.listdir(vol.realpath)
except:
self.volstate[vol.vpath] = "OFFLINE (cannot access folder)"
self.log("cannot access " + vol.realpath, c=1)
@@ -299,7 +338,7 @@ class Up2k(object):
self.log(msg.format(len(vols), time.time() - t0))
if needed_mutagen:
msg = "could not read tags because no backends are available (mutagen or ffprobe)"
msg = "could not read tags because no backends are available (Mutagen or FFprobe)"
self.log(msg, c=1)
thr = None
@@ -339,18 +378,26 @@ class Up2k(object):
for k, v in flags.items()
]
if a:
self.log(" ".join(sorted(a)) + "\033[0m")
vpath = "?"
for k, v in self.asrv.vfs.all_vols.items():
if v.realpath == ptop:
vpath = k
if vpath:
vpath += "/"
self.log("/{} {}".format(vpath, " ".join(sorted(a))), "35")
reg = {}
path = os.path.join(histpath, "up2k.snap")
if "e2d" in flags and os.path.exists(path):
if "e2d" in flags and bos.path.exists(path):
with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8")
reg2 = json.loads(j)
for k, job in reg2.items():
path = os.path.join(job["ptop"], job["prel"], job["name"])
if os.path.exists(fsenc(path)):
if bos.path.exists(path):
reg[k] = job
job["poke"] = time.time()
else:
@@ -365,10 +412,7 @@ class Up2k(object):
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
return None
try:
os.makedirs(histpath)
except:
pass
bos.makedirs(histpath)
try:
cur = self._open_db(db_path)
@@ -398,7 +442,7 @@ class Up2k(object):
if WINDOWS:
excl = [x.replace("/", "\\") for x in excl]
n_add = self._build_dir(dbw, top, set(excl), top, nohash)
n_add = self._build_dir(dbw, top, set(excl), top, nohash, [])
n_rm = self._drop_lost(dbw[0], top)
if dbw[1]:
self.log("commit {} new files".format(dbw[1]))
@@ -406,11 +450,18 @@ class Up2k(object):
return True, n_add or n_rm or do_vac
def _build_dir(self, dbw, top, excl, cdir, nohash):
def _build_dir(self, dbw, top, excl, cdir, nohash, seen):
rcdir = absreal(cdir) # a bit expensive but worth
if rcdir in seen:
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}"
self.log(m.format(seen[-1], rcdir, cdir), 3)
return 0
seen = seen + [cdir]
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
histpath = self.asrv.vfs.histtab[top]
ret = 0
g = statdir(self.log, not self.args.no_scandir, False, cdir)
g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
for iname, inf in sorted(g):
abspath = os.path.join(cdir, iname)
lmod = int(inf.st_mtime)
@@ -419,10 +470,13 @@ class Up2k(object):
if abspath in excl or abspath == histpath:
continue
# self.log(" dir: {}".format(abspath))
ret += self._build_dir(dbw, top, excl, abspath, nohash)
ret += self._build_dir(dbw, top, excl, abspath, nohash, seen)
else:
# self.log("file: {}".format(abspath))
rp = abspath[len(top) :].replace("\\", "/").strip("/")
rp = abspath[len(top) + 1 :]
if WINDOWS:
rp = rp.replace("\\", "/").strip("/")
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
sql = "select w, mt, sz from up where rd = ? and fn = ?"
try:
@@ -493,7 +547,7 @@ class Up2k(object):
# almost zero overhead dw
self.pp.msg = "b{} {}".format(nfiles - nchecked, abspath)
try:
if not os.path.exists(fsenc(abspath)):
if not bos.path.exists(abspath):
rm.append([drd, dfn])
except Exception as ex:
self.log("stat-rm: {} @ [{}]".format(repr(ex), abspath))
@@ -566,7 +620,7 @@ class Up2k(object):
c2 = conn.cursor()
c3 = conn.cursor()
n_left = cur.execute("select count(w) from up").fetchone()[0]
for w, rd, fn in cur.execute("select w, rd, fn from up"):
for w, rd, fn in cur.execute("select w, rd, fn from up order by rd, fn"):
n_left -= 1
q = "select w from mt where w = ?"
if c2.execute(q, (w[:16],)).fetchone():
@@ -647,7 +701,7 @@ class Up2k(object):
try:
parser = MParser(parser)
except:
self.log("invalid argument: " + parser, 1)
self.log("invalid argument (could not find program): " + parser, 1)
return
for tag in entags:
@@ -881,64 +935,55 @@ class Up2k(object):
# x.set_trace_callback(trace)
def _open_db(self, db_path):
existed = os.path.exists(db_path)
existed = bos.path.exists(db_path)
cur = self._orz(db_path)
ver = self._read_ver(cur)
if not existed and ver is None:
return self._create_db(db_path, cur)
orig_ver = ver
if not ver or ver < 3:
bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
db = cur.connection
cur.close()
db.close()
msg = "creating new DB (old is bad); backup: {}"
if ver:
msg = "creating backup before upgrade: {}"
self.log(msg.format(bak))
shutil.copy2(db_path, bak)
cur = self._orz(db_path)
if ver == 1:
cur = self._upgrade_v1(cur, db_path)
if cur:
ver = 2
if ver == 2:
cur = self._create_v3(cur)
ver = self._read_ver(cur) if cur else None
if ver == 3:
if orig_ver != ver:
cur.connection.commit()
cur.execute("vacuum")
cur.connection.commit()
if ver == DB_VER:
try:
nfiles = next(cur.execute("select count(w) from up"))[0]
self.log("OK: {} |{}|".format(db_path, nfiles))
return cur
except Exception as ex:
self.log("WARN: could not list files, DB corrupt?\n " + repr(ex))
except:
self.log("WARN: could not list files; DB corrupt?\n" + min_ex())
if cur:
db = cur.connection
cur.close()
db.close()
if (ver or 0) > DB_VER:
m = "database is version {}, this copyparty only supports versions <= {}"
raise Exception(m.format(ver, DB_VER))
msg = "creating new DB (old is bad); backup: {}"
if ver:
msg = "creating new DB (too old to upgrade); backup: {}"
cur = self._backup_db(db_path, cur, ver, msg)
db = cur.connection
cur.close()
db.close()
bos.unlink(db_path)
return self._create_db(db_path, None)
def _create_db(self, db_path, cur):
if not cur:
cur = self._orz(db_path)
def _backup_db(self, db_path, cur, ver, msg):
bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
self.log(msg + bak)
try:
c2 = sqlite3.connect(bak)
with c2:
cur.connection.backup(c2)
return cur
except:
m = "native sqlite3 backup failed; using fallback method:\n"
self.log(m + min_ex())
finally:
c2.close()
self._create_v2(cur)
self._create_v3(cur)
cur.connection.commit()
self.log("created DB at {}".format(db_path))
return cur
db = cur.connection
cur.close()
db.close()
shutil.copy2(fsenc(db_path), fsenc(bak))
return self._orz(db_path)
def _read_ver(self, cur):
for tab in ["ki", "kv"]:
@@ -951,72 +996,45 @@ class Up2k(object):
if rows:
return int(rows[0][0])
def _create_v2(self, cur):
for cmd in [
r"create table up (w text, mt int, sz int, rd text, fn text)",
r"create index up_rd on up(rd)",
r"create index up_fn on up(fn)",
]:
cur.execute(cmd)
return cur
def _create_v3(self, cur):
def _create_db(self, db_path, cur):
"""
collision in 2^(n/2) files where n = bits (6 bits/ch)
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx 1<<(3*10)
12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx
16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx
"""
for c, ks in [["drop table k", "isv"], ["drop index up_", "w"]]:
for k in ks:
try:
cur.execute(c + k)
except:
pass
if not cur:
cur = self._orz(db_path)
idx = r"create index up_w on up(substr(w,1,16))"
if self.no_expr_idx:
idx = r"create index up_w on up(w)"
for cmd in [
r"create table up (w text, mt int, sz int, rd text, fn text)",
r"create index up_rd on up(rd)",
r"create index up_fn on up(fn)",
idx,
r"create table mt (w text, k text, v int)",
r"create index mt_w on mt(w)",
r"create index mt_k on mt(k)",
r"create index mt_v on mt(v)",
r"create table kv (k text, v int)",
r"insert into kv values ('sver', 3)",
r"insert into kv values ('sver', {})".format(DB_VER),
]:
cur.execute(cmd)
cur.connection.commit()
self.log("created DB at {}".format(db_path))
return cur
def _upgrade_v1(self, odb, db_path):
npath = db_path + ".next"
if os.path.exists(npath):
os.unlink(npath)
ndb = self._orz(npath)
self._create_v2(ndb)
c = odb.execute("select * from up")
for wark, ts, sz, rp in c:
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
v = (wark, ts, sz, rd, fn)
ndb.execute("insert into up values (?,?,?,?,?)", v)
ndb.connection.commit()
ndb.connection.close()
odb.connection.close()
atomic_move(npath, db_path)
return self._orz(db_path)
def handle_json(self, cj):
with self.mutex:
if not self.register_vpath(cj["ptop"], cj["vcfg"]):
if cj["ptop"] not in self.registry:
raise Pebkac(410, "location unavailable")
cj["name"] = sanitize_fn(cj["name"], bad=[".prologue.html", ".epilogue.html"])
cj["name"] = sanitize_fn(cj["name"], "", [".prologue.html", ".epilogue.html"])
cj["poke"] = time.time()
wark = self._get_wark(cj)
now = time.time()
@@ -1039,7 +1057,7 @@ class Up2k(object):
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
# relying on path.exists to return false on broken symlinks
if os.path.exists(fsenc(dp_abs)):
if bos.path.exists(dp_abs):
job = {
"name": dp_fn,
"prel": dp_dir,
@@ -1063,12 +1081,13 @@ class Up2k(object):
for fn in names:
path = os.path.join(job["ptop"], job["prel"], fn)
try:
if os.path.getsize(fsenc(path)) > 0:
if bos.path.getsize(path) > 0:
# upload completed or both present
break
except:
# missing; restart
job = None
if not self.args.nw:
job = None
break
else:
# file contents match, but not the path
@@ -1095,8 +1114,14 @@ class Up2k(object):
pdir = os.path.join(cj["ptop"], cj["prel"])
job["name"] = self._untaken(pdir, cj["name"], now, cj["addr"])
dst = os.path.join(job["ptop"], job["prel"], job["name"])
os.unlink(fsenc(dst)) # TODO ed pls
self._symlink(src, dst)
if not self.args.nw:
bos.unlink(dst) # TODO ed pls
self._symlink(src, dst)
if cur:
a = [cj[x] for x in "prel name lmod size".split()]
self.db_add(cur, wark, *a)
cur.connection.commit()
if not job:
job = {
@@ -1138,20 +1163,27 @@ class Up2k(object):
}
def _untaken(self, fdir, fname, ts, ip):
if self.args.nw:
return fname
# TODO broker which avoid this race and
# provides a new filename if taken (same as bup)
suffix = ".{:.6f}-{}".format(ts, ip)
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
return f["orz"][1]
def _symlink(self, src, dst):
# TODO store this in linktab so we never delete src if there are links to it
self.log("linking dupe:\n {0}\n {1}".format(src, dst))
def _symlink(self, src, dst, verbose=True):
if verbose:
self.log("linking dupe:\n {0}\n {1}".format(src, dst))
if self.args.nw:
return
try:
lsrc = src
ldst = dst
fs1 = os.stat(fsenc(os.path.split(src)[0])).st_dev
fs2 = os.stat(fsenc(os.path.split(dst)[0])).st_dev
fs1 = bos.stat(os.path.dirname(src)).st_dev
fs2 = bos.stat(os.path.dirname(dst)).st_dev
if fs1 == 0:
# py2 on winxp or other unsupported combination
raise OSError()
@@ -1224,21 +1256,18 @@ class Up2k(object):
if ret > 0:
return ret, src
if self.args.nw:
# del self.registry[ptop][wark]
return ret, dst
atomic_move(src, dst)
if ANYWIN:
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
self.lastmod_q.put(a)
# legit api sware 2 me mum
if self.idx_wark(
job["ptop"],
job["wark"],
job["prel"],
job["name"],
job["lmod"],
job["size"],
):
a = [job[x] for x in "ptop wark prel name lmod size".split()]
if self.idx_wark(*a):
del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads
@@ -1250,7 +1279,7 @@ class Up2k(object):
return False
self.db_rm(cur, rd, fn)
self.db_add(cur, wark, rd, fn, int(lmod), sz)
self.db_add(cur, wark, rd, fn, lmod, sz)
cur.connection.commit()
if "e2t" in self.flags[ptop]:
@@ -1273,9 +1302,251 @@ class Up2k(object):
db.execute(sql, v)
except:
rd, fn = s3enc(self.mem_cur, rd, fn)
v = (wark, ts, sz, rd, fn)
v = (wark, int(ts), sz, rd, fn)
db.execute(sql, v)
def handle_rm(self, uname, vpath):
permsets = [[True, False, False, True]]
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
ptop = vn.realpath
atop = vn.canonical(rem)
adir, fn = os.path.split(atop)
st = bos.lstat(atop)
scandir = not self.args.no_scandir
if stat.S_ISLNK(st.st_mode) or stat.S_ISREG(st.st_mode):
dbv, vrem = self.asrv.vfs.get(vpath, uname, *permsets[0])
dbv, vrem = dbv.get_dbv(vrem)
g = [[dbv, vrem, os.path.dirname(vpath), adir, [[fn, 0]], [], []]]
else:
g = vn.walk("", rem, [], uname, permsets, True, scandir, True)
n_files = 0
for dbv, vrem, _, adir, files, rd, vd in g:
for fn in [x[0] for x in files]:
n_files += 1
abspath = os.path.join(adir, fn)
vpath = "{}/{}".format(vrem, fn).strip("/")
self.log("rm {}\n {}".format(vpath, abspath))
_ = dbv.get(vrem, uname, *permsets[0])
with self.mutex:
try:
ptop = dbv.realpath
cur, wark, _, _ = self._find_from_vpath(ptop, vrem)
self._forget_file(ptop, vpath, cur, wark)
finally:
cur.connection.commit()
bos.unlink(abspath)
rm = rmdirs(self.log_func, scandir, True, atop)
ok = len(rm[0])
ng = len(rm[1])
return "deleted {} files (and {}/{} folders)".format(n_files, ok, ok + ng)
def handle_mv(self, uname, svp, dvp):
svn, srem = self.asrv.vfs.get(svp, uname, True, False, True)
svn, srem = svn.get_dbv(srem)
sabs = svn.canonical(srem, False)
if not srem:
raise Pebkac(400, "mv: cannot move a mountpoint")
st = bos.stat(sabs)
if stat.S_ISREG(st.st_mode):
return self._mv_file(uname, svp, dvp)
jail = svn.get_dbv(srem)[0]
permsets = [[True, False, True]]
scandir = not self.args.no_scandir
# following symlinks is too scary
g = svn.walk("", srem, [], uname, permsets, True, scandir, True)
for dbv, vrem, _, atop, files, rd, vd in g:
if dbv != jail:
# fail early (prevent partial moves)
raise Pebkac(400, "mv: source folder contains other volumes")
g = svn.walk("", srem, [], uname, permsets, True, scandir, True)
for dbv, vrem, _, atop, files, rd, vd in g:
if dbv != jail:
# the actual check (avoid toctou)
raise Pebkac(400, "mv: source folder contains other volumes")
for fn in files:
svpf = "/".join(x for x in [dbv.vpath, vrem, fn[0]] if x)
if not svpf.startswith(svp + "/"): # assert
raise Pebkac(500, "mv: bug at {}, top {}".format(svpf, svp))
dvpf = dvp + svpf[len(svp) :]
self._mv_file(uname, svpf, dvpf)
rmdirs(self.log_func, scandir, True, sabs)
return "k"
def _mv_file(self, uname, svp, dvp):
svn, srem = self.asrv.vfs.get(svp, uname, True, False, True)
svn, srem = svn.get_dbv(srem)
dvn, drem = self.asrv.vfs.get(dvp, uname, False, True)
dvn, drem = dvn.get_dbv(drem)
sabs = svn.canonical(srem, False)
dabs = dvn.canonical(drem)
drd, dfn = vsplit(drem)
if bos.path.exists(dabs):
raise Pebkac(400, "mv2: target file exists")
bos.makedirs(os.path.dirname(dabs))
if bos.path.islink(sabs):
dlabs = absreal(sabs)
m = "moving symlink from [{}] to [{}], target [{}]"
self.log(m.format(sabs, dabs, dlabs))
os.unlink(sabs)
self._symlink(dlabs, dabs, False)
# folders are too scary, schedule rescan of both vols
self.need_rescan[svn.vpath] = 1
self.need_rescan[dvn.vpath] = 1
return "k"
c1, w, ftime, fsize = self._find_from_vpath(svn.realpath, srem)
c2 = self.cur.get(dvn.realpath)
if ftime is None:
st = bos.stat(sabs)
ftime = st.st_mtime
fsize = st.st_size
if w:
if c2:
self._copy_tags(c1, c2, w)
self._forget_file(svn.realpath, srem, c1, w)
self._relink(w, svn.realpath, srem, dabs)
c1.connection.commit()
if c2:
self.db_add(c2, w, drd, dfn, ftime, fsize)
c2.connection.commit()
else:
self.log("not found in src db: [{}]".format(svp))
bos.rename(sabs, dabs)
return "k"
def _copy_tags(self, csrc, cdst, wark):
"""copy all tags for wark from src-db to dst-db"""
w = wark[:16]
if cdst.execute("select * from mt where w=? limit 1", (w,)).fetchone():
return # existing tags in dest db
for _, k, v in csrc.execute("select * from mt where w=?", (w,)):
cdst.execute("insert into mt values(?,?,?)", (w, k, v))
def _find_from_vpath(self, ptop, vrem):
cur = self.cur.get(ptop)
if not cur:
return None, None
rd, fn = vsplit(vrem)
q = "select w, mt, sz from up where rd=? and fn=? limit 1"
try:
c = cur.execute(q, (rd, fn))
except:
c = cur.execute(q, s3enc(self.mem_cur, rd, fn))
hit = c.fetchone()
if hit:
wark, ftime, fsize = hit
return cur, wark, ftime, fsize
return cur, None, None, None
def _forget_file(self, ptop, vrem, cur, wark):
"""forgets file in db, fixes symlinks, does not delete"""
srd, sfn = vsplit(vrem)
self.log("forgetting {}".format(vrem))
if wark:
self.log("found {} in db".format(wark))
self._relink(wark, ptop, vrem, None)
q = "delete from mt where w=?"
cur.execute(q, (wark[:16],))
self.db_rm(cur, srd, sfn)
reg = self.registry.get(ptop)
if reg:
if not wark:
wark = [
x
for x, y in reg.items()
if fn in [y["name"], y.get("tnam")] and y["prel"] == vrem
]
if wark and wark in reg:
m = "forgetting partial upload {} ({})"
p = self._vis_job_progress(wark)
self.log(m.format(wark, p))
del reg[wark]
def _relink(self, wark, sptop, srem, dabs):
"""
update symlinks from file at svn/srem to dabs (rename),
or to first remaining full if no dabs (delete)
"""
dupes = []
sabs = os.path.join(sptop, srem)
q = "select rd, fn from up where substr(w,1,16)=? and w=?"
for ptop, cur in self.cur.items():
for rd, fn in cur.execute(q, (wark[:16], wark)):
if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn)
dvrem = "/".join([rd, fn]).strip("/")
if ptop != sptop or srem != dvrem:
dupes.append([ptop, dvrem])
self.log("found {} dupe: [{}] {}".format(wark, ptop, dvrem))
if not dupes:
return
full = {}
links = {}
for ptop, vp in dupes:
ap = os.path.join(ptop, vp)
try:
d = links if bos.path.islink(ap) else full
d[ap] = [ptop, vp]
except:
self.log("relink: not found: [{}]".format(ap))
if not dabs and not full and links:
# deleting final remaining full copy; swap it with a symlink
slabs = list(sorted(links.keys()))[0]
ptop, rem = links.pop(slabs)
self.log("linkswap [{}] and [{}]".format(sabs, dabs))
bos.unlink(slabs)
bos.rename(sabs, slabs)
self._symlink(slabs, sabs, False)
full[slabs] = [ptop, rem]
if not dabs:
dabs = list(sorted(full.keys()))[0]
for alink in links.keys():
try:
if alink != sabs and absreal(alink) != sabs:
continue
self.log("relinking [{}] to [{}]".format(alink, dabs))
bos.unlink(alink)
except:
pass
self._symlink(dabs, alink, False)
def _get_wark(self, cj):
if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB
raise Pebkac(400, "name or numchunks not according to spec")
@@ -1297,7 +1568,7 @@ class Up2k(object):
def _hashlist_from_file(self, path):
pp = self.pp if hasattr(self, "pp") else None
fsz = os.path.getsize(fsenc(path))
fsz = bos.path.getsize(path)
csz = up2k_chunksize(fsz)
ret = []
with open(fsenc(path), "rb", 512 * 1024) as f:
@@ -1316,9 +1587,9 @@ class Up2k(object):
hashobj.update(buf)
rem -= len(buf)
digest = hashobj.digest()[:32]
digest = hashobj.digest()[:33]
digest = base64.urlsafe_b64encode(digest)
ret.append(digest.decode("utf-8").rstrip("="))
ret.append(digest.decode("utf-8"))
return ret
@@ -1333,6 +1604,10 @@ class Up2k(object):
if self.args.dotpart:
tnam = "." + tnam
if self.args.nw:
job["tnam"] = tnam
return
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
f, job["tnam"] = f["orz"]
@@ -1361,7 +1636,7 @@ class Up2k(object):
for path, sz, times in ready:
self.log("lmod: setting times {} on {}".format(times, path))
try:
os.utime(fsenc(path), times)
bos.utime(path, times)
except:
self.log("lmod: failed to utime ({}, {})".format(path, times))
@@ -1372,19 +1647,22 @@ class Up2k(object):
self.log("could not unsparse [{}]".format(path), 3)
def _snapshot(self):
persist_interval = 30 # persist unfinished uploads index every 30 sec
discard_interval = 21600 # drop unfinished uploads after 6 hours inactivity
prev = {}
self.snap_persist_interval = 300 # persist unfinished index every 5 min
self.snap_discard_interval = 21600 # drop unfinished after 6 hours inactivity
self.snap_prev = {}
while True:
time.sleep(persist_interval)
with self.mutex:
for k, reg in self.registry.items():
self._snap_reg(prev, k, reg, discard_interval)
time.sleep(self.snap_persist_interval)
self.do_snapshot()
def _snap_reg(self, prev, ptop, reg, discard_interval):
def do_snapshot(self):
with self.mutex:
for k, reg in self.registry.items():
self._snap_reg(k, reg)
def _snap_reg(self, ptop, reg):
now = time.time()
histpath = self.asrv.vfs.histtab[ptop]
rm = [x for x in reg.values() if now - x["poke"] > discard_interval]
rm = [x for x in reg.values() if now - x["poke"] > self.snap_discard_interval]
if rm:
m = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
vis = [self._vis_job_progress(x) for x in rm]
@@ -1394,33 +1672,30 @@ class Up2k(object):
try:
# remove the filename reservation
path = os.path.join(job["ptop"], job["prel"], job["name"])
if os.path.getsize(fsenc(path)) == 0:
os.unlink(fsenc(path))
if bos.path.getsize(path) == 0:
bos.unlink(path)
if len(job["hash"]) == len(job["need"]):
# PARTIAL is empty, delete that too
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
os.unlink(fsenc(path))
bos.unlink(path)
except:
pass
path = os.path.join(histpath, "up2k.snap")
if not reg:
if ptop not in prev or prev[ptop] is not None:
prev[ptop] = None
if os.path.exists(fsenc(path)):
os.unlink(fsenc(path))
if ptop not in self.snap_prev or self.snap_prev[ptop] is not None:
self.snap_prev[ptop] = None
if bos.path.exists(path):
bos.unlink(path)
return
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
etag = [len(reg), newest]
if etag == prev.get(ptop):
if etag == self.snap_prev.get(ptop):
return
try:
os.makedirs(histpath)
except:
pass
bos.makedirs(histpath)
path2 = "{}.{}".format(path, os.getpid())
j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8")
@@ -1430,7 +1705,7 @@ class Up2k(object):
atomic_move(path2, path)
self.log("snap: {} |{}|".format(path, len(reg.keys())))
prev[ptop] = etag
self.snap_prev[ptop] = etag
def _tagger(self):
with self.mutex:
@@ -1485,7 +1760,7 @@ class Up2k(object):
abspath = os.path.join(ptop, rd, fn)
self.log("hashing " + abspath)
inf = os.stat(fsenc(abspath))
inf = bos.stat(abspath)
hashes = self._hashlist_from_file(abspath)
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
with self.mutex:
@@ -1498,6 +1773,11 @@ class Up2k(object):
self.n_hashq += 1
# self.log("hashq {} push {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
def shutdown(self):
if hasattr(self, "snap_prev"):
self.log("writing snapshot")
self.do_snapshot()
def up2k_chunksize(filesize):
chunksize = 1024 * 1024
@@ -1513,17 +1793,17 @@ def up2k_chunksize(filesize):
def up2k_wark_from_hashlist(salt, filesize, hashes):
""" server-reproducible file identifier, independent of name or location """
"""server-reproducible file identifier, independent of name or location"""
ident = [salt, str(filesize)]
ident.extend(hashes)
ident = "\n".join(ident)
wark = hashlib.sha512(ident.encode("utf-8")).digest()
wark = hashlib.sha512(ident.encode("utf-8")).digest()[:33]
wark = base64.urlsafe_b64encode(wark)
return wark.decode("ascii")[:43]
return wark.decode("ascii")
def up2k_wark_from_metadata(salt, sz, lastmod, rd, fn):
ret = fsenc("{}\n{}\n{}\n{}\n{}".format(salt, lastmod, sz, rd, fn))
ret = base64.urlsafe_b64encode(hashlib.sha512(ret).digest())
return "#{}".format(ret[:42].decode("ascii"))
return "#{}".format(ret.decode("ascii"))[:44]

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import re
import os
import sys
import stat
import time
import base64
import select
@@ -16,6 +17,7 @@ import mimetypes
import contextlib
import subprocess as sp # nosec
from datetime import datetime
from collections import Counter
from .__init__ import PY2, WINDOWS, ANYWIN
from .stolen import surrogateescape
@@ -42,6 +44,20 @@ else:
from Queue import Queue # pylint: disable=import-error,no-name-in-module
from StringIO import StringIO as BytesIO
try:
struct.unpack(b">i", b"idgi")
spack = struct.pack
sunpack = struct.unpack
except:
def spack(f, *a, **ka):
return struct.pack(f.decode("ascii"), *a, **ka)
def sunpack(f, *a, **ka):
return struct.unpack(f.decode("ascii"), *a, **ka)
surrogateescape.register_surrogateescape()
FS_ENCODING = sys.getfilesystemencoding()
if WINDOWS and PY2:
@@ -123,20 +139,6 @@ REKOBO_KEY = {
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
class Counter(object):
def __init__(self, v=0):
self.v = v
self.mutex = threading.Lock()
def add(self, delta=1):
with self.mutex:
self.v += delta
def set(self, absval):
with self.mutex:
self.v = absval
class Cooldown(object):
def __init__(self, maxage):
self.maxage = maxage
@@ -231,7 +233,7 @@ def nuprint(msg):
def rice_tid():
tid = threading.current_thread().ident
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
c = sunpack(b"B" * 5, spack(b">Q", tid)[-5:])
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
@@ -282,15 +284,69 @@ def alltrace():
return "\n".join(rret + bret)
def start_stackmon(arg_str, nid):
suffix = "-{}".format(nid) if nid else ""
fp, f = arg_str.rsplit(",", 1)
f = int(f)
t = threading.Thread(
target=stackmon,
args=(fp, f, suffix),
name="stackmon" + suffix,
)
t.daemon = True
t.start()
def stackmon(fp, ival, suffix):
ctr = 0
while True:
ctr += 1
time.sleep(ival)
st = "{}, {}\n{}".format(ctr, time.time(), alltrace())
with open(fp + suffix, "wb") as f:
f.write(st.encode("utf-8", "replace"))
def start_log_thrs(logger, ival, nid):
ival = int(ival)
tname = lname = "log-thrs"
if nid:
tname = "logthr-n{}-i{:x}".format(nid, os.getpid())
lname = tname[3:]
t = threading.Thread(
target=log_thrs,
args=(logger, ival, lname),
name=tname,
)
t.daemon = True
t.start()
def log_thrs(log, ival, name):
while True:
time.sleep(ival)
tv = [x.name for x in threading.enumerate()]
tv = [
x.split("-")[0]
if x.startswith("httpconn-") or x.startswith("thumb-")
else "listen"
if "-listen-" in x
else x
for x in tv
if not x.startswith("pydevd.")
]
tv = ["{}\033[36m{}".format(v, k) for k, v in sorted(Counter(tv).items())]
log(name, "\033[0m \033[33m".join(tv), 3)
def min_ex():
et, ev, tb = sys.exc_info()
tb = traceback.extract_tb(tb, 2)
ex = [
"{} @ {} <{}>: {}".format(fp.split(os.sep)[-1], ln, fun, txt)
for fp, ln, fun, txt in tb
]
ex.append("{}: {}".format(et.__name__, ev))
return "\n".join(ex)
tb = traceback.extract_tb(tb)
fmt = "{} @ {} <{}>: {}"
ex = [fmt.format(fp.split(os.sep)[-1], ln, fun, txt) for fp, ln, fun, txt in tb]
ex.append("[{}] {}".format(et.__name__, ev))
return "\n".join(ex[-8:])
@contextlib.contextmanager
@@ -351,7 +407,7 @@ def ren_open(fname, *args, **kwargs):
if not b64:
b64 = (bname + ext).encode("utf-8", "replace")
b64 = hashlib.sha512(b64).digest()[:12]
b64 = base64.urlsafe_b64encode(b64).decode("utf-8").rstrip("=")
b64 = base64.urlsafe_b64encode(b64).decode("utf-8")
badlen = len(fname)
while len(fname) >= badlen:
@@ -648,6 +704,16 @@ def s2hms(s, optional_h=False):
return "{}:{:02}:{:02}".format(h, m, s)
def uncyg(path):
if len(path) < 2 or not path.startswith("/"):
return path
if len(path) > 2 and path[2] != "/":
return path
return "{}:\\{}".format(path[1], path[3:])
def undot(path):
ret = []
for node in path.split("/"):
@@ -664,7 +730,7 @@ def undot(path):
return "/".join(ret)
def sanitize_fn(fn, ok="", bad=[]):
def sanitize_fn(fn, ok, bad):
if "/" not in ok:
fn = fn.replace("\\", "/").split("/")[-1]
@@ -693,6 +759,19 @@ def sanitize_fn(fn, ok="", bad=[]):
return fn.strip()
def absreal(fpath):
try:
return fsdec(os.path.abspath(os.path.realpath(fsenc(fpath))))
except:
if not WINDOWS:
raise
# cpython bug introduced in 3.8, still exists in 3.9.1,
# some win7sp1 and win10:20H2 boxes cannot realpath a
# networked drive letter such as b"n:" or b"n:\\"
return os.path.abspath(os.path.realpath(fpath))
def u8safe(txt):
try:
return txt.encode("utf-8", "xmlcharrefreplace").decode("utf-8", "replace")
@@ -750,6 +829,13 @@ def unquotep(txt):
return w8dec(unq2)
def vsplit(vpath):
if "/" not in vpath:
return "", vpath
return vpath.rsplit("/", 1)
def w8dec(txt):
"""decodes filesystem-bytes to wtf8"""
if PY2:
@@ -894,35 +980,24 @@ def yieldfile(fn):
yield buf
def hashcopy(actor, fin, fout):
is_mp = actor.is_mp
def hashcopy(fin, fout):
hashobj = hashlib.sha512()
tlen = 0
for buf in fin:
if is_mp:
actor.workload += 1
if actor.workload > 2 ** 31:
actor.workload = 100
tlen += len(buf)
hashobj.update(buf)
fout.write(buf)
digest32 = hashobj.digest()[:32]
digest_b64 = base64.urlsafe_b64encode(digest32).decode("utf-8").rstrip("=")
digest = hashobj.digest()[:33]
digest_b64 = base64.urlsafe_b64encode(digest).decode("utf-8")
return tlen, hashobj.hexdigest(), digest_b64
def sendfile_py(lower, upper, f, s, actor=None):
def sendfile_py(lower, upper, f, s):
remains = upper - lower
f.seek(lower)
while remains > 0:
if actor:
actor.workload += 1
if actor.workload > 2 ** 31:
actor.workload = 100
# time.sleep(0.01)
buf = f.read(min(1024 * 32, remains))
if not buf:
@@ -960,6 +1035,9 @@ def sendfile_kern(lower, upper, f, s):
def statdir(logger, scandir, lstat, top):
if lstat and not os.supports_follow_symlinks:
scandir = False
try:
btop = fsenc(top)
if scandir and hasattr(os, "scandir"):
@@ -969,8 +1047,7 @@ def statdir(logger, scandir, lstat, top):
try:
yield [fsdec(fh.name), fh.stat(follow_symlinks=not lstat)]
except Exception as ex:
msg = "scan-stat: \033[36m{} @ {}"
logger(msg.format(repr(ex), fsdec(fh.path)))
logger(src, "[s] {} @ {}".format(repr(ex), fsdec(fh.path)), 6)
else:
src = "listdir"
fun = os.lstat if lstat else os.stat
@@ -979,11 +1056,30 @@ def statdir(logger, scandir, lstat, top):
try:
yield [fsdec(name), fun(abspath)]
except Exception as ex:
msg = "list-stat: \033[36m{} @ {}"
logger(msg.format(repr(ex), fsdec(abspath)))
logger(src, "[s] {} @ {}".format(repr(ex), fsdec(abspath)), 6)
except Exception as ex:
logger("{}: \033[31m{} @ {}".format(src, repr(ex), top))
logger(src, "{} @ {}".format(repr(ex), top), 1)
def rmdirs(logger, scandir, lstat, top):
dirs = statdir(logger, scandir, lstat, top)
dirs = [x[0] for x in dirs if stat.S_ISDIR(x[1].st_mode)]
dirs = [os.path.join(top, x) for x in dirs]
ok = []
ng = []
for d in dirs[::-1]:
a, b = rmdirs(logger, scandir, lstat, d)
ok += a
ng += b
try:
os.rmdir(fsenc(top))
ok.append(top)
except:
ng.append(top)
return ok, ng
def unescape_cookie(orig):
@@ -1020,10 +1116,16 @@ def guess_mime(url, fallback="application/octet-stream"):
except:
return fallback
return MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
ret = MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
if ";" not in ret:
if ret.startswith("text/") or ret.endswith("/javascript"):
ret += "; charset=UTF-8"
return ret
def runcmd(*argv):
def runcmd(argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate()
stdout = stdout.decode("utf-8", "replace")
@@ -1031,8 +1133,8 @@ def runcmd(*argv):
return [p.returncode, stdout, stderr]
def chkcmd(*argv):
ok, sout, serr = runcmd(*argv)
def chkcmd(argv):
ok, sout, serr = runcmd(argv)
if ok != 0:
raise Exception(serr)
@@ -1054,10 +1156,7 @@ def gzip_orig_sz(fn):
with open(fsenc(fn), "rb") as f:
f.seek(-4, 2)
rv = f.read(4)
try:
return struct.unpack(b"I", rv)[0]
except:
return struct.unpack("I", rv)[0]
return sunpack(b"I", rv)[0]
def py_desc():

View File

@@ -13,7 +13,7 @@ window.baguetteBox = (function () {
captions: true,
buttons: 'auto',
noScrollbars: false,
bodyClass: 'baguetteBox-open',
bodyClass: 'bbox-open',
titleTag: false,
async: false,
preload: 2,
@@ -22,37 +22,46 @@ window.baguetteBox = (function () {
afterHide: null,
onChange: null,
},
overlay, slider, previousButton, nextButton, closeButton,
overlay, slider, btnPrev, btnNext, btnHelp, btnVmode, btnClose,
currentGallery = [],
currentIndex = 0,
isOverlayVisible = false,
touch = {}, // start-pos
touchFlag = false, // busy
regex = /.+\.(gif|jpe?g|png|webp)/i,
re_i = /.+\.(gif|jpe?g|png|webp)(\?|$)/i,
re_v = /.+\.(webm|mp4)(\?|$)/i,
data = {}, // all galleries
imagesElements = [],
documentLastFocus = null;
documentLastFocus = null,
isFullscreen = false,
vmute = false,
vloop = false,
vnext = false,
resume_mp = false;
var overlayClickHandler = function (event) {
if (event.target.id.indexOf('baguette-img') !== -1) {
var onFSC = function (e) {
isFullscreen = !!document.fullscreenElement;
};
var overlayClickHandler = function (e) {
if (e.target.id.indexOf('baguette-img') !== -1)
hideOverlay();
}
};
var touchstartHandler = function (event) {
var touchstartHandler = function (e) {
touch.count++;
if (touch.count > 1) {
if (touch.count > 1)
touch.multitouch = true;
}
touch.startX = event.changedTouches[0].pageX;
touch.startY = event.changedTouches[0].pageY;
touch.startX = e.changedTouches[0].pageX;
touch.startY = e.changedTouches[0].pageY;
};
var touchmoveHandler = function (event) {
if (touchFlag || touch.multitouch) {
var touchmoveHandler = function (e) {
if (touchFlag || touch.multitouch)
return;
}
event.preventDefault ? event.preventDefault() : event.returnValue = false;
var touchEvent = event.touches[0] || event.changedTouches[0];
e.preventDefault ? e.preventDefault() : e.returnValue = false;
var touchEvent = e.touches[0] || e.changedTouches[0];
if (touchEvent.pageX - touch.startX > 40) {
touchFlag = true;
showPreviousImage();
@@ -65,19 +74,19 @@ window.baguetteBox = (function () {
};
var touchendHandler = function () {
touch.count--;
if (touch.count <= 0) {
if (touch.count <= 0)
touch.multitouch = false;
}
touchFlag = false;
};
var contextmenuHandler = function () {
touchendHandler();
};
var trapFocusInsideOverlay = function (event) {
if (overlay.style.display === 'block' && (overlay.contains && !overlay.contains(event.target))) {
event.stopPropagation();
initFocus();
var trapFocusInsideOverlay = function (e) {
if (overlay.style.display === 'block' && (overlay.contains && !overlay.contains(e.target))) {
e.stopPropagation();
btnClose.focus();
}
};
@@ -88,7 +97,7 @@ window.baguetteBox = (function () {
}
function bindImageClickListeners(selector, userOptions) {
var galleryNodeList = document.querySelectorAll(selector);
var galleryNodeList = QSA(selector);
var selectorData = {
galleries: [],
nodeList: galleryNodeList
@@ -96,33 +105,26 @@ window.baguetteBox = (function () {
data[selector] = selectorData;
[].forEach.call(galleryNodeList, function (galleryElement) {
if (userOptions && userOptions.filter) {
regex = userOptions.filter;
}
var tagsNodeList = [];
if (galleryElement.tagName === 'A') {
if (galleryElement.tagName === 'A')
tagsNodeList = [galleryElement];
} else {
else
tagsNodeList = galleryElement.getElementsByTagName('a');
}
tagsNodeList = [].filter.call(tagsNodeList, function (element) {
if (element.className.indexOf(userOptions && userOptions.ignoreClass) === -1) {
return regex.test(element.href);
}
if (element.className.indexOf(userOptions && userOptions.ignoreClass) === -1)
return re_i.test(element.href) || re_v.test(element.href);
});
if (tagsNodeList.length === 0) {
if (!tagsNodeList.length)
return;
}
var gallery = [];
[].forEach.call(tagsNodeList, function (imageElement, imageIndex) {
var imageElementClickHandler = function (event) {
if (event && event.ctrlKey)
var imageElementClickHandler = function (e) {
if (ctrl(e))
return true;
event.preventDefault ? event.preventDefault() : event.returnValue = false;
e.preventDefault ? e.preventDefault() : e.returnValue = false;
prepareOverlay(gallery, userOptions);
showOverlay(imageIndex);
};
@@ -140,93 +142,186 @@ window.baguetteBox = (function () {
}
function clearCachedData() {
for (var selector in data) {
if (data.hasOwnProperty(selector)) {
for (var selector in data)
if (data.hasOwnProperty(selector))
removeFromCache(selector);
}
}
}
function removeFromCache(selector) {
if (!data.hasOwnProperty(selector)) {
if (!data.hasOwnProperty(selector))
return;
}
var galleries = data[selector].galleries;
[].forEach.call(galleries, function (gallery) {
[].forEach.call(gallery, function (imageItem) {
unbind(imageItem.imageElement, 'click', imageItem.eventHandler);
});
if (currentGallery === gallery) {
if (currentGallery === gallery)
currentGallery = [];
}
});
delete data[selector];
}
function buildOverlay() {
overlay = ebi('baguetteBox-overlay');
if (overlay) {
slider = ebi('baguetteBox-slider');
previousButton = ebi('previous-button');
nextButton = ebi('next-button');
closeButton = ebi('close-button');
return;
overlay = ebi('bbox-overlay');
if (!overlay) {
var ctr = mknod('div');
ctr.innerHTML = (
'<div id="bbox-overlay" role="dialog">' +
'<div id="bbox-slider"></div>' +
'<button id="bbox-prev" class="bbox-btn" type="button" aria-label="Previous">&lt;</button>' +
'<button id="bbox-next" class="bbox-btn" type="button" aria-label="Next">&gt;</button>' +
'<div id="bbox-btns">' +
'<button id="bbox-help" type="button">?</button>' +
'<button id="bbox-vmode" type="button" tt="a"></button>' +
'<button id="bbox-close" type="button" aria-label="Close">X</button>' +
'</div></div>'
);
overlay = ctr.firstChild;
QS('body').appendChild(overlay);
tt.att(overlay);
}
overlay = mknod('div');
overlay.setAttribute('role', 'dialog');
overlay.id = 'baguetteBox-overlay';
document.getElementsByTagName('body')[0].appendChild(overlay);
slider = mknod('div');
slider.id = 'baguetteBox-slider';
overlay.appendChild(slider);
previousButton = mknod('button');
previousButton.setAttribute('type', 'button');
previousButton.id = 'previous-button';
previousButton.setAttribute('aria-label', 'Previous');
previousButton.innerHTML = '&lt;';
overlay.appendChild(previousButton);
nextButton = mknod('button');
nextButton.setAttribute('type', 'button');
nextButton.id = 'next-button';
nextButton.setAttribute('aria-label', 'Next');
nextButton.innerHTML = '&gt;';
overlay.appendChild(nextButton);
closeButton = mknod('button');
closeButton.setAttribute('type', 'button');
closeButton.id = 'close-button';
closeButton.setAttribute('aria-label', 'Close');
closeButton.innerHTML = '&times;';
overlay.appendChild(closeButton);
previousButton.className = nextButton.className = closeButton.className = 'baguetteBox-button';
slider = ebi('bbox-slider');
btnPrev = ebi('bbox-prev');
btnNext = ebi('bbox-next');
btnHelp = ebi('bbox-help');
btnVmode = ebi('bbox-vmode');
btnClose = ebi('bbox-close');
bindEvents();
}
function keyDownHandler(event) {
switch (event.keyCode) {
case 37: // Left
showPreviousImage();
break;
case 39: // Right
showNextImage();
break;
case 27: // Esc
hideOverlay();
break;
case 36: // Home
showFirstImage(event);
break;
case 35: // End
showLastImage(event);
break;
function halp() {
if (ebi('bbox-halp'))
return;
var list = [
['<b># hotkey</b>', '<b># operation</b>'],
['escape', 'close'],
['left, J', 'previous file'],
['right, L', 'next file'],
['home', 'first file'],
['end', 'last file'],
['space, P, K', 'video: play / pause'],
['U', 'video: seek 10sec back'],
['P', 'video: seek 10sec ahead'],
['M', 'video: toggle mute'],
['R', 'video: toggle loop'],
['C', 'video: toggle auto-next'],
['F', 'video: toggle fullscreen'],
],
d = mknod('table'),
html = ['<tbody>'];
for (var a = 0; a < list.length; a++)
html.push('<tr><td>' + list[a][0] + '</td><td>' + list[a][1] + '</td></tr>');
d.innerHTML = html.join('\n') + '</tbody>';
d.setAttribute('id', 'bbox-halp');
d.onclick = function () {
overlay.removeChild(d);
};
overlay.appendChild(d);
}
function keyDownHandler(e) {
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
return;
var k = e.code + '', v = vid();
if (k == "ArrowLeft" || k == "KeyJ")
showPreviousImage();
else if (k == "ArrowRight" || k == "KeyL")
showNextImage();
else if (k == "Escape")
hideOverlay();
else if (k == "Home")
showFirstImage(e);
else if (k == "End")
showLastImage(e);
else if (k == "Space" || k == "KeyP" || k == "KeyK")
playpause();
else if (k == "KeyU" || k == "KeyO")
relseek(k == "KeyU" ? -10 : 10);
else if (k == "KeyM" && v) {
v.muted = vmute = !vmute;
mp_ctl();
}
else if (k == "KeyR" && v) {
vloop = !vloop;
vnext = vnext && !vloop;
setVmode();
}
else if (k == "KeyC" && v) {
vnext = !vnext;
vloop = vloop && !vnext;
setVmode();
}
else if (k == "KeyF")
try {
if (isFullscreen)
document.exitFullscreen();
else
v.requestFullscreen();
}
catch (ex) { }
}
function setVmode() {
var v = vid();
ebi('bbox-vmode').style.display = v ? '' : 'none';
if (!v)
return;
var msg = 'When video ends, ', tts = '', lbl;
if (vloop) {
lbl = 'Loop';
msg += 'repeat it';
tts = '$NHotkey: R';
}
else if (vnext) {
lbl = 'Cont';
msg += 'continue to next';
tts = '$NHotkey: C';
}
else {
lbl = 'Stop';
msg += 'just stop'
}
btnVmode.setAttribute('aria-label', msg);
btnVmode.setAttribute('tt', msg + tts);
btnVmode.textContent = lbl;
v.loop = vloop
if (vloop && v.paused)
v.play();
}
function tglVmode() {
if (vloop) {
vnext = true;
vloop = false;
}
else if (vnext)
vnext = false;
else
vloop = true;
setVmode();
if (tt.en)
tt.show.bind(this)();
}
function keyUpHandler(e) {
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
return;
var k = e.code + '';
if (k == "Space")
ev(e);
}
var passiveSupp = false;
@@ -248,9 +343,11 @@ window.baguetteBox = (function () {
function bindEvents() {
bind(overlay, 'click', overlayClickHandler);
bind(previousButton, 'click', showPreviousImage);
bind(nextButton, 'click', showNextImage);
bind(closeButton, 'click', hideOverlay);
bind(btnPrev, 'click', showPreviousImage);
bind(btnNext, 'click', showNextImage);
bind(btnClose, 'click', hideOverlay);
bind(btnVmode, 'click', tglVmode);
bind(btnHelp, 'click', halp);
bind(slider, 'contextmenu', contextmenuHandler);
bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
bind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
@@ -260,9 +357,11 @@ window.baguetteBox = (function () {
function unbindEvents() {
unbind(overlay, 'click', overlayClickHandler);
unbind(previousButton, 'click', showPreviousImage);
unbind(nextButton, 'click', showNextImage);
unbind(closeButton, 'click', hideOverlay);
unbind(btnPrev, 'click', showPreviousImage);
unbind(btnNext, 'click', showNextImage);
unbind(btnClose, 'click', hideOverlay);
unbind(btnVmode, 'click', tglVmode);
unbind(btnHelp, 'click', halp);
unbind(slider, 'contextmenu', contextmenuHandler);
unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
@@ -271,9 +370,9 @@ window.baguetteBox = (function () {
}
function prepareOverlay(gallery, userOptions) {
if (currentGallery === gallery) {
if (currentGallery === gallery)
return;
}
currentGallery = gallery;
setOptions(userOptions);
slider.innerHTML = '';
@@ -287,8 +386,8 @@ window.baguetteBox = (function () {
fullImage.id = 'baguette-img-' + i;
imagesElements.push(fullImage);
imagesFiguresIds.push('baguetteBox-figure-' + i);
imagesCaptionsIds.push('baguetteBox-figcaption-' + i);
imagesFiguresIds.push('bbox-figure-' + i);
imagesCaptionsIds.push('bbox-figcaption-' + i);
slider.appendChild(imagesElements[i]);
}
overlay.setAttribute('aria-labelledby', imagesFiguresIds.join(' '));
@@ -296,23 +395,21 @@ window.baguetteBox = (function () {
}
function setOptions(newOptions) {
if (!newOptions) {
if (!newOptions)
newOptions = {};
}
for (var item in defaults) {
options[item] = defaults[item];
if (typeof newOptions[item] !== 'undefined') {
if (typeof newOptions[item] !== 'undefined')
options[item] = newOptions[item];
}
}
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .4s ease' :
options.animation === 'slideIn' ? '' : 'none');
if (options.buttons === 'auto' && ('ontouchstart' in window || currentGallery.length === 1)) {
if (options.buttons === 'auto' && ('ontouchstart' in window || currentGallery.length === 1))
options.buttons = false;
}
previousButton.style.display = nextButton.style.display = (options.buttons ? '' : 'none');
btnPrev.style.display = btnNext.style.display = (options.buttons ? '' : 'none');
}
function showOverlay(chosenImageIndex) {
@@ -320,11 +417,12 @@ window.baguetteBox = (function () {
document.documentElement.style.overflowY = 'hidden';
document.body.style.overflowY = 'scroll';
}
if (overlay.style.display === 'block') {
if (overlay.style.display === 'block')
return;
}
bind(document, 'keydown', keyDownHandler);
bind(document, 'keyup', keyUpHandler);
bind(document, 'fullscreenchange', onFSC);
currentIndex = chosenImageIndex;
touch = {
count: 0,
@@ -341,50 +439,48 @@ window.baguetteBox = (function () {
// Fade in overlay
setTimeout(function () {
overlay.className = 'visible';
if (options.bodyClass && document.body.classList) {
if (options.bodyClass && document.body.classList)
document.body.classList.add(options.bodyClass);
}
if (options.afterShow) {
options.afterShow();
}
}, 50);
if (options.onChange) {
options.onChange(currentIndex, imagesElements.length);
}
documentLastFocus = document.activeElement;
initFocus();
isOverlayVisible = true;
}
function initFocus() {
if (options.buttons) {
previousButton.focus();
} else {
closeButton.focus();
}
if (options.afterShow)
options.afterShow();
}, 50);
if (options.onChange)
options.onChange(currentIndex, imagesElements.length);
documentLastFocus = document.activeElement;
btnClose.focus();
isOverlayVisible = true;
}
function hideOverlay(e) {
ev(e);
playvid(false);
if (options.noScrollbars) {
document.documentElement.style.overflowY = 'auto';
document.body.style.overflowY = 'auto';
}
if (overlay.style.display === 'none') {
if (overlay.style.display === 'none')
return;
}
unbind(document, 'keydown', keyDownHandler);
unbind(document, 'keyup', keyUpHandler);
unbind(document, 'fullscreenchange', onFSC);
// Fade out and hide the overlay
overlay.className = '';
setTimeout(function () {
overlay.style.display = 'none';
if (options.bodyClass && document.body.classList) {
if (options.bodyClass && document.body.classList)
document.body.classList.remove(options.bodyClass);
}
if (options.afterHide) {
var h = ebi('bbox-halp');
if (h)
h.parentNode.removeChild(h);
if (options.afterHide)
options.afterHide();
}
documentLastFocus && documentLastFocus.focus();
isOverlayVisible = false;
}, 500);
@@ -394,59 +490,68 @@ window.baguetteBox = (function () {
var imageContainer = imagesElements[index];
var galleryItem = currentGallery[index];
if (typeof imageContainer === 'undefined' || typeof galleryItem === 'undefined') {
if (typeof imageContainer === 'undefined' || typeof galleryItem === 'undefined')
return; // out-of-bounds or gallery dirty
}
if (imageContainer.getElementsByTagName('img')[0]) {
// image is loaded, cb and bail
if (callback) {
callback();
}
return;
}
if (imageContainer.querySelector('img, video'))
// was loaded, cb and bail
return callback ? callback() : null;
// maybe unloaded video
while (imageContainer.firstChild)
imageContainer.removeChild(imageContainer.firstChild);
var imageElement = galleryItem.imageElement,
imageSrc = imageElement.href,
thumbnailElement = imageElement.getElementsByTagName('img')[0],
is_vid = re_v.test(imageSrc),
thumbnailElement = imageElement.querySelector('img, video'),
imageCaption = typeof options.captions === 'function' ?
options.captions.call(currentGallery, imageElement) :
imageElement.getAttribute('data-caption') || imageElement.title;
imageSrc += imageSrc.indexOf('?') < 0 ? '?cache' : '&cache';
if (is_vid && index != currentIndex)
return; // no preload
var figure = mknod('figure');
figure.id = 'baguetteBox-figure-' + index;
figure.innerHTML = '<div class="baguetteBox-spinner">' +
'<div class="baguetteBox-double-bounce1"></div>' +
'<div class="baguetteBox-double-bounce2"></div>' +
figure.id = 'bbox-figure-' + index;
figure.innerHTML = '<div class="bbox-spinner">' +
'<div class="bbox-double-bounce1"></div>' +
'<div class="bbox-double-bounce2"></div>' +
'</div>';
if (options.captions && imageCaption) {
var figcaption = mknod('figcaption');
figcaption.id = 'baguetteBox-figcaption-' + index;
figcaption.id = 'bbox-figcaption-' + index;
figcaption.innerHTML = imageCaption;
figure.appendChild(figcaption);
}
imageContainer.appendChild(figure);
var image = mknod('img');
image.onload = function () {
var image = mknod(is_vid ? 'video' : 'img');
clmod(imageContainer, 'vid', is_vid);
image.addEventListener(is_vid ? 'loadedmetadata' : 'load', function () {
// Remove loader element
var spinner = document.querySelector('#baguette-img-' + index + ' .baguetteBox-spinner');
var spinner = QS('#baguette-img-' + index + ' .bbox-spinner');
figure.removeChild(spinner);
if (!options.async && callback) {
if (!options.async && callback)
callback();
}
};
});
image.setAttribute('src', imageSrc);
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
if (options.titleTag && imageCaption) {
image.title = imageCaption;
if (is_vid) {
image.setAttribute('controls', 'controls');
image.onended = vidEnd;
}
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
if (options.titleTag && imageCaption)
image.title = imageCaption;
figure.appendChild(image);
if (options.async && callback) {
if (options.async && callback)
callback();
}
}
function showNextImage(e) {
@@ -459,26 +564,20 @@ window.baguetteBox = (function () {
return show(currentIndex - 1);
}
function showFirstImage(event) {
if (event) {
event.preventDefault();
}
function showFirstImage(e) {
if (e)
e.preventDefault();
return show(0);
}
function showLastImage(event) {
if (event) {
event.preventDefault();
}
function showLastImage(e) {
if (e)
e.preventDefault();
return show(currentGallery.length - 1);
}
/**
* Move the gallery to a specific index
* @param `index` {number} - the position of the image
* @param `gallery` {array} - gallery which should be opened, if omitted assumes the currently opened one
* @return {boolean} - true on success or false if the index is invalid
*/
function show(index, gallery) {
if (!isOverlayVisible && index >= 0 && index < gallery.length) {
prepareOverlay(gallery, options);
@@ -486,18 +585,25 @@ window.baguetteBox = (function () {
return true;
}
if (index < 0) {
if (options.animation) {
if (options.animation)
bounceAnimation('left');
}
return false;
}
if (index >= imagesElements.length) {
if (options.animation) {
if (options.animation)
bounceAnimation('right');
}
return false;
}
var v = vid();
if (v) {
v.src = '';
v.load();
v.parentNode.removeChild(v);
}
currentIndex = index;
loadImage(currentIndex, function () {
preloadNext(currentIndex);
@@ -505,17 +611,49 @@ window.baguetteBox = (function () {
});
updateOffset();
if (options.onChange) {
if (options.onChange)
options.onChange(currentIndex, imagesElements.length);
}
return true;
}
/**
* Triggers the bounce animation
* @param {('left'|'right')} direction - Direction of the movement
*/
function vid() {
return imagesElements[currentIndex].querySelector('video');
}
function playvid(play) {
if (vid())
vid()[play ? 'play' : 'pause']();
}
function playpause() {
var v = vid();
if (v)
v[v.paused ? "play" : "pause"]();
}
function relseek(sec) {
if (vid())
vid().currentTime += sec;
}
function vidEnd() {
if (this == vid() && vnext)
showNextImage();
}
function mp_ctl() {
var v = vid();
if (!vmute && v && mp.au && !mp.au.paused) {
mp.fade_out();
resume_mp = true;
}
else if (resume_mp && (vmute || !v) && mp.au && mp.au.paused) {
mp.fade_in();
resume_mp = false;
}
}
function bounceAnimation(direction) {
slider.className = 'bounce-from-' + direction;
setTimeout(function () {
@@ -534,21 +672,30 @@ window.baguetteBox = (function () {
} else {
slider.style.transform = 'translate3d(' + offset + ',0,0)';
}
playvid(false);
var v = vid();
if (v) {
playvid(true);
v.muted = vmute;
v.loop = vloop;
}
mp_ctl();
setVmode();
}
function preloadNext(index) {
if (index - currentIndex >= options.preload) {
if (index - currentIndex >= options.preload)
return;
}
loadImage(index + 1, function () {
preloadNext(index + 1);
});
}
function preloadPrev(index) {
if (currentIndex - index >= options.preload) {
if (currentIndex - index >= options.preload)
return;
}
loadImage(index - 1, function () {
preloadPrev(index - 1);
});
@@ -566,7 +713,8 @@ window.baguetteBox = (function () {
unbindEvents();
clearCachedData();
unbind(document, 'keydown', keyDownHandler);
document.getElementsByTagName('body')[0].removeChild(ebi('baguetteBox-overlay'));
unbind(document, 'keyup', keyUpHandler);
document.getElementsByTagName('body')[0].removeChild(ebi('bbox-overlay'));
data = {};
currentGallery = [];
currentIndex = 0;
@@ -577,6 +725,8 @@ window.baguetteBox = (function () {
show: show,
showNext: showNextImage,
showPrevious: showPreviousImage,
relseek: relseek,
playpause: playpause,
hide: hideOverlay,
destroy: destroyPlugin
};

View File

@@ -25,34 +25,121 @@ html, body {
body {
padding-bottom: 5em;
}
#tt {
pre, code, tt {
font-family: monospace, monospace;
}
#tt, #toast {
position: fixed;
max-width: 34em;
background: #222;
border: 0 solid #555;
overflow: hidden;
margin-top: 1em;
padding: 0 1em;
height: 0;
opacity: .1;
transition: opacity 0.14s, height 0.14s, padding 0.14s;
border: 0 solid #777;
box-shadow: 0 .2em .5em #222;
border-radius: .4em;
z-index: 9001;
}
#tt.show {
padding: 1em;
height: auto;
border-width: .2em 0;
#tt {
overflow: hidden;
margin-top: 1em;
padding: 0 1.3em;
height: 0;
opacity: .1;
transition: opacity 0.14s, height 0.14s, padding 0.14s;
}
#toast {
top: 1.4em;
right: -1em;
line-height: 1.5em;
padding: 1em 1.3em;
border-width: .4em 0;
transform: translateX(100%);
transition:
transform .4s cubic-bezier(.2, 1.2, .5, 1),
right .4s cubic-bezier(.2, 1.2, .5, 1);
text-shadow: 1px 1px 0 #000;
color: #fff;
}
#toastc {
display: inline-block;
position: absolute;
overflow: hidden;
left: 0;
width: 0;
opacity: 0;
padding: .3em 0;
margin: -.3em 0 0 0;
line-height: 1.5em;
color: #000;
border: none;
outline: none;
text-shadow: none;
border-radius: .5em 0 0 .5em;
transition: left .3s, width .3s, padding .3s, opacity .3s;
}
#toast.vis {
right: 1.3em;
transform: unset;
}
#toast.vis #toastc {
left: -2em;
width: .4em;
padding: .3em .8em;
opacity: 1;
}
#toast.inf {
background: #07a;
border-color: #0be;
}
#toast.inf #toastc {
background: #0be;
}
#toast.ok {
background: #4a0;
border-color: #8e4;
}
#toast.ok #toastc {
background: #8e4;
}
#toast.warn {
background: #970;
border-color: #fc0;
}
#toast.warn #toastc {
background: #fc0;
}
#toast.err {
background: #900;
border-color: #d06;
}
#toast.err #toastc {
background: #d06;
}
#tt.b {
padding: 0 2em;
border-radius: .5em;
box-shadow: 0 .2em 1em #000;
}
#tt.show {
padding: 1em 1.3em;
border-width: .4em 0;
height: auto;
opacity: 1;
}
#tt.show.b {
padding: 1.5em 2em;
border-width: .5em 0;
}
#tt code {
background: #3c3c3c;
padding: .2em .3em;
padding: .1em .3em;
border-top: 1px solid #777;
border-radius: .3em;
font-family: monospace, monospace;
line-height: 2em;
line-height: 1.7em;
}
#tt em {
color: #f6a;
}
#path,
#path * {
@@ -84,6 +171,10 @@ body {
padding: .3em 0;
scroll-margin-top: 45vh;
}
#files tr {
scroll-margin-top: 25vh;
scroll-margin-bottom: 20vh;
}
#files tbody div a {
color: #f5a;
}
@@ -138,8 +229,7 @@ a, #files tbody div a:last-child {
border-top: 1px solid #383838;
}
#files tbody td:nth-child(3) {
font-family: monospace;
font-size: 1.3em;
font-family: monospace, monospace;
text-align: right;
padding-right: 1em;
white-space: nowrap;
@@ -199,15 +289,31 @@ a, #files tbody div a:last-child {
margin: .8em 0;
}
#srv_info {
opacity: .5;
font-size: .8em;
color: #fc5;
color: #a73;
background: #333;
position: absolute;
top: .5em;
font-size: .8em;
top: .5em;
left: 2em;
padding-right: .5em;
}
#srv_info span {
color: #fff;
color: #aaa;
}
#acc_info {
position: absolute;
font-size: .81em;
top: .5em;
right: 2em;
color: #999;
}
#acc_info span {
color: #999;
margin-right: .6em;
}
#acc_info span.warn {
color: #f4c;
border-bottom: 1px solid rgba(255,68,204,0.6);
}
#files tbody a.play {
color: #e70;
@@ -234,6 +340,7 @@ html.light #ggrid a.sel {
border-color: #c37;
}
#files tbody tr.sel:hover td,
#files tbody tr.sel:focus td,
#ggrid a.sel:hover,
html.light #ggrid a.sel:hover {
color: #fff;
@@ -268,6 +375,21 @@ html.light #ggrid a.sel {
color: #fff;
text-shadow: 0 0 1px #fff;
}
#files tr:focus {
outline: none;
position: relative;
}
#files tr:focus td {
background: #111;
border-color: #fc0 #111 #fc0 #111;
box-shadow: 0 .2em 0 #fc0, 0 -.2em 0 #fc0;
}
#files tr:focus td:first-child {
box-shadow: -.2em .2em 0 #fc0, -.2em -.2em 0 #fc0;
}
#files tr:focus+tr td {
border-top: 1px solid transparent;
}
#blocked {
position: fixed;
top: 0;
@@ -311,6 +433,7 @@ html.light #ggrid a.sel {
height: 6em;
width: 100%;
z-index: 3;
touch-action: none;
transition: bottom 0.15s;
}
#widget.open {
@@ -324,10 +447,18 @@ html.light #ggrid a.sel {
height: 100%;
background: #3c3c3c;
}
#wtgrid,
#wtico {
cursor: url(/.cpr/dd/4.png), pointer;
animation: cursor 500ms;
position: relative;
top: -.06em;
}
#wtgrid {
font-size: .8em;
top: -.12em;
}
#wtgrid:hover,
#wtico:hover {
animation: cursor 500ms infinite;
}
@@ -343,9 +474,9 @@ html.light #ggrid a.sel {
}
#wtoggle {
position: absolute;
white-space: nowrap;
top: -1.2em;
right: 0;
width: 1.2em;
height: 1em;
font-size: 2em;
line-height: 1em;
@@ -354,7 +485,7 @@ html.light #ggrid a.sel {
background: #3c3c3c;
box-shadow: 0 0 .5em #222;
border-radius: .3em 0 0 0;
padding: .2em 0 0 .07em;
padding: .2em .2em;
color: #fff;
}
#wzip, #wnp {
@@ -376,12 +507,6 @@ html.light #ggrid a.sel {
#wtoggle * {
line-height: 1em;
}
#wtoggle.np {
width: 5.5em;
}
#wtoggle.sel {
width: 6.4em;
}
#wtoggle.sel #wzip,
#wtoggle.np #wnp {
display: inline-block;
@@ -389,15 +514,42 @@ html.light #ggrid a.sel {
#wtoggle.sel.np #wnp {
display: none;
}
#wfm a,
#wzip a {
font-size: .4em;
font-size: .5em;
padding: 0 .3em;
margin: -.3em .2em;
position: relative;
display: inline-block;
}
#wzip a+a {
margin-left: .8em;
#wfm span {
font-size: .6em;
display: block;
}
#wfm a:not(.en) {
opacity: .3;
color: #f6c;
}
html.light #wfm a:not(.en) {
color: #c4a;
}
#files tbody tr.c1 td {
animation: fcut1 .5s ease-out;
}
#files tbody tr.c2 td {
animation: fcut2 .5s ease-out;
}
@keyframes fcut1 {
0% {opacity:0}
100% {opacity:1}
}
@keyframes fcut2 {
0% {opacity:0}
100% {opacity:1}
}
#wzip a {
font-size: .4em;
margin: -.3em .3em;
}
#wtoggle.sel #wzip #selzip {
top: -.6em;
@@ -463,6 +615,17 @@ html.light #ggrid a.sel {
max-width: 9em;
}
}
@media (max-width: 35em) {
#ops>a[data-dest="new_md"],
#ops>a[data-dest="msg"] {
display: none;
}
#op_mkdir.act+div,
#op_mkdir.act+div+div {
display: block;
margin-top: 1em;
}
}
@@ -535,7 +698,9 @@ html.light #ggrid a.sel {
border-radius: .2em;
padding: .2em .3em;
}
.opview input.err {
.opview input.err,
html.light .opview input[type="text"].err {
color: #fff;
background: #a20;
border-color: #f00;
box-shadow: 0 0 .7em #f00;
@@ -548,6 +713,12 @@ input[type="checkbox"]+label {
input[type="checkbox"]:checked+label {
color: #fc5;
}
input[type="radio"]:checked+label {
color: #fc0;
}
html.light input[type="radio"]:checked+label {
color: #07c;
}
input.eq_gain {
width: 3em;
text-align: center;
@@ -598,7 +769,7 @@ input.eq_gain {
#srch_q {
white-space: pre;
color: #f80;
height: 1em;
min-height: 1em;
margin: .2em 0 -1em 1.6em;
}
#tq_raw {
@@ -656,6 +827,7 @@ input.eq_gain {
}
#thx_ff {
padding: 5em 0;
/* widget */
}
#tree::-webkit-scrollbar-track,
#tree::-webkit-scrollbar {
@@ -716,10 +888,10 @@ input.eq_gain {
#treeul a.hl {
color: #400;
background: #fc4;
border-radius: .3em;
text-shadow: none;
}
#treeul a {
border-radius: .3em;
display: inline-block;
}
#treeul a+a {
@@ -751,9 +923,14 @@ input.eq_gain {
display: block;
width: 1em;
border-radius: .2em;
margin: -1.3em auto 0 auto;
margin: -1.2em auto 0 auto;
top: 2em;
position: relative;
background: #444;
}
#files th span {
position: relative;
}
#files>thead>tr>th.min,
#files td.min {
display: none;
@@ -801,10 +978,14 @@ input.eq_gain {
padding: 0;
border-bottom: 1px solid #555;
}
#thumbs {
#thumbs,
#au_osd_cv,
#u2tdate {
opacity: .3;
}
#griden.on+#thumbs {
#griden.on+#thumbs,
#au_os_ctl.on+#au_osd_cv,
#u2turbo.on+#u2tdate {
opacity: 1;
}
#ghead {
@@ -876,7 +1057,8 @@ html.light #ggrid a:hover {
#pvol,
#barbuf,
#barpos,
#u2conf label {
#u2conf label,
#ops {
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
@@ -909,13 +1091,19 @@ html.light {
}
html.light #tt {
background: #fff;
border-color: #888;
border-color: #888 #000 #777 #000;
}
html.light #tt,
html.light #toast {
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
}
html.light #tt code {
background: #060;
color: #fff;
}
html.light #tt em {
color: #d38;
}
html.light #ops,
html.light .opbox,
html.light #srch_form {
@@ -946,10 +1134,14 @@ html.light .tgl.btn.on {
}
html.light #srv_info {
color: #c83;
background: #eee;
}
html.light #srv_info,
html.light #acc_info {
text-shadow: 1px 1px 0 #fff;
}
html.light #srv_info span {
color: #000;
color: #777;
}
html.light #treeul a+a {
background: inherit;
@@ -959,8 +1151,14 @@ html.light #treeul a.hl {
background: #07a;
color: #fff;
}
html.light #treeul a.hl:hover {
background: #059;
}
html.light #tree li {
border-color: #ddd #fff #f7f7f7 #fff;
border-color: #f7f7f7 #fff #ddd #fff;
}
html.light #tree a:hover {
background: #fff;
}
html.light #tree ul {
border-color: #ccc;
@@ -978,18 +1176,29 @@ html.light #files {
}
html.light #files thead th {
background: #eee;
border-right: 1px solid #ccc;
border-bottom: 1px solid #ccc;
border: 1px solid #ccc;
border-top: none;
}
html.light #files thead th {
html.light #files thead th+th {
border-left: 1px solid #f7f7f7;
}
html.light #files td {
border-color: #ddd #fff #fff #ddd;
border-color: #fff #fff #ddd #ddd;
}
html.light #files tbody tr:last-child td {
border-bottom: .2em solid #ccc;
}
html.light #files tr:focus td {
background: #fff;
border-color: #c37;
box-shadow: 0 .2em 0 #e80 , 0 -.2em 0 #e80;
}
html.light #files tr:focus td:first-child {
box-shadow: -.2em .2em 0 #e80, -.2em -.2em 0 #e80;
}
html.light #files tr.sel td {
background: #925;
}
html.light #files td:nth-child(2n) {
color: #d38;
}
@@ -1040,7 +1249,11 @@ html.light #wzip,
html.light #wnp {
border-color: #ccc;
}
html.light #files tr.sel:hover td {
html.light #barbuf {
background: none;
}
html.light #files tr.sel:hover td,
html.light #files tr.sel:focus td {
background: #c37;
}
html.light #files tr.sel td {
@@ -1107,67 +1320,76 @@ html.light #tree::-webkit-scrollbar {
#baguetteBox-overlay {
#bbox-overlay {
display: none;
opacity: 0;
position: fixed;
overflow: hidden;
touch-action: none;
top: 0;
left: 0;
width: 100%;
height: 100%;
z-index: 1000000;
z-index: 10;
background: rgba(0, 0, 0, 0.8);
transition: opacity .3s ease;
}
#baguetteBox-overlay.visible {
#bbox-overlay.visible {
opacity: 1;
}
#baguetteBox-overlay .full-image {
.full-image {
display: inline-block;
position: relative;
width: 100%;
height: 100%;
text-align: center;
}
#baguetteBox-overlay .full-image figure {
.full-image figure {
display: inline;
margin: 0;
height: 100%;
}
#baguetteBox-overlay .full-image img {
.full-image img,
.full-image video {
display: inline-block;
width: auto;
height: auto;
max-height: 100%;
max-width: 100%;
max-height: 100%;
max-height: calc(100% - 1.4em);
margin-bottom: 1.4em;
vertical-align: middle;
box-shadow: 0 0 8px rgba(0, 0, 0, 0.6);
}
#baguetteBox-overlay .full-image figcaption {
.full-image video {
background: #333;
}
.full-image figcaption {
display: block;
position: absolute;
bottom: 0;
position: fixed;
bottom: .1em;
width: 100%;
text-align: center;
line-height: 1.8;
white-space: normal;
color: #ccc;
}
#baguetteBox-overlay figcaption a {
#bbox-overlay figcaption a {
background: rgba(0, 0, 0, 0.6);
border-radius: .4em;
padding: .3em .6em;
}
#baguetteBox-overlay .full-image:before {
html.light #bbox-overlay figcaption a {
color: #0bf;
}
.full-image:before {
content: "";
display: inline-block;
height: 50%;
width: 1px;
margin-right: -1px;
}
#baguetteBox-slider {
position: absolute;
#bbox-slider {
position: fixed;
left: 0;
top: 0;
height: 100%;
@@ -1175,10 +1397,10 @@ html.light #tree::-webkit-scrollbar {
white-space: nowrap;
transition: left .2s ease, transform .2s ease;
}
#baguetteBox-slider.bounce-from-right {
.bounce-from-right {
animation: bounceFromRight .4s ease-out;
}
#baguetteBox-slider.bounce-from-left {
.bounce-from-left {
animation: bounceFromLeft .4s ease-out;
}
@keyframes bounceFromRight {
@@ -1191,48 +1413,63 @@ html.light #tree::-webkit-scrollbar {
50% {margin-left: 30px}
100% {margin-left: 0}
}
.baguetteBox-button#next-button,
.baguetteBox-button#previous-button {
#bbox-next,
#bbox-prev {
top: 50%;
top: calc(50% - 30px);
width: 44px;
height: 60px;
}
.baguetteBox-button {
position: absolute;
.bbox-btn {
position: fixed;
}
#bbox-overlay button {
cursor: pointer;
outline: none;
padding: 0;
margin: 0;
padding: 0 .3em;
margin: 0 .4em;
border: 0;
border-radius: 15%;
background: rgba(50, 50, 50, 0.5);
color: #ddd;
font: 1.6em sans-serif;
color: rgba(255,255,255,0.7);
transition: background-color .3s ease;
transition: color .3s ease;
font-size: 1.4em;
line-height: 1.4em;
vertical-align: top;
}
.baguetteBox-button:focus,
.baguetteBox-button:hover {
#bbox-overlay button:focus,
#bbox-overlay button:hover {
color: rgba(255,255,255,0.9);
background: rgba(50, 50, 50, 0.9);
}
#next-button {
#bbox-next {
right: 1%;
}
#bbox-prev {
left: 1%;
}
#bbox-btns {
top: .5em;
right: 2%;
position: fixed;
}
#previous-button {
left: 2%;
}
#close-button {
top: 20px;
right: 2%;
width: 30px;
height: 30px;
}
.baguetteBox-button svg {
#bbox-halp {
color: #fff;
background: #333;
position: absolute;
left: 0;
top: 0;
left: 0;
z-index: 20;
padding: .4em;
}
.baguetteBox-spinner {
#bbox-halp td {
padding: .2em .5em;
}
#bbox-halp td:first-child {
text-align: right;
}
.bbox-spinner {
width: 40px;
height: 40px;
display: inline-block;
@@ -1242,8 +1479,8 @@ html.light #tree::-webkit-scrollbar {
margin-top: -20px;
margin-left: -20px;
}
.baguetteBox-double-bounce1,
.baguetteBox-double-bounce2 {
.bbox-double-bounce1,
.bbox-double-bounce2 {
width: 100%;
height: 100%;
border-radius: 50%;
@@ -1254,7 +1491,7 @@ html.light #tree::-webkit-scrollbar {
left: 0;
animation: bounce 2s infinite ease-in-out;
}
.baguetteBox-double-bounce2 {
.bbox-double-bounce2 {
animation-delay: -1s;
}
@keyframes bounce {

View File

@@ -2,140 +2,137 @@
<html lang="en">
<head>
<meta charset="utf-8">
<title>⇆🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css{{ ts }}">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css{{ ts }}">
{%- if css %}
<link rel="stylesheet" type="text/css" media="screen" href="{{ css }}{{ ts }}">
{%- endif %}
<meta charset="utf-8">
<title>⇆🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css?_={{ ts }}">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css?_={{ ts }}">
{%- if css %}
<link rel="stylesheet" type="text/css" media="screen" href="{{ css }}?_={{ ts }}">
{%- endif %}
</head>
<body>
<div id="ops">
<a href="#" data-dest="" tt="close submenu">---</a>
{%- if have_up2k_idx %}
<a href="#" data-perm="read" data-dest="search" tt="search for files by attributes, path/name, music tags, or any combination of those.&lt;br /&gt;&lt;br /&gt;&lt;code&gt;foo bar&lt;/code&gt; = must contain both foo and bar,&lt;br /&gt;&lt;code&gt;foo -bar&lt;/code&gt; = must contain foo but not bar,&lt;br /&gt;&lt;code&gt;^yana .opus$&lt;/code&gt; = must start with yana and have the opus extension">🔎</a>
<a href="#" data-dest="up2k" tt="up2k: upload files (if you have write-access) or toggle into the search-mode and drag files onto the search button to see if they exist somewhere on the server">🚀</a>
{%- else %}
<a href="#" data-perm="write" data-dest="up2k" tt="up2k: upload files with resume support (close your browser and drop the same files in later)">🚀</a>
{%- endif %}
<a href="#" data-perm="write" data-dest="bup" tt="bup: basic uploader, even supports netscape 4.0">🎈</a>
<a href="#" data-perm="write" data-dest="mkdir" tt="mkdir: create a new directory">📂</a>
<a href="#" data-perm="read write" data-dest="new_md" tt="new-md: create a new markdown document">📝</a>
<a href="#" data-perm="write" data-dest="msg" tt="msg: send a message to the server log">📟</a>
<a href="#" data-dest="player" tt="media player options">🎺</a>
<a href="#" data-dest="cfg" tt="configuration options">⚙️</a>
<div id="opdesc"></div>
</div>
<div id="ops"></div>
<div id="op_search" class="opview">
{%- if have_tags_idx %}
<div id="srch_form" class="tags"></div>
{%- else %}
<div id="srch_form"></div>
{%- endif %}
<div id="srch_q"></div>
</div>
<div id="op_search" class="opview">
{%- if have_tags_idx %}
<div id="srch_form" class="tags"></div>
{%- else %}
<div id="srch_form"></div>
{%- endif %}
<div id="srch_q"></div>
</div>
<div id="op_player" class="opview opbox opwide"></div>
<div id="op_player" class="opview opbox opwide"></div>
{%- include 'upload.html' %}
<div id="op_bup" class="opview opbox act">
<div id="u2err"></div>
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="bput" />
<input type="file" name="f" multiple><br />
<input type="submit" value="start upload">
</form>
</div>
<div id="op_cfg" class="opview opbox opwide">
<div>
<h3>switches</h3>
<div>
<a id="tooltips" class="tgl btn" href="#" tt="◔ ◡ ◔"> tooltips</a>
<a id="lightmode" class="tgl btn" href="#">☀️ lightmode</a>
<a id="griden" class="tgl btn" href="#" tt="toggle icons or list-view$NHotkey: G">田 the grid</a>
<a id="thumbs" class="tgl btn" href="#" tt="in icon view, toggle icons or thumbnails$NHotkey: T">🖼️ thumbs</a>
</div>
</div>
{%- if have_zip %}
<div><h3>folder download</h3><div id="arc_fmt"></div></div>
{%- endif %}
<div><h3>key notation</h3><div id="key_notation"></div></div>
<div class="fill"><h3>hidden columns</h3><div id="hcols"></div></div>
</div>
<h1 id="path">
<a href="#" id="entree" tt="show directory tree$NHotkey: B">🌲</a>
{%- for n in vpnodes %}
<a href="/{{ n[0] }}">{{ n[1] }}</a>
{%- endfor %}
</h1>
<div id="tree">
<div id="treeh">
<a href="#" id="detree" tt="show breadcrumbs$NHotkey: B">🍞...</a>
<a href="#" class="btn" step="2" id="twobytwo">+</a>
<a href="#" class="btn" step="-2" id="twig">&ndash;</a>
<a href="#" class="tgl btn" id="dyntree" tt="autogrow as tree expands">a</a>
</div>
<ul id="treeul"></ul>
<div id="thx_ff">&nbsp;</div>
</div>
<div id="op_mkdir" class="opview opbox act">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="mkdir" />
📂<input type="text" name="name" size="30">
<input type="submit" value="make directory">
</form>
</div>
<div id="op_new_md" class="opview opbox">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="new_md" />
📝<input type="text" name="name" size="30">
<input type="submit" value="new markdown doc">
</form>
</div>
<div id="op_msg" class="opview opbox act">
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
📟<input type="text" name="msg" size="30">
<input type="submit" value="send msg to server log">
</form>
</div>
<div id="op_up2k" class="opview"></div>
<div id="op_cfg" class="opview opbox opwide"></div>
<h1 id="path">
<a href="#" id="entree" tt="show navpane (directory tree sidebar)$NHotkey: B">🌲</a>
{%- for n in vpnodes %}
<a href="/{{ n[0] }}">{{ n[1] }}</a>
{%- endfor %}
</h1>
<div id="tree"></div>
<div id="wrap">
<div id="pro" class="logue">{{ logues[0] }}</div>
<div id="pro" class="logue">{{ logues[0] }}</div>
<table id="files">
<thead>
<tr>
<th name="lead"><span>c</span></th>
<th name="href"><span>File Name</span></th>
<th name="sz" sort="int"><span>Size</span></th>
{%- for k in taglist %}
{%- if k.startswith('.') %}
<th name="tags/{{ k }}" sort="int"><span>{{ k[1:] }}</span></th>
{%- else %}
<th name="tags/{{ k }}"><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
{%- endif %}
{%- endfor %}
<th name="ext"><span>T</span></th>
<th name="ts"><span>Date</span></th>
</tr>
</thead>
<tbody>
<table id="files">
<thead>
<tr>
<th name="lead"><span>c</span></th>
<th name="href"><span>File Name</span></th>
<th name="sz" sort="int"><span>Size</span></th>
{%- for k in taglist %}
{%- if k.startswith('.') %}
<th name="tags/{{ k }}" sort="int"><span>{{ k[1:] }}</span></th>
{%- else %}
<th name="tags/{{ k }}"><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
{%- endif %}
{%- endfor %}
<th name="ext"><span>T</span></th>
<th name="ts"><span>Date</span></th>
</tr>
</thead>
<tbody>
{%- for f in files %}
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
{%- if f.tags is defined %}
{%- for k in taglist %}
<td>{{ f.tags[k] }}</td>
{%- endfor %}
{%- endif %}
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
{%- if f.tags is defined %}
{%- for k in taglist %}
<td>{{ f.tags[k] }}</td>
{%- endfor %}
{%- endif %}
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
{%- endfor %}
</tbody>
</table>
<div id="epi" class="logue">{{ logues[1] }}</div>
</tbody>
</table>
<div id="epi" class="logue">{{ logues[1] }}</div>
<h2><a href="?h">control-panel</a></h2>
<h2><a href="/?h">control-panel</a></h2>
</div>
{%- if srv_info %}
<div id="srv_info"><span>{{ srv_info }}</span></div>
{%- endif %}
{%- if srv_info %}
<div id="srv_info"><span>{{ srv_info }}</span></div>
{%- endif %}
<div id="widget"></div>
<div id="widget"></div>
<script>
var tag_order_cfg = {{ tag_order }};
</script>
<script src="/.cpr/util.js{{ ts }}"></script>
<script src="/.cpr/browser.js{{ ts }}"></script>
<script src="/.cpr/up2k.js{{ ts }}"></script>
<script>
apply_perms({{ perms }});
</script>
<script>
var acct = "{{ acct }}",
perms = {{ perms }},
tag_order_cfg = {{ tag_order }},
have_up2k_idx = {{ have_up2k_idx|tojson }},
have_tags_idx = {{ have_tags_idx|tojson }},
have_mv = {{ have_mv|tojson }},
have_del = {{ have_del|tojson }},
have_zip = {{ have_zip|tojson }};
</script>
<script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/browser.js?_={{ ts }}"></script>
<script src="/.cpr/up2k.js?_={{ ts }}"></script>
</body>
</html>

File diff suppressed because it is too large Load Diff

View File

@@ -2,59 +2,59 @@
<html lang="en">
<head>
<meta charset="utf-8">
<title>{{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<style>
html{font-family:sans-serif}
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
a{display:block}
</style>
<meta charset="utf-8">
<title>{{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<style>
html{font-family:sans-serif}
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
a{display:block}
</style>
</head>
<body>
{%- if srv_info %}
<p><span>{{ srv_info }}</span></p>
{%- endif %}
{%- if srv_info %}
<p><span>{{ srv_info }}</span></p>
{%- endif %}
{%- if have_b_u %}
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="bput" />
<input type="file" name="f" multiple /><br />
<input type="submit" value="start upload" />
</form>
<br />
{%- endif %}
{%- if have_b_u %}
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="bput" />
<input type="file" name="f" multiple /><br />
<input type="submit" value="start upload" />
</form>
<br />
{%- endif %}
{%- if logues[0] %}
<div>{{ logues[0] }}</div><br />
{%- endif %}
{%- if logues[0] %}
<div>{{ logues[0] }}</div><br />
{%- endif %}
<table id="files">
<thead>
<tr>
<th name="lead"><span>c</span></th>
<th name="href"><span>File Name</span></th>
<th name="sz" sort="int"><span>Size</span></th>
<th name="ts"><span>Date</span></th>
</tr>
</thead>
<tbody>
<tr><td></td><td><a href="../{{ url_suf }}">parent folder</a></td><td>-</td><td>-</td></tr>
<table id="files">
<thead>
<tr>
<th name="lead"><span>c</span></th>
<th name="href"><span>File Name</span></th>
<th name="sz" sort="int"><span>Size</span></th>
<th name="ts"><span>Date</span></th>
</tr>
</thead>
<tbody>
<tr><td></td><td><a href="../{{ url_suf }}">parent folder</a></td><td>-</td><td>-</td></tr>
{%- for f in files %}
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}{{ url_suf }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.dt }}</td></tr>
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}{{ url_suf }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.dt }}</td></tr>
{%- endfor %}
</tbody>
</table>
{%- if logues[1] %}
<div>{{ logues[1] }}</div><br />
{%- endif %}
<h2><a href="{{ url_suf }}{{ url_suf and '&amp;' or '?' }}h">control-panel</a></h2>
</tbody>
</table>
{%- if logues[1] %}
<div>{{ logues[1] }}</div><br />
{%- endif %}
<h2><a href="/{{ url_suf }}{{ url_suf and '&amp;' or '?' }}h">control-panel</a></h2>
</body>
</html>

View File

@@ -8,6 +8,137 @@ html, body {
font-family: sans-serif;
line-height: 1.5em;
}
#tt, #toast {
position: fixed;
max-width: 34em;
background: #222;
border: 0 solid #777;
box-shadow: 0 .2em .5em #222;
border-radius: .4em;
z-index: 9001;
}
#tt {
overflow: hidden;
margin-top: 1em;
padding: 0 1.3em;
height: 0;
opacity: .1;
transition: opacity 0.14s, height 0.14s, padding 0.14s;
}
#toast {
top: 1.4em;
right: -1em;
line-height: 1.5em;
padding: 1em 1.3em;
border-width: .4em 0;
transform: translateX(100%);
transition:
transform .4s cubic-bezier(.2, 1.2, .5, 1),
right .4s cubic-bezier(.2, 1.2, .5, 1);
text-shadow: 1px 1px 0 #000;
color: #fff;
}
#toastc {
display: inline-block;
position: absolute;
overflow: hidden;
left: 0;
width: 0;
opacity: 0;
padding: .3em 0;
margin: -.3em 0 0 0;
line-height: 1.5em;
color: #000;
border: none;
outline: none;
text-shadow: none;
border-radius: .5em 0 0 .5em;
transition: left .3s, width .3s, padding .3s, opacity .3s;
}
#toast.vis {
right: 1.3em;
transform: unset;
}
#toast.vis #toastc {
left: -2em;
width: .4em;
padding: .3em .8em;
opacity: 1;
}
#toast.inf {
background: #07a;
border-color: #0be;
}
#toast.inf #toastc {
background: #0be;
}
#toast.ok {
background: #4a0;
border-color: #8e4;
}
#toast.ok #toastc {
background: #8e4;
}
#toast.warn {
background: #970;
border-color: #fc0;
}
#toast.warn #toastc {
background: #fc0;
}
#toast.err {
background: #900;
border-color: #d06;
}
#toast.err #toastc {
background: #d06;
}
#tt.b {
padding: 0 2em;
border-radius: .5em;
box-shadow: 0 .2em 1em #000;
}
#tt.show {
padding: 1em 1.3em;
border-width: .4em 0;
height: auto;
opacity: 1;
}
#tt.show.b {
padding: 1.5em 2em;
border-width: .5em 0;
}
#tt code {
background: #3c3c3c;
padding: .1em .3em;
border-top: 1px solid #777;
border-radius: .3em;
line-height: 1.7em;
}
#tt em {
color: #f6a;
}
html.light #tt {
background: #fff;
border-color: #888 #000 #777 #000;
}
html.light #tt,
html.light #toast {
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
}
html.light #tt code {
background: #060;
color: #fff;
}
html.light #tt em {
color: #d38;
}
#mtw {
display: none;
}
@@ -26,7 +157,7 @@ pre, code, a {
code {
font-size: .96em;
}
pre, code {
pre, code, tt {
font-family: 'scp', monospace, monospace;
white-space: pre-wrap;
word-break: break-all;
@@ -166,7 +297,7 @@ small {
z-index: 99;
position: relative;
display: inline-block;
font-family: monospace, monospace;
font-family: 'scp', monospace, monospace;
font-weight: bold;
font-size: 1.3em;
line-height: .1em;

View File

@@ -3,9 +3,9 @@
<title>📝🎉 {{ title }}</title> <!-- 📜 -->
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7">
<link href="/.cpr/md.css" rel="stylesheet">
<link href="/.cpr/md.css?_={{ ts }}" rel="stylesheet">
{%- if edit %}
<link href="/.cpr/md2.css" rel="stylesheet">
<link href="/.cpr/md2.css?_={{ ts }}" rel="stylesheet">
{%- endif %}
</head>
<body>
@@ -14,9 +14,9 @@
<a id="lightswitch" href="#">go dark</a>
<a id="navtoggle" href="#">hide nav</a>
{%- if edit %}
<a id="save" href="?edit">save</a>
<a id="sbs" href="#">sbs</a>
<a id="nsbs" href="#">editor</a>
<a id="save" href="?edit" tt="Hotkey: ctrl-s">save</a>
<a id="sbs" href="#" tt="editor and preview side by side">sbs</a>
<a id="nsbs" href="#" tt="switch between editor and preview$NHotkey: ctrl-e">editor</a>
<div id="toolsbox">
<a id="tools" href="#">tools</a>
<a id="fmt_table" href="#">prettify table (ctrl-k)</a>
@@ -26,8 +26,8 @@
<a id="help" href="#">help</a>
</div>
{%- else %}
<a href="?edit">edit (basic)</a>
<a href="?edit2">edit (fancy)</a>
<a href="?edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
<a href="?edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
<a href="?raw">view raw</a>
{%- endif %}
</div>
@@ -131,25 +131,25 @@ var md_opt = {
};
(function () {
var btn = document.getElementById("lightswitch");
var toggle = function (e) {
if (e) e.preventDefault();
var dark = !document.documentElement.getAttribute("class");
document.documentElement.setAttribute("class", dark ? "dark" : "");
btn.innerHTML = "go " + (dark ? "light" : "dark");
if (window.localStorage)
localStorage.setItem('lightmode', dark ? 0 : 1);
};
btn.onclick = toggle;
if (window.localStorage && localStorage.getItem('lightmode') != 1)
toggle();
var l = localStorage,
drk = l.getItem('lightmode') != 1,
btn = document.getElementById("lightswitch"),
f = function (e) {
if (e) { e.preventDefault(); drk = !drk; }
document.documentElement.setAttribute("class", drk? "dark":"light");
btn.innerHTML = "go " + (drk ? "light":"dark");
l.setItem('lightmode', drk? 0:1);
};
btn.onclick = f;
f();
})();
</script>
<script src="/.cpr/util.js"></script>
<script src="/.cpr/deps/marked.js"></script>
<script src="/.cpr/md.js"></script>
<script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
<script src="/.cpr/md.js?_={{ ts }}"></script>
{%- if edit %}
<script src="/.cpr/md2.js"></script>
<script src="/.cpr/md2.js?_={{ ts }}"></script>
{%- endif %}
</body></html>

View File

@@ -176,7 +176,7 @@ function md_plug_err(ex, js) {
var lns = js.split('\n');
if (ln < lns.length) {
o = mknod('span');
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
o.style.cssText = "color:#ac2;font-size:.9em;font-family:'scp',monospace,monospace;display:block";
o.textContent = lns[ln - 1];
}
}
@@ -530,3 +530,6 @@ dom_navtgl.onclick = function () {
if (sread('hidenav') == 1)
dom_navtgl.onclick();
if (window['tt'])
tt.init();

View File

@@ -84,13 +84,10 @@ html.dark #save.force-save {
#save.disabled {
opacity: .4;
}
#helpbox,
#toast {
#helpbox {
background: #f7f7f7;
border-radius: .4em;
z-index: 9001;
}
#helpbox {
display: none;
position: fixed;
padding: 2em;
@@ -107,19 +104,7 @@ html.dark #save.force-save {
}
html.dark #helpbox {
box-shadow: 0 .5em 2em #444;
}
html.dark #helpbox,
html.dark #toast {
background: #222;
border: 1px solid #079;
border-width: 1px 0;
}
#toast {
font-weight: bold;
text-align: center;
padding: .6em 0;
position: fixed;
top: 30%;
transition: opacity 0.2s ease-in-out;
opacity: 1;
}

View File

@@ -236,7 +236,7 @@ function Modpoll() {
var skip = null;
if (ebi('toast'))
if (toast.visible)
skip = 'toast';
else if (this.skip_one)
@@ -291,10 +291,9 @@ function Modpoll() {
"Press F5 or CTRL-R to refresh the page,<br />" +
"replacing your document with the server copy.",
"You can click this message to ignore and contnue."
"You can close this message to ignore and contnue."
];
return toast(false, "box-shadow:0 1em 2em rgba(64,64,64,0.8);font-weight:normal",
36, "<p>" + msg.join('</p>\n<p>') + '</p>');
return toast.warn(0, "<p>" + msg.join('</p>\n<p>') + '</p>');
}
console.log('modpoll eq');
@@ -323,16 +322,12 @@ function save(e) {
var save_btn = ebi("save"),
save_cls = save_btn.getAttribute('class') + '';
if (save_cls.indexOf('disabled') >= 0) {
toast(true, ";font-size:2em;color:#c90", 9, "no changes");
return;
}
if (save_cls.indexOf('disabled') >= 0)
return toast.inf(2, "no changes");
var force = (save_cls.indexOf('force-save') >= 0);
if (force && !confirm('confirm that you wish to lose the changes made on the server since you opened this document')) {
alert('ok, aborted');
return;
}
if (force && !confirm('confirm that you wish to lose the changes made on the server since you opened this document'))
return toast.inf(3, 'aborted');
var txt = dom_src.value;
@@ -357,18 +352,15 @@ function save_cb() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) {
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
return;
}
if (this.status !== 200)
return alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
var r;
try {
r = JSON.parse(this.responseText);
}
catch (ex) {
alert('Failed to parse reply from server:\n\n' + this.responseText);
return;
return alert('Failed to parse reply from server:\n\n' + this.responseText);
}
if (!r.ok) {
@@ -443,46 +435,10 @@ function savechk_cb() {
last_modified = this.lastmod;
server_md = this.txt;
draw_md();
toast(true, ";font-size:6em;font-family:serif;color:#9b4", 4,
'OK✔<span style="font-size:.2em;color:#999;position:absolute">' + this.ntry + '</span>');
toast.ok(2, 'save OK' + (this.ntry ? '\nattempt ' + this.ntry : ''));
modpoll.disabled = false;
}
function toast(autoclose, style, width, msg) {
var ok = ebi("toast");
if (ok)
ok.parentNode.removeChild(ok);
style = "width:" + width + "em;left:calc(50% - " + (width / 2) + "em);" + style;
ok = mknod('div');
ok.setAttribute('id', 'toast');
ok.setAttribute('style', style);
ok.innerHTML = msg;
var parent = ebi('m');
document.documentElement.appendChild(ok);
var hide = function (delay) {
delay = delay || 0;
setTimeout(function () {
ok.style.opacity = 0;
}, delay);
setTimeout(function () {
if (ok.parentNode)
ok.parentNode.removeChild(ok);
}, delay + 250);
}
ok.onclick = function () {
hide(0);
};
if (autoclose)
hide(500);
}
// firefox bug: initial selection offset isn't cleared properly through js
var ff_clearsel = (function () {
@@ -761,7 +717,7 @@ function fmt_table(e) {
var ind2 = tab[a].match(re_ind)[0];
if (ind != ind2 && a != 1) // the table can be a list entry or something, ignore [0]
return alert(err + 'indentation mismatch on row#2 and ' + row_name + ',\n' + tab[a]);
return toast.err(7, err + 'indentation mismatch on row#2 and ' + row_name + ',\n' + tab[a]);
var t = tab[a].slice(ind.length);
t = t.replace(re_lpipe, "");
@@ -771,7 +727,7 @@ function fmt_table(e) {
if (a == 0)
ncols = tab[a].length;
else if (ncols < tab[a].length)
return alert(err + 'num.columns(' + row_name + ') exceeding row#2; ' + ncols + ' < ' + tab[a].length);
return toast.err(7, err + 'num.columns(' + row_name + ') exceeding row#2; ' + ncols + ' < ' + tab[a].length);
// if row has less columns than row2, fill them in
while (tab[a].length < ncols)
@@ -788,7 +744,7 @@ function fmt_table(e) {
for (var col = 0; col < tab[1].length; col++) {
var m = tab[1][col].match(re_align);
if (!m)
return alert(err + 'invalid column specification, row#2, col ' + (col + 1) + ', [' + tab[1][col] + ']');
return toast.err(7, err + 'invalid column specification, row#2, col ' + (col + 1) + ', [' + tab[1][col] + ']');
if (m[2]) {
if (m[1])
@@ -876,10 +832,9 @@ function mark_uni(e) {
ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g'),
mod = txt.replace(/\r/g, "").replace(ptn, "\u2588\u2770$1\u2771");
if (txt == mod) {
alert('no results; no modifications were made');
return;
}
if (txt == mod)
return toast.inf(5, 'no results; no modifications were made');
dom_src.value = mod;
}
@@ -893,10 +848,9 @@ function iter_uni(e) {
re = new RegExp('([^' + js_uni_whitelist + ']+)'),
m = re.exec(txt.slice(ofs));
if (!m) {
alert('no more hits from cursor onwards');
return;
}
if (!m)
return toast.inf(5, 'no more hits from cursor onwards');
ofs += m.index;
dom_src.setSelectionRange(ofs, ofs + m[0].length, "forward");
@@ -924,10 +878,9 @@ function cfg_uni(e) {
(function () {
function keydown(ev) {
ev = ev || window.event;
var kc = ev.keyCode || ev.which;
var ctrl = ev.ctrlKey || ev.metaKey;
//console.log(ev.code, kc);
if (ctrl && (ev.code == "KeyS" || kc == 83)) {
var kc = ev.code || ev.keyCode || ev.which;
//console.log(ev.key, ev.code, ev.keyCode, ev.which);
if (ctrl(ev) && (ev.code == "KeyS" || kc == 83)) {
save();
return false;
}
@@ -936,23 +889,15 @@ function cfg_uni(e) {
if (d)
d.click();
}
if (document.activeElement == dom_src) {
if (ev.code == "Tab" || kc == 9) {
md_indent(ev.shiftKey);
return false;
}
if (ctrl && (ev.code == "KeyH" || kc == 72)) {
if (document.activeElement != dom_src)
return true;
if (ctrl(ev)) {
if (ev.code == "KeyH" || kc == 72) {
md_header(ev.shiftKey);
return false;
}
if (!ctrl && (ev.code == "Home" || kc == 36)) {
md_home(ev.shiftKey);
return false;
}
if (!ctrl && !ev.shiftKey && (ev.code == "Enter" || kc == 13)) {
return md_newline();
}
if (ctrl && (ev.code == "KeyZ" || kc == 90)) {
if (ev.code == "KeyZ" || kc == 90) {
if (ev.shiftKey)
action_stack.redo();
else
@@ -960,33 +905,45 @@ function cfg_uni(e) {
return false;
}
if (ctrl && (ev.code == "KeyY" || kc == 89)) {
if (ev.code == "KeyY" || kc == 89) {
action_stack.redo();
return false;
}
if (!ctrl && !ev.shiftKey && kc == 8) {
return md_backspace();
}
if (ctrl && (ev.code == "KeyK")) {
if (ev.code == "KeyK") {
fmt_table();
return false;
}
if (ctrl && (ev.code == "KeyU")) {
if (ev.code == "KeyU") {
iter_uni();
return false;
}
if (ctrl && (ev.code == "KeyE")) {
if (ev.code == "KeyE") {
dom_nsbs.click();
//fmt_table();
return false;
}
var up = ev.code == "ArrowUp" || kc == 38;
var dn = ev.code == "ArrowDown" || kc == 40;
if (ctrl && (up || dn)) {
if (up || dn) {
md_p_jump(dn);
return false;
}
}
else {
if (ev.code == "Tab" || kc == 9) {
md_indent(ev.shiftKey);
return false;
}
if (ev.code == "Home" || kc == 36) {
md_home(ev.shiftKey);
return false;
}
if (!ev.shiftKey && (ev.code == "Enter" || kc == 13)) {
return md_newline();
}
if (!ev.shiftKey && kc == 8) {
return md_backspace();
}
}
}
document.onkeydown = keydown;
ebi('save').onclick = save;

View File

@@ -3,9 +3,9 @@
<title>📝🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7">
<link href="/.cpr/mde.css" rel="stylesheet">
<link href="/.cpr/deps/mini-fa.css" rel="stylesheet">
<link href="/.cpr/deps/easymde.css" rel="stylesheet">
<link href="/.cpr/mde.css?_={{ ts }}" rel="stylesheet">
<link href="/.cpr/deps/mini-fa.css?_={{ ts }}" rel="stylesheet">
<link href="/.cpr/deps/easymde.css?_={{ ts }}" rel="stylesheet">
</head>
<body>
<div id="mw">
@@ -30,20 +30,19 @@ var md_opt = {
};
var lightswitch = (function () {
var fun = function () {
var dark = !document.documentElement.getAttribute("class");
document.documentElement.setAttribute("class", dark ? "dark" : "");
if (window.localStorage)
localStorage.setItem('lightmode', dark ? 0 : 1);
};
if (window.localStorage && localStorage.getItem('lightmode') != 1)
fun();
return fun;
var l = localStorage,
drk = l.getItem('lightmode') != 1,
f = function (e) {
if (e) drk = !drk;
document.documentElement.setAttribute("class", drk? "dark":"light");
l.setItem('lightmode', drk? 0:1);
};
f();
return f;
})();
</script>
<script src="/.cpr/util.js"></script>
<script src="/.cpr/deps/easymde.js"></script>
<script src="/.cpr/mde.js"></script>
<script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
<script src="/.cpr/mde.js?_={{ ts }}"></script>
</body></html>

View File

@@ -106,15 +106,12 @@ function md_changed(mde, on_srv) {
function save(mde) {
var save_btn = QS('.editor-toolbar button.save');
if (save_btn.classList.contains('disabled')) {
alert('there is nothing to save');
return;
}
if (save_btn.classList.contains('disabled'))
return toast.inf(2, 'no changes');
var force = save_btn.classList.contains('force-save');
if (force && !confirm('confirm that you wish to lose the changes made on the server since you opened this document')) {
alert('ok, aborted');
return;
}
if (force && !confirm('confirm that you wish to lose the changes made on the server since you opened this document'))
return toast.inf(3, 'aborted');
var txt = mde.value();
@@ -138,18 +135,15 @@ function save_cb() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) {
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
return;
}
if (this.status !== 200)
return alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
var r;
try {
r = JSON.parse(this.responseText);
}
catch (ex) {
alert('Failed to parse reply from server:\n\n' + this.responseText);
return;
return alert('Failed to parse reply from server:\n\n' + this.responseText);
}
if (!r.ok) {

View File

@@ -6,7 +6,7 @@
<title>copyparty</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/msg.css">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/msg.css?_={{ ts }}">
</head>
<body>

View File

@@ -6,7 +6,7 @@
<title>copyparty</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/splash.css">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/splash.css?_={{ ts }}">
</head>
<body>
@@ -35,7 +35,7 @@
</table>
</td></tr></table>
<div class="btns">
<a href="{{ avol[0] }}?stack">dump stack</a>
<a href="/?stack">dump stack</a>
</div>
{%- endif %}
@@ -68,7 +68,7 @@
</div>
<script>
if (window.localStorage && localStorage.getItem('lightmode') != 1)
if (localStorage.getItem('lightmode') != 1)
document.documentElement.setAttribute("class", "dark");
</script>

File diff suppressed because it is too large Load Diff

View File

@@ -87,8 +87,9 @@
#u2tab td:nth-child(3) {
width: 40%;
}
#op_up2k.srch #u2tab td:nth-child(3) {
#op_up2k.srch td.prog {
font-family: sans-serif;
font-size: 1em;
width: auto;
}
#u2tab tbody tr:hover td {
@@ -215,15 +216,37 @@
color: #fff;
font-style: italic;
}
#u2foot .warn {
font-size: 1.3em;
padding: .5em .8em;
margin: 1em -.6em;
color: #f74;
background: #322;
border: 1px solid #633;
border-width: .1em 0;
text-align: center;
}
#u2foot .warn span {
color: #f86;
}
html.light #u2foot .warn {
color: #b00;
background: #fca;
border-color: #f70;
}
html.light #u2foot .warn span {
color: #930;
}
#u2foot span {
color: #999;
font-size: .9em;
font-weight: normal;
}
#u2footfoot {
margin-bottom: -1em;
}
.prog {
font-family: monospace;
font-family: monospace, monospace;
}
#u2tab a>span {
font-weight: bold;
@@ -235,6 +258,11 @@
float: right;
margin-bottom: -.3em;
}
.fsearch_explain {
padding-left: .7em;
font-size: 1.1em;
line-height: 0;
}

View File

@@ -1,101 +0,0 @@
<div id="op_bup" class="opview opbox act">
<div id="u2err"></div>
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="bput" />
<input type="file" name="f" multiple><br />
<input type="submit" value="start upload">
</form>
</div>
<div id="op_mkdir" class="opview opbox act">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="mkdir" />
<input type="text" name="name" size="30">
<input type="submit" value="mkdir">
</form>
</div>
<div id="op_new_md" class="opview opbox">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
<input type="hidden" name="act" value="new_md" />
<input type="text" name="name" size="30">
<input type="submit" value="create doc">
</form>
</div>
<div id="op_msg" class="opview opbox act">
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
<input type="text" name="msg" size="30">
<input type="submit" value="send msg">
</form>
</div>
<div id="op_up2k" class="opview">
<form id="u2form" method="post" enctype="multipart/form-data" onsubmit="return false;"></form>
<table id="u2conf">
<tr>
<td><br />parallel uploads:</td>
<td rowspan="2">
<input type="checkbox" id="multitask" />
<label for="multitask" tt="continue hashing other files while uploading">🏃</label>
</td>
<td rowspan="2">
<input type="checkbox" id="ask_up" />
<label for="ask_up" tt="ask for confirmation befofre upload starts">💭</label>
</td>
<td rowspan="2">
<input type="checkbox" id="flag_en" />
<label for="flag_en" tt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</label>
</td>
{%- if have_up2k_idx %}
<td data-perm="read" rowspan="2">
<input type="checkbox" id="fsearch" />
<label for="fsearch" tt="don't actually upload, instead check if the files already $N exist on the server (will scan all folders you can read)">🔎</label>
</td>
{%- endif %}
<td data-perm="read" rowspan="2" id="u2btn_cw"></td>
</tr>
<tr>
<td>
<a href="#" id="nthread_sub">&ndash;</a><input
class="txtbox" id="nthread" value="2"/><a
href="#" id="nthread_add">+</a><br />&nbsp;
</td>
</tr>
</table>
<div id="u2notbtn"></div>
<div id="u2btn_ct">
<div id="u2btn">
<span id="u2bm"></span><br />
drag/drop files<br />
and folders here<br />
(or click me)
</div>
</div>
<div id="u2cards">
<a href="#" act="ok" tt="completed successfully">ok <span>0</span></a><a
href="#" act="ng" tt="failed / rejected / not-found">ng <span>0</span></a><a
href="#" act="done" tt="ok and ng combined">done <span>0</span></a><a
href="#" act="bz" tt="hashing or uploading" class="act">busy <span>0</span></a><a
href="#" act="q" tt="idle, pending">que <span>0</span></a>
</div>
<table id="u2tab">
<thead>
<tr>
<td>filename</td>
<td>status</td>
<td>progress<a href="#" id="u2cleanup" tt="remove completed uploads$N(makes it possible to upload a file after searching for it)">cleanup</a></td>
</tr>
</thead>
<tbody></tbody>
</table>
<p id="u2foot"></p>
<p id="u2footfoot" data-perm="write">( you can use the <a href="#" id="u2nope">basic uploader</a> if you don't need lastmod timestamps, resumable uploads, or progress bars )</p>
</div>

View File

@@ -6,21 +6,18 @@ if (!window['console'])
};
var clickev = window.Touch ? 'touchstart' : 'click',
ANDROID = /(android)/i.test(navigator.userAgent);
var is_touch = 'ontouchstart' in window,
IPHONE = /iPhone|iPad|iPod/i.test(navigator.userAgent),
ANDROID = /android/i.test(navigator.userAgent);
var ebi = document.getElementById.bind(document),
QS = document.querySelector.bind(document),
QSA = document.querySelectorAll.bind(document),
mknod = document.createElement.bind(document);
// error handler for mobile devices
function hcroak(msg) {
document.body.innerHTML = msg;
window.onerror = undefined;
throw 'fatal_err';
}
function croak(msg) {
document.body.textContent = msg;
window.onerror = undefined;
throw msg;
}
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
@@ -31,29 +28,65 @@ function esc(txt) {
}[c];
});
}
var crashed = false, ignexd = {};
function vis_exh(msg, url, lineNo, columnNo, error) {
window.onerror = undefined;
window['vis_exh'] = null;
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
if ((msg + '').indexOf('ResizeObserver') !== -1)
return; // chrome issue 809574 (benign, from <video>)
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<h2>' + find[a] + '</h2>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
var ekey = url + '\n' + lineNo + '\n' + msg;
if (ignexd[ekey] || crashed)
return;
crashed = true;
window.onerror = undefined;
var html = ['<h1>you hit a bug!</h1><p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a></p><p>please send me a screenshot arigathanks gozaimuch: <code>ed/irc.rizon.net</code> or <code>ed#2644</code><br />&nbsp; (and if you can, press F12 and include the "Console" tab in the screenshot too)</p><p>',
esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)) + '</p>'];
try {
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<h3>' + find[a] + '</h3>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
}
ignexd[ekey] = true;
html.push('<h3>localStore</h3>' + esc(JSON.stringify(localStorage)));
}
document.body.style.fontSize = '0.8em';
document.body.style.padding = '0 1em 1em 1em';
hcroak(html.join('\n'));
catch (e) { }
try {
var exbox = ebi('exbox');
if (!exbox) {
exbox = mknod('div');
exbox.setAttribute('id', 'exbox');
document.body.appendChild(exbox);
var s = mknod('style');
s.innerHTML = '#exbox{background:#333;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%} #exbox h1{margin:.5em 1em 0 0;padding:0} #exbox h3{border-top:1px solid #999;margin:1em 0 0 0} #exbox a{text-decoration:underline;color:#fc0} #exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} #exbox *{line-height:1.5em}';
document.head.appendChild(s);
}
exbox.innerHTML = html.join('\n');
exbox.style.display = 'block';
}
catch (e) {
document.body.innerHTML = html.join('\n');
}
throw 'fatal_err';
}
function ignex(all) {
var o = ebi('exbox');
o.style.display = 'none';
o.innerHTML = '';
crashed = false;
if (!all)
window.onerror = vis_exh;
}
var ebi = document.getElementById.bind(document),
QS = document.querySelector.bind(document),
QSA = document.querySelectorAll.bind(document),
mknod = document.createElement.bind(document);
function ctrl(e) {
return e && (e.ctrlKey || e.metaKey);
}
function ev(e) {
@@ -67,6 +100,9 @@ function ev(e) {
if (e.stopPropagation)
e.stopPropagation();
if (e.stopImmediatePropagation)
e.stopImmediatePropagation();
e.returnValue = false;
return e;
}
@@ -87,6 +123,15 @@ if (!String.startsWith) {
return this.substring(i, i + s.length) === s;
};
}
if (!Element.prototype.closest) {
Element.prototype.closest = function (s) {
var el = this;
do {
if (el.msMatchesSelector(s)) return el;
el = el.parentElement || el.parentNode;
} while (el !== null && el.nodeType === 1);
}
}
// https://stackoverflow.com/a/950146
@@ -285,63 +330,6 @@ function makeSortable(table, cb) {
}
(function () {
var ops = QSA('#ops>a');
for (var a = 0; a < ops.length; a++) {
ops[a].onclick = opclick;
}
})();
function opclick(e) {
ev(e);
var dest = this.getAttribute('data-dest');
goto(dest);
swrite('opmode', dest || null);
var input = QS('.opview.act input:not([type="hidden"])')
if (input)
input.focus();
}
function goto(dest) {
var obj = QSA('.opview.act');
for (var a = obj.length - 1; a >= 0; a--)
clmod(obj[a], 'act');
obj = QSA('#ops>a');
for (var a = obj.length - 1; a >= 0; a--)
clmod(obj[a], 'act');
if (dest) {
var ui = ebi('op_' + dest);
clmod(ui, 'act', true);
QS('#ops>a[data-dest=' + dest + ']').className += " act";
var fn = window['goto_' + dest];
if (fn)
fn();
}
if (window['treectl'])
treectl.onscroll();
}
(function () {
goto();
var op = sread('opmode');
if (op !== null && op !== '.')
try {
goto(op);
}
catch (ex) { }
})();
function linksplit(rp) {
var ret = [];
var apath = '/';
@@ -373,6 +361,18 @@ function linksplit(rp) {
}
function vsplit(vp) {
if (vp.endsWith('/'))
vp = vp.slice(0, -1);
var ofs = vp.lastIndexOf('/') + 1,
base = vp.slice(0, ofs),
fn = vp.slice(ofs);
return [base, fn];
}
function uricom_enc(txt, do_fb_enc) {
try {
return encodeURIComponent(txt);
@@ -416,6 +416,15 @@ function get_vpath() {
}
function get_pwd() {
var pwd = ('; ' + document.cookie).split('; cppwd=');
if (pwd.length < 2)
return null;
return pwd[1].split(';')[0];
}
function unix2iso(ts) {
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
}
@@ -437,20 +446,27 @@ function has(haystack, needle) {
}
function sread(key) {
if (window.localStorage)
return localStorage.getItem(key);
function apop(arr, v) {
var ofs = arr.indexOf(v);
if (ofs !== -1)
arr.splice(ofs, 1);
}
return null;
function jcp(obj) {
return JSON.parse(JSON.stringify(obj));
}
function sread(key) {
return localStorage.getItem(key);
}
function swrite(key, val) {
if (window.localStorage) {
if (val === undefined || val === null)
localStorage.removeItem(key);
else
localStorage.setItem(key, val);
}
if (val === undefined || val === null)
localStorage.removeItem(key);
else
localStorage.setItem(key, val);
}
function jread(key, fb) {
@@ -533,13 +549,20 @@ function hist_replace(url) {
var tt = (function () {
var r = {
"tt": mknod("div"),
"en": bcfg_get('tooltips', true),
"en": true,
"el": null,
"skip": false
};
r.tt.setAttribute('id', 'tt');
document.body.appendChild(r.tt);
function show() {
r.show = function () {
if (r.skip) {
r.skip = false;
return;
}
var cfg = sread('tooltips');
if (cfg !== null && cfg != '1')
return;
@@ -548,42 +571,127 @@ var tt = (function () {
if (!msg)
return;
r.el = this;
var pos = this.getBoundingClientRect(),
dir = this.getAttribute('ttd') || '',
left = pos.left < window.innerWidth / 2,
top = pos.top < window.innerHeight / 2;
top = pos.top < window.innerHeight / 2,
big = this.className.indexOf(' ttb') !== -1;
if (dir.indexOf('u') + 1) top = false;
if (dir.indexOf('d') + 1) top = true;
if (dir.indexOf('l') + 1) left = false;
if (dir.indexOf('r') + 1) left = true;
clmod(r.tt, 'b', big);
r.tt.style.top = top ? pos.bottom + 'px' : 'auto';
r.tt.style.bottom = top ? 'auto' : (window.innerHeight - pos.top) + 'px';
r.tt.style.left = left ? pos.left + 'px' : 'auto';
r.tt.style.right = left ? 'auto' : (window.innerWidth - pos.right) + 'px';
r.tt.innerHTML = msg.replace(/\$N/g, "<br />");
r.el.addEventListener('mouseleave', r.hide);
clmod(r.tt, 'show', 1);
}
};
function hide() {
r.hide = function (e) {
ev(e);
clmod(r.tt, 'show');
if (r.el)
r.el.removeEventListener('mouseleave', r.hide);
};
if (is_touch && IPHONE) {
var f1 = r.show,
f2 = r.hide;
r.show = function () {
setTimeout(f1.bind(this), 301);
};
r.hide = function () {
setTimeout(f2.bind(this), 301);
};
}
r.init = function () {
var _show = r.en ? show : null,
_hide = r.en ? hide : null;
r.tt.onclick = r.hide;
r.att = function (ctr) {
var _show = r.en ? r.show : null,
_hide = r.en ? r.hide : null,
o = ctr.querySelectorAll('*[tt]');
var o = QSA('*[tt]');
for (var a = o.length - 1; a >= 0; a--) {
o[a].onfocus = _show;
o[a].onblur = _hide;
o[a].onmouseenter = _show;
o[a].onmouseleave = _hide;
}
hide();
};
r.hide();
}
ebi('tooltips').onclick = function (e) {
ev(e);
r.en = !r.en;
bcfg_set('tooltips', r.en);
r.init();
r.init = function () {
var ttb = ebi('tooltips');
if (ttb) {
ttb.onclick = function (e) {
ev(e);
r.en = !r.en;
bcfg_set('tooltips', r.en);
r.init();
};
r.en = bcfg_get('tooltips', true)
}
r.att(document);
};
return r;
})();
var toast = (function () {
var r = {},
te = null,
visible = false,
obj = mknod('div');
obj.setAttribute('id', 'toast');
document.body.appendChild(obj);;
r.hide = function (e) {
ev(e);
clearTimeout(te);
clmod(obj, 'vis');
r.visible = false;
};
r.show = function (cl, ms, txt) {
clearTimeout(te);
if (ms)
te = setTimeout(r.hide, ms * 1000);
var html = '', hp = txt.split(/(?=<.?pre>)/i);
for (var a = 0; a < hp.length; a++)
html += hp[a].startsWith('<pre>') ? hp[a] :
hp[a].replace(/<br ?.?>\n/g, '\n').replace(/\n<br ?.?>/g, '\n').replace(/\n/g, '<br />\n');
obj.innerHTML = '<a href="#" id="toastc">x</a>' + html;
obj.className = cl;
ms += obj.offsetWidth;
obj.className += ' vis';
ebi('toastc').onclick = r.hide;
r.visible = true;
};
r.ok = function (ms, txt) {
r.show('ok', ms, txt);
};
r.inf = function (ms, txt) {
r.show('inf', ms, txt);
};
r.warn = function (ms, txt) {
r.show('warn', ms, txt);
};
r.err = function (ms, txt) {
r.show('err', ms, txt);
};
return r;

View File

@@ -15,11 +15,6 @@
}
#ggrid>a[href$="/"]:before {
content: '📂';
display: block;
position: absolute;
margin: -.1em -.4em;
text-shadow: 0 0 .1em #000;
font-size: 2em;
}
@@ -27,8 +22,11 @@
#ggrid>a:before {
display: block;
position: absolute;
margin: -.1em -.4em;
padding: .3em 0;
margin: -.4em;
text-shadow: 0 0 .1em #000;
background: linear-gradient(135deg,rgba(255,255,255,0) 50%,rgba(255,255,255,0.2));
border-radius: .3em;
font-size: 2em;
}

View File

@@ -10,19 +10,25 @@ u k:k
# share "." (the current directory)
# as "/" (the webroot) for the following users:
# "r" grants read-access for anyone
# "a ed" grants read-write to ed
# "rw ed" grants read-write to ed
.
/
r
a ed
rw ed
# custom permissions for the "priv" folder:
# user "k" can see/read the contents
# and "ed" gets read-write access
# user "k" can only see/read the contents
# user "ed" gets read-write access
./priv
/priv
r k
a ed
rw ed
# this does the same thing:
./priv
/priv
r ed k
w ed
# share /home/ed/Music/ as /music and let anyone read it
# (this will replace any folder called "music" in the webroot)
@@ -41,5 +47,5 @@ c e2d
c nodupe
# this entire config file can be replaced with these arguments:
# -u ed:123 -u k:k -v .::r:aed -v priv:priv:rk:aed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w
# -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d:c,nodupe
# but note that the config file always wins in case of conflicts

51
docs/hls.html Normal file
View File

@@ -0,0 +1,51 @@
<!DOCTYPE html><html lang="en"><head>
<meta charset="utf-8">
<title>hls-test</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
</head><body>
<video id="vid" controls></video>
<script src="hls.light.js"></script>
<script>
var video = document.getElementById('vid');
var hls = new Hls({
debug: true,
autoStartLoad: false
});
hls.loadSource('live/v.m3u8');
hls.attachMedia(video);
hls.on(Hls.Events.MANIFEST_PARSED, function() {
hls.startLoad(0);
});
hls.on(Hls.Events.MEDIA_ATTACHED, function() {
video.muted = true;
video.play();
});
/*
general good news:
- doesn't need fixed-length segments; ok to let x264 pick optimal keyframes and slice on those
- hls.js polls the m3u8 for new segments, scales the duration accordingly, seeking works great
- the sfx will grow by 66 KiB since that's how small hls.js can get, wait thats not good
# vod, creates m3u8 at the end, fixed keyframes, v bad
ffmpeg -hide_banner -threads 0 -flags -global_header -i ..\CowboyBebopMovie-OP1.webm -vf scale=1280:-4,format=yuv420p -ac 2 -c:a libopus -b:a 128k -c:v libx264 -preset slow -crf 24 -maxrate:v 5M -bufsize:v 10M -g 120 -keyint_min 120 -sc_threshold 0 -hls_time 4 -hls_playlist_type vod -hls_segment_filename v%05d.ts v.m3u8
# live, updates m3u8 as it goes, dynamic keyframes, streamable with hls.js
ffmpeg -hide_banner -threads 0 -flags -global_header -i ..\..\CowboyBebopMovie-OP1.webm -vf scale=1280:-4,format=yuv420p -ac 2 -c:a libopus -b:a 128k -c:v libx264 -preset slow -crf 24 -maxrate:v 5M -bufsize:v 10M -f segment -segment_list v.m3u8 -segment_format mpegts -segment_list_flags live v%05d.ts
# fmp4 (fragmented mp4), doesn't work with hls.js, gets duratoin 149:07:51 (536871s), probably the tkhd/mdhd 0xffffffff (timebase 8000? ok)
ffmpeg -re -hide_banner -threads 0 -flags +cgop -i ..\..\CowboyBebopMovie-OP1.webm -vf scale=1280:-4,format=yuv420p -ac 2 -c:a libopus -b:a 128k -c:v libx264 -preset slow -crf 24 -maxrate:v 5M -bufsize:v 10M -f segment -segment_list v.m3u8 -segment_format fmp4 -segment_list_flags live v%05d.mp4
# try 2, works, uses tempfiles for m3u8 updates, good, 6% smaller
ffmpeg -re -hide_banner -threads 0 -flags +cgop -i ..\..\CowboyBebopMovie-OP1.webm -vf scale=1280:-4,format=yuv420p -ac 2 -c:a libopus -b:a 128k -c:v libx264 -preset slow -crf 24 -maxrate:v 5M -bufsize:v 10M -f hls -hls_segment_type fmp4 -hls_list_size 0 -hls_segment_filename v%05d.mp4 v.m3u8
more notes
- adding -hls_flags single_file makes duration wack during playback (for both fmp4 and ts), ok once finalized and refreshed, gives no size reduction anyways
- bebop op has good keyframe spacing for testing hls.js, in particular it hops one seg back and immediately resumes if it hits eof with the explicit hls.startLoad(0); otherwise it jumps into the middle of a seg and becomes art
- can probably -c:v copy most of the time, is there a way to check for cgop? todo
*/
</script>
</body></html>

View File

@@ -103,6 +103,15 @@ cat warks | while IFS= read -r x; do sqlite3 up2k.db "delete from mt where w = '
# dump all dbs
find -iname up2k.db | while IFS= read -r x; do sqlite3 "$x" 'select substr(w,1,12), rd, fn from up' | sed -r 's/\|/ \| /g' | while IFS= read -r y; do printf '%s | %s\n' "$x" "$y"; done; done
# unschedule mtp scan for all files somewhere under "enc/"
sqlite3 -readonly up2k.db 'select substr(up.w,1,16) from up inner join mt on mt.w = substr(up.w,1,16) where rd like "enc/%" and +mt.k = "t:mtp"' > keys; awk '{printf "delete from mt where w = \"%s\" and +k = \"t:mtp\";\n", $0}' <keys | tee /dev/stderr | sqlite3 up2k.db
# compare metadata key "key" between two databases
sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by w' > k1; sqlite3 -readonly up2k.db 'select w, v from mt where k = "key" order by w' > k2; ok=0; ng=0; while IFS='|' read w k2; do k1="$(grep -E "^$w" k1 | sed -r 's/.*\|//')"; [ "$k1" = "$k2" ] && ok=$((ok+1)) || { ng=$((ng+1)); printf '%3s %3s %s\n' "$k1" "$k2" "$(sqlite3 -readonly up2k.db.key-full "select * from up where substr(w,1,16) = '$w'" | sed -r 's/\|/ | /g')"; }; done < <(cat k2); echo "match $ok diff $ng"
# actually this is much better
sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by w' > k1; sqlite3 -readonly up2k.db 'select mt.w, mt.v, up.rd, up.fn from mt inner join up on mt.w = substr(up.w,1,16) where mt.k = "key" order by up.rd, up.fn' > k2; ok=0; ng=0; while IFS='|' read w k2 path; do k1="$(grep -E "^$w" k1 | sed -r 's/.*\|//')"; [ "$k1" = "$k2" ] && ok=$((ok+1)) || { ng=$((ng+1)); printf '%3s %3s %s\n' "$k1" "$k2" "$path"; }; done < <(cat k2); echo "match $ok diff $ng"
##
## media
@@ -157,7 +166,10 @@ dbg.asyncStore.pendingBreakpoints = {}
about:config >> devtools.debugger.prefs-schema-version = -1
# determine server version
git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > /dev/shm/revs && cat /dev/shm/revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js 2>/dev/null | diff -wNarU0 - <(cat /mnt/Users/ed/Downloads/ref/{util,browser,up2k}.js) | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
# download all sfx versions
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | while read v t; do fn="copyparty $v $t.py"; [ -e $fn ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
##
@@ -200,3 +212,4 @@ mk() { rm -rf /tmp/foo; sudo -u ed bash -c 'mkdir /tmp/foo; echo hi > /tmp/foo/b
mk && t0="$(date)" && while true; do date -s "$(date '+ 1 hour')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
mk && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done; echo "$t0"

View File

@@ -1,6 +1,7 @@
FROM alpine:3.13
WORKDIR /z
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_hashwasm=4.7.0 \
ver_marked=1.1.0 \
ver_ogvjs=1.8.0 \
ver_mde=2.14.0 \
@@ -9,12 +10,6 @@ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_zopfli=1.0.3
# TODO
# sha512.hw.js https://github.com/Daninet/hash-wasm
# sha512.kc.js https://github.com/chm-diederichs/sha3-wasm
# awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' <sha512.ac.js.orig >sha512.ac.js; for fn in sha512.ac.js.orig sha512.ac.js; do wc -c <$fn; wc -c <$fn.gz ; for n in {1..9}; do printf '%8d %d bz\n' $(bzip2 -c$n <$fn | wc -c) $n; done; done
# download;
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
RUN mkdir -p /z/dist/no-pk \
@@ -27,7 +22,11 @@ RUN mkdir -p /z/dist/no-pk \
&& wget https://github.com/codemirror/CodeMirror/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \
&& unzip ogvjs.zip \
&& (mkdir hash-wasm \
&& cd hash-wasm \
&& unzip ../hash-wasm.zip) \
&& (tar -xf asmcrypto.tgz \
&& cd asmcrypto.js-$ver_asmcrypto \
&& npm install ) \
@@ -64,7 +63,12 @@ RUN tar -xf zopfli.tgz \
RUN cd asmcrypto.js-$ver_asmcrypto \
&& echo "export { Sha512 } from './hash/sha512/sha512';" > src/entry-export_all.ts \
&& node -r esm build.js \
&& mv asmcrypto.all.es5.js /z/dist/sha512.js
&& awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' < asmcrypto.all.es5.js > /z/dist/sha512.ac.js
# build hash-wasm
RUN cd hash-wasm \
&& mv sha512.umd.min.js /z/dist/sha512.hw.js
# build ogvjs

View File

@@ -20,6 +20,11 @@ echo
#
# `no-cm` saves ~90k by removing easymde/codemirror
# (the fancy markdown editor)
#
# `no-fnt` saves ~9k by removing the source-code-pro font
# (mainly used my the markdown viewer/editor)
#
# `no-dd` saves ~2k by removing the mouse cursor
# port install gnutar findutils gsed coreutils
@@ -57,14 +62,18 @@ use_gz=
do_sh=1
do_py=1
while [ ! -z "$1" ]; do
[ "$1" = clean ] && clean=1 && shift && continue
[ "$1" = re ] && repack=1 && shift && continue
[ "$1" = gz ] && use_gz=1 && shift && continue
[ "$1" = no-ogv ] && no_ogv=1 && shift && continue
[ "$1" = no-cm ] && no_cm=1 && shift && continue
[ "$1" = no-sh ] && do_sh= && shift && continue
[ "$1" = no-py ] && do_py= && shift && continue
break
case $1 in
clean) clean=1 ; ;;
re) repack=1 ; ;;
gz) use_gz=1 ; ;;
no-ogv) no_ogv=1 ; ;;
no-fnt) no_fnt=1 ; ;;
no-dd) no_dd=1 ; ;;
no-cm) no_cm=1 ; ;;
no-sh) do_sh= ; ;;
no-py) do_py= ; ;;
esac
shift
done
tmv() {
@@ -190,6 +199,18 @@ done
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
}
[ $no_fnt ] && {
rm -f copyparty/web/deps/scp.woff2
f=copyparty/web/md.css
sed -r '/scp\.woff2/d' <$f >t && tmv "$f"
}
[ $no_dd ] && {
rm -rf copyparty/web/dd
f=copyparty/web/browser.css
sed -r 's/(cursor: )url\([^)]+\), (pointer)/\1\2/; /[0-9]+% \{cursor:/d; /animation: cursor/d' <$f >t && tmv "$f"
}
[ $repack ] ||
find | grep -E '\.py$' |
grep -vE '__version__' |

View File

@@ -6,10 +6,10 @@ import re, os, sys, time, shutil, signal, threading, tarfile, hashlib, platform,
import subprocess as sp
"""
run me with any version of python, i will unpack and run copyparty
to edit this file, use HxD or "vim -b"
(there is compressed stuff at the end)
(but please don't edit this file with a text editor
since that would probably corrupt the binary stuff at the end)
run me with any version of python, i will unpack and run copyparty
there's zero binaries! just plaintext python scripts all the way down
so you can easily unpack the archive and inspect it for shady stuff
@@ -380,7 +380,7 @@ def run(tmp, j2):
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except Exception as ex:
if not WINDOWS:
msg("\033[31mflock:", repr(ex))
msg("\033[31mflock:{!r}\033[0m".format(ex))
t = threading.Thread(target=utime, args=(tmp,))
t.daemon = True

View File

@@ -23,14 +23,18 @@ def hdr(query):
class Cfg(Namespace):
def __init__(self, a=[], v=[], c=None):
def __init__(self, a=None, v=None, c=None):
super(Cfg, self).__init__(
a=a,
v=v,
a=a or [],
v=v or [],
c=c,
rproxy=0,
ed=False,
nw=False,
no_mv=False,
no_del=False,
no_zip=False,
no_voldump=True,
no_scandir=False,
no_sendfile=True,
no_rescan=True,
@@ -89,7 +93,7 @@ class TestHttpCli(unittest.TestCase):
if not vol.startswith(top):
continue
mode = vol[-2]
mode = vol[-2].replace("a", "rwmd")
usr = vol[-1]
if usr == "a":
usr = ""
@@ -98,7 +102,7 @@ class TestHttpCli(unittest.TestCase):
vol += "/"
top, sub = vol.split("/", 1)
vcfg.append("{0}/{1}:{1}:{2}{3}".format(top, sub, mode, usr))
vcfg.append("{0}/{1}:{1}:{2},{3}".format(top, sub, mode, usr))
pprint.pprint(vcfg)

View File

@@ -16,18 +16,19 @@ from copyparty import util
class Cfg(Namespace):
def __init__(self, a=[], v=[], c=None):
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
def __init__(self, a=None, v=None, c=None):
ex = {k: False for k in "nw e2d e2ds e2dsa e2t e2ts e2tsr".split()}
ex2 = {
"mtp": [],
"mte": "a",
"hist": None,
"no_hash": False,
"css_browser": None,
"no_voldump": True,
"rproxy": 0,
}
ex.update(ex2)
super(Cfg, self).__init__(a=a, v=v, c=c, **ex)
super(Cfg, self).__init__(a=a or [], v=v or [], c=c, **ex)
class TestVFS(unittest.TestCase):
@@ -57,8 +58,8 @@ class TestVFS(unittest.TestCase):
# type: (VFS, str, str) -> tuple[str, str, str]
"""helper for resolving and listing a folder"""
vn, rem = vfs.get(vpath, uname, True, False)
r1 = vn.ls(rem, uname, False)
r2 = vn.ls(rem, uname, False)
r1 = vn.ls(rem, uname, False, [[True]])
r2 = vn.ls(rem, uname, False, [[True]])
self.assertEqual(r1, r2)
fsdir, real, virt = r1
@@ -68,6 +69,11 @@ class TestVFS(unittest.TestCase):
def log(self, src, msg, c=0):
pass
def assertAxs(self, dct, lst):
t1 = list(sorted(dct.keys()))
t2 = list(sorted(lst))
self.assertEqual(t1, t2)
def test(self):
td = os.path.join(self.td, "vfs")
os.mkdir(td)
@@ -88,53 +94,53 @@ class TestVFS(unittest.TestCase):
self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td)
self.assertEqual(vfs.uread, ["*"])
self.assertEqual(vfs.uwrite, ["*"])
self.assertAxs(vfs.axs.uread, ["*"])
self.assertAxs(vfs.axs.uwrite, ["*"])
# single read-only rootfs (relative path)
vfs = AuthSrv(Cfg(v=["a/ab/::r"]), self.log).vfs
self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
self.assertEqual(vfs.uread, ["*"])
self.assertEqual(vfs.uwrite, [])
self.assertAxs(vfs.axs.uread, ["*"])
self.assertAxs(vfs.axs.uwrite, [])
# single read-only rootfs (absolute path)
vfs = AuthSrv(Cfg(v=[td + "//a/ac/../aa//::r"]), self.log).vfs
self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
self.assertEqual(vfs.uread, ["*"])
self.assertEqual(vfs.uwrite, [])
self.assertAxs(vfs.axs.uread, ["*"])
self.assertAxs(vfs.axs.uwrite, [])
# read-only rootfs with write-only subdirectory (read-write for k)
vfs = AuthSrv(
Cfg(a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]),
Cfg(a=["k:k"], v=[".::r:rw,k", "a/ac/acb:a/ac/acb:w:rw,k"]),
self.log,
).vfs
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td)
self.assertEqual(vfs.uread, ["*", "k"])
self.assertEqual(vfs.uwrite, ["k"])
self.assertAxs(vfs.axs.uread, ["*", "k"])
self.assertAxs(vfs.axs.uwrite, ["k"])
n = vfs.nodes["a"]
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a")
self.assertEqual(n.realpath, os.path.join(td, "a"))
self.assertEqual(n.uread, ["*", "k"])
self.assertEqual(n.uwrite, ["k"])
self.assertAxs(n.axs.uread, ["*", "k"])
self.assertAxs(n.axs.uwrite, ["k"])
n = n.nodes["ac"]
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a/ac")
self.assertEqual(n.realpath, os.path.join(td, "a", "ac"))
self.assertEqual(n.uread, ["*", "k"])
self.assertEqual(n.uwrite, ["k"])
self.assertAxs(n.axs.uread, ["*", "k"])
self.assertAxs(n.axs.uwrite, ["k"])
n = n.nodes["acb"]
self.assertEqual(n.nodes, {})
self.assertEqual(n.vpath, "a/ac/acb")
self.assertEqual(n.realpath, os.path.join(td, "a", "ac", "acb"))
self.assertEqual(n.uread, ["k"])
self.assertEqual(n.uwrite, ["*", "k"])
self.assertAxs(n.axs.uread, ["k"])
self.assertAxs(n.axs.uwrite, ["*", "k"])
# something funky about the windows path normalization,
# doesn't really matter but makes the test messy, TODO?
@@ -173,24 +179,24 @@ class TestVFS(unittest.TestCase):
# admin-only rootfs with all-read-only subfolder
vfs = AuthSrv(
Cfg(a=["k:k"], v=[".::ak", "a:a:r"]),
Cfg(a=["k:k"], v=[".::rw,k", "a:a:r"]),
self.log,
).vfs
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td)
self.assertEqual(vfs.uread, ["k"])
self.assertEqual(vfs.uwrite, ["k"])
self.assertAxs(vfs.axs.uread, ["k"])
self.assertAxs(vfs.axs.uwrite, ["k"])
n = vfs.nodes["a"]
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a")
self.assertEqual(n.realpath, os.path.join(td, "a"))
self.assertEqual(n.uread, ["*"])
self.assertEqual(n.uwrite, [])
self.assertEqual(vfs.can_access("/", "*"), [False, False])
self.assertEqual(vfs.can_access("/", "k"), [True, True])
self.assertEqual(vfs.can_access("/a", "*"), [True, False])
self.assertEqual(vfs.can_access("/a", "k"), [True, False])
self.assertAxs(n.axs.uread, ["*"])
self.assertAxs(n.axs.uwrite, [])
self.assertEqual(vfs.can_access("/", "*"), [False, False, False, False])
self.assertEqual(vfs.can_access("/", "k"), [True, True, False, False])
self.assertEqual(vfs.can_access("/a", "*"), [True, False, False, False])
self.assertEqual(vfs.can_access("/a", "k"), [True, False, False, False])
# breadth-first construction
vfs = AuthSrv(
@@ -247,26 +253,26 @@ class TestVFS(unittest.TestCase):
./src
/dst
r a
a asd
rw asd
"""
).encode("utf-8")
)
au = AuthSrv(Cfg(c=[cfg_path]), self.log)
self.assertEqual(au.user["a"], "123")
self.assertEqual(au.user["asd"], "fgh:jkl")
self.assertEqual(au.acct["a"], "123")
self.assertEqual(au.acct["asd"], "fgh:jkl")
n = au.vfs
# root was not defined, so PWD with no access to anyone
self.assertEqual(n.vpath, "")
self.assertEqual(n.realpath, None)
self.assertEqual(n.uread, [])
self.assertEqual(n.uwrite, [])
self.assertAxs(n.axs.uread, [])
self.assertAxs(n.axs.uwrite, [])
self.assertEqual(len(n.nodes), 1)
n = n.nodes["dst"]
self.assertEqual(n.vpath, "dst")
self.assertEqual(n.realpath, os.path.join(td, "src"))
self.assertEqual(n.uread, ["a", "asd"])
self.assertEqual(n.uwrite, ["asd"])
self.assertAxs(n.axs.uread, ["a", "asd"])
self.assertAxs(n.axs.uwrite, ["asd"])
self.assertEqual(len(n.nodes), 0)
os.unlink(cfg_path)

View File

@@ -31,7 +31,7 @@ if MACOS:
from copyparty.util import Unrecv
def runcmd(*argv):
def runcmd(argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate()
stdout = stdout.decode("utf-8")
@@ -39,8 +39,8 @@ def runcmd(*argv):
return [p.returncode, stdout, stderr]
def chkcmd(*argv):
ok, sout, serr = runcmd(*argv)
def chkcmd(argv):
ok, sout, serr = runcmd(argv)
if ok != 0:
raise Exception(serr)
@@ -60,12 +60,20 @@ def get_ramdisk():
if os.path.exists("/Volumes"):
# hdiutil eject /Volumes/cptd/
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://131072")
devname, _ = chkcmd("hdiutil attach -nomount ram://131072".split())
devname = devname.strip()
print("devname: [{}]".format(devname))
for _ in range(10):
try:
_, _ = chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
_, _ = chkcmd(["diskutil", "eraseVolume", "HFS+", "cptd", devname])
with open("/Volumes/cptd/.metadata_never_index", "w") as f:
f.write("orz")
try:
shutil.rmtree("/Volumes/cptd/.fseventsd")
except:
pass
return subdir("/Volumes/cptd")
except Exception as ex:
print(repr(ex))
@@ -108,6 +116,9 @@ class VHttpSrv(object):
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
self.j2 = {x: J2_FILES for x in aliases}
def cachebuster(self):
return "a"
class VHttpConn(object):
def __init__(self, args, asrv, log, buf):
@@ -116,13 +127,13 @@ class VHttpConn(object):
self.addr = ("127.0.0.1", "42069")
self.args = args
self.asrv = asrv
self.is_mp = False
self.nid = None
self.log_func = log
self.log_src = "a"
self.lf_url = None
self.hsrv = VHttpSrv()
self.nreq = 0
self.nbyte = 0
self.workload = 0
self.ico = None
self.thumbcli = None
self.t0 = time.time()