Compare commits

...

315 Commits

Author SHA1 Message Date
ed
633b1f0a78 v1.0.1 2021-09-09 00:59:55 +02:00
ed
6136b9bf9c don't double-eof 2021-09-09 00:54:09 +02:00
ed
524a3ba566 actually this is better 2021-09-09 00:41:23 +02:00
ed
58580320f9 make the primary tabs toggle-buttons 2021-09-09 00:35:07 +02:00
ed
759b0a994d alternative equalizer tuning 2021-09-09 00:27:18 +02:00
ed
d2800473e4 less aggressive searching, especially on phones 2021-09-08 23:24:32 +02:00
ed
f5b1a2065e multipart-parser needs exact reads 2021-09-08 21:07:34 +00:00
ed
5e62532295 minimal-up2k: remove filesearch dropzone 2021-09-08 09:16:02 +02:00
ed
c1bee96c40 fix filedrop trying to upload without write access 2021-09-08 00:19:48 +02:00
ed
f273253a2b ( ´ w `) 2021-09-08 00:16:08 +02:00
ed
012bbcf770 v1.0.0 2021-09-07 23:18:54 +02:00
ed
b54cb47b2e listen for filedrops in all tabs/modes 2021-09-07 22:44:48 +02:00
ed
1b15f43745 crashpage: add github-issue link 2021-09-07 22:30:50 +02:00
ed
96771bf1bd linken 2021-09-07 22:12:28 +02:00
ed
580078bddb more readme stuff 2021-09-07 22:10:59 +02:00
ed
c5c7080ec6 more readme fixup 2021-09-07 21:57:33 +02:00
ed
408339b51d mention the new dropzones 2021-09-07 21:49:00 +02:00
ed
02e3d44998 fix move/delete without -e2d (thx exci) 2021-09-07 21:20:34 +02:00
ed
156f13ded1 add 10-minute indicators to seekbar 2021-09-07 21:10:50 +02:00
ed
d288467cb7 separate dropzones for upload/search 2021-09-07 20:52:06 +02:00
ed
21662c9f3f error-message cleanup 2021-09-07 20:51:07 +02:00
ed
9149fe6cdd lightmode fix 2021-09-07 00:44:09 +02:00
ed
9a146192b7 don't unwrap single folders in zip/tar downloads 2021-09-07 00:43:51 +02:00
ed
3a9d3b7b61 rip hls 2021-09-07 00:05:51 +02:00
ed
f03f0973ab Create branch-rename.md 2021-09-06 23:42:42 +02:00
ed
7ec0881e8c Create CODE_OF_CONDUCT.md 2021-09-06 23:31:57 +02:00
ed
59e1ab42ff Create CONTRIBUTING.md 2021-09-06 22:18:41 +02:00
ed
722216b901 Update issue templates 2021-09-06 22:11:06 +02:00
ed
bd8f3dc368 Update issue templates 2021-09-06 22:09:10 +02:00
ed
33cd94a141 update TOC 2021-09-06 08:36:18 +02:00
ed
053ac74734 v0.13.14 2021-09-06 01:06:16 +02:00
ed
cced99fafa replace SCP with Consolas on no-fnt repack 2021-09-06 01:04:12 +02:00
ed
a009ff53f7 show README.md in directory listings 2021-09-06 00:23:35 +02:00
ed
ca16c4108d add options to disallow renaming/moving dotfiles 2021-09-06 00:17:35 +02:00
ed
d1b6c67dc3 fix misnomer 2021-09-06 00:13:52 +02:00
ed
a61f8133d5 add option to disable logues 2021-09-05 22:33:42 +02:00
ed
38d797a544 remove duplicate code 2021-09-05 22:32:34 +02:00
ed
16c1877f50 fix markdown scrollmap desync on offsite images 2021-09-05 21:44:17 +02:00
ed
da5f15a778 move general markdown to ui.css 2021-09-05 21:42:41 +02:00
ed
396c64ecf7 move sourcecodepro to ui.css 2021-09-05 18:55:28 +02:00
ed
252c3a7985 faster turbo 2021-09-05 18:51:01 +02:00
ed
a3ecbf0ae7 better fix for the up2k bounce 2021-09-05 18:50:24 +02:00
ed
314327d8f2 support alternative python impls 2021-09-05 18:48:58 +02:00
ed
bfacd06929 mention some more features 2021-09-04 21:40:22 +02:00
ed
4f5e8f8cf5 toc tweaks 2021-09-04 21:21:18 +02:00
ed
1fbb4c09cc readme/doc cleanup 2021-09-04 21:07:45 +02:00
ed
b332e1992b sfx-repack: fix git version numbers 2021-09-04 17:43:49 +02:00
ed
5955940b82 fix upload eta going bad after inactivity 2021-09-04 03:10:54 +02:00
ed
231a03bcfd v0.13.13 2021-09-03 21:21:17 +02:00
ed
bc85723657 more intense compressino 2021-09-03 21:20:40 +02:00
ed
be32b743c6 repl: select default text on load 2021-09-03 20:48:41 +02:00
ed
83c9843059 make-sfx: correct version number on repack 2021-09-03 20:38:41 +02:00
ed
11cf43626d make-sfx: fix no-dd css modifier 2021-09-03 20:38:14 +02:00
ed
a6dc5e2ce3 add some missing preventdefaults 2021-09-03 20:37:30 +02:00
ed
38593a0394 move column hider buttons above the header 2021-09-03 20:19:17 +02:00
ed
95309afeea fix file-list jumping around during uploads 2021-09-03 20:17:44 +02:00
ed
c2bf6fe2a3 add basic authentication 2021-09-03 20:15:24 +02:00
ed
99ac324fbd tweaks 2021-09-02 19:06:08 +02:00
ed
5562de330f slightly smaller jpeg thumbnails 2021-09-02 18:51:15 +02:00
ed
95014236ac js-repl presets 2021-09-02 18:50:47 +02:00
ed
6aa7386138 modals: onDisplay callback 2021-09-02 18:46:51 +02:00
ed
3226a1f588 crashpage: show recent console messages 2021-09-02 18:45:42 +02:00
ed
b4cf890cd8 emphasis 2021-09-02 18:42:53 +02:00
ed
ce09e323af ok/cancel buttons in platform-defined order 2021-09-02 18:42:12 +02:00
ed
941aedb177 v0.13.12 2021-09-01 23:48:01 +02:00
ed
87a0d502a3 crashpage: add useragent 2021-09-01 23:32:27 +02:00
ed
cab7c1b0b8 browser-icons: centered play button 2021-09-01 22:35:27 +02:00
ed
d5892341b6 prevent vertical toast overflow 2021-09-01 22:34:48 +02:00
ed
646557a43e crashpage: better localstore dump 2021-09-01 22:34:04 +02:00
ed
ed8d34ab43 dont try to play audio if js crashed 2021-09-01 22:28:15 +02:00
ed
5e34463c77 support massive cut/paste ops 2021-09-01 22:27:39 +02:00
ed
1b14eb7959 fix thumbnail-zoom hotkeys 2021-09-01 22:26:18 +02:00
ed
ed48c2d0ed v0.13.11 2021-08-30 22:32:16 +02:00
ed
26fe84b660 smaller sfx 2021-08-30 22:27:10 +02:00
ed
5938230270 more tray ui nitpicks 2021-08-30 22:25:07 +02:00
ed
1a33a047fa fix listening on single interface 2021-08-30 21:39:44 +02:00
ed
43a8bcefb9 v0.13.10 2021-08-30 03:02:11 +02:00
ed
2e740e513f cheap performance fix 2021-08-30 02:38:48 +02:00
ed
8a21a86b61 better iOS error-handling 2021-08-30 02:29:38 +02:00
ed
f600116205 login returns to volume listing 2021-08-30 01:55:24 +02:00
ed
1c03705de8 upload filedrops in alphabetical order 2021-08-30 01:50:12 +02:00
ed
f7e461fac6 add humantime 2021-08-30 01:16:20 +02:00
ed
03ce6c97ff better crash-handler ui 2021-08-30 01:15:37 +02:00
ed
ffd9e76e07 select all text in modal.prompt 2021-08-30 01:11:00 +02:00
ed
fc49cb1e67 add js repl 2021-08-30 01:09:27 +02:00
ed
f5712d9f25 v0.13.9 2021-08-29 02:24:09 +02:00
ed
161d57bdda v0.13.8 2021-08-29 01:38:06 +02:00
ed
bae0d440bf upgrade ogvjs to 1.8.4 2021-08-29 01:11:44 +02:00
ed
fff052dde1 explain the magic 2021-08-29 00:11:06 +02:00
ed
73b06eaa02 coerce iOS into playing opus in the background 2021-08-29 00:05:14 +02:00
ed
08a8ebed17 minor cleanup 2021-08-28 22:40:59 +02:00
ed
74d07426b3 make tray tab smaller 2021-08-28 22:37:39 +02:00
ed
69a2bba99a fix ogv.js crashing iOS 2021-08-28 22:35:47 +02:00
ed
4d685d78ee v0.13.7 2021-08-28 04:55:06 +02:00
ed
5845ec3f49 nevermind, nailed it 2021-08-28 04:08:22 +02:00
ed
13373426fe alright fine apple you win 2021-08-28 03:44:07 +02:00
ed
8e55551a06 positioning fixes 2021-08-28 03:27:14 +02:00
ed
12a3f0ac31 update the filetype icons example 2021-08-28 02:56:07 +02:00
ed
18e33edc88 hide tooltips on scroll 2021-08-28 02:46:06 +02:00
ed
c72c5ad4ee make the ellipsis more visible 2021-08-28 02:38:31 +02:00
ed
0fbc81ab2f missed some 2021-08-28 02:37:28 +02:00
ed
af0a34cf82 improve iphone fix 2021-08-28 02:11:40 +02:00
ed
b4590c5398 horizontally centered tooltips 2021-08-28 01:49:21 +02:00
ed
f787a66230 that was dumb 2021-08-28 01:47:36 +02:00
ed
b21a99fd62 only tooltip the ellipsed thumbnails 2021-08-28 01:25:27 +02:00
ed
eb16306cde misc cleanup 2021-08-28 00:03:30 +02:00
ed
7bc23687e3 this kinda broke ellipsing, hopefully not too expensive 2021-08-28 00:02:59 +02:00
ed
e1eaa057f2 optimize clmod 2021-08-27 23:58:23 +02:00
ed
97c264ca3e snappy taps 2021-08-27 23:57:46 +02:00
ed
cf848ab1f7 add ellipsing of thumbnail filename, fixes #3 (+ clamp zoom level) 2021-08-27 23:50:09 +02:00
ed
cf83f9b0fd v0.13.6 2021-08-27 00:09:36 +02:00
ed
d98e361083 quick debounce 2021-08-26 23:59:17 +02:00
ed
ce7f5309c7 tweak toast bg 2021-08-26 23:46:04 +02:00
ed
75c485ced7 misc toast rice and html escaping 2021-08-26 23:45:28 +02:00
ed
9c6e2ec012 misc modal rice and html escaping 2021-08-26 23:23:56 +02:00
ed
1a02948a61 prevent text selection on most buttons 2021-08-26 23:01:24 +02:00
ed
8b05ba4ba1 stop counting eta when we don't hold the flag 2021-08-26 22:51:07 +02:00
ed
21e2874cb7 warning when another browser tab holds the flag 2021-08-26 22:50:22 +02:00
ed
360ed5c46c release the up2k flag when disabling it 2021-08-26 22:48:57 +02:00
ed
5099bc365d better eta for fsearch 2021-08-26 22:47:43 +02:00
ed
12986da147 might be useful some time 2021-08-26 22:45:50 +02:00
ed
23e72797bc remove some more ansi escapes on win7 2021-08-26 22:45:36 +02:00
ed
ac7b6f8f55 update turbo hint for fsearch 2021-08-26 20:44:36 +02:00
ed
981b9ff11e more accurate eta 2021-08-26 20:43:52 +02:00
ed
4186906f4c pause hashing as well when parallel uploads is 0 2021-08-26 20:43:27 +02:00
ed
0850d24e0c improve spacing on narrow screens 2021-08-26 20:42:20 +02:00
ed
7ab8334c96 remove debug 2021-08-26 01:16:59 +02:00
ed
a4d7329ab7 revert to fixed MiB/s in upload tab 2021-08-26 01:13:20 +02:00
ed
3f4eae6bce yolo search + show in bz + md search 2021-08-26 00:57:49 +02:00
ed
518cf4be57 set fsearch tag on tasks 2021-08-26 00:54:00 +02:00
ed
71096182be toFixed is busted, workaround 2021-08-26 00:51:35 +02:00
ed
6452e927ea download-eta accuracy + misc ux 2021-08-26 00:40:12 +02:00
ed
bc70cfa6f0 fix tmi 2021-08-25 09:02:34 +02:00
ed
2b6e5ebd2d update minimal-up2k 2021-08-25 08:26:38 +02:00
ed
c761bd799a add pane with total eta for all uploads 2021-08-25 02:06:29 +02:00
ed
2f7c2fdee4 add colors to status column in up2k ui 2021-08-24 00:32:53 +02:00
ed
70a76ec343 add toast on upload/fsearch completion 2021-08-24 00:31:01 +02:00
ed
7c3f64abf2 fix navpane h.scroll bug 2021-08-24 00:29:11 +02:00
ed
f5f38f195c use scp.woff in browser too 2021-08-24 00:28:16 +02:00
ed
7e84f4f015 fence focus inside modals 2021-08-24 00:26:54 +02:00
ed
4802f8cf07 better msg when unposting a deleted file 2021-08-24 00:24:50 +02:00
ed
cc05e67d8f add summaries to readme toc 2021-08-22 17:23:42 +02:00
ed
2b6b174517 the smallest nitpick 2021-08-20 19:25:57 +02:00
ed
a1d05e6e12 folder thumbnail fix 2021-08-20 19:22:25 +02:00
ed
f95ceb6a9b fix toc 2021-08-17 08:54:19 +02:00
ed
8f91b0726d add missing hotkey hint 2021-08-17 00:24:27 +02:00
ed
97807f4383 update screenshots 2021-08-17 00:23:12 +02:00
ed
5f42237f2c v0.13.5 2021-08-16 08:40:26 +02:00
ed
68289cfa54 v0.13.4 2021-08-16 08:18:52 +02:00
ed
42ea30270f up2k-ui: post absolute URLs 2021-08-16 08:16:52 +02:00
ed
ebbbbf3d82 misc old-browser support 2021-08-16 00:22:30 +02:00
ed
27516e2d16 scroll navpane to open folder on load 2021-08-16 00:07:31 +02:00
ed
84bb6f915e fix unpost ui for nonroot volumes 2021-08-16 00:03:05 +02:00
ed
46752f758a fix bup into volumes with upload rules 2021-08-15 23:59:41 +02:00
ed
34c4c22e61 v0.13.3 2021-08-14 22:46:15 +02:00
ed
af2d0b8421 upgrade permsets in smoketest 2021-08-14 22:45:33 +02:00
ed
638b05a49a fix image-viewer touch handler 2021-08-14 22:40:54 +02:00
ed
7a13e8a7fc clear transform on 0deg rotate 2021-08-14 21:13:15 +02:00
ed
d9fa74711d cheaper shadows 2021-08-14 18:17:40 +02:00
ed
41867f578f image viewer: add rotation 2021-08-14 18:06:53 +02:00
ed
0bf41ed4ef exif orientation for thumbnails 2021-08-14 17:45:44 +02:00
ed
d080b4a731 v0.13.2 2021-08-12 22:42:36 +02:00
ed
ca4232ada9 move sortfiles from util to browser 2021-08-12 22:42:17 +02:00
ed
ad348f91c9 fix button placement in large modals 2021-08-12 22:31:28 +02:00
ed
990f915f42 ui tweaks 2021-08-12 22:31:07 +02:00
ed
53d720217b open videos in gallery 2021-08-12 22:30:52 +02:00
ed
7a06ff480d fix cut/paste on old chromes 2021-08-12 22:30:41 +02:00
ed
3ef551f788 selection-toggle in image viewer 2021-08-12 22:20:32 +02:00
ed
f0125cdc36 prevent massive stacks in chrome 2021-08-12 22:12:05 +02:00
ed
ed5f6736df add prisonparty systemd example 2021-08-10 23:29:14 +02:00
ed
15d8be0fae no more loops 2021-08-10 02:56:48 +02:00
ed
46f3e61360 no actually that is a terrible location 2021-08-09 23:53:09 +02:00
ed
87ad8c98d4 /var/empty is a good location 2021-08-09 23:37:01 +02:00
ed
9bbdc4100f fix permission flags in service scripts 2021-08-09 23:26:30 +02:00
ed
c80307e8ff v0.13.1 2021-08-09 22:28:54 +02:00
ed
c1d77e1041 add upload lifetimes 2021-08-09 22:17:41 +02:00
ed
d9e83650dc handle invalid XDG_CONFIG_HOME on linux 2021-08-09 22:13:16 +02:00
ed
f6d635acd9 sfx: return 1 on exception 2021-08-09 22:13:00 +02:00
ed
0dbd8a01ff mount PWD into chroot for config files 2021-08-09 22:12:39 +02:00
ed
8d755d41e0 per-volume rescan interval 2021-08-09 01:31:20 +02:00
ed
190473bd32 up2k-ui: fix hash-ahead button 2021-08-09 01:16:09 +02:00
ed
030d1ec254 no wait thats too much 2021-08-09 01:15:51 +02:00
ed
5a2b91a084 handle more exceptions + sanitize fs paths in msgs 2021-08-09 01:09:20 +02:00
ed
a50a05e4e7 git: set 0755 on binary 2021-08-09 00:44:19 +02:00
ed
6cb5a87c79 add chroot wrapper (tested on debian only) 2021-08-09 00:42:21 +02:00
ed
b9f89ca552 shared password for providers 2021-08-08 23:05:00 +02:00
ed
26c9fd5dea add converter to freg / yta-raw 2021-08-08 22:48:02 +02:00
ed
e81a9b6fe0 better error handling 2021-08-08 20:48:24 +02:00
ed
452450e451 improve youtube parser 2021-08-08 20:30:12 +02:00
ed
419dd2d1c7 v0.13.0 2021-08-08 04:14:59 +02:00
ed
ee86b06676 compat + perf + ux 2021-08-08 04:02:58 +02:00
ed
953183f16d add help sections and vt100 stripper 2021-08-08 02:47:42 +02:00
ed
228f71708b improve youtube collector/parser 2021-08-08 02:47:04 +02:00
ed
621471a7cb add streaming upload compression 2021-08-08 02:45:50 +02:00
ed
8b58e951e3 metadata search with keys containing _- 2021-08-07 21:38:52 +02:00
ed
1db489a0aa port changes to mde 2021-08-07 21:35:24 +02:00
ed
be65c3c6cf cleanup 2021-08-07 21:11:01 +02:00
ed
46e7fa31fe up2k-cli: handle subfolders better 2021-08-07 20:43:24 +02:00
ed
66e21bd499 up2k-ui: prevent accidentally showing huge lists 2021-08-07 20:08:41 +02:00
ed
8cab4c01fd chrome optimizations 2021-08-07 20:08:02 +02:00
ed
d52038366b reinventing alert/confirm/prompt was exactly what i had in mind for the weekend, thanks google 2021-08-07 18:41:06 +02:00
ed
4fcfd87f5b fix transfer limit 2021-08-07 18:40:28 +02:00
ed
f893c6baa4 add youtube manifest parser 2021-08-07 04:29:55 +02:00
ed
9a45549b66 adding upload rules 2021-08-07 03:45:50 +02:00
ed
ae3a01038b v0.12.12 2021-08-06 11:10:04 +02:00
ed
e47a2a4ca2 hyperlinks 2021-08-06 01:48:34 +02:00
ed
95ea6d5f78 v0.12.11 2021-08-06 00:53:44 +02:00
ed
7d290f6b8f fix volflag syntax in examples 2021-08-06 00:50:29 +02:00
ed
9db617ed5a new mtp: media-hash 2021-08-06 00:49:42 +02:00
ed
514456940a tooltips, examples, fwd ng in lpad 2021-08-05 23:56:09 +02:00
ed
33feefd9cd sup merge conflict 2021-08-05 23:14:19 +02:00
ed
65e14cf348 batch-rename: add functions and presets 2021-08-05 23:11:06 +02:00
ed
1d61bcc4f3 every time 2021-08-05 21:56:52 +02:00
ed
c38bbaca3c mention batch-rename in readme 2021-08-05 21:53:51 +02:00
ed
246d245ebc make it better 2021-08-05 21:53:08 +02:00
ed
f269a710e2 suspiciously working first attempt at batch-rename 2021-08-05 20:49:49 +02:00
ed
051998429c fix argv compat on windows paths 2021-08-05 20:46:08 +02:00
ed
432cdd640f video-thumbs: take first video stream + better errors 2021-08-05 20:44:04 +02:00
ed
9ed9b0964e nice race 2021-08-03 22:53:13 +00:00
ed
6a97b3526d why was that there 2021-08-03 21:16:26 +00:00
ed
451d757996 fix renaming single symlinks 2021-08-03 20:12:51 +02:00
ed
f9e9eba3b1 sfx-repack: fix no-fnt, no-dd 2021-08-03 20:12:21 +02:00
ed
2a9a6aebd9 systemd fun 2021-08-03 09:22:16 +02:00
ed
adbb6c449e v0.12.10 2021-08-02 00:49:31 +02:00
ed
3993605324 add -mth (deafult-hidden columns) 2021-08-02 00:47:07 +02:00
ed
0ae574ec2c better mutagen codec detection 2021-08-02 00:40:40 +02:00
ed
c56ded828c v0.12.9 2021-08-01 00:40:15 +02:00
ed
02c7061945 v0.12.8 2021-08-01 00:17:05 +02:00
ed
9209e44cd3 heh 2021-08-01 00:08:50 +02:00
ed
ebed37394e better rename ui 2021-08-01 00:04:53 +02:00
ed
4c7a2a7ec3 uridec alerts 2021-07-31 22:05:31 +02:00
ed
0a25a88a34 add mojibake fixer 2021-07-31 14:31:39 +02:00
ed
6aa9025347 v0.12.7 2021-07-31 13:21:43 +02:00
ed
a918cc67eb only drop tags when its safe 2021-07-31 13:19:02 +02:00
ed
08f4695283 v0.12.6 2021-07-31 12:38:53 +02:00
ed
44e76d5eeb optimize make-sfx 2021-07-31 12:38:17 +02:00
ed
cfa36fd279 phone-friendly toast positioning 2021-07-31 10:56:03 +02:00
ed
3d4166e006 dont thumbnail thumbnails 2021-07-31 10:51:18 +02:00
ed
07bac1c592 add option to show dotfiles 2021-07-31 10:44:35 +02:00
ed
755f2ce1ba more url encoding fun 2021-07-31 10:24:34 +02:00
ed
cca2844deb fix mode display for move 2021-07-31 07:19:10 +00:00
ed
24a2f760b7 v0.12.5 2021-07-30 19:28:14 +02:00
ed
79bbd8fe38 systemd: line-buffered logging 2021-07-30 10:39:46 +02:00
ed
35dce1e3e4 v0.12.4 2021-07-30 08:52:15 +02:00
ed
f886fdf913 mention unpost in the readme 2021-07-30 00:53:15 +02:00
ed
4476f2f0da v0.12.3 orz 2021-07-30 00:32:21 +02:00
ed
160f161700 v0.12.2 (1000GET) 2021-07-29 23:56:25 +02:00
ed
c164fc58a2 add unpost 2021-07-29 23:53:08 +02:00
ed
0c625a4e62 store upload ip and time 2021-07-29 00:30:10 +02:00
ed
bf3941cf7a v0.12.1 2021-07-28 01:55:01 +02:00
ed
3649e8288a v0.12.0 2021-07-28 01:47:42 +02:00
ed
9a45e26026 another windows sighandler fix 2021-07-28 01:18:51 +02:00
ed
e65f127571 list server ips on windows 2021-07-28 01:18:38 +02:00
ed
3bfc699787 block hotkeys when insufficient permissions 2021-07-27 23:16:50 +02:00
ed
955318428a font adjustments 2021-07-27 23:12:47 +02:00
ed
f6279b356a fix more signal handler jank 2021-07-27 22:11:33 +02:00
ed
4cc3cdc989 list server ips on macos 2021-07-27 20:39:16 +02:00
ed
f9aa20a3ad naming: navpane 2021-07-27 20:39:01 +02:00
ed
129d33f1a0 mv/del: recursive rmdir 2021-07-27 19:15:58 +02:00
ed
1ad7a3f378 await and monitor workers on startup 2021-07-27 15:48:00 +00:00
ed
b533be8818 actually this is much better 2021-07-27 12:26:34 +02:00
ed
fb729e5166 file selection scroll behavior 2021-07-27 12:13:00 +02:00
ed
d337ecdb20 fix color bleed 2021-07-27 12:02:55 +02:00
ed
5f1f0a48b0 toast appearance 2021-07-27 11:48:32 +02:00
ed
e0f1cb94a5 toast close-handle 2021-07-27 10:05:53 +02:00
ed
a362ee2246 dodge a bullet on centos7 2021-07-27 00:28:40 +02:00
ed
19f23c686e toasty 2021-07-27 00:18:08 +02:00
ed
23b20ff4a6 bos abspath 2021-07-26 23:53:13 +02:00
ed
72574da834 hide fileman buttons when argv-disabled 2021-07-26 23:35:55 +02:00
ed
d5a79455d1 cleanup 2021-07-26 23:31:45 +02:00
ed
070d4b9da9 allow regular hotkeys during file selection 2021-07-26 22:50:58 +02:00
ed
0ace22fffe file selection hotkeys 2021-07-26 22:47:54 +02:00
ed
9e483d7694 ctrl-a 2021-07-26 22:44:07 +02:00
ed
26458b7a06 keyboard file selection 2021-07-26 22:40:55 +02:00
ed
b6a4604952 show fileman buttons conditionally 2021-07-26 21:00:36 +02:00
ed
af752fbbc2 reload-signal to source folder on paste 2021-07-26 20:49:26 +02:00
ed
279c9d706a list volumes/permissions on startup 2021-07-26 20:07:23 +02:00
ed
806e7b5530 fix argv compat bug 2021-07-26 19:40:12 +02:00
ed
f3dc6a217b use the new toast in md-editor 2021-07-26 19:20:36 +02:00
ed
7671d791fa rename works + more symlink fixes 2021-07-26 17:44:20 +02:00
ed
8cd84608a5 toast coloring 2021-07-26 03:00:37 +02:00
ed
980c6fc810 add scheduled rescans + fix mv bugs 2021-07-26 02:34:56 +02:00
ed
fb40a484c5 mv(folder) works 2021-07-26 01:26:58 +02:00
ed
daa9dedcaa rm works 2021-07-26 00:29:28 +02:00
ed
0d634345ac signal handling was still busted 2021-07-26 00:19:33 +02:00
ed
e648252479 mv works (at least in trivial cases) 2021-07-25 21:15:43 +02:00
ed
179d7a9ad8 bikeshedding 2021-07-25 19:47:40 +02:00
ed
19bc962ad5 add toasts 2021-07-25 10:50:11 +02:00
ed
27cce086c6 fileman ui 2021-07-25 01:09:14 +02:00
ed
fec0c620d4 add accounts/volumes section 2021-07-24 22:26:52 +02:00
ed
05a1a31cab too soon 2021-07-24 22:20:02 +02:00
ed
d020527c6f centralize mojibake support stuff 2021-07-24 21:56:55 +02:00
ed
4451485664 mv/rm (serverside), 100% untested 2021-07-24 20:08:31 +02:00
ed
a4e1a3738a more deletion progress 2021-07-23 23:42:07 +02:00
ed
4339dbeb8d mv/rm handlers 2021-07-23 01:14:49 +02:00
ed
5b0605774c add move/delete permission flags 2021-07-22 23:48:29 +02:00
ed
e3684e25f8 treat symlinks as regular files in db 2021-07-22 19:34:40 +02:00
ed
1359213196 prefer native sqlite3 backup (journal-aware) 2021-07-22 19:10:42 +02:00
ed
03efc6a169 support ancient glibc 2021-07-22 19:04:59 +02:00
ed
15b5982211 v0.11.47 2021-07-22 10:09:04 +02:00
ed
0eb3a5d387 ignorable exceptions 2021-07-22 10:08:39 +02:00
Lytexx
7f8777389c fix typo 2021-07-22 09:34:04 +02:00
ed
4eb20f10ad v0.11.46 2021-07-22 08:42:27 +02:00
ed
daa11df558 avoid chrome bug 809574 2021-07-22 08:40:46 +02:00
ed
1bb0db30a0 fix logout link going 404 2021-07-21 01:30:27 +02:00
ed
02910b0020 v0.11.45 2021-07-20 23:23:08 +02:00
ed
23b8901c9c include localstore on the crashpage 2021-07-20 23:22:35 +02:00
ed
99f6ed0cd7 up2k-cli: avoid loading sha.js multiple times 2021-07-20 23:14:30 +02:00
ed
890c310880 another attempt at fixing tooltips on iphone 2021-07-20 23:07:15 +02:00
ed
0194eeb31f add login/permissions indicator 2021-07-20 22:42:03 +02:00
ed
f9be4c62b1 v0.11.44 2021-07-20 01:03:08 +02:00
ed
027e8c18f1 sfx: option to remove mouse cursor 2021-07-20 01:00:28 +02:00
ed
4a3bb35a95 sfx: option to remove scp.woff2 2021-07-20 00:45:54 +02:00
ed
4bfb0d4494 notes 2021-07-19 23:46:44 +02:00
ed
7e0ef03a1e fix audio player edgecase (continue into next folder with sidebar closed) 2021-07-19 23:10:48 +02:00
81 changed files with 7558 additions and 2534 deletions

40
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,40 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: '9001'
---
NOTE:
all of the below are optional, consider them as inspiration, delete and rewrite at will, thx md
**Describe the bug**
a description of what the bug is
**To Reproduce**
List of steps to reproduce the issue, or, if it's hard to reproduce, then at least a detailed explanation of what you did to run into it
**Expected behavior**
a description of what you expected to happen
**Screenshots**
if applicable, add screenshots to help explain your problem, such as the kickass crashpage :^)
**Server details**
if the issue is possibly on the server-side, then mention some of the following:
* server OS / version:
* python version:
* copyparty arguments:
* filesystem (`lsblk -f` on linux):
**Client details**
if the issue is possibly on the client-side, then mention some of the following:
* the device type and model:
* OS version:
* browser version:
**Additional context**
any other context about the problem here

View File

@@ -0,0 +1,22 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: '9001'
---
all of the below are optional, consider them as inspiration, delete and rewrite at will
**is your feature request related to a problem? Please describe.**
a description of what the problem is, for example, `I'm always frustrated when [...]` or `Why is it not possible to [...]`
**Describe the idea / solution you'd like**
a description of what you want to happen
**Describe any alternatives you've considered**
a description of any alternative solutions or features you've considered
**Additional context**
add any other context or screenshots about the feature request here

View File

@@ -0,0 +1,10 @@
---
name: Something else
about: "┐(゚∀゚)┌"
title: ''
labels: ''
assignees: ''
---

7
.github/branch-rename.md vendored Normal file
View File

@@ -0,0 +1,7 @@
modernize your local checkout of the repo like so,
```sh
git branch -m master hovudstraum
git fetch origin
git branch -u origin/hovudstraum hovudstraum
git remote set-head origin -a
```

24
CODE_OF_CONDUCT.md Normal file
View File

@@ -0,0 +1,24 @@
in the words of Abraham Lincoln:
> Be excellent to each other... and... PARTY ON, DUDES!
more specifically I'll paraphrase some examples from a german automotive corporation as they cover all the bases without being too wordy
## Examples of unacceptable behavior
* intimidation, harassment, trolling
* insulting, derogatory, harmful or prejudicial comments
* posting private information without permission
* political or personal attacks
## Examples of expected behavior
* being nice, friendly, welcoming, inclusive, mindful and empathetic
* acting considerate, modest, respectful
* using polite and inclusive language
* criticize constructively and accept constructive criticism
* respect different points of view
## finally and even more specifically,
* parse opinions and feedback objectively without prejudice
* it's the message that matters, not who said it
aaand that's how you say `be nice` in a way that fills half a floppy w

3
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,3 @@
* do something cool
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight

555
README.md
View File

@@ -6,85 +6,100 @@
## summary
turn your phone or raspi into a portable file server with resumable uploads/downloads using IE6 or any other browser
turn your phone or raspi into a portable file server with resumable uploads/downloads using *any* web browser
* server runs on anything with `py2.7` or `py3.3+`
* server only needs `py2.7` or `py3.3+`, all dependencies optional
* browse/upload with IE4 / netscape4.0 on win3.11 (heh)
* *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+` for full speed
* code standard: `black`
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [thumbnails](#thumbnails) // [md-viewer](#markdown-viewer) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [ie4](#browser-support)
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer) // [ie4](#browser-support)
## readme toc
* top
* [quickstart](#quickstart)
* [on debian](#on-debian)
* [notes](#notes)
* [status](#status)
* [testimonials](#testimonials)
* **[quickstart](#quickstart)** - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
* [on servers](#on-servers) - you may also want these, especially on servers
* [on debian](#on-debian) - recommended additional steps on debian
* [notes](#notes) - general notes
* [status](#status) - feature summary
* [testimonials](#testimonials) - small collection of user feedback
* [bugs](#bugs)
* [general bugs](#general-bugs)
* [not my bugs](#not-my-bugs)
* [the browser](#the-browser)
* [tabs](#tabs)
* [hotkeys](#hotkeys)
* [tree-mode](#tree-mode)
* [thumbnails](#thumbnails)
* [zip downloads](#zip-downloads)
* [uploading](#uploading)
* [file-search](#file-search)
* [markdown viewer](#markdown-viewer)
* [accounts and volumes](#accounts-and-volumes) - per-folder, per-user permissions
* [the browser](#the-browser) - accessing a copyparty server using a web-browser
* [tabs](#tabs) - the main tabs in the ui
* [hotkeys](#hotkeys) - the browser has the following hotkeys
* [navpane](#navpane) - switching between breadcrumbs or navpane
* [thumbnails](#thumbnails) - press `g` to toggle grid-view instead of the file listing
* [zip downloads](#zip-downloads) - download folders (or file selections) as `zip` or `tar` files
* [uploading](#uploading) - drag files/folders into the web-browser to upload
* [file-search](#file-search) - dropping files into the browser also lets you see if they exist on the server
* [unpost](#unpost) - undo/delete accidental uploads
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
* [markdown viewer](#markdown-viewer) - and there are *two* editors
* [other tricks](#other-tricks)
* [searching](#searching)
* [search configuration](#search-configuration)
* [database location](#database-location)
* [metadata from audio files](#metadata-from-audio-files)
* [file parser plugins](#file-parser-plugins)
* [searching](#searching) - search by size, date, path/name, mp3-tags, ...
* [server config](#server-config)
* [file indexing](#file-indexing)
* [upload rules](#upload-rules) - set upload rules using volume flags
* [compress uploads](#compress-uploads) - files can be autocompressed on upload
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
* [complete examples](#complete-examples)
* [browser support](#browser-support)
* [client examples](#client-examples)
* [up2k](#up2k)
* [performance](#performance)
* [dependencies](#dependencies)
* [optional dependencies](#optional-dependencies)
* [browser support](#browser-support) - TLDR: yes
* [client examples](#client-examples) - interact with copyparty using non-browser clients
* [up2k](#up2k) - quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
* [security](#security) - some notes on hardening
* [gotchas](#gotchas) - behavior that might be unexpected
* [dependencies](#dependencies) - mandatory deps
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
* [install recommended deps](#install-recommended-deps)
* [optional gpl stuff](#optional-gpl-stuff)
* [sfx](#sfx)
* [sfx repack](#sfx-repack)
* [sfx](#sfx) - there are two self-contained "binaries"
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
* [install on android](#install-on-android)
* [building](#building)
* [dev env setup](#dev-env-setup)
* [just the sfx](#just-the-sfx)
* [complete release](#complete-release)
* [todo](#todo)
* [todo](#todo) - roughly sorted by priority
* [discarded ideas](#discarded-ideas)
## quickstart
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone full access to the current folder; see `-h` for help if you want accounts and volumes etc
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; see `-h` for help if you want [accounts and volumes](#accounts-and-volumes) etc
some recommended options:
* `-e2dsa` enables general file indexing, see [search configuration](#search-configuration)
* `-e2dsa` enables general [file indexing](#file-indexing)
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies)
* `-v /mnt/music:/music:r:afoo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, with user `foo` as `a`dmin (read/write), password `bar`
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
* replace `:r:afoo` with `:rfoo` to only make the folder readable by `foo` and nobody else
* in addition to `r`ead and `a`dmin, `w`rite makes a folder write-only, so cannot list/access files in it
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other access levels (`r`ead, `w`rite, `m`ove, `d`elete)
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
### on servers
you may also want these, especially on servers:
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
### on debian
recommended steps to enable audio metadata and thumbnails (from images and videos):
recommended additional steps on debian which enable audio metadata and thumbnails (from images and videos):
* as root, run the following:
`apt install python3 python3-pip python3-dev ffmpeg`
@@ -97,7 +112,7 @@ recommended steps to enable audio metadata and thumbnails (from images and video
## notes
general:
general notes:
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
* because no browsers currently implement the media-query to do this properly orz
@@ -106,43 +121,45 @@ browser-specific:
* Android-Chrome: increase "parallel uploads" for higher speed (android bug)
* Android-Firefox: takes a while to select files (their fix for ☝️)
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
* Desktop-Firefox: may stop you from deleting folders you've uploaded until you visit `about:memory` and click `Minimize memory usage`
* Desktop-Firefox: may stop you from deleting files you've uploaded until you visit `about:memory` and click `Minimize memory usage`
## status
summary: all planned features work! now please enjoy the bloatening
feature summary
* backend stuff
* ☑ sanic multipart parser
* ☑ multiprocessing (actual multithreading)
* ☑ volumes (mountpoints)
* ☑ accounts
*[accounts](#accounts-and-volumes)
* upload
* ☑ basic: plain multipart, ie6 support
* ☑ up2k: js, resumable, multithreaded
*[up2k](#uploading): js, resumable, multithreaded
* ☑ stash: simple PUT filedropper
* ☑ [unpost](#unpost): undo/delete accidental uploads
* ☑ symlink/discard existing files (content-matching)
* download
* ☑ single files in browser
* ☑ folders as zip / tar files
*[folders as zip / tar files](#zip-downloads)
* ☑ FUSE client (read-only)
* browser
*tree-view
*[navpane](#navpane) (directory tree sidebar)
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
* ☑ audio player (with OS media controls)
*thumbnails
*image gallery with webm player
* ☑ [thumbnails](#thumbnails)
* ☑ ...of images using Pillow
* ☑ ...of videos using FFmpeg
* ☑ cache eviction (max-age; maybe max-size eventually)
* ☑ image gallery with webm player
* ☑ SPA (browse while uploading)
* if you use the file-tree on the left only, not folders in the file list
* if you use the navpane to navigate, not folders in the file list
* server indexing
* ☑ locate files by contents
*[locate files by contents](#file-search)
* ☑ search by name/path/date/size
* ☑ search by ID3-tags etc.
*[search by ID3-tags etc.](#searching)
* markdown
* ☑ viewer
*[viewer](#markdown-viewer)
* ☑ editor (sure why not)
@@ -158,14 +175,11 @@ small collection of user feedback
* Windows: python 3.7 and older cannot read tags with FFprobe, so use Mutagen or upgrade
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* Windows: python 2.7 cannot handle filenames with mojibake
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
## general bugs
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
* cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1`
* dupe files will not have metadata (audio tags etc) displayed in the file listing
* because they don't get `up` entries in the db (probably best fix) and `tx_browser` does not `lstat`
* probably more, pls let me know
## not my bugs
@@ -177,33 +191,72 @@ small collection of user feedback
* this is an msys2 bug, the regular windows edition of python is fine
* VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf
* use `--hist` or the `hist` volflag (`-v [...]:chist=/tmp/foo`) to place the db inside the vm instead
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
# accounts and volumes
per-folder, per-user permissions
* `-a usr:pwd` adds account `usr` with password `pwd`
* `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
* granting the same permissions to multiple accounts:
`-v .::r,usr1,usr2:rw,usr3,usr4` = usr1/2 read-only, 3/4 read-write
permissions:
* `r` (read): browse folder contents, download files, download as zip/tar
* `w` (write): upload files, move files *into* this folder
* `m` (move): move files/folders *from* this folder
* `d` (delete): delete files/folders
examples:
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
* make folder `/srv` the root of the filesystem, read-only by anyone: `-v /srv::r`
* make folder `/mnt/music` available at `/music`, read-only for u1 and u2, read-write for u3: `-v /mnt/music:music:r,u1,u2:rw,u3`
* unauthorized users accessing the webroot can see that the `music` folder exists, but cannot open it
* make folder `/mnt/incoming` available at `/inc`, write-only for u1, read-move for u2: `-v /mnt/incoming:inc:w,u1:rm,u2`
* unauthorized users accessing the webroot can see that the `inc` folder exists, but cannot open it
* `u1` can open the `inc` folder, but cannot see the contents, only upload new files to it
* `u2` can browse it and move files *from* `/inc` into any folder where `u2` has write-access
# the browser
![copyparty-browser-fs8](https://user-images.githubusercontent.com/241032/115978054-65106380-a57d-11eb-98f8-59e3dee73557.png)
accessing a copyparty server using a web-browser
![copyparty-browser-fs8](https://user-images.githubusercontent.com/241032/129635359-d6dd9b07-8079-4020-ad77-2bfdb9ebd8d5.png)
## tabs
* `[🔎]` search by size, date, path/name, mp3-tags ... see [searching](#searching)
* `[🚀]` and `[🎈]` are the uploaders, see [uploading](#uploading)
* `[📂]` mkdir, create directories
* `[📝]` new-md, create a new markdown document
* `[📟]` send-msg, either to server-log or into textfiles if `--urlform save`
the main tabs in the ui
* `[🔎]` [search](#searching) by size, date, path/name, mp3-tags ...
* `[🧯]` [unpost](#unpost): undo/delete accidental uploads
* `[🚀]` and `[🎈]` are the [uploaders](#uploading)
* `[📂]` mkdir: create directories
* `[📝]` new-md: create a new markdown document
* `[📟]` send-msg: either to server-log or into textfiles if `--urlform save`
* `[🎺]` audio-player config options
* `[⚙️]` general client config options
## hotkeys
the browser has the following hotkeys (assumes qwerty, ignores actual layout)
* `B` toggle breadcrumbs / directory tree
the browser has the following hotkeys (always qwerty)
* `B` toggle breadcrumbs / [navpane](#navpane)
* `I/K` prev/next folder
* `M` parent folder (or unexpand current)
* `G` toggle list / grid view
* `G` toggle list / [grid view](#thumbnails)
* `T` toggle thumbnails / icons
* `ctrl-X` cut selected files/folders
* `ctrl-V` paste
* `F2` [rename](#batch-rename) selected file/folder
* when a file/folder is selected (in not-grid-view):
* `Up/Down` move cursor
* shift+`Up/Down` select and move cursor
* ctrl+`Up/Down` move cursor and scroll viewport
* `Space` toggle file selection
* `Ctrl-A` toggle select all
* when playing audio:
* `J/L` prev/next song
* `U/O` skip 10sec back/forward
@@ -212,17 +265,19 @@ the browser has the following hotkeys (assumes qwerty, ignores actual layout)
* when viewing images / playing videos:
* `J/L, Left/Right` prev/next file
* `Home/End` first/last file
* `S` toggle selection
* `R` rotate clockwise (shift=ccw)
* `Esc` close viewer
* videos:
* `U/O` skip 10sec back/forward
* `P/K/Space` play/pause
* `F` fullscreen
* `C` continue playing next video
* `R` loop
* `V` loop
* `M` mute
* when tree-sidebar is open:
* when the navpane is open:
* `A/D` adjust tree width
* in the grid view:
* in the [grid view](#thumbnails):
* `S` toggle multiselect
* shift+`A/D` zoom
* in the markdown editor:
@@ -233,16 +288,23 @@ the browser has the following hotkeys (assumes qwerty, ignores actual layout)
* `^e` toggle editor / preview
* `^up, ^down` jump paragraphs
## tree-mode
by default there's a breadcrumbs path; you can replace this with a tree-browser sidebar thing by clicking the `🌲` or pressing the `B` hotkey
## navpane
click `[-]` and `[+]` (or hotkeys `A`/`D`) to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
switching between breadcrumbs or navpane
click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (default), or a navpane (tree-browser sidebar thing)
* `[-]` and `[+]` (or hotkeys `A`/`D`) adjust the size
* `[v]` jumps to the currently open folder
* `[a]` toggles automatic widening as you go deeper
## thumbnails
![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/120070302-10836b00-c08a-11eb-8eb4-82004a34c342.png)
press `g` to toggle grid-view instead of the file listing, and `t` toggles icons / thumbnails
![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/129636211-abd20fa2-a953-4366-9423-1c88ebb96ba9.png)
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
@@ -253,7 +315,9 @@ in the grid/thumbnail view, if the audio player panel is open, songs will start
## zip downloads
the `zip` link next to folders can produce various types of zip/tar files using these alternatives in the browser settings tab:
download folders (or file selections) as `zip` or `tar` files
select which type of archive you want in the `[⚙️] config` tab:
| name | url-suffix | description |
|--|--|--|
@@ -270,13 +334,18 @@ the `zip` link next to folders can produce various types of zip/tar files using
you can also zip a selection of files or folders by clicking them in the browser, that brings up a selection editor and zip button in the bottom right
![copyparty-zipsel-fs8](https://user-images.githubusercontent.com/241032/116008321-372a2e00-a614-11eb-9a4a-4a1fd9074224.png)
![copyparty-zipsel-fs8](https://user-images.githubusercontent.com/241032/129635374-e5136e01-470a-49b1-a762-848e8a4c9cdc.png)
## uploading
two upload methods are available in the html client:
* `🎈 bup`, the basic uploader, supports almost every browser since netscape 4.0
* `🚀 up2k`, the fancy one
drag files/folders into the web-browser to upload
this initiates an upload using `up2k`; there are two uploaders available:
* `[🎈] bup`, the basic uploader, supports almost every browser since netscape 4.0
* `[🚀] up2k`, the fancy one
you can also undo/delete uploads by using `[🧯]` [unpost](#unpost)
up2k has several advantages:
* you can drop folders into the browser (files are added recursively)
@@ -290,43 +359,126 @@ up2k has several advantages:
see [up2k](#up2k) for details on how it works
![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/115978061-680b5400-a57d-11eb-9ef6-cbb5f60aeccc.png)
![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/129635371-48fc54ca-fa91-48e3-9b1d-ba413e4b68cb.png)
**protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
the up2k UI is the epitome of polished inutitive experiences:
* "parallel uploads" specifies how many chunks to upload at the same time
* `[🏃]` analysis of other files should continue while one is uploading
* `[💭]` ask for confirmation before files are added to the list
* `[💤]` sync uploading between other copyparty tabs so only one is active
* `[🔎]` switch between upload and file-search mode
* `[💭]` ask for confirmation before files are added to the queue
* `[💤]` sync uploading between other copyparty browser-tabs so only one is active
* `[🔎]` switch between upload and [file-search](#file-search) mode
* ignore `[🔎]` if you add files by dragging them into the browser
and then theres the tabs below it,
* `[ok]` is uploads which completed successfully
* `[ng]` is the uploads which failed / got rejected (already exists, ...)
* `[ok]` is the files which completed successfully
* `[ng]` is the ones that failed / got rejected (already exists, ...)
* `[done]` shows a combined list of `[ok]` and `[ng]`, chronological order
* `[busy]` files which are currently hashing, pending-upload, or uploading
* plus up to 3 entries each from `[done]` and `[que]` for context
* `[que]` is all the files that are still queued
note that since up2k has to read each file twice, `[🎈 bup]` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD)
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
### file-search
![copyparty-fsearch-fs8](https://user-images.githubusercontent.com/241032/116008320-36919780-a614-11eb-803f-04162326a700.png)
dropping files into the browser also lets you see if they exist on the server
in the `[🚀 up2k]` tab, after toggling the `[🔎]` switch green, any files/folders you drop onto the dropzone will be hashed on the client-side. Each hash is sent to the server which checks if that file exists somewhere already
![copyparty-fsearch-fs8](https://user-images.githubusercontent.com/241032/129635361-c79286f0-b8f1-440e-aaf4-6e929428fac9.png)
when you drag/drop files into the browser, you will see two dropzones: `Upload` and `Search`
> on a phone? toggle the `[🔎]` switch green before tapping the big yellow Search button to select your files
the files will be hashed on the client-side, and each hash is sent to the server, which checks if that file exists somewhere
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files (or just refresh the page)
note that since up2k has to read the file twice, `[🎈 bup]` can be up to 2x faster in extreme cases (if your internet connection is faster than the read-speed of your HDD)
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well (thanks to tls also functioning as an integrity check)
### unpost
undo/delete accidental uploads
![copyparty-unpost-fs8](https://user-images.githubusercontent.com/241032/129635368-3afa6634-c20f-418c-90dc-ec411f3b3897.png)
you can unpost even if you don't have regular move/delete access, however only for files uploaded within the past `--unpost` seconds (default 12 hours) and the server must be running with `-e2d`
## file manager
cut/paste, rename, and delete files/folders (if you have permission)
file selection: click somewhere on the line (not the link itsef), then:
* `space` to toggle
* `up/down` to move
* `shift-up/down` to move-and-select
* `ctrl-shift-up/down` to also scroll
* cut: select some files and `ctrl-x`
* paste: `ctrl-v` in another folder
* rename: `F2`
you can move files across browser tabs (cut in one tab, paste in another)
## batch rename
select some files and press `F2` to bring up the rename UI
![batch-rename-fs8](https://user-images.githubusercontent.com/241032/128434204-eb136680-3c07-4ec7-92e0-ae86af20c241.png)
quick explanation of the buttons,
* `[✅ apply rename]` confirms and begins renaming
* `[❌ cancel]` aborts and closes the rename window
* `[↺ reset]` reverts any filename changes back to the original name
* `[decode]` does a URL-decode on the filename, fixing stuff like `&` and `%20`
* `[advanced]` toggles advanced mode
advanced mode: rename files based on rules to decide the new names, based on the original name (regex), or based on the tags collected from the file (artist/title/...), or a mix of both
in advanced mode,
* `[case]` toggles case-sensitive regex
* `regex` is the regex pattern to apply to the original filename; any files which don't match will be skipped
* `format` is the new filename, taking values from regex capturing groups and/or from file tags
* very loosely based on foobar2000 syntax
* `presets` lets you save rename rules for later
available functions:
* `$lpad(text, length, pad_char)`
* `$rpad(text, length, pad_char)`
so,
say you have a file named [`meganeko - Eclipse - 07 Sirius A.mp3`](https://www.youtube.com/watch?v=-dtb0vDPruI) (absolutely fantastic album btw) and the tags are: `Album:Eclipse`, `Artist:meganeko`, `Title:Sirius A`, `tn:7`
you could use just regex to rename it:
* `regex` = `(.*) - (.*) - ([0-9]{2}) (.*)`
* `format` = `(3). (1) - (4)`
* `output` = `07. meganeko - Sirius A.mp3`
or you could use just tags:
* `format` = `$lpad((tn),2,0). (artist) - (title).(ext)`
* `output` = `7. meganeko - Sirius A.mp3`
or a mix of both:
* `regex` = ` - ([0-9]{2}) `
* `format` = `(1). (artist) - (title).(ext)`
* `output` = `07. meganeko - Sirius A.mp3`
the metadata keys you can use in the format field are the ones in the file-browser table header (whatever is collected with `-mte` and `-mtp`)
## markdown viewer
and there are *two* editors
![copyparty-md-read-fs8](https://user-images.githubusercontent.com/241032/115978057-66419080-a57d-11eb-8539-d2be843991aa.png)
* the document preview has a max-width which is the same as an A4 paper when printed
@@ -338,10 +490,18 @@ up2k has saved a few uploads from becoming corrupted in-transfer already; caught
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
* click the bottom-left `π` to open a javascript prompt for debugging
# searching
* files named `.prologue.html` / `.epilogue.html` will be rendered before/after directory listings unless `--no-logues`
![copyparty-search-fs8](https://user-images.githubusercontent.com/241032/115978060-6772bd80-a57d-11eb-81d3-174e869b72c3.png)
* files named `README.md` / `readme.md` will be rendered after directory listings unless `--no-readme` (but `.epilogue.html` takes precedence)
## searching
search by size, date, path/name, mp3-tags, ...
![copyparty-search-fs8](https://user-images.githubusercontent.com/241032/129635365-c0ff2a9f-0ee5-4fc3-8bb6-006033cf67b8.png)
when started with `-e2dsa` copyparty will scan/index all your files. This avoids duplicates on upload, and also makes the volumes searchable through the web-ui:
* make search queries by `size`/`date`/`directory-path`/`filename`, or...
@@ -351,44 +511,89 @@ path/name queries are space-separated, AND'ed together, and words are negated wi
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
add `-e2ts` to also scan/index tags from music files:
add the argument `-e2ts` to also scan/index tags from music files, which brings us over to:
## search configuration
# server config
searching relies on two databases, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`). Configuration can be done through arguments, volume flags, or a mix of both.
## file indexing
file indexing relies on two database tables, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`), stored in `.hist/up2k.db`. Configuration can be done through arguments, volume flags, or a mix of both.
through arguments:
* `-e2d` enables file indexing on upload
* `-e2ds` scans writable folders for new files on startup
* `-e2dsa` scans all mounted volumes (including readonly ones)
* `-e2ds` also scans writable folders for new files on startup
* `-e2dsa` also scans all mounted volumes (including readonly ones)
* `-e2t` enables metadata indexing on upload
* `-e2ts` scans for tags in all files that don't have tags yet
* `-e2tsr` deletes all existing tags, does a full reindex
* `-e2ts` also scans for tags in all files that don't have tags yet
* `-e2tsr` also deletes all existing tags, doing a full reindex
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
* `-v ~/music::r:ce2dsa:ce2tsr` does a full reindex of everything on startup
* `-v ~/music::r:cd2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
* `-v ~/music::r:c,e2dsa:c,e2tsr` does a full reindex of everything on startup
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
note:
* the parser currently can't handle `c,e2dsa,e2tsr` so you have to `c,e2dsa:c,e2tsr`
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `cdhash`, this has the following consequences:
to save some time, you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `:c,dhash`, this has the following consequences:
* initial indexing is way faster, especially when the volume is on a network disk
* makes it impossible to [file-search](#file-search)
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
if you set `--no-hash`, you can enable hashing for specific volumes using flag `cehash`
if you set `--no-hash`, you can enable hashing for specific volumes using flag `:c,ehash`
## upload rules
set upload rules using volume flags, some examples:
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: b, k, m, g)
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
* if someone uploads to `/foo/bar` the path would be rewritten to `/foo/bar/2021/08/06/23` for example
* but the actual value is not verified, just the structure, so the uploader can choose any values which conform to the format string
* just to avoid additional complexity in up2k which is enough of a mess already
* `:c,lifetime=300` delete uploaded files when they become 5 minutes old
you can also set transaction limits which apply per-IP and per-volume, but these assume `-j 1` (default) otherwise the limits will be off, for example `-j 4` would allow anywhere between 1x and 4x the limits you set depending on which processing node the client gets routed to
* `:c,maxn=250,3600` allows 250 files over 1 hour from each IP (tracked per-volume)
* `:c,maxb=1g,300` allows 1 GiB total over 5 minutes from each IP (tracked per-volume)
## compress uploads
files can be autocompressed on upload, either on user-request (if config allows) or forced by server-config
* volume flag `gz` allows gz compression
* volume flag `xz` allows lzma compression
* volume flag `pk` **forces** compression on all files
* url parameter `pk` requests compression with server-default algorithm
* url parameter `gz` or `xz` requests compression with a specific algorithm
* url parameter `xz` requests xz compression
things to note,
* the `gz` and `xz` arguments take a single optional argument, the compression level (range 0 to 9)
* the `pk` volume flag takes the optional argument `ALGORITHM,LEVEL` which will then be forced for all uploads, for example `gz,9` or `xz,0`
* default compression is gzip level 9
* all upload methods except up2k are supported
* the files will be indexed after compression, so dupe-detection and file-search will not work as expected
some examples,
## database location
in-volume (`.hist/up2k.db`, default) or somewhere else
copyparty creates a subfolder named `.hist` inside each volume where it stores the database, thumbnails, and some other stuff
this can instead be kept in a single place using the `--hist` argument, or the `hist=` volume flag, or a mix of both:
* `--hist ~/.cache/copyparty -v ~/music::r:chist=-` sets `~/.cache/copyparty` as the default place to put volume info, but `~/music` gets the regular `.hist` subfolder (`-` restores default behavior)
* `--hist ~/.cache/copyparty -v ~/music::r:c,hist=-` sets `~/.cache/copyparty` as the default place to put volume info, but `~/music` gets the regular `.hist` subfolder (`-` restores default behavior)
note:
* markdown edits are always stored in a local `.hist` subdirectory
@@ -398,16 +603,20 @@ note:
## metadata from audio files
set `-e2t` to index tags on upload
`-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume:
* `-v ~/music::r:cmte=title,artist` indexes and displays *title* followed by *artist*
* `-v ~/music::r:c,mte=title,artist` indexes and displays *title* followed by *artist*
if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected
but instead of using `-mte`, `-mth` is a better way to hide tags in the browser: these tags will not be displayed by default, but they still get indexed and become searchable, and users can choose to unhide them in the `[⚙️] config` pane
`-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux`
tags that start with a `.` such as `.bpm` and `.dur`(ation) indicate numeric value
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
`--no-mutagen` disables Mutagen and uses FFprobe instead, which...
* is about 20x slower than Mutagen
@@ -419,11 +628,13 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy
## file parser plugins
provide custom parsers to index additional tags
copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec
* `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
* `-v ~/music::r:cmtp=.bpm=~/bin/audio-bpm.py:cmtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly
* `-v ~/music::r:c,mtp=.bpm=~/bin/audio-bpm.py:c,mtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly
*but wait, there's more!* `-mtp` can be used for non-audio files as well using the `a` flag: `ay` only do audio files, `an` only do non-audio files, or `ad` do all files (d as in dontcare)
@@ -439,32 +650,38 @@ copyparty can invoke external programs to collect additional metadata for files
# browser support
TLDR: yes
![copyparty-ie4-fs8](https://user-images.githubusercontent.com/241032/118192791-fb31fe00-b446-11eb-9647-898ea8efc1f7.png)
`ie` = internet-explorer, `ff` = firefox, `c` = chrome, `iOS` = iPhone/iPad, `Andr` = Android
| feature | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
| --------------- | --- | --- | ---- | ---- | ----- | ---- | --- | ---- |
| browse files | yep | yep | yep | yep | yep | yep | yep | yep |
| basic uploader | yep | yep | yep | yep | yep | yep | yep | yep |
| make directory | yep | yep | yep | yep | yep | yep | yep | yep |
| send message | yep | yep | yep | yep | yep | yep | yep | yep |
| set sort order | - | yep | yep | yep | yep | yep | yep | yep |
| zip selection | - | yep | yep | yep | yep | yep | yep | yep |
| directory tree | - | - | `*1` | yep | yep | yep | yep | yep |
| up2k | - | - | yep | yep | yep | yep | yep | yep |
| markdown editor | - | - | yep | yep | yep | yep | yep | yep |
| markdown viewer | - | - | yep | yep | yep | yep | yep | yep |
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
| play ogg/opus | - | - | - | - | yep | yep | `*2` | yep |
| thumbnail view | - | - | - | - | yep | yep | yep | yep |
| image viewer | - | - | - | - | yep | yep | yep | yep |
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
| feature | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
| --------------- | --- | ---- | ---- | ---- | ----- | ---- | --- | ---- |
| browse files | yep | yep | yep | yep | yep | yep | yep | yep |
| thumbnail view | - | yep | yep | yep | yep | yep | yep | yep |
| basic uploader | yep | yep | yep | yep | yep | yep | yep | yep |
| up2k | - | - | `*1` | `*1` | yep | yep | yep | yep |
| make directory | yep | yep | yep | yep | yep | yep | yep | yep |
| send message | yep | yep | yep | yep | yep | yep | yep | yep |
| set sort order | - | yep | yep | yep | yep | yep | yep | yep |
| zip selection | - | yep | yep | yep | yep | yep | yep | yep |
| file rename | - | yep | yep | yep | yep | yep | yep | yep |
| file cut/paste | - | yep | yep | yep | yep | yep | yep | yep |
| navpane | - | `*2` | yep | yep | yep | yep | yep | yep |
| image viewer | - | yep | yep | yep | yep | yep | yep | yep |
| video player | - | yep | yep | yep | yep | yep | yep | yep |
| markdown editor | - | - | yep | yep | yep | yep | yep | yep |
| markdown viewer | - | - | yep | yep | yep | yep | yep | yep |
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
| play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
* internet explorer 6 to 8 behave the same
* firefox 52 and chrome 49 are the last winxp versions
* `*1` only public folders (login session is dropped) and no history / back-button
* `*2` using a wasm decoder which can sometimes get stuck and consumes a bit more power
* firefox 52 and chrome 49 are the final winxp versions
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
* `*2` causes a full-page refresh on each navigation
* `*3` using a wasm decoder which consumes a bit more power
quick summary of more eccentric web-browsers trying to view a directory index:
@@ -476,22 +693,25 @@ quick summary of more eccentric web-browsers trying to view a directory index:
| **lynx** (2.8.9/macports) | can browse, login, upload/mkdir/msg |
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
| **opera** (11.60/winxp) | OK: thumbnails, image-viewer, zip-selection, rename/cut/paste. NG: up2k, navpane, markdown, audio |
| **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` |
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
# client examples
interact with copyparty using non-browser clients
* javascript: dump some state into a file (two separate examples)
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* curl/wget: upload some files (post=file, chunk=stdin)
* `post(){ curl -b cppwd=wark http://127.0.0.1:3923/ -F act=bput -F f=@"$1";}`
* `post(){ curl -b cppwd=wark -F act=bput -F f=@"$1" http://127.0.0.1:3923/;}`
`post movie.mkv`
* `post(){ wget --header='Cookie: cppwd=wark' http://127.0.0.1:3923/?raw --post-file="$1" -O-;}`
* `post(){ wget --header='Cookie: cppwd=wark' --post-file="$1" -O- http://127.0.0.1:3923/?raw;}`
`post movie.mkv`
* `chunk(){ curl -b cppwd=wark http://127.0.0.1:3923/ -T-;}`
* `chunk(){ curl -b cppwd=wark -T- http://127.0.0.1:3923/;}`
`chunk <movie.mkv`
* FUSE: mount a copyparty server as a local filesystem
@@ -505,6 +725,8 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
b512 <movie.mkv
you can provide passwords using cookie 'cppwd=hunter2', as a url query `?pw=hunter2`, or with basic-authentication (either as the username or password)
# up2k
@@ -521,10 +743,25 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
* server writes chunks into place based on the hash
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well (thanks to tls also functioning as an integrity check)
## why chunk-hashes
a single sha512 would be better, right?
this is due to `crypto.subtle` not providing a streaming api (or the option to seed the sha512 hasher with a starting hash)
as a result, the hashes are much less useful than they could have been (search the server by sha512, provide the sha512 in the response http headers, ...)
hashwasm would solve the streaming issue but reduces hashing speed for sha512 (xxh128 does 6 GiB/s), and it would make old browsers and [iphones](https://bugs.webkit.org/show_bug.cgi?id=228552) unsupported
# performance
defaults are good for most cases, don't mind the `cannot efficiently use multiple CPU cores` message, it's very unlikely to be a problem
defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
you can ignore the `cannot efficiently use multiple CPU cores` message, very unlikely to be a problem
below are some tweaks roughly ordered by usefulness:
@@ -539,28 +776,43 @@ below are some tweaks roughly ordered by usefulness:
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
# security
some notes on hardening
on public copyparty instances with anonymous upload enabled:
* users can upload html/css/js which will evaluate for other visitors in a few ways,
* unless `--no-readme` is set: by uploading/modifying a file named `readme.md`
* if `move` access is granted AND none of `--no-logues`, `--no-dot-mv`, `--no-dot-ren` is set: by uploading some .html file and renaming it to `.epilogue.html` (uploading it directly is blocked)
## gotchas
behavior that might be unexpected
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
# dependencies
mandatory deps:
* `jinja2` (is built into the SFX)
## optional dependencies
install these to enable bonus features
enable music tags:
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
* or `ffprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
enable thumbnails of images:
* `Pillow` (requires py2.7 or py3.5+)
enable thumbnails of videos:
* `ffmpeg` and `ffprobe` somewhere in `$PATH`
enable thumbnails of HEIF pictures:
* `pyheif-pillow-opener` (requires Linux or a C compiler)
enable thumbnails of AVIF pictures:
* `pillow-avif-plugin`
enable [thumbnails](#thumbnails) of...
* **images:** `Pillow` (requires py2.7 or py3.5+)
* **videos:** `ffmpeg` and `ffprobe` somewhere in `$PATH`
* **HEIF pictures:** `pyheif-pillow-opener` (requires Linux or a C compiler)
* **AVIF pictures:** `pillow-avif-plugin`
## install recommended deps
@@ -578,7 +830,7 @@ these are standalone programs and will never be imported / evaluated by copypart
# sfx
currently there are two self-contained "binaries":
there are two self-contained "binaries":
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere, **recommended**
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos, kinda deprecated
@@ -589,14 +841,18 @@ pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `
## sfx repack
reduce the size of an sfx by removing features
if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL)
* `525k` size of original sfx.py as of v0.11.30
* `315k` after `./scripts/make-sfx.sh re no-ogv`
* `223k` after `./scripts/make-sfx.sh re no-ogv no-cm`
the features you can opt to drop are
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files
* `cm`/easymde, the "fancy" markdown editor
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files, saves ~192k
* `cm`/easymde, the "fancy" markdown editor, saves ~92k
* `fnt`, source-code-pro, the monospace font, saves ~9k
* `dd`, the custom mouse cursor for the media player tray tab, saves ~2k
for the `re`pack to work, first run one of the sfx'es once to unpack it
@@ -632,7 +888,7 @@ pip install black bandit pylint flake8 # vscode tooling
## just the sfx
unless you need to modify something in the web-dependencies, it's faster to grab those from a previous release:
first grab the web-dependencies from a previous sfx (assuming you don't need to modify something in those):
```sh
rm -rf copyparty/web/deps
@@ -652,14 +908,14 @@ then build the sfx using any of the following examples:
## complete release
also builds the sfx so disregard the sfx section above
also builds the sfx so skip the sfx section above
in the `scripts` folder:
* run `make -C deps-docker` to build all dependencies
* `git tag v1.2.3 && git push origin --tags`
* create github release with `make-tgz-release.sh`
* upload to pypi with `make-pypi-release.(sh|bat)`
* create github release with `make-tgz-release.sh`
* create sfx with `make-sfx.sh`
@@ -667,8 +923,7 @@ in the `scripts` folder:
roughly sorted by priority
* hls framework for Someone Else to drop code into :^)
* readme.md as epilogue
* nothing! currently
## discarded ideas
@@ -696,3 +951,5 @@ roughly sorted by priority
* indexedDB for hashes, cfg enable/clear/sz, 2gb avail, ~9k for 1g, ~4k for 100m, 500k items before autoeviction
* blank hashlist when up-ok to skip handshake
* too many confusing side-effects
* hls framework for Someone Else to drop code into :^)
* probably not, too much stuff to consider -- seeking, start at offset, task stitching (probably np-hard), conditional passthru, rate-control (especially multi-consumer), session keepalive, cache mgmt...

View File

@@ -61,3 +61,8 @@ cd /mnt/nas/music/.hist
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy key
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
```
# [`prisonparty.sh`](prisonparty.sh)
* run copyparty in a chroot, preventing any accidental file access
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`

View File

@@ -22,7 +22,7 @@ dependencies:
note:
you probably want to run this on windows clients:
https://github.com/9001/copyparty/blob/master/contrib/explorer-nothumbs-nofoldertypes.reg
https://github.com/9001/copyparty/blob/hovudstraum/contrib/explorer-nothumbs-nofoldertypes.reg
get server cert:
awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem

View File

@@ -4,6 +4,7 @@ some of these rely on libraries which are not MIT-compatible
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
# dependencies
@@ -18,7 +19,10 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
# usage from copyparty
`copyparty -e2dsa -e2ts -mtp key=f,audio-key.py -mtp .bpm=f,audio-bpm.py`
`copyparty -e2dsa -e2ts` followed by any combination of these:
* `-mtp key=f,audio-key.py`
* `-mtp .bpm=f,audio-bpm.py`
* `-mtp ahash,vhash=f,media-hash.py`
* `f,` makes the detected value replace any existing values
* the `.` in `.bpm` indicates numeric value
@@ -29,6 +33,9 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
## usage with volume-flags
instead of affecting all volumes, you can set the options for just one volume like so:
```
copyparty -v /mnt/nas/music:/music:r:cmtp=key=f,audio-key.py:cmtp=.bpm=f,audio-bpm.py:ce2dsa:ce2ts
```
`copyparty -v /mnt/nas/music:/music:r:c,e2dsa:c,e2ts` immediately followed by any combination of these:
* `:c,mtp=key=f,audio-key.py`
* `:c,mtp=.bpm=f,audio-bpm.py`
* `:c,mtp=ahash,vhash=f,media-hash.py`

73
bin/mtag/media-hash.py Normal file
View File

@@ -0,0 +1,73 @@
#!/usr/bin/env python
import re
import sys
import json
import time
import base64
import hashlib
import subprocess as sp
try:
from copyparty.util import fsenc
except:
def fsenc(p):
return p
"""
dep: ffmpeg
"""
def det():
# fmt: off
cmd = [
"ffmpeg",
"-nostdin",
"-hide_banner",
"-v", "fatal",
"-i", fsenc(sys.argv[1]),
"-f", "framemd5",
"-"
]
# fmt: on
p = sp.Popen(cmd, stdout=sp.PIPE)
# ps = io.TextIOWrapper(p.stdout, encoding="utf-8")
ps = p.stdout
chans = {}
for ln in ps:
if ln.startswith(b"#stream#"):
break
m = re.match(r"^#media_type ([0-9]): ([a-zA-Z])", ln.decode("utf-8"))
if m:
chans[m.group(1)] = m.group(2)
hashers = [hashlib.sha512(), hashlib.sha512()]
for ln in ps:
n = int(ln[:1])
v = ln.rsplit(b",", 1)[-1].strip()
hashers[n].update(v)
r = {}
for k, v in chans.items():
dg = hashers[int(k)].digest()[:12]
dg = base64.urlsafe_b64encode(dg).decode("ascii")
r[v[0].lower() + "hash"] = dg
print(json.dumps(r, indent=4))
def main():
try:
det()
except:
pass # mute
if __name__ == "__main__":
main()

39
bin/mtag/res/yt-ipr.conf Normal file
View File

@@ -0,0 +1,39 @@
# example config file to use copyparty as a youtube manifest collector,
# use with copyparty like: python copyparty.py -c yt-ipr.conf
#
# see docs/example.conf for a better explanation of the syntax, but
# newlines are block separators, so adding blank lines inside a volume definition is bad
# (use comments as separators instead)
# create user ed, password wark
u ed:wark
# create a volume at /ytm which stores files at ./srv/ytm
./srv/ytm
/ytm
# write-only, but read-write for user ed
w
rw ed
# rescan the volume on startup
c e2dsa
# collect tags from all new files since last scan
c e2ts
# optionally enable compression to make the files 50% smaller
c pk
# only allow uploads which are between 16k and 1m large
c sz=16k-1m
# allow up to 10 uploads over 5 minutes from each ip
c maxn=10,300
# move uploads into subfolders: YEAR-MONTH / DAY-HOUR / <upload>
c rotf=%Y-%m/%d-%H
# delete uploads when they are 24 hours old
c lifetime=86400
# add the parser and tell copyparty what tags it can expect from it
c mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
# decide which tags we want to index and in what order
c mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
# create any other volumes you'd like down here, or merge this with an existing config file

View File

@@ -0,0 +1,47 @@
// ==UserScript==
// @name youtube-playerdata-hub
// @match https://youtube.com/*
// @match https://*.youtube.com/*
// @version 1.0
// @grant GM_addStyle
// ==/UserScript==
function main() {
var server = 'https://127.0.0.1:3923/ytm?pw=wark',
interval = 60; // sec
var sent = {};
function send(txt, mf_url, desc) {
if (sent[mf_url])
return;
fetch(server + '&_=' + Date.now(), { method: "PUT", body: txt });
console.log('[yt-pdh] yeet %d bytes, %s', txt.length, desc);
sent[mf_url] = 1;
}
function collect() {
try {
var pd = document.querySelector('ytd-watch-flexy');
if (!pd)
return console.log('[yt-pdh] no video found');
pd = pd.playerData;
var mu = pd.streamingData.dashManifestUrl || pd.streamingData.hlsManifestUrl;
if (!mu || !mu.length)
return console.log('[yt-pdh] no manifest found');
var desc = pd.videoDetails.videoId + ', ' + pd.videoDetails.title;
send(JSON.stringify(pd), mu, desc);
}
catch (ex) {
console.log("[yt-pdh]", ex);
}
}
setInterval(collect, interval * 1000);
}
var scr = document.createElement('script');
scr.textContent = '(' + main.toString() + ')();';
(document.head || document.getElementsByTagName('head')[0]).appendChild(scr);
console.log('[yt-pdh] a');

198
bin/mtag/yt-ipr.py Normal file
View File

@@ -0,0 +1,198 @@
#!/usr/bin/env python
import re
import os
import sys
import gzip
import json
import base64
import string
import urllib.request
from datetime import datetime
"""
youtube initial player response
it's probably best to use this through a config file; see res/yt-ipr.conf
but if you want to use plain arguments instead then:
-v srv/ytm:ytm:w:rw,ed
:c,e2ts:c,e2dsa
:c,sz=16k-1m:c,maxn=10,300:c,rotf=%Y-%m/%d-%H
:c,mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
:c,mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
see res/yt-ipr.user.js for the example userscript to go with this
"""
def main():
try:
with gzip.open(sys.argv[1], "rt", encoding="utf-8", errors="replace") as f:
txt = f.read()
except:
with open(sys.argv[1], "r", encoding="utf-8", errors="replace") as f:
txt = f.read()
txt = "{" + txt.split("{", 1)[1]
try:
pd = json.loads(txt)
except json.decoder.JSONDecodeError as ex:
pd = json.loads(txt[: ex.pos])
# print(json.dumps(pd, indent=2))
if "videoDetails" in pd:
parse_youtube(pd)
else:
parse_freg(pd)
def get_expiration(url):
et = re.search(r"[?&]expire=([0-9]+)", url).group(1)
et = datetime.utcfromtimestamp(int(et))
return et.strftime("%Y-%m-%d, %H:%M")
def parse_youtube(pd):
vd = pd["videoDetails"]
sd = pd["streamingData"]
et = sd["adaptiveFormats"][0]["url"]
et = get_expiration(et)
mf = []
if "dashManifestUrl" in sd:
mf.append("dash")
if "hlsManifestUrl" in sd:
mf.append("hls")
r = {
"yt-id": vd["videoId"],
"yt-title": vd["title"],
"yt-author": vd["author"],
"yt-channel": vd["channelId"],
"yt-views": vd["viewCount"],
"yt-private": vd["isPrivate"],
# "yt-expires": sd["expiresInSeconds"],
"yt-manifest": ",".join(mf),
"yt-expires": et,
}
print(json.dumps(r))
freg_conv(pd)
def parse_freg(pd):
md = pd["metadata"]
r = {
"yt-id": md["id"],
"yt-title": md["title"],
"yt-author": md["channelName"],
"yt-channel": md["channelURL"].strip("/").split("/")[-1],
"yt-expires": get_expiration(list(pd["video"].values())[0]),
}
print(json.dumps(r))
def freg_conv(pd):
# based on getURLs.js v1.5 (2021-08-07)
# fmt: off
priority = {
"video": [
337, 315, 266, 138, # 2160p60
313, 336, # 2160p
308, # 1440p60
271, 264, # 1440p
335, 303, 299, # 1080p60
248, 169, 137, # 1080p
334, 302, 298, # 720p60
247, 136 # 720p
],
"audio": [
251, 141, 171, 140, 250, 249, 139
]
}
vid_id = pd["videoDetails"]["videoId"]
chan_id = pd["videoDetails"]["channelId"]
try:
thumb_url = pd["microformat"]["playerMicroformatRenderer"]["thumbnail"]["thumbnails"][0]["url"]
start_ts = pd["microformat"]["playerMicroformatRenderer"]["liveBroadcastDetails"]["startTimestamp"]
except:
thumb_url = f"https://img.youtube.com/vi/{vid_id}/maxresdefault.jpg"
start_ts = ""
# fmt: on
metadata = {
"title": pd["videoDetails"]["title"],
"id": vid_id,
"channelName": pd["videoDetails"]["author"],
"channelURL": "https://www.youtube.com/channel/" + chan_id,
"description": pd["videoDetails"]["shortDescription"],
"thumbnailUrl": thumb_url,
"startTimestamp": start_ts,
}
if [x for x in vid_id if x not in string.ascii_letters + string.digits + "_-"]:
print(f"malicious json", file=sys.stderr)
return
basepath = os.path.dirname(sys.argv[1])
thumb_fn = f"{basepath}/{vid_id}.jpg"
tmp_fn = f"{thumb_fn}.{os.getpid()}"
if not os.path.exists(thumb_fn) and (
thumb_url.startswith("https://img.youtube.com/vi/")
or thumb_url.startswith("https://i.ytimg.com/vi/")
):
try:
with urllib.request.urlopen(thumb_url) as fi:
with open(tmp_fn, "wb") as fo:
fo.write(fi.read())
os.rename(tmp_fn, thumb_fn)
except:
if os.path.exists(tmp_fn):
os.unlink(tmp_fn)
try:
with open(thumb_fn, "rb") as f:
thumb = base64.b64encode(f.read()).decode("ascii")
except:
thumb = "/9j/4AAQSkZJRgABAQEASABIAAD/2wBDAAMCAgICAgMCAgIDAwMDBAYEBAQEBAgGBgUGCQgKCgkICQkKDA8MCgsOCwkJDRENDg8QEBEQCgwSExIQEw8QEBD/yQALCAABAAEBAREA/8wABgAQEAX/2gAIAQEAAD8A0s8g/9k="
metadata["thumbnail"] = "data:image/jpeg;base64," + thumb
ret = {
"metadata": metadata,
"version": "1.5",
"createTime": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
}
for stream, itags in priority.items():
for itag in itags:
url = None
for afmt in pd["streamingData"]["adaptiveFormats"]:
if itag == afmt["itag"]:
url = afmt["url"]
break
if url:
ret[stream] = {itag: url}
break
fn = f"{basepath}/{vid_id}.urls.json"
with open(fn, "w", encoding="utf-8", errors="replace") as f:
f.write(json.dumps(ret, indent=4))
if __name__ == "__main__":
try:
main()
except:
# raise
pass

99
bin/prisonparty.sh Normal file
View File

@@ -0,0 +1,99 @@
#!/bin/bash
set -e
# runs copyparty (or any other program really) in a chroot
#
# assumption: these directories, and everything within, are owned by root
sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
# error-handler
help() { cat <<'EOF'
usage:
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- copyparty-sfx.py [...]"
example:
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- copyparty-sfx.py -v /mnt/nas/music::rwmd"
EOF
exit 1
}
# read arguments
trap help EXIT
jail="$(realpath "$1")"; shift
uid="$1"; shift
gid="$1"; shift
vols=()
while true; do
v="$1"; shift
[ "$v" = -- ] && break # end of volumes
[ "$#" -eq 0 ] && break # invalid usage
vols+=( "$(realpath "$v")" )
done
pybin="$1"; shift
pybin="$(realpath "$pybin")"
cpp="$1"; shift
cpp="$(realpath "$cpp")"
cppdir="$(dirname "$cpp")"
trap - EXIT
# debug/vis
echo
echo "chroot-dir = $jail"
echo "user:group = $uid:$gid"
echo " copyparty = $cpp"
echo
printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:"
for v in "${vols[@]}"; do
printf '\033[36m ├─\033[0m %s \033[36m ── added by (You)\033[0m\n' "$v"
done
printf '\033[36m ├─\033[0m %s \033[36m ── where the copyparty binary is\033[0m\n' "$cppdir"
printf '\033[36m ╰─\033[0m %s \033[36m ── the folder you are currently in\033[0m\n' "$PWD"
vols+=("$cppdir" "$PWD")
echo
# remove any trailing slashes
jail="${jail%/}"
cppdir="${cppdir%/}"
# bind-mount system directories and volumes
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | LC_ALL=C sort |
while IFS= read -r v; do
[ -e "$v" ] || {
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v"
continue
}
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b)
[ $i1 = $i2 ] && continue
mkdir -p "$jail$v"
mount --bind "$v" "$jail$v"
done
# create a tmp
mkdir -p "$jail/tmp"
chmod 777 "$jail/tmp"
# run copyparty
/sbin/chroot --userspec=$uid:$gid "$jail" "$pybin" "$cpp" "$@" && rv=0 || rv=$?
# cleanup if not in use
lsof "$jail" | grep -qF "$jail" &&
echo "chroot is in use, will not cleanup" ||
{
mount | grep -qF " on $jail" |
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
}
exit $rv

View File

@@ -29,7 +29,8 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
# OS integration
init-scripts to start copyparty as a service
* [`systemd/copyparty.service`](systemd/copyparty.service)
* [`systemd/copyparty.service`](systemd/copyparty.service) runs the sfx normally
* [`systemd/prisonparty.service`](systemd/prisonparty.service) runs the sfx in a chroot
* [`openrc/copyparty`](openrc/copyparty)
# Reverse-proxy

View File

@@ -8,11 +8,11 @@
#
# you may want to:
# change '/usr/bin/python' to another interpreter
# change '/mnt::a' to another location or permission-set
# change '/mnt::rw' to another location or permission-set
name="$SVCNAME"
command_background=true
pidfile="/var/run/$SVCNAME.pid"
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
command_args="-q -v /mnt::a"
command_args="-q -v /mnt::rw"

View File

@@ -6,13 +6,20 @@
#
# you may want to:
# change '/usr/bin/python' to another interpreter
# change '/mnt::a' to another location or permission-set
# change '/mnt::rw' to another location or permission-set
#
# with `Type=notify`, copyparty will signal systemd when it is ready to
# accept connections; correctly delaying units depending on copyparty.
# But note that journalctl will get the timestamps wrong due to
# python disabling line-buffering, so messages are out-of-order:
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
#
# enable line-buffering for realtime logging (slight performance cost):
# modify ExecStart and prefix it with `/usr/bin/stdbuf -oL` like so:
# ExecStart=/usr/bin/stdbuf -oL /usr/bin/python3 [...]
# but some systemd versions require this instead (higher performance cost):
# inside the [Service] block, add the following line:
# Environment=PYTHONUNBUFFERED=x
[Unit]
Description=copyparty file server
@@ -20,7 +27,7 @@ Description=copyparty file server
[Service]
Type=notify
SyslogIdentifier=copyparty
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
[Install]

View File

@@ -0,0 +1,27 @@
# this will start `/usr/local/bin/copyparty-sfx.py`
# in a chroot, preventing accidental access elsewhere
# and share '/mnt' with anonymous read+write
#
# installation:
# 1) put copyparty-sfx.py and prisonparty.sh in /usr/local/bin
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
#
# you may want to:
# change '/mnt::rw' to another location or permission-set
# (remember to change the '/mnt' chroot arg too)
#
# enable line-buffering for realtime logging (slight performance cost):
# inside the [Service] block, add the following line:
# Environment=PYTHONUNBUFFERED=x
[Unit]
Description=copyparty file server
[Service]
SyslogIdentifier=prisonparty
WorkingDirectory=/usr/local/bin
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
[Install]
WantedBy=multi-user.target

View File

@@ -25,6 +25,28 @@ ANYWIN = WINDOWS or sys.platform in ["msys"]
MACOS = platform.system() == "Darwin"
def get_unix_home():
try:
v = os.environ["XDG_CONFIG_HOME"]
if not v:
raise Exception()
ret = os.path.normpath(v)
os.listdir(ret)
return ret
except:
pass
try:
v = os.path.expanduser("~/.config")
if v.startswith("~"):
raise Exception()
ret = os.path.normpath(v)
os.listdir(ret)
return ret
except:
return "/tmp"
class EnvParams(object):
def __init__(self):
self.t0 = time.time()
@@ -37,10 +59,7 @@ class EnvParams(object):
elif sys.platform == "darwin":
self.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
else:
self.cfg = os.path.normpath(
os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))
+ "/copyparty"
)
self.cfg = get_unix_home() + "/copyparty"
self.cfg = self.cfg.replace("\\", "/")
try:

View File

@@ -23,7 +23,8 @@ from textwrap import dedent
from .__init__ import E, WINDOWS, VT100, PY2, unicode
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub
from .util import py_desc, align_tab, IMPLICATIONS
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re
from .authsrv import re_vol
HAVE_SSL = True
try:
@@ -66,8 +67,12 @@ class Dodge11874(RiceFormatter):
def lprint(*a, **ka):
global printed
printed += " ".join(unicode(x) for x in a) + ka.get("end", "\n")
print(*a, **ka)
txt = " ".join(unicode(x) for x in a) + ka.get("end", "\n")
printed += txt
if not VT100:
txt = ansi_re.sub("", txt)
print(txt, **ka)
def warn(msg):
@@ -196,27 +201,35 @@ def run_argparse(argv, formatter):
formatter_class=formatter,
prog="copyparty",
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
epilog=dedent(
"""
)
sects = [
[
"accounts",
"accounts and volumes",
dedent(
"""
-a takes username:password,
-v takes src:dst:permset:permset:cflag:cflag:...
where "permset" is accesslevel followed by username (no separator)
and "cflag" is config flags to set on this volume
-v takes src:dst:perm1:perm2:permN:volflag1:volflag2:volflagN:...
where "perm" is "accesslevels,username1,username2,..."
and "volflag" is config flags to set on this volume
list of cflags:
"cnodupe" rejects existing files (instead of symlinking them)
"ce2d" sets -e2d (all -e2* args can be set using ce2* cflags)
"cd2t" disables metadata collection, overrides -e2t*
"cd2d" disables all database stuff, overrides -e2*
list of accesslevels:
"r" (read): list folder contents, download files
"w" (write): upload files; need "r" to see the uploads
"m" (move): move files and folders; need "w" at destination
"d" (delete): permanently delete files and folders
too many volflags to list here, see the other sections
example:\033[35m
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
-a ed:hunter2 -v .::r:rw,ed -v ../inc:dump:w:rw,ed:c,nodupe \033[36m
mount current directory at "/" with
* r (read-only) for everyone
* a (read+write) for ed
* rw (read+write) for ed
mount ../inc at "/dump" with
* w (write-only) for everyone
* a (read+write) for ed
* rw (read+write) for ed
* reject duplicate files \033[0m
if no accounts or volumes are configured,
@@ -224,29 +237,86 @@ def run_argparse(argv, formatter):
consider the config file for more flexible account/volume management,
including dynamic reload at runtime (and being more readable w)
"""
),
],
[
"flags",
"list of volflags",
dedent(
"""
volflags are appended to volume definitions, for example,
to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags:
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub
\033[0muploads, general:
\033[36mnodupe\033[35m rejects existing files (instead of symlinking them)
\033[36mnosub\033[35m forces all uploads into the top folder of the vfs
\033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz)
\033[36mpk\033[35m forces server-side compression, optional arg: xz,9
\033[0mupload rules:
\033[36mmaxn=250,600\033[35m max 250 uploads over 15min
\033[36mmaxb=1g,300\033[35m max 1 GiB over 5min (suffixes: b, k, m, g)
\033[36msz=1k-3m\033[35m allow filesizes between 1 KiB and 3MiB
\033[0mupload rotation:
(moves all uploads into the specified folder structure)
\033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each
\033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing
\033[36mlifetime=3600\033[35m uploads are deleted after 1 hour
\033[0mdatabase, general:
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
\033[36md2d\033[35m disables all database stuff, overrides -e2*
\033[36mdhash\033[35m disables file hashing on initial scans, also ehash
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
\033[0mdatabase, audio tags:
"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
generate ".bpm" tags from uploads (f = overwrite tags)
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
\033[0m"""
),
],
[
"urlform",
"",
dedent(
"""
values for --urlform:
"stash" dumps the data to file and returns length + checksum
"save,get" dumps to file and returns the page like a GET
"print,get" prints the data in the log and returns GET
\033[36mstash\033[35m dumps the data to file and returns length + checksum
\033[36msave,get\033[35m dumps to file and returns the page like a GET
\033[36mprint,get\033[35m prints the data in the log and returns GET
(leave out the ",get" to return an error instead)
values for --ls:
"USR" is a user to browse as; * is anonymous, ** is all users
"VOL" is a single volume to scan, default is * (all vols)
"FLAG" is flags;
"v" in addition to realpaths, print usernames and vpaths
"ln" only prints symlinks leaving the volume mountpoint
"p" exits 1 if any such symlinks are found
"r" resumes startup after the listing
"""
),
],
[
"ls",
"volume inspection",
dedent(
"""
\033[35m--ls USR,VOL,FLAGS
\033[36mUSR\033[0m is a user to browse as; * is anonymous, ** is all users
\033[36mVOL\033[0m is a single volume to scan, default is * (all vols)
\033[36mFLAG\033[0m is flags;
\033[36mv\033[0m in addition to realpaths, print usernames and vpaths
\033[36mln\033[0m only prints symlinks leaving the volume mountpoint
\033[36mp\033[0m exits 1 if any such symlinks are found
\033[36mr\033[0m resumes startup after the listing
examples:
--ls '**' # list all files which are possible to read
--ls '**,*,ln' # check for dangerous symlinks
--ls '**,*,ln,p,r' # check, then start normally if safe
\033[0m
"""
),
)
),
],
]
# fmt: off
u = unicode
ap2 = ap.add_argument_group('general options')
@@ -258,9 +328,12 @@ def run_argparse(argv, formatter):
ap2.add_argument("-ed", action="store_true", help="enable ?dots")
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
ap2 = ap.add_argument_group('upload options')
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
ap2 = ap.add_argument_group('network options')
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
@@ -277,17 +350,25 @@ def run_argparse(argv, formatter):
ap2 = ap.add_argument_group('opt-outs')
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
ap2.add_argument("-nih", action="store_true", help="no info hostname")
ap2.add_argument("-nid", action="store_true", help="no info disk-usage")
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (lifetime volflag)")
ap2 = ap.add_argument_group('safety options')
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
ap2 = ap.add_argument_group('logging options')
ap2.add_argument("-q", action="store_true", help="quiet")
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
ap2.add_argument("--log-htp", action="store_true", help="print http-server threadpool scaling")
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
@@ -301,6 +382,7 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=0, help="max num cpu cores to use, 0=all")
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
@@ -310,23 +392,29 @@ def run_argparse(argv, formatter):
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for")
ap2 = ap.add_argument_group('database options')
ap2 = ap.add_argument_group('general db options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval")
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
ap2 = ap.add_argument_group('metadata db options')
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume state")
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead")
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader")
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps")
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash")
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)",
default=".vq,.aq,vc,ac,res,.fps")
ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
ap2 = ap.add_argument_group('appearance options')
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
@@ -338,10 +426,22 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second")
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
return ap.parse_args(args=argv[1:])
# fmt: on
ap2 = ap.add_argument_group("help sections")
for k, h, _ in sects:
ap2.add_argument("--help-" + k, action="store_true", help=h)
ret = ap.parse_args(args=argv[1:])
for k, h, t in sects:
k2 = "help_" + k.replace("-", "_")
if vars(ret)[k2]:
lprint("# {} help page".format(k))
lprint(t + "\033[0m")
sys.exit(0)
return ret
def main(argv=None):
time.strptime("19970815", "%Y%m%d") # python#7980
@@ -377,6 +477,42 @@ def main(argv=None):
except AssertionError:
al = run_argparse(argv, Dodge11874)
nstrs = []
anymod = False
for ostr in al.v or []:
m = re_vol.match(ostr)
if not m:
# not our problem
nstrs.append(ostr)
continue
src, dst, perms = m.groups()
na = [src, dst]
mod = False
for opt in perms.split(":"):
if re.match("c[^,]", opt):
mod = True
na.append("c," + opt[1:])
elif re.sub("^[rwmd]*", "", opt) and "," not in opt:
mod = True
perm = opt[0]
if perm == "a":
perm = "rw"
na.append(perm + "," + opt[1:])
else:
na.append(opt)
nstr = ":".join(na)
nstrs.append(nstr if mod else ostr)
if mod:
msg = "\033[1;31mWARNING:\033[0;1m\n -v {} \033[0;33mwas replaced with\033[0;1m\n -v {} \n\033[0m"
lprint(msg.format(ostr, nstr))
anymod = True
if anymod:
al.v = nstrs
time.sleep(2)
# propagate implications
for k1, k2 in IMPLICATIONS:
if getattr(al, k1):

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (0, 11, 43)
CODENAME = "the grid"
BUILD_DT = (2021, 7, 19)
VERSION = (1, 0, 1)
CODENAME = "sufficient"
BUILD_DT = (2021, 9, 9)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -5,41 +5,229 @@ import re
import os
import sys
import stat
import time
import base64
import hashlib
import threading
from datetime import datetime
from .__init__ import WINDOWS
from .util import IMPLICATIONS, uncyg, undot, Pebkac, fsdec, fsenc, statdir
from .util import (
IMPLICATIONS,
uncyg,
undot,
unhumanize,
absreal,
Pebkac,
fsenc,
statdir,
)
from .bos import bos
LEELOO_DALLAS = "leeloo_dallas"
class AXS(object):
def __init__(self, uread=None, uwrite=None, umove=None, udel=None):
self.uread = {} if uread is None else {k: 1 for k in uread}
self.uwrite = {} if uwrite is None else {k: 1 for k in uwrite}
self.umove = {} if umove is None else {k: 1 for k in umove}
self.udel = {} if udel is None else {k: 1 for k in udel}
def __repr__(self):
return "AXS({})".format(
", ".join(
"{}={!r}".format(k, self.__dict__[k])
for k in "uread uwrite umove udel".split()
)
)
class Lim(object):
def __init__(self):
self.nups = {} # num tracker
self.bups = {} # byte tracker list
self.bupc = {} # byte tracker cache
self.nosub = False # disallow subdirectories
self.smin = None # filesize min
self.smax = None # filesize max
self.bwin = None # bytes window
self.bmax = None # bytes max
self.nwin = None # num window
self.nmax = None # num max
self.rotn = None # rot num files
self.rotl = None # rot depth
self.rotf = None # rot datefmt
self.rot_re = None # rotf check
def set_rotf(self, fmt):
self.rotf = fmt
r = re.escape(fmt).replace("%Y", "[0-9]{4}").replace("%j", "[0-9]{3}")
r = re.sub("%[mdHMSWU]", "[0-9]{2}", r)
self.rot_re = re.compile("(^|/)" + r + "$")
def all(self, ip, rem, sz, abspath):
self.chk_nup(ip)
self.chk_bup(ip)
self.chk_rem(rem)
if sz != -1:
self.chk_sz(sz)
ap2, vp2 = self.rot(abspath)
if abspath == ap2:
return ap2, rem
return ap2, ("{}/{}".format(rem, vp2) if rem else vp2)
def chk_sz(self, sz):
if self.smin is not None and sz < self.smin:
raise Pebkac(400, "file too small")
if self.smax is not None and sz > self.smax:
raise Pebkac(400, "file too big")
def chk_rem(self, rem):
if self.nosub and rem:
raise Pebkac(500, "no subdirectories allowed")
def rot(self, path):
if not self.rotf and not self.rotn:
return path, ""
if self.rotf:
path = path.rstrip("/\\")
if self.rot_re.search(path.replace("\\", "/")):
return path, ""
suf = datetime.utcnow().strftime(self.rotf)
if path:
path += "/"
return path + suf, suf
ret = self.dive(path, self.rotl)
if not ret:
raise Pebkac(500, "no available slots in volume")
d = ret[len(path) :].strip("/\\").replace("\\", "/")
return ret, d
def dive(self, path, lvs):
items = bos.listdir(path)
if not lvs:
# at leaf level
return None if len(items) >= self.rotn else ""
dirs = [int(x) for x in items if x and all(y in "1234567890" for y in x)]
dirs.sort()
if not dirs:
# no branches yet; make one
sub = os.path.join(path, "0")
bos.mkdir(sub)
else:
# try newest branch only
sub = os.path.join(path, str(dirs[-1]))
ret = self.dive(sub, lvs - 1)
if ret is not None:
return os.path.join(sub, ret)
if len(dirs) >= self.rotn:
# full branch or root
return None
# make a branch
sub = os.path.join(path, str(dirs[-1] + 1))
bos.mkdir(sub)
ret = self.dive(sub, lvs - 1)
if ret is None:
raise Pebkac(500, "rotation bug")
return os.path.join(sub, ret)
def nup(self, ip):
try:
self.nups[ip].append(time.time())
except:
self.nups[ip] = [time.time()]
def bup(self, ip, nbytes):
v = [time.time(), nbytes]
try:
self.bups[ip].append(v)
self.bupc[ip] += nbytes
except:
self.bups[ip] = [v]
self.bupc[ip] = nbytes
def chk_nup(self, ip):
if not self.nmax or ip not in self.nups:
return
nups = self.nups[ip]
cutoff = time.time() - self.nwin
while nups and nups[0] < cutoff:
nups.pop(0)
if len(nups) >= self.nmax:
raise Pebkac(429, "too many uploads")
def chk_bup(self, ip):
if not self.bmax or ip not in self.bups:
return
bups = self.bups[ip]
cutoff = time.time() - self.bwin
mark = self.bupc[ip]
while bups and bups[0][0] < cutoff:
mark -= bups.pop(0)[1]
self.bupc[ip] = mark
if mark >= self.bmax:
raise Pebkac(429, "ingress saturated")
class VFS(object):
"""single level in the virtual fs"""
def __init__(self, log, realpath, vpath, uread, uwrite, uadm, flags):
def __init__(self, log, realpath, vpath, axs, flags):
self.log = log
self.realpath = realpath # absolute path on host filesystem
self.vpath = vpath # absolute path in the virtual filesystem
self.uread = uread # users who can read this
self.uwrite = uwrite # users who can write this
self.uadm = uadm # users who are regular admins
self.flags = flags # config switches
self.axs = axs # type: AXS
self.flags = flags # config options
self.nodes = {} # child nodes
self.histtab = None # all realpath->histpath
self.dbv = None # closest full/non-jump parent
self.lim = None # type: Lim # upload limits; only set for dbv
if realpath:
self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
self.all_vols = {vpath: self} # flattened recursive
self.aread = {}
self.awrite = {}
self.amove = {}
self.adel = {}
else:
self.histpath = None
self.all_vols = None
self.aread = None
self.awrite = None
self.amove = None
self.adel = None
def __repr__(self):
return "VFS({})".format(
", ".join(
"{}={!r}".format(k, self.__dict__[k])
for k in "realpath vpath uread uwrite uadm flags".split()
for k in "realpath vpath axs flags".split()
)
)
@@ -66,9 +254,7 @@ class VFS(object):
self.log,
os.path.join(self.realpath, name) if self.realpath else None,
"{}/{}".format(self.vpath, name).lstrip("/"),
self.uread,
self.uwrite,
self.uadm,
self.axs,
self._copy_flags(name),
)
vn.dbv = self.dbv or self
@@ -81,7 +267,7 @@ class VFS(object):
# leaf does not exist; create and keep permissions blank
vp = "{}/{}".format(self.vpath, dst).lstrip("/")
vn = VFS(self.log, src, vp, [], [], [], {})
vn = VFS(self.log, src, vp, AXS(), {})
vn.dbv = self.dbv or self
self.nodes[dst] = vn
return vn
@@ -121,27 +307,37 @@ class VFS(object):
return [self, vpath]
def can_access(self, vpath, uname):
"""return [readable,writable]"""
# type: (str, str) -> tuple[bool, bool, bool, bool]
"""can Read,Write,Move,Delete"""
vn, _ = self._find(vpath)
c = vn.axs
return [
uname in vn.uread or "*" in vn.uread,
uname in vn.uwrite or "*" in vn.uwrite,
uname in c.uread or "*" in c.uread,
uname in c.uwrite or "*" in c.uwrite,
uname in c.umove or "*" in c.umove,
uname in c.udel or "*" in c.udel,
]
def get(self, vpath, uname, will_read, will_write):
# type: (str, str, bool, bool) -> tuple[VFS, str]
def get(self, vpath, uname, will_read, will_write, will_move=False, will_del=False):
# type: (str, str, bool, bool, bool, bool) -> tuple[VFS, str]
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
vn, rem = self._find(vpath)
c = vn.axs
if will_read and (uname not in vn.uread and "*" not in vn.uread):
raise Pebkac(403, "you don't have read-access for this location")
if will_write and (uname not in vn.uwrite and "*" not in vn.uwrite):
raise Pebkac(403, "you don't have write-access for this location")
for req, d, msg in [
[will_read, c.uread, "read"],
[will_write, c.uwrite, "write"],
[will_move, c.umove, "move"],
[will_del, c.udel, "delete"],
]:
if req and (uname not in d and "*" not in d) and uname != LEELOO_DALLAS:
m = "you don't have {}-access for this location"
raise Pebkac(403, m.format(msg))
return vn, rem
def get_dbv(self, vrem):
# type: (str) -> tuple[VFS, str]
dbv = self.dbv
if not dbv:
return self, vrem
@@ -150,65 +346,50 @@ class VFS(object):
vrem = "/".join([x for x in vrem if x])
return dbv, vrem
def canonical(self, rem):
def canonical(self, rem, resolve=True):
"""returns the canonical path (fully-resolved absolute fs path)"""
rp = self.realpath
if rem:
rp += "/" + rem
try:
return fsdec(os.path.realpath(fsenc(rp)))
except:
if not WINDOWS:
raise
return absreal(rp) if resolve else rp
# cpython bug introduced in 3.8, still exists in 3.9.1;
# some win7sp1 and win10:20H2 boxes cannot realpath a
# networked drive letter such as b"n:" or b"n:\\"
#
# requirements to trigger:
# * bytestring (not unicode str)
# * just the drive letter (subfolders are ok)
# * networked drive (regular disks and vmhgfs are ok)
# * on an enterprise network (idk, cannot repro with samba)
#
# hits the following exceptions in succession:
# * access denied at L601: "path = _getfinalpathname(path)"
# * "cant concat str to bytes" at L621: "return path + tail"
#
return os.path.realpath(rp)
def ls(self, rem, uname, scandir, incl_wo=False, lstat=False):
# type: (str, str, bool, bool, bool) -> tuple[str, str, dict[str, VFS]]
def ls(self, rem, uname, scandir, permsets, lstat=False):
# type: (str, str, bool, list[list[bool]], bool) -> tuple[str, str, dict[str, VFS]]
"""return user-readable [fsdir,real,virt] items at vpath"""
virt_vis = {} # nodes readable by user
abspath = self.canonical(rem)
real = list(statdir(self.log, scandir, lstat, abspath))
real.sort()
if not rem:
for name, vn2 in sorted(self.nodes.items()):
ok = uname in vn2.uread or "*" in vn2.uread
# no vfs nodes in the list of real inodes
real = [x for x in real if x[0] not in self.nodes]
if not ok and incl_wo:
ok = uname in vn2.uwrite or "*" in vn2.uwrite
for name, vn2 in sorted(self.nodes.items()):
ok = False
axs = vn2.axs
axs = [axs.uread, axs.uwrite, axs.umove, axs.udel]
for pset in permsets:
ok = True
for req, lst in zip(pset, axs):
if req and uname not in lst and "*" not in lst:
ok = False
if ok:
break
if ok:
virt_vis[name] = vn2
# no vfs nodes in the list of real inodes
real = [x for x in real if x[0] not in self.nodes]
return [abspath, real, virt_vis]
def walk(self, rel, rem, seen, uname, dots, scandir, lstat):
def walk(self, rel, rem, seen, uname, permsets, dots, scandir, lstat):
"""
recursively yields from ./rem;
rel is a unix-style user-defined vpath (not vfs-related)
"""
fsroot, vfs_ls, vfs_virt = self.ls(
rem, uname, scandir, incl_wo=False, lstat=lstat
)
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, permsets, lstat=lstat)
dbv, vrem = self.get_dbv(rem)
if (
seen
@@ -226,7 +407,7 @@ class VFS(object):
rfiles.sort()
rdirs.sort()
yield rel, fsroot, rfiles, rdirs, vfs_virt
yield dbv, vrem, rel, fsroot, rfiles, rdirs, vfs_virt
for rdir, _ in rdirs:
if not dots and rdir.startswith("."):
@@ -234,7 +415,7 @@ class VFS(object):
wrel = (rel + "/" + rdir).lstrip("/")
wrem = (rem + "/" + rdir).lstrip("/")
for x in self.walk(wrel, wrem, seen, uname, dots, scandir, lstat):
for x in self.walk(wrel, wrem, seen, uname, permsets, dots, scandir, lstat):
yield x
for n, vfs in sorted(vfs_virt.items()):
@@ -242,7 +423,7 @@ class VFS(object):
continue
wrel = (rel + "/" + n).lstrip("/")
for x in vfs.walk(wrel, "", seen, uname, dots, scandir, lstat):
for x in vfs.walk(wrel, "", seen, uname, permsets, dots, scandir, lstat):
yield x
def zipgen(self, vrem, flt, uname, dots, scandir):
@@ -253,9 +434,12 @@ class VFS(object):
f2a = os.sep + "dir.txt"
f2b = "{0}.hist{0}".format(os.sep)
for vpath, apath, files, rd, vd in self.walk(
"", vrem, [], uname, dots, scandir, False
):
# if multiselect: add all items to archive root
# if single folder: the folder itself is the top-level item
folder = "" if flt else (vrem.split("/")[-1] or "top")
g = self.walk(folder, vrem, [], uname, [[True]], dots, scandir, False)
for _, _, vpath, apath, files, rd, vd in g:
if flt:
files = [x for x in files if x[0] in flt]
@@ -295,19 +479,11 @@ class VFS(object):
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
yield f
def user_tree(self, uname, readable, writable, admin):
is_readable = False
if uname in self.uread or "*" in self.uread:
readable.append(self.vpath)
is_readable = True
if uname in self.uwrite or "*" in self.uwrite:
writable.append(self.vpath)
if is_readable:
admin.append(self.vpath)
for _, vn in sorted(self.nodes.items()):
vn.user_tree(uname, readable, writable, admin)
if WINDOWS:
re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
else:
re_vol = re.compile(r"^([^:]*):([^:]*):(.*)$")
class AuthSrv(object):
@@ -319,11 +495,6 @@ class AuthSrv(object):
self.warn_anonwrite = warn_anonwrite
self.line_ctr = 0
if WINDOWS:
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
else:
self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)$")
self.mutex = threading.Lock()
self.reload()
@@ -341,7 +512,8 @@ class AuthSrv(object):
yield prev, True
def _parse_config_file(self, fd, user, mread, mwrite, madm, mflags, mount):
def _parse_config_file(self, fd, acct, daxs, mflags, mount):
# type: (any, str, dict[str, AXS], any, str) -> None
vol_src = None
vol_dst = None
self.line_ctr = 0
@@ -357,7 +529,7 @@ class AuthSrv(object):
if vol_src is None:
if ln.startswith("u "):
u, p = ln[2:].split(":", 1)
user[u] = p
acct[u] = p
else:
vol_src = ln
continue
@@ -368,50 +540,53 @@ class AuthSrv(object):
raise Exception('invalid mountpoint "{}"'.format(vol_dst))
# cfg files override arguments and previous files
vol_src = fsdec(os.path.abspath(fsenc(vol_src)))
vol_src = bos.path.abspath(vol_src)
vol_dst = vol_dst.strip("/")
mount[vol_dst] = vol_src
mread[vol_dst] = []
mwrite[vol_dst] = []
madm[vol_dst] = []
daxs[vol_dst] = AXS()
mflags[vol_dst] = {}
continue
if len(ln) > 1:
lvl, uname = ln.split(" ")
else:
try:
lvl, uname = ln.split(" ", 1)
except:
lvl = ln
uname = "*"
self._read_vol_str(
lvl,
uname,
mread[vol_dst],
mwrite[vol_dst],
madm[vol_dst],
mflags[vol_dst],
)
if lvl == "a":
m = "WARNING (config-file): permission flag 'a' is deprecated; please use 'rw' instead"
self.log(m, 1)
self._read_vol_str(lvl, uname, daxs[vol_dst], mflags[vol_dst])
def _read_vol_str(self, lvl, uname, axs, flags):
# type: (str, str, AXS, any) -> None
if lvl.strip("crwmd"):
raise Exception("invalid volume flag: {},{}".format(lvl, uname))
def _read_vol_str(self, lvl, uname, mr, mw, ma, mf):
if lvl == "c":
cval = True
if "=" in uname:
uname, cval = uname.split("=", 1)
self._read_volflag(mf, uname, cval, False)
self._read_volflag(flags, uname, cval, False)
return
if uname == "":
uname = "*"
if lvl in "ra":
mr.append(uname)
for un in uname.split(","):
if "r" in lvl:
axs.uread[un] = 1
if lvl in "wa":
mw.append(uname)
if "w" in lvl:
axs.uwrite[un] = 1
if lvl == "a":
ma.append(uname)
if "m" in lvl:
axs.umove[un] = 1
if "d" in lvl:
axs.udel[un] = 1
def _read_volflag(self, flags, name, value, is_list):
if name not in ["mtp"]:
@@ -433,23 +608,26 @@ class AuthSrv(object):
before finally building the VFS
"""
user = {} # username:password
mread = {} # mountpoint:[username]
mwrite = {} # mountpoint:[username]
madm = {} # mountpoint:[username]
acct = {} # username:password
daxs = {} # type: dict[str, AXS]
mflags = {} # mountpoint:[flag]
mount = {} # dst:src (mountpoint:realpath)
if self.args.a:
# list of username:password
for u, p in [x.split(":", 1) for x in self.args.a]:
user[u] = p
for x in self.args.a:
try:
u, p = x.split(":", 1)
acct[u] = p
except:
m = '\n invalid value "{}" for argument -a, must be username:password'
raise Exception(m.format(x))
if self.args.v:
# list of src:dst:permset:permset:...
# permset is [rwa]username or [c]flag
# permset is <rwmd>[,username][,username] or <c>,<flag>[=args]
for v_str in self.args.v:
m = self.re_vol.match(v_str)
m = re_vol.match(v_str)
if not m:
raise Exception("invalid -v argument: [{}]".format(v_str))
@@ -458,27 +636,21 @@ class AuthSrv(object):
src = uncyg(src)
# print("\n".join([src, dst, perms]))
src = fsdec(os.path.abspath(fsenc(src)))
src = bos.path.abspath(src)
dst = dst.strip("/")
mount[dst] = src
mread[dst] = []
mwrite[dst] = []
madm[dst] = []
daxs[dst] = AXS()
mflags[dst] = {}
perms = perms.split(":")
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
self._read_vol_str(
lvl, uname, mread[dst], mwrite[dst], madm[dst], mflags[dst]
)
for x in perms.split(":"):
lvl, uname = x.split(",", 1) if "," in x else [x, ""]
self._read_vol_str(lvl, uname, daxs[dst], mflags[dst])
if self.args.c:
for cfg_fn in self.args.c:
with open(cfg_fn, "rb") as f:
try:
self._parse_config_file(
f, user, mread, mwrite, madm, mflags, mount
)
self._parse_config_file(f, acct, daxs, mflags, mount)
except:
m = "\n\033[1;31m\nerror in config file {} on line {}:\n\033[0m"
self.log(m.format(cfg_fn, self.line_ctr), 1)
@@ -488,19 +660,17 @@ class AuthSrv(object):
if WINDOWS:
cased = {}
for k, v in mount.items():
try:
cased[k] = fsdec(os.path.realpath(fsenc(v)))
except:
cased[k] = v
cased[k] = absreal(v)
mount = cased
if not mount:
# -h says our defaults are CWD at root and read/write for everyone
vfs = VFS(self.log_func, os.path.abspath("."), "", ["*"], ["*"], ["*"], {})
axs = AXS(["*"], ["*"], None, None)
vfs = VFS(self.log_func, bos.path.abspath("."), "", axs, {})
elif "" not in mount:
# there's volumes but no root; make root inaccessible
vfs = VFS(self.log_func, None, "", [], [], [], {})
vfs = VFS(self.log_func, None, "", AXS(), {})
vfs.flags["d2d"] = True
maxdepth = 0
@@ -511,32 +681,34 @@ class AuthSrv(object):
if dst == "":
# rootfs was mapped; fully replaces the default CWD vfs
vfs = VFS(
self.log_func,
mount[dst],
dst,
mread[dst],
mwrite[dst],
madm[dst],
mflags[dst],
)
vfs = VFS(self.log_func, mount[dst], dst, daxs[dst], mflags[dst])
continue
v = vfs.add(mount[dst], dst)
v.uread = mread[dst]
v.uwrite = mwrite[dst]
v.uadm = madm[dst]
v.axs = daxs[dst]
v.flags = mflags[dst]
v.dbv = None
vfs.all_vols = {}
vfs.get_all_vols(vfs.all_vols)
for perm in "read write move del".split():
axs_key = "u" + perm
unames = ["*"] + list(acct.keys())
umap = {x: [] for x in unames}
for usr in unames:
for mp, vol in vfs.all_vols.items():
if usr in getattr(vol.axs, axs_key):
umap[usr].append(mp)
setattr(vfs, "a" + perm, umap)
all_users = {}
missing_users = {}
for d in [mread, mwrite]:
for _, ul in d.items():
for usr in ul:
if usr != "*" and usr not in user:
for axs in daxs.values():
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel]:
for usr in d.keys():
all_users[usr] = 1
if usr != "*" and usr not in acct:
missing_users[usr] = 1
if missing_users:
@@ -547,6 +719,9 @@ class AuthSrv(object):
)
raise Exception("invalid config")
if LEELOO_DALLAS in all_users:
raise Exception("sorry, reserved username: " + LEELOO_DALLAS)
promote = []
demote = []
for vol in vfs.all_vols.values():
@@ -560,10 +735,7 @@ class AuthSrv(object):
elif self.args.hist:
for nch in range(len(hid)):
hpath = os.path.join(self.args.hist, hid[: nch + 1])
try:
os.makedirs(hpath)
except:
pass
bos.makedirs(hpath)
powner = os.path.join(hpath, "owner.txt")
try:
@@ -583,9 +755,9 @@ class AuthSrv(object):
vol.histpath = hpath
break
vol.histpath = os.path.realpath(vol.histpath)
vol.histpath = absreal(vol.histpath)
if vol.dbv:
if os.path.exists(os.path.join(vol.histpath, "up2k.db")):
if bos.path.exists(os.path.join(vol.histpath, "up2k.db")):
promote.append(vol)
vol.dbv = None
else:
@@ -608,10 +780,55 @@ class AuthSrv(object):
vfs.histtab = {v.realpath: v.histpath for v in vfs.all_vols.values()}
for vol in vfs.all_vols.values():
lim = Lim()
use = False
if vol.flags.get("nosub"):
use = True
lim.nosub = True
v = vol.flags.get("sz")
if v:
use = True
lim.smin, lim.smax = [unhumanize(x) for x in v.split("-")]
v = vol.flags.get("rotn")
if v:
use = True
lim.rotn, lim.rotl = [int(x) for x in v.split(",")]
v = vol.flags.get("rotf")
if v:
use = True
lim.set_rotf(v)
v = vol.flags.get("maxn")
if v:
use = True
lim.nmax, lim.nwin = [int(x) for x in v.split(",")]
v = vol.flags.get("maxb")
if v:
use = True
lim.bmax, lim.bwin = [unhumanize(x) for x in v.split(",")]
if use:
vol.lim = lim
for vol in vfs.all_vols.values():
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
vol.flags["gz"] = False # def.pk
if "scan" in vol.flags:
vol.flags["scan"] = int(vol.flags["scan"])
elif self.args.re_maxage:
vol.flags["scan"] = self.args.re_maxage
all_mte = {}
errors = False
for vol in vfs.all_vols.values():
if (self.args.e2ds and vol.uwrite) or self.args.e2dsa:
if (self.args.e2ds and vol.axs.uwrite) or self.args.e2dsa:
vol.flags["e2ds"] = True
if self.args.e2d or "e2ds" in vol.flags:
@@ -629,9 +846,11 @@ class AuthSrv(object):
if k1 in vol.flags:
vol.flags[k2] = True
# default tag-list if unset
# default tag cfgs if unset
if "mte" not in vol.flags:
vol.flags["mte"] = self.args.mte
if "mth" not in vol.flags:
vol.flags["mth"] = self.args.mth
# append parsers from argv to volume-flags
self._read_volflag(vol.flags, "mtp", self.args.mtp, True)
@@ -700,6 +919,27 @@ class AuthSrv(object):
vfs.bubble_flags()
m = "volumes and permissions:\n"
for v in vfs.all_vols.values():
if not self.warn_anonwrite:
break
m += '\n\033[36m"/{}" \033[33m{}\033[0m'.format(v.vpath, v.realpath)
for txt, attr in [
[" read", "uread"],
[" write", "uwrite"],
[" move", "umove"],
["delete", "udel"],
]:
u = list(sorted(getattr(v.axs, attr).keys()))
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
u = u if u else "\033[36m--none--\033[0m"
m += "\n| {}: {}".format(txt, u)
m += "\n"
if self.warn_anonwrite and not self.args.no_voldump:
self.log(m)
try:
v, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == v.realpath:
@@ -711,17 +951,14 @@ class AuthSrv(object):
with self.mutex:
self.vfs = vfs
self.user = user
self.iuser = {v: k for k, v in user.items()}
self.acct = acct
self.iacct = {v: k for k, v in acct.items()}
self.re_pwd = None
pwds = [re.escape(x) for x in self.iuser.keys()]
pwds = [re.escape(x) for x in self.iacct.keys()]
if pwds:
self.re_pwd = re.compile("=(" + "|".join(pwds) + ")([]&; ]|$)")
# import pprint
# pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount})
def dbg_ls(self):
users = self.args.ls
vols = "*"
@@ -739,12 +976,12 @@ class AuthSrv(object):
pass
if users == "**":
users = list(self.user.keys()) + ["*"]
users = list(self.acct.keys()) + ["*"]
else:
users = [users]
for u in users:
if u not in self.user and u != "*":
if u not in self.acct and u != "*":
raise Exception("user not found: " + u)
if vols == "*":
@@ -760,8 +997,10 @@ class AuthSrv(object):
raise Exception("volume not found: " + v)
self.log({"users": users, "vols": vols, "flags": flags})
m = "/{}: read({}) write({}) move({}) del({})"
for k, v in self.vfs.all_vols.items():
self.log("/{}: read({}) write({})".format(k, v.uread, v.uwrite))
vc = v.axs
self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel))
flag_v = "v" in flags
flag_ln = "ln" in flags
@@ -775,13 +1014,15 @@ class AuthSrv(object):
for u in users:
self.log("checking /{} as {}".format(v, u))
try:
vn, _ = self.vfs.get(v, u, True, False)
vn, _ = self.vfs.get(v, u, True, False, False, False)
except:
continue
atop = vn.realpath
g = vn.walk("", "", [], u, True, not self.args.no_scandir, False)
for vpath, apath, files, _, _ in g:
g = vn.walk(
vn.vpath, "", [], u, [[True]], True, not self.args.no_scandir, False
)
for _, _, vpath, apath, files, _, _ in g:
fnames = [n[0] for n in files]
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
vpaths = [vtop + x for x in vpaths]

View File

59
copyparty/bos/bos.py Normal file
View File

@@ -0,0 +1,59 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
from ..util import fsenc, fsdec
from . import path
# grep -hRiE '(^|[^a-zA-Z_\.-])os\.' . | gsed -r 's/ /\n/g;s/\(/(\n/g' | grep -hRiE '(^|[^a-zA-Z_\.-])os\.' | sort | uniq -c
# printf 'os\.(%s)' "$(grep ^def bos/__init__.py | gsed -r 's/^def //;s/\(.*//' | tr '\n' '|' | gsed -r 's/.$//')"
def chmod(p, mode):
return os.chmod(fsenc(p), mode)
def listdir(p="."):
return [fsdec(x) for x in os.listdir(fsenc(p))]
def lstat(p):
return os.lstat(fsenc(p))
def makedirs(name, mode=0o755, exist_ok=True):
bname = fsenc(name)
try:
os.makedirs(bname, mode=mode)
except:
if not exist_ok or not os.path.isdir(bname):
raise
def mkdir(p, mode=0o755):
return os.mkdir(fsenc(p), mode=mode)
def rename(src, dst):
return os.rename(fsenc(src), fsenc(dst))
def replace(src, dst):
return os.replace(fsenc(src), fsenc(dst))
def rmdir(p):
return os.rmdir(fsenc(p))
def stat(p):
return os.stat(fsenc(p))
def unlink(p):
return os.unlink(fsenc(p))
def utime(p, times=None):
return os.utime(fsenc(p), times)

33
copyparty/bos/path.py Normal file
View File

@@ -0,0 +1,33 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
from ..util import fsenc, fsdec
def abspath(p):
return fsdec(os.path.abspath(fsenc(p)))
def exists(p):
return os.path.exists(fsenc(p))
def getmtime(p):
return os.path.getmtime(fsenc(p))
def getsize(p):
return os.path.getsize(fsenc(p))
def isdir(p):
return os.path.isdir(fsenc(p))
def islink(p):
return os.path.islink(fsenc(p))
def realpath(p):
return fsdec(os.path.realpath(fsenc(p)))

View File

@@ -22,12 +22,9 @@ class BrokerMp(object):
self.retpend_mutex = threading.Lock()
self.mutex = threading.Lock()
cores = self.args.j
if not cores:
cores = mp.cpu_count()
self.log("broker", "booting {} subprocesses".format(cores))
for n in range(1, cores + 1):
self.num_workers = self.args.j or mp.cpu_count()
self.log("broker", "booting {} subprocesses".format(self.num_workers))
for n in range(1, self.num_workers + 1):
q_pend = mp.Queue(1)
q_yield = mp.Queue(64)
@@ -103,5 +100,8 @@ class BrokerMp(object):
for p in self.procs:
p.q_pend.put([0, dest, [args[0], len(self.procs)]])
elif dest == "cb_httpsrv_up":
self.hub.cb_httpsrv_up()
else:
raise Exception("what is " + str(dest))

View File

@@ -1,6 +1,5 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
from copyparty.authsrv import AuthSrv
import sys
import signal
@@ -9,6 +8,7 @@ import threading
from .broker_util import ExceptionalQueue
from .httpsrv import HttpSrv
from .util import FAKE_MP
from copyparty.authsrv import AuthSrv
class MpWorker(object):

View File

@@ -17,6 +17,7 @@ class BrokerThr(object):
self.asrv = hub.asrv
self.mutex = threading.Lock()
self.num_workers = 1
# instantiate all services here (TODO: inheritance?)
self.httpsrv = HttpSrv(self, None)

View File

@@ -7,15 +7,22 @@ import gzip
import time
import copy
import json
import base64
import string
import socket
import ctypes
from datetime import datetime
import calendar
try:
import lzma
except:
pass
from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
from .util import * # noqa # pylint: disable=unused-wildcard-import
from .authsrv import AuthSrv
from .bos import bos
from .authsrv import AuthSrv, Lim
from .szip import StreamZip
from .star import StreamTar
@@ -58,9 +65,12 @@ class HttpCli(object):
def unpwd(self, m):
a, b = m.groups()
return "=\033[7m {} \033[27m{}".format(self.asrv.iuser[a], b)
return "=\033[7m {} \033[27m{}".format(self.asrv.iacct[a], b)
def _check_nonfatal(self, ex, post):
if post:
return ex.code < 300
def _check_nonfatal(self, ex):
return ex.code < 400 or ex.code in [404, 429]
def _assert_safe_rem(self, rem):
@@ -102,8 +112,8 @@ class HttpCli(object):
self.req = "[junk]"
self.http_ver = "HTTP/1.1"
# self.log("pebkac at httpcli.run #1: " + repr(ex))
self.keepalive = self._check_nonfatal(ex)
self.loud_reply(unicode(ex), status=ex.code)
self.keepalive = False
self.loud_reply(unicode(ex), status=ex.code, volsan=True)
return self.keepalive
# time.sleep(0.4)
@@ -176,14 +186,33 @@ class HttpCli(object):
if kc in cookies and ku not in uparam:
uparam[ku] = cookies[kc]
if len(uparam) > 10 or len(cookies) > 50:
raise Pebkac(400, "u wot m8")
self.uparam = uparam
self.cookies = cookies
self.vpath = unquotep(vpath)
self.vpath = unquotep(vpath) # not query, so + means +
pwd = uparam.get("pw")
self.uname = self.asrv.iuser.get(pwd, "*")
self.rvol, self.wvol, self.avol = [[], [], []]
self.asrv.vfs.user_tree(self.uname, self.rvol, self.wvol, self.avol)
pwd = None
ba = self.headers.get("authorization")
if ba:
try:
ba = ba.split(" ")[1].encode("ascii")
ba = base64.b64decode(ba).decode("utf-8")
# try "pwd", "x:pwd", "pwd:x"
for ba in [ba] + ba.split(":", 1)[::-1]:
if self.asrv.iacct.get(ba):
pwd = ba
break
except:
pass
pwd = uparam.get("pw") or pwd
self.uname = self.asrv.iacct.get(pwd, "*")
self.rvol = self.asrv.vfs.aread[self.uname]
self.wvol = self.asrv.vfs.awrite[self.uname]
self.mvol = self.asrv.vfs.amove[self.uname]
self.dvol = self.asrv.vfs.adel[self.uname]
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0]
@@ -210,18 +239,24 @@ class HttpCli(object):
else:
raise Pebkac(400, 'invalid HTTP mode "{0}"'.format(self.mode))
except Pebkac as ex:
except Exception as ex:
pex = ex
if not hasattr(ex, "code"):
pex = Pebkac(500)
try:
# self.log("pebkac at httpcli.run #2: " + repr(ex))
if not self._check_nonfatal(ex):
post = self.mode in ["POST", "PUT"] or "content-length" in self.headers
if not self._check_nonfatal(pex, post):
self.keepalive = False
self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3)
msg = str(ex) if pex == ex else min_ex()
self.log("{}\033[0m, {}".format(msg, self.vpath), 3)
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
if self.hint:
msg += "hint: {}\r\n".format(self.hint)
self.reply(msg.encode("utf-8", "replace"), status=ex.code)
self.reply(msg.encode("utf-8", "replace"), status=pex.code, volsan=True)
return self.keepalive
except Pebkac:
return False
@@ -254,8 +289,12 @@ class HttpCli(object):
except:
raise Pebkac(400, "client d/c while replying headers")
def reply(self, body, status=200, mime=None, headers=None):
def reply(self, body, status=200, mime=None, headers=None, volsan=False):
# TODO something to reply with user-supplied values safely
if volsan:
body = vol_san(self.asrv.vfs.all_vols.values(), body)
self.send_headers(len(body), status, mime, headers)
try:
@@ -339,11 +378,36 @@ class HttpCli(object):
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
return self.tx_file(static_path)
x = self.asrv.vfs.can_access(self.vpath, self.uname)
self.can_read, self.can_write, self.can_move, self.can_delete = x
if not self.can_read and not self.can_write:
if self.vpath:
self.log("inaccessible: [{}]".format(self.vpath))
raise Pebkac(404)
self.uparam["h"] = False
if "tree" in self.uparam:
return self.tx_tree()
if "stack" in self.uparam:
return self.tx_stack()
if "delete" in self.uparam:
return self.handle_rm()
if "move" in self.uparam:
return self.handle_mv()
if "scan" in self.uparam:
return self.scanvol()
if not self.vpath:
if "stack" in self.uparam:
return self.tx_stack()
if "ups" in self.uparam:
return self.tx_ups()
if "h" in self.uparam:
return self.tx_mounts()
# conditional redirect to single volumes
if self.vpath == "" and not self.ouparam:
@@ -359,21 +423,6 @@ class HttpCli(object):
self.redirect(vpath, flavor="redirecting to", use302=True)
return True
self.readable, self.writable = self.asrv.vfs.can_access(self.vpath, self.uname)
if not self.readable and not self.writable:
if self.vpath:
self.log("inaccessible: [{}]".format(self.vpath))
raise Pebkac(404)
self.uparam = {"h": False}
if "h" in self.uparam:
self.vpath = None
return self.tx_mounts()
if "scan" in self.uparam:
return self.scanvol()
return self.tx_browser()
def handle_options(self):
@@ -456,7 +505,7 @@ class HttpCli(object):
if "get" in opt:
return self.handle_get()
raise Pebkac(405, "POST({}) is disabled".format(ctype))
raise Pebkac(405, "POST({}) is disabled in server config".format(ctype))
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
@@ -474,7 +523,11 @@ class HttpCli(object):
def dump_to_file(self):
reader, remains = self.get_body_reader()
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
lim = vfs.get_dbv(rem)[0].lim
fdir = os.path.join(vfs.realpath, rem)
if lim:
fdir, rem = lim.all(self.ip, rem, remains, fdir)
bos.makedirs(fdir)
addr = self.ip.replace(":", ".")
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
@@ -482,13 +535,81 @@ class HttpCli(object):
if self.args.nw:
path = os.devnull
with open(fsenc(path), "wb", 512 * 1024) as f:
open_f = open
open_a = [fsenc(path), "wb", 512 * 1024]
open_ka = {}
# user-request || config-force
if ("gz" in vfs.flags or "xz" in vfs.flags) and (
"pk" in vfs.flags
or "pk" in self.uparam
or "gz" in self.uparam
or "xz" in self.uparam
):
fb = {"gz": 9, "xz": 0} # default/fallback level
lv = {} # selected level
alg = None # selected algo (gz=preferred)
# user-prefs first
if "gz" in self.uparam or "pk" in self.uparam: # def.pk
alg = "gz"
if "xz" in self.uparam:
alg = "xz"
if alg:
v = self.uparam.get(alg)
lv[alg] = fb[alg] if v is None else int(v)
if alg not in vfs.flags:
alg = "gz" if "gz" in vfs.flags else "xz"
# then server overrides
pk = vfs.flags.get("pk")
if pk is not None:
# config-forced on
alg = alg or "gz" # def.pk
try:
# config-forced opts
alg, lv = pk.split(",")
lv[alg] = int(lv)
except:
pass
lv[alg] = lv.get(alg) or fb.get(alg)
self.log("compressing with {} level {}".format(alg, lv.get(alg)))
if alg == "gz":
open_f = gzip.GzipFile
open_a = [fsenc(path), "wb", lv[alg], None, 0x5FEE6600] # 2021-01-01
elif alg == "xz":
open_f = lzma.open
open_a = [fsenc(path), "wb"]
open_ka = {"preset": lv[alg]}
else:
self.log("fallthrough? thats a bug", 1)
with open_f(*open_a, **open_ka) as f:
post_sz, _, sha_b64 = hashcopy(reader, f)
if lim:
lim.nup(self.ip)
lim.bup(self.ip, post_sz)
try:
lim.chk_sz(post_sz)
except:
bos.unlink(path)
raise
if not self.args.nw:
vfs, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, vrem, fn
False,
"up2k.hash_file",
vfs.realpath,
vfs.flags,
vrem,
fn,
self.ip,
time.time(),
)
return post_sz, sha_b64, remains, path
@@ -559,7 +680,7 @@ class HttpCli(object):
try:
remains = int(self.headers["content-length"])
except:
raise Pebkac(400, "you must supply a content-length for JSON POST")
raise Pebkac(411)
if remains > 1024 * 1024:
raise Pebkac(413, "json 2big")
@@ -582,17 +703,17 @@ class HttpCli(object):
if "srch" in self.uparam or "srch" in body:
return self.handle_search(body)
if "delete" in self.uparam:
return self.handle_rm(body)
# up2k-php compat
for k in "chunkpit.php", "handshake.php":
if self.vpath.endswith(k):
self.vpath = self.vpath[: -len(k)]
sub = None
name = undot(body["name"])
if "/" in name:
sub, name = name.rsplit("/", 1)
self.vpath = "/".join([self.vpath, sub]).strip("/")
body["name"] = name
raise Pebkac(400, "your client is old; press CTRL-SHIFT-R and try again")
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
dbv, vrem = vfs.get_dbv(rem)
@@ -603,14 +724,14 @@ class HttpCli(object):
body["addr"] = self.ip
body["vcfg"] = dbv.flags
if sub:
if rem:
try:
dst = os.path.join(vfs.realpath, rem)
if not os.path.isdir(fsenc(dst)):
os.makedirs(fsenc(dst))
if not bos.path.isdir(dst):
bos.makedirs(dst)
except OSError as ex:
self.log("makedirs failed [{}]".format(dst))
if not os.path.isdir(fsenc(dst)):
if not bos.path.isdir(dst):
if ex.errno == 13:
raise Pebkac(500, "the server OS denied write-access")
@@ -623,9 +744,6 @@ class HttpCli(object):
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
ret = x.get()
if sub:
ret["name"] = "/".join([sub, ret["name"]])
ret = json.dumps(ret)
self.log(ret)
self.reply(ret.encode("utf-8"), mime="application/json")
@@ -756,7 +874,7 @@ class HttpCli(object):
times = (int(time.time()), int(lastmod))
self.log("no more chunks, setting times {}".format(times))
try:
os.utime(fsenc(path), times)
bos.utime(path, times)
except:
self.log("failed to utime ({}, {})".format(path, times))
@@ -770,12 +888,12 @@ class HttpCli(object):
self.parser.drop()
ck, msg = self.get_pwd_cookie(pwd)
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
html = self.j2("msg", h1=msg, h2='<a href="/?h">ack</a>', redir="/?h")
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
return True
def get_pwd_cookie(self, pwd):
if pwd in self.asrv.iuser:
if pwd in self.asrv.iacct:
msg = "login ok"
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
@@ -801,14 +919,14 @@ class HttpCli(object):
fdir = os.path.join(vfs.realpath, rem)
fn = os.path.join(fdir, sanitized)
if not os.path.isdir(fsenc(fdir)):
if not bos.path.isdir(fdir):
raise Pebkac(500, "parent folder does not exist")
if os.path.isdir(fsenc(fn)):
if bos.path.isdir(fn):
raise Pebkac(500, "that folder exists already")
try:
os.mkdir(fsenc(fn))
bos.mkdir(fn)
except OSError as ex:
if ex.errno == 13:
raise Pebkac(500, "the server OS denied write-access")
@@ -838,7 +956,7 @@ class HttpCli(object):
fdir = os.path.join(vfs.realpath, rem)
fn = os.path.join(fdir, sanitized)
if os.path.exists(fsenc(fn)):
if bos.path.exists(fn):
raise Pebkac(500, "that file exists already")
with open(fsenc(fn), "wb") as f:
@@ -853,6 +971,15 @@ class HttpCli(object):
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem)
upload_vpath = self.vpath
lim = vfs.get_dbv(rem)[0].lim
fdir_base = os.path.join(vfs.realpath, rem)
if lim:
fdir_base, rem = lim.all(self.ip, rem, -1, fdir_base)
upload_vpath = "{}/{}".format(vfs.vpath, rem).strip("/")
if not nullwrite:
bos.makedirs(fdir_base)
files = []
errmsg = ""
t0 = time.time()
@@ -862,13 +989,10 @@ class HttpCli(object):
self.log("discarding incoming file without filename")
# fallthrough
fdir = fdir_base
fname = sanitize_fn(p_file, "", [".prologue.html", ".epilogue.html"])
if p_file and not nullwrite:
fdir = os.path.join(vfs.realpath, rem)
fname = sanitize_fn(
p_file, "", [".prologue.html", ".epilogue.html"]
)
if not os.path.isdir(fsenc(fdir)):
if not bos.path.isdir(fdir):
raise Pebkac(404, "that folder does not exist")
suffix = ".{:.6f}-{}".format(time.time(), self.ip)
@@ -878,25 +1002,41 @@ class HttpCli(object):
fname = os.devnull
fdir = ""
if lim:
lim.chk_bup(self.ip)
lim.chk_nup(self.ip)
try:
with ren_open(fname, "wb", 512 * 1024, **open_args) as f:
f, fname = f["orz"]
self.log("writing to {}/{}".format(fdir, fname))
abspath = os.path.join(fdir, fname)
self.log("writing to {}".format(abspath))
sz, sha512_hex, _ = hashcopy(p_data, f)
if sz == 0:
raise Pebkac(400, "empty files in post")
files.append([sz, sha512_hex, p_file, fname])
dbv, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put(
False,
"up2k.hash_file",
dbv.realpath,
dbv.flags,
vrem,
fname,
)
self.conn.nbyte += sz
if lim:
lim.nup(self.ip)
lim.bup(self.ip, sz)
try:
lim.chk_sz(sz)
except:
bos.unlink(abspath)
raise
files.append([sz, sha512_hex, p_file, fname])
dbv, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put(
False,
"up2k.hash_file",
dbv.realpath,
dbv.flags,
vrem,
fname,
self.ip,
time.time(),
)
self.conn.nbyte += sz
except Pebkac:
if fname != os.devnull:
@@ -907,15 +1047,17 @@ class HttpCli(object):
suffix = ".PARTIAL"
try:
os.rename(fsenc(fp), fsenc(fp2 + suffix))
bos.rename(fp, fp2 + suffix)
except:
fp2 = fp2[: -len(suffix) - 1]
os.rename(fsenc(fp), fsenc(fp2 + suffix))
bos.rename(fp, fp2 + suffix)
raise
except Pebkac as ex:
errmsg = unicode(ex)
errmsg = vol_san(
self.asrv.vfs.all_vols.values(), unicode(ex).encode("utf-8")
).decode("utf-8")
td = max(0.1, time.time() - t0)
sz_total = sum(x[0] for x in files)
@@ -935,7 +1077,7 @@ class HttpCli(object):
errmsg = "ERROR: " + errmsg
for sz, sha512, ofn, lfn in files:
vpath = (self.vpath + "/" if self.vpath else "") + lfn
vpath = "{}/{}".format(upload_vpath, lfn).strip("/")
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True)
)
@@ -994,13 +1136,20 @@ class HttpCli(object):
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem)
# TODO:
# the per-volume read/write permissions must be replaced with permission flags
# which would decide how to handle uploads to filenames which are taken,
# current behavior of creating a new name is a good default for binary files
# but should also offer a flag to takeover the filename and rename the old one
#
# stopgap:
clen = int(self.headers.get("content-length", -1))
if clen == -1:
raise Pebkac(411)
rp, fn = vsplit(rem)
fp = os.path.join(vfs.realpath, rp)
lim = vfs.get_dbv(rem)[0].lim
if lim:
fp, rp = lim.all(self.ip, rp, clen, fp)
bos.makedirs(fp)
fp = os.path.join(fp, fn)
rem = "{}/{}".format(rp, fn).strip("/")
if not rem.endswith(".md"):
raise Pebkac(400, "only markdown pls")
@@ -1012,10 +1161,9 @@ class HttpCli(object):
self.reply(response.encode("utf-8"))
return True
fp = os.path.join(vfs.realpath, rem)
srv_lastmod = srv_lastmod3 = -1
try:
st = os.stat(fsenc(fp))
st = bos.stat(fp)
srv_lastmod = st.st_mtime
srv_lastmod3 = int(srv_lastmod * 1000)
except OSError as ex:
@@ -1051,14 +1199,13 @@ class HttpCli(object):
self.reply(response.encode("utf-8"))
return True
# TODO another hack re: pending permissions rework
mdir, mfile = os.path.split(fp)
mfile2 = "{}.{:.3f}.md".format(mfile[:-3], srv_lastmod)
try:
os.mkdir(fsenc(os.path.join(mdir, ".hist")))
bos.mkdir(os.path.join(mdir, ".hist"))
except:
pass
os.rename(fsenc(fp), fsenc(os.path.join(mdir, ".hist", mfile2)))
bos.rename(fp, os.path.join(mdir, ".hist", mfile2))
p_field, _, p_data = next(self.parser.gen)
if p_field != "body":
@@ -1067,7 +1214,16 @@ class HttpCli(object):
with open(fsenc(fp), "wb", 512 * 1024) as f:
sz, sha512, _ = hashcopy(p_data, f)
new_lastmod = os.stat(fsenc(fp)).st_mtime
if lim:
lim.nup(self.ip)
lim.bup(self.ip, sz)
try:
lim.chk_sz(sz)
except:
bos.unlink(fp)
raise
new_lastmod = bos.stat(fp).st_mtime
new_lastmod3 = int(new_lastmod * 1000)
sha512 = sha512[:56]
@@ -1112,7 +1268,7 @@ class HttpCli(object):
for ext in ["", ".gz", ".br"]:
try:
fs_path = req_path + ext
st = os.stat(fsenc(fs_path))
st = bos.stat(fs_path)
file_ts = max(file_ts, st.st_mtime)
editions[ext or "plain"] = [fs_path, st.st_size]
except:
@@ -1289,11 +1445,9 @@ class HttpCli(object):
else:
fn = self.headers.get("host", "hey")
afn = "".join(
[x if x in (string.ascii_letters + string.digits) else "_" for x in fn]
)
bascii = unicode(string.ascii_letters + string.digits).encode("utf-8")
safe = (string.ascii_letters + string.digits).replace("%", "")
afn = "".join([x if x in safe.replace('"', "") else "_" for x in fn])
bascii = unicode(safe).encode("utf-8")
ufn = fn.encode("utf-8", "xmlcharrefreplace")
if PY2:
ufn = [unicode(x) if x in bascii else "%{:02x}".format(ord(x)) for x in ufn]
@@ -1308,6 +1462,7 @@ class HttpCli(object):
cdis = "attachment; filename=\"{}.{}\"; filename*=UTF-8''{}.{}"
cdis = cdis.format(afn, fmt, ufn, fmt)
self.log(cdis)
self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis})
fgen = vn.zipgen(rem, items, self.uname, dots, not self.args.no_scandir)
@@ -1364,10 +1519,10 @@ class HttpCli(object):
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
template = self.j2(tpl)
st = os.stat(fsenc(fs_path))
st = bos.stat(fs_path)
ts_md = st.st_mtime
st = os.stat(fsenc(html_path))
st = bos.stat(html_path)
ts_html = st.st_mtime
sz_md = 0
@@ -1424,12 +1579,13 @@ class HttpCli(object):
def tx_mounts(self):
suf = self.urlq({}, ["h"])
avol = [x for x in self.wvol if x in self.rvol]
rvol, wvol, avol = [
[("/" + x).rstrip("/") + "/" for x in y]
for y in [self.rvol, self.wvol, self.avol]
for y in [self.rvol, self.wvol, avol]
]
if self.avol and not self.args.no_rescan:
if avol and not self.args.no_rescan:
x = self.conn.hsrv.broker.put(True, "up2k.get_state")
vs = json.loads(x.get())
vstate = {("/" + k).rstrip("/") + "/": v for k, v in vs["volstate"].items()}
@@ -1454,11 +1610,11 @@ class HttpCli(object):
return True
def scanvol(self):
if not self.readable or not self.writable:
raise Pebkac(403, "not admin")
if not self.can_read or not self.can_write:
raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_rescan:
raise Pebkac(403, "disabled by argv")
raise Pebkac(403, "the rescan feature is disabled in server config")
vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True)
@@ -1473,11 +1629,11 @@ class HttpCli(object):
raise Pebkac(500, x)
def tx_stack(self):
if not self.avol:
raise Pebkac(403, "not admin")
if not [x for x in self.wvol if x in self.rvol]:
raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_stack:
raise Pebkac(403, "disabled by argv")
raise Pebkac(403, "the stackdump feature is disabled in server config")
ret = "<pre>{}\n{}".format(time.time(), alltrace())
self.reply(ret.encode("utf-8"))
@@ -1512,7 +1668,7 @@ class HttpCli(object):
try:
vn, rem = self.asrv.vfs.get(top, self.uname, True, False)
fsroot, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, incl_wo=True
rem, self.uname, not self.args.no_scandir, [[True], [False, True]]
)
except:
vfs_ls = []
@@ -1539,6 +1695,74 @@ class HttpCli(object):
ret["a"] = dirs
return ret
def tx_ups(self):
if not self.args.unpost:
raise Pebkac(400, "the unpost feature is disabled in server config")
filt = self.uparam.get("filter")
lm = "ups [{}]".format(filt)
self.log(lm)
ret = []
t0 = time.time()
idx = self.conn.get_u2idx()
lim = time.time() - self.args.unpost
for vol in self.asrv.vfs.all_vols.values():
cur = idx.get_cur(vol.realpath)
if not cur:
continue
q = "select sz, rd, fn, at from up where ip=? and at>?"
for sz, rd, fn, at in cur.execute(q, (self.ip, lim)):
vp = "/" + "/".join(x for x in [vol.vpath, rd, fn] if x)
if filt and filt not in vp:
continue
ret.append({"vp": vp, "sz": sz, "at": at})
if len(ret) > 3000:
ret.sort(key=lambda x: x["at"], reverse=True)
ret = ret[:2000]
ret.sort(key=lambda x: x["at"], reverse=True)
ret = ret[:2000]
jtxt = json.dumps(ret, indent=2, sort_keys=True).encode("utf-8", "replace")
self.log("{} #{} {:.2f}sec".format(lm, len(ret), time.time() - t0))
self.reply(jtxt, mime="application/json")
def handle_rm(self, req=None):
if not req and not self.can_delete:
raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_del:
raise Pebkac(403, "the delete feature is disabled in server config")
if not req:
req = [self.vpath]
x = self.conn.hsrv.broker.put(True, "up2k.handle_rm", self.uname, self.ip, req)
self.loud_reply(x.get())
def handle_mv(self):
if not self.can_move:
raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_mv:
raise Pebkac(403, "the rename/move feature is disabled in server config")
# full path of new loc (incl filename)
dst = self.uparam.get("move")
if not dst:
raise Pebkac(400, "need dst vpath")
# x-www-form-urlencoded (url query part) uses
# either + or %20 for 0x20 so handle both
dst = unquotep(dst.replace("+", " "))
x = self.conn.hsrv.broker.put(
True, "up2k.handle_mv", self.uname, self.vpath, dst
)
self.loud_reply(x.get())
def tx_browser(self):
vpath = ""
vpnodes = [["", "/"]]
@@ -1551,18 +1775,16 @@ class HttpCli(object):
vpnodes.append([quotep(vpath) + "/", html_escape(node, crlf=True)])
vn, rem = self.asrv.vfs.get(
self.vpath, self.uname, self.readable, self.writable
)
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, False, False)
abspath = vn.canonical(rem)
dbv, vrem = vn.get_dbv(rem)
try:
st = os.stat(fsenc(abspath))
st = bos.stat(abspath)
except:
raise Pebkac(404)
if self.readable:
if self.can_read:
if rem.startswith(".hist/up2k.") or (
rem.endswith("/dir.txt") and rem.startswith(".hist/th/")
):
@@ -1574,8 +1796,8 @@ class HttpCli(object):
if is_dir:
for fn in self.args.th_covers.split(","):
fp = os.path.join(abspath, fn)
if os.path.exists(fp):
vrem = "{}/{}".format(vrem.rstrip("/"), fn)
if bos.path.exists(fp):
vrem = "{}/{}".format(vrem.rstrip("/"), fn).strip("/")
is_dir = False
break
@@ -1629,10 +1851,14 @@ class HttpCli(object):
srv_info = "</span> /// <span>".join(srv_info)
perms = []
if self.readable:
if self.can_read:
perms.append("read")
if self.writable:
if self.can_write:
perms.append("write")
if self.can_move:
perms.append("move")
if self.can_delete:
perms.append("delete")
url_suf = self.urlq({}, [])
is_ls = "ls" in self.uparam
@@ -1642,37 +1868,54 @@ class HttpCli(object):
tpl = "browser2"
logues = ["", ""]
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
fn = os.path.join(abspath, fn)
if os.path.exists(fsenc(fn)):
with open(fsenc(fn), "rb") as f:
logues[n] = f.read().decode("utf-8")
if not self.args.no_logues:
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
fn = os.path.join(abspath, fn)
if bos.path.exists(fn):
with open(fsenc(fn), "rb") as f:
logues[n] = f.read().decode("utf-8")
readme = ""
if not self.args.no_readme and not logues[1]:
for fn in ["README.md", "readme.md"]:
fn = os.path.join(abspath, fn)
if bos.path.exists(fn):
with open(fsenc(fn), "rb") as f:
readme = f.read().decode("utf-8")
break
ls_ret = {
"dirs": [],
"files": [],
"taglist": [],
"srvinf": srv_info,
"acct": self.uname,
"perms": perms,
"logues": logues,
"readme": readme,
}
j2a = {
"vdir": quotep(self.vpath),
"vpnodes": vpnodes,
"files": [],
"acct": self.uname,
"perms": json.dumps(perms),
"taglist": [],
"tag_order": [],
"def_hcols": [],
"have_up2k_idx": ("e2d" in vn.flags),
"have_tags_idx": ("e2t" in vn.flags),
"have_mv": (not self.args.no_mv),
"have_del": (not self.args.no_del),
"have_zip": (not self.args.no_zip),
"have_b_u": (self.writable and self.uparam.get("b") == "u"),
"have_unpost": (self.args.unpost > 0),
"have_b_u": (self.can_write and self.uparam.get("b") == "u"),
"url_suf": url_suf,
"logues": logues,
"readme": readme,
"title": html_escape(self.vpath, crlf=True),
"srv_info": srv_info,
}
if not self.readable:
if not self.can_read:
if is_ls:
ret = json.dumps(ls_ret)
self.reply(
@@ -1695,7 +1938,7 @@ class HttpCli(object):
return self.tx_zip(k, v, vn, rem, [], self.args.ed)
fsroot, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, incl_wo=True
rem, self.uname, not self.args.no_scandir, [[True], [False, True]]
)
stats = {k: v for k, v in vfs_ls}
vfs_ls = [x[0] for x in vfs_ls]
@@ -1706,7 +1949,7 @@ class HttpCli(object):
histdir = os.path.join(fsroot, ".hist")
ptn = re.compile(r"(.*)\.([0-9]+\.[0-9]{3})(\.[^\.]+)$")
try:
for hfn in os.listdir(histdir):
for hfn in bos.listdir(histdir):
m = ptn.match(hfn)
if not m:
continue
@@ -1747,7 +1990,7 @@ class HttpCli(object):
fspath = fsroot + "/" + fn
try:
inf = stats.get(fn) or os.stat(fsenc(fspath))
inf = stats.get(fn) or bos.stat(fspath)
except:
self.log("broken symlink: {}".format(repr(fspath)))
continue
@@ -1856,8 +2099,8 @@ class HttpCli(object):
j2a["logues"] = logues
j2a["taglist"] = taglist
if "mte" in vn.flags:
j2a["tag_order"] = json.dumps(vn.flags["mte"].split(","))
if "mth" in vn.flags:
j2a["def_hcols"] = vn.flags["mth"].split(",")
if self.args.css_browser:
j2a["css"] = self.args.css_browser

View File

@@ -28,6 +28,7 @@ except ImportError:
from .__init__ import E, PY2, MACOS
from .util import spack, min_ex, start_stackmon, start_log_thrs
from .bos import bos
from .httpconn import HttpConn
if PY2:
@@ -73,7 +74,7 @@ class HttpSrv(object):
}
cert_path = os.path.join(E.cfg, "cert.pem")
if os.path.exists(cert_path):
if bos.path.exists(cert_path):
self.cert_path = cert_path
else:
self.cert_path = None
@@ -140,6 +141,7 @@ class HttpSrv(object):
fno = srv_sck.fileno()
msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
self.log(self.name, msg)
self.broker.put(False, "cb_httpsrv_up")
while not self.stopping:
if self.args.log_conn:
self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30")
@@ -172,25 +174,26 @@ class HttpSrv(object):
now = time.time()
if now - (self.tp_time or now) > 300:
m = "httpserver threadpool died: tpt {:.2f}, now {:.2f}, nthr {}, ncli {}"
self.log(self.name, m.format(self.tp_time, now, self.tp_nthr, self.ncli), 1)
self.tp_time = None
self.tp_q = None
if self.tp_q:
self.tp_q.put((sck, addr))
with self.mutex:
self.ncli += 1
with self.mutex:
self.ncli += 1
if self.tp_q:
self.tp_time = self.tp_time or now
self.tp_ncli = max(self.tp_ncli, self.ncli + 1)
self.tp_ncli = max(self.tp_ncli, self.ncli)
if self.tp_nthr < self.ncli + 4:
self.start_threads(8)
return
self.tp_q.put((sck, addr))
return
if not self.args.no_htp:
m = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n"
self.log(self.name, m, 1)
with self.mutex:
self.ncli += 1
thr = threading.Thread(
target=self.thr_client,
args=(sck, addr),
@@ -307,7 +310,7 @@ class HttpSrv(object):
try:
with os.scandir(os.path.join(E.mod, "web")) as dh:
for fh in dh:
inf = fh.stat(follow_symlinks=False)
inf = fh.stat()
v = max(v, inf.st_mtime)
except:
pass

View File

@@ -9,6 +9,7 @@ import subprocess as sp
from .__init__ import PY2, WINDOWS, unicode
from .util import fsenc, fsdec, uncyg, REKOBO_LKEY
from .bos import bos
def have_ff(cmd):
@@ -44,7 +45,7 @@ class MParser(object):
if WINDOWS:
bp = uncyg(bp)
if os.path.exists(bp):
if bos.path.exists(bp):
self.bin = bp
return
except:
@@ -420,7 +421,7 @@ class MTag(object):
except Exception as ex:
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
sz = os.path.getsize(fsenc(abspath))
sz = bos.path.getsize(abspath)
ret = {".q": [0, int((sz / md.info.length) / 128)]}
for attr, k, norm in [
@@ -433,7 +434,15 @@ class MTag(object):
try:
v = getattr(md.info, attr)
except:
continue
if k != "ac":
continue
try:
v = str(md.info).split(".")[1]
if v.startswith("ogg"):
v = v[3:]
except:
continue
if not v:
continue

View File

@@ -1,12 +1,12 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import tarfile
import threading
from .sutil import errdesc
from .util import Queue, fsenc
from .bos import bos
class QFile(object):
@@ -61,7 +61,7 @@ class StreamTar(object):
yield None
if self.errf:
os.unlink(self.errf["ap"])
bos.unlink(self.errf["ap"])
def ser(self, f):
name = f["vp"]

View File

@@ -1,3 +1,5 @@
# coding: utf-8
"""
This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error
handler of Python 3.
@@ -171,7 +173,7 @@ FS_ENCODING = sys.getfilesystemencoding()
if WINDOWS and not PY3:
# py2 thinks win* is mbcs, probably a bug? anyways this works
FS_ENCODING = 'utf-8'
FS_ENCODING = "utf-8"
# normalize the filesystem encoding name.

View File

@@ -1,11 +1,12 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import time
import tempfile
from datetime import datetime
from .bos import bos
def errdesc(errors):
report = ["copyparty failed to add the following files to the archive:", ""]
@@ -17,12 +18,11 @@ def errdesc(errors):
tf_path = tf.name
tf.write("\r\n".join(report).encode("utf-8", "replace"))
dt = datetime.utcfromtimestamp(time.time())
dt = dt.strftime("%Y-%m%d-%H%M%S")
dt = datetime.utcnow().strftime("%Y-%m%d-%H%M%S")
os.chmod(tf_path, 0o444)
bos.chmod(tf_path, 0o444)
return {
"vp": "archive-errors-{}.txt".format(dt),
"ap": tf_path,
"st": os.stat(tf_path),
"st": bos.stat(tf_path),
}, report

View File

@@ -13,8 +13,8 @@ import threading
from datetime import datetime, timedelta
import calendar
from .__init__ import E, PY2, WINDOWS, MACOS, VT100, unicode
from .util import mp, start_log_thrs, start_stackmon, min_ex
from .__init__ import E, PY2, WINDOWS, ANYWIN, MACOS, VT100, unicode
from .util import mp, start_log_thrs, start_stackmon, min_ex, ansi_re
from .authsrv import AuthSrv
from .tcpsrv import TcpSrv
from .up2k import Up2k
@@ -39,8 +39,8 @@ class SvcHub(object):
self.stop_req = False
self.stopping = False
self.stop_cond = threading.Condition()
self.httpsrv_up = 0
self.ansi_re = re.compile("\033\\[[^m]*m")
self.log_mutex = threading.Lock()
self.next_day = 0
@@ -55,7 +55,7 @@ class SvcHub(object):
start_log_thrs(self.log, args.log_thrs, 0)
# initiate all services to manage
self.asrv = AuthSrv(self.args, self.log, False)
self.asrv = AuthSrv(self.args, self.log)
if args.ls:
self.asrv.dbg_ls()
@@ -86,8 +86,31 @@ class SvcHub(object):
self.broker = Broker(self)
def thr_httpsrv_up(self):
time.sleep(5)
failed = self.broker.num_workers - self.httpsrv_up
if not failed:
return
m = "{}/{} workers failed to start"
m = m.format(failed, self.broker.num_workers)
self.log("root", m, 1)
os._exit(1)
def cb_httpsrv_up(self):
self.httpsrv_up += 1
if self.httpsrv_up != self.broker.num_workers:
return
self.log("root", "workers OK\n")
self.up2k.init_vols()
thr = threading.Thread(target=self.sd_notify, name="sd-notify")
thr.daemon = True
thr.start()
def _logname(self):
dt = datetime.utcfromtimestamp(time.time())
dt = datetime.utcnow()
fn = self.args.lo
for fs in "YmdHMS":
fs = "%" + fs
@@ -135,24 +158,33 @@ class SvcHub(object):
def run(self):
self.tcpsrv.run()
thr = threading.Thread(target=self.sd_notify, name="sd-notify")
thr.daemon = True
thr.start()
thr = threading.Thread(target=self.stop_thr, name="svchub-sig")
thr = threading.Thread(target=self.thr_httpsrv_up)
thr.daemon = True
thr.start()
for sig in [signal.SIGINT, signal.SIGTERM]:
signal.signal(sig, self.signal_handler)
try:
while not self.stop_req:
time.sleep(9001)
except:
pass
# macos hangs after shutdown on sigterm with while-sleep,
# windows cannot ^c stop_cond (and win10 does the macos thing but winxp is fine??)
# linux is fine with both,
# never lucky
if ANYWIN:
# msys-python probably fine but >msys-python
thr = threading.Thread(target=self.stop_thr, name="svchub-sig")
thr.daemon = True
thr.start()
self.shutdown()
try:
while not self.stop_req:
time.sleep(1)
except:
pass
self.shutdown()
thr.join()
else:
self.stop_thr()
def stop_thr(self):
while not self.stop_req:
@@ -161,7 +193,7 @@ class SvcHub(object):
self.shutdown()
def signal_handler(self):
def signal_handler(self, sig, frame):
if self.stopping:
return
@@ -175,6 +207,10 @@ class SvcHub(object):
self.stopping = True
self.stop_req = True
with self.stop_cond:
self.stop_cond.notify_all()
ret = 1
try:
with self.log_mutex:
print("OPYTHAT")
@@ -194,18 +230,20 @@ class SvcHub(object):
print("waiting for thumbsrv (10sec)...")
print("nailed it", end="")
ret = 0
finally:
print("\033[0m")
if self.logf:
self.logf.close()
sys.exit(ret)
def _log_disabled(self, src, msg, c=0):
if not self.logf:
return
with self.log_mutex:
ts = datetime.utcfromtimestamp(time.time())
ts = ts.strftime("%Y-%m%d-%H%M%S.%f")[:-3]
ts = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")[:-3]
self.logf.write("@{} [{}] {}\n".format(ts, src, msg))
now = time.time()
@@ -217,7 +255,7 @@ class SvcHub(object):
self.logf.close()
self._setup_logfile("")
dt = datetime.utcfromtimestamp(time.time())
dt = datetime.utcnow()
# unix timestamp of next 00:00:00 (leap-seconds safe)
day_now = dt.day
@@ -240,9 +278,9 @@ class SvcHub(object):
if not VT100:
fmt = "{} {:21} {}\n"
if "\033" in msg:
msg = self.ansi_re.sub("", msg)
msg = ansi_re.sub("", msg)
if "\033" in src:
src = self.ansi_re.sub("", src)
src = ansi_re.sub("", src)
elif c:
if isinstance(c, int):
msg = "\033[3{}m{}".format(c, msg)

View File

@@ -8,6 +8,7 @@ from datetime import datetime
from .sutil import errdesc
from .util import yieldfile, sanitize_fn, spack, sunpack
from .bos import bos
def dostime2unix(buf):
@@ -271,4 +272,4 @@ class StreamZip(object):
yield self._ct(ecdr)
if errors:
os.unlink(errf["ap"])
bos.unlink(errf["ap"])

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import re
import socket
from .__init__ import MACOS, ANYWIN
from .util import chkcmd
@@ -29,14 +30,16 @@ class TcpSrv(object):
for x in nonlocals:
eps[x] = "external"
msgs = []
m = "available @ http://{}:{}/ (\033[33m{}\033[0m)"
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
for port in sorted(self.args.p):
self.log(
"tcpsrv",
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
ip, port, desc
),
)
msgs.append(m.format(ip, port, desc))
if msgs:
msgs[-1] += "\n"
for m in msgs:
self.log("tcpsrv", m)
self.srv = []
for ip in self.args.i:
@@ -81,25 +84,100 @@ class TcpSrv(object):
self.log("tcpsrv", "ok bye")
def detect_interfaces(self, listen_ips):
def ips_linux(self):
eps = {}
# get all ips and their interfaces
try:
ip_addr, _ = chkcmd("ip", "addr")
txt, _ = chkcmd(["ip", "addr"])
except:
ip_addr = None
return eps
if ip_addr:
r = re.compile(r"^\s+inet ([^ ]+)/.* (.*)")
for ln in ip_addr.split("\n"):
try:
ip, dev = r.match(ln.rstrip()).groups()
for lip in listen_ips:
if lip in ["0.0.0.0", ip]:
eps[ip] = dev
except:
pass
r = re.compile(r"^\s+inet ([^ ]+)/.* (.*)")
for ln in txt.split("\n"):
try:
ip, dev = r.match(ln.rstrip()).groups()
eps[ip] = dev
except:
pass
return eps
def ips_macos(self):
eps = {}
try:
txt, _ = chkcmd(["ifconfig"])
except:
return eps
rdev = re.compile(r"^([^ ]+):")
rip = re.compile(r"^\tinet ([0-9\.]+) ")
dev = None
for ln in txt.split("\n"):
m = rdev.match(ln)
if m:
dev = m.group(1)
m = rip.match(ln)
if m:
eps[m.group(1)] = dev
dev = None
return eps
def ips_windows_ipconfig(self):
eps = {}
try:
txt, _ = chkcmd(["ipconfig"])
except:
return eps
rdev = re.compile(r"(^[^ ].*):$")
rip = re.compile(r"^ +IPv?4? [^:]+: *([0-9\.]{7,15})$")
dev = None
for ln in txt.replace("\r", "").split("\n"):
m = rdev.match(ln)
if m:
dev = m.group(1).split(" adapter ", 1)[-1]
m = rip.match(ln)
if m and dev:
eps[m.group(1)] = dev
dev = None
return eps
def ips_windows_netsh(self):
eps = {}
try:
txt, _ = chkcmd("netsh interface ip show address".split())
except:
return eps
rdev = re.compile(r'.* "([^"]+)"$')
rip = re.compile(r".* IP\b.*: +([0-9\.]{7,15})$")
dev = None
for ln in txt.replace("\r", "").split("\n"):
m = rdev.match(ln)
if m:
dev = m.group(1)
m = rip.match(ln)
if m and dev:
eps[m.group(1)] = dev
dev = None
return eps
def detect_interfaces(self, listen_ips):
if MACOS:
eps = self.ips_macos()
elif ANYWIN:
eps = self.ips_windows_ipconfig() # sees more interfaces
eps.update(self.ips_windows_netsh()) # has better names
else:
eps = self.ips_linux()
if "0.0.0.0" not in listen_ips:
eps = {k: v for k, v in eps.items() if k in listen_ips}
default_route = None
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)

View File

@@ -5,6 +5,7 @@ import os
from .util import Cooldown
from .th_srv import thumb_path, THUMBABLE, FMT_FF
from .bos import bos
class ThumbCli(object):
@@ -25,6 +26,9 @@ class ThumbCli(object):
if is_vid and self.args.no_vthumb:
return None
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]:
return os.path.join(ptop, rem)
if fmt == "j" and self.args.th_no_jpg:
fmt = "w"
@@ -36,7 +40,7 @@ class ThumbCli(object):
tpath = thumb_path(histpath, rem, mtime, fmt)
ret = None
try:
st = os.stat(tpath)
st = bos.stat(tpath)
if st.st_size:
ret = tpath
else:

View File

@@ -10,7 +10,8 @@ import threading
import subprocess as sp
from .__init__ import PY2, unicode
from .util import fsenc, runcmd, Queue, Cooldown, BytesIO, min_ex
from .util import fsenc, vsplit, runcmd, Queue, Cooldown, BytesIO, min_ex
from .bos import bos
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
@@ -20,7 +21,7 @@ HAVE_AVIF = False
HAVE_WEBP = False
try:
from PIL import Image, ImageOps
from PIL import Image, ImageOps, ExifTags
HAVE_PIL = True
try:
@@ -73,12 +74,7 @@ def thumb_path(histpath, rem, mtime, fmt):
# base16 = 16 = 256
# b64-lc = 38 = 1444
# base64 = 64 = 4096
try:
rd, fn = rem.rsplit("/", 1)
except:
rd = ""
fn = rem
rd, fn = vsplit(rem)
if rd:
h = hashlib.sha512(fsenc(rd)).digest()
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
@@ -109,7 +105,10 @@ class ThumbSrv(object):
self.mutex = threading.Lock()
self.busy = {}
self.stopping = False
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
self.nthr = self.args.th_mt
if not self.nthr:
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
self.q = Queue(self.nthr * 4)
for n in range(self.nthr):
t = threading.Thread(
@@ -159,13 +158,10 @@ class ThumbSrv(object):
self.log("wait {}".format(tpath))
except:
thdir = os.path.dirname(tpath)
try:
os.makedirs(thdir)
except:
pass
bos.makedirs(thdir)
inf_path = os.path.join(thdir, "dir.txt")
if not os.path.exists(inf_path):
if not bos.path.exists(inf_path):
with open(inf_path, "wb") as f:
f.write(fsenc(os.path.dirname(abspath)))
@@ -185,7 +181,7 @@ class ThumbSrv(object):
cond.wait(3)
try:
st = os.stat(tpath)
st = bos.stat(tpath)
if st.st_size:
return tpath
except:
@@ -202,7 +198,7 @@ class ThumbSrv(object):
abspath, tpath = task
ext = abspath.split(".")[-1].lower()
fun = None
if not os.path.exists(tpath):
if not bos.path.exists(tpath):
if ext in FMT_PIL:
fun = self.conv_pil
elif ext in FMT_FF:
@@ -212,8 +208,8 @@ class ThumbSrv(object):
try:
fun(abspath, tpath)
except:
msg = "{} failed on {}\n{}"
self.log(msg.format(fun.__name__, abspath, min_ex()), 3)
msg = "{} could not create thumbnail of {}\n{}"
self.log(msg.format(fun.__name__, abspath, min_ex()), "1;30")
with open(tpath, "wb") as _:
pass
@@ -228,21 +224,38 @@ class ThumbSrv(object):
with self.mutex:
self.nthr -= 1
def fancy_pillow(self, im):
# exif_transpose is expensive (loads full image + unconditional copy)
r = max(*self.res) * 2
im.thumbnail((r, r), resample=Image.LANCZOS)
try:
k = next(k for k, v in ExifTags.TAGS.items() if v == "Orientation")
exif = im.getexif()
rot = int(exif[k])
del exif[k]
except:
rot = 1
rots = {8: Image.ROTATE_90, 3: Image.ROTATE_180, 6: Image.ROTATE_270}
if rot in rots:
im = im.transpose(rots[rot])
if self.args.th_no_crop:
im.thumbnail(self.res, resample=Image.LANCZOS)
else:
iw, ih = im.size
dw, dh = self.res
res = (min(iw, dw), min(ih, dh))
im = ImageOps.fit(im, res, method=Image.LANCZOS)
return im
def conv_pil(self, abspath, tpath):
with Image.open(fsenc(abspath)) as im:
crop = not self.args.th_no_crop
res2 = self.res
if crop:
res2 = (res2[0] * 2, res2[1] * 2)
try:
im.thumbnail(res2, resample=Image.LANCZOS)
if crop:
iw, ih = im.size
dw, dh = self.res
res = (min(iw, dw), min(ih, dh))
im = ImageOps.fit(im, res, method=Image.LANCZOS)
except:
im = self.fancy_pillow(im)
except Exception as ex:
self.log("fancy_pillow {}".format(ex), "1;30")
im.thumbnail(self.res)
fmts = ["RGB", "L"]
@@ -257,13 +270,14 @@ class ThumbSrv(object):
fmts += ["RGBA", "LA"]
args["method"] = 6
else:
pass # default q = 75
# default q = 75
args["progressive"] = True
if im.mode not in fmts:
# print("conv {}".format(im.mode))
im = im.convert("RGB")
im.save(tpath, quality=40, method=6)
im.save(tpath, **args)
def conv_ffmpeg(self, abspath, tpath):
ret, _ = ffprobe(abspath)
@@ -293,8 +307,10 @@ class ThumbSrv(object):
cmd += seek
cmd += [
b"-i", fsenc(abspath),
b"-map", b"0:v:0",
b"-vf", scale,
b"-vframes", b"1",
b"-frames:v", b"1",
b"-metadata:s:v:0", b"rotate=0",
]
# fmt: on
@@ -312,11 +328,13 @@ class ThumbSrv(object):
]
cmd += [fsenc(tpath)]
# self.log((b" ".join(cmd)).decode("utf-8"))
ret, sout, serr = runcmd(*cmd)
ret, sout, serr = runcmd(cmd)
if ret != 0:
msg = ["ff: {}".format(x) for x in serr.split("\n")]
self.log("FFmpeg failed:\n" + "\n".join(msg), c="1;30")
m = "FFmpeg failed (probably a corrupt video file):\n"
m += "\n".join(["ff: {}".format(x) for x in serr.split("\n")])
self.log(m, c="1;30")
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
def poke(self, tdir):
@@ -328,7 +346,7 @@ class ThumbSrv(object):
p1 = os.path.dirname(tdir)
p2 = os.path.dirname(p1)
for dp in [tdir, p1, p2]:
os.utime(fsenc(dp), (ts, ts))
bos.utime(dp, (ts, ts))
except:
pass
@@ -355,7 +373,7 @@ class ThumbSrv(object):
prev_b64 = None
prev_fp = None
try:
ents = os.listdir(thumbpath)
ents = bos.listdir(thumbpath)
except:
return 0
@@ -366,7 +384,7 @@ class ThumbSrv(object):
# "top" or b64 prefix/full (a folder)
if len(f) <= 3 or len(f) == 24:
age = now - os.path.getmtime(fp)
age = now - bos.path.getmtime(fp)
if age > maxage:
with self.mutex:
safe = True
@@ -398,7 +416,7 @@ class ThumbSrv(object):
if b64 == prev_b64:
self.log("rm replaced [{}]".format(fp))
os.unlink(prev_fp)
bos.unlink(prev_fp)
prev_b64 = b64
prev_fp = fp

View File

@@ -9,6 +9,7 @@ from datetime import datetime
from .__init__ import unicode
from .util import s3dec, Pebkac, min_ex
from .bos import bos
from .up2k import up2k_wark_from_hashlist
@@ -67,7 +68,7 @@ class U2idx(object):
histpath = self.asrv.vfs.histtab[ptop]
db_path = os.path.join(histpath, "up2k.db")
if not os.path.exists(db_path):
if not bos.path.exists(db_path):
return None
cur = sqlite3.connect(db_path, 2).cursor()
@@ -87,7 +88,7 @@ class U2idx(object):
is_date = False
kw_key = ["(", ")", "and ", "or ", "not "]
kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "]
ptn_mt = re.compile(r"^\.?[a-z]+$")
ptn_mt = re.compile(r"^\.?[a-z_-]+$")
mt_ctr = 0
mt_keycmp = "substr(up.w,1,16)"
mt_keycmp2 = None
@@ -243,7 +244,7 @@ class U2idx(object):
sret = []
c = cur.execute(q, v)
for hit in c:
w, ts, sz, rd, fn = hit
w, ts, sz, rd, fn, ip, at = hit
lim -= 1
if lim <= 0:
break

View File

@@ -23,15 +23,20 @@ from .util import (
ProgressPrinter,
fsdec,
fsenc,
absreal,
sanitize_fn,
ren_open,
atomic_move,
vsplit,
s3enc,
s3dec,
rmdirs,
statdir,
s2hms,
min_ex,
)
from .bos import bos
from .authsrv import AuthSrv, LEELOO_DALLAS
from .mtag import MTag, MParser
try:
@@ -40,20 +45,13 @@ try:
except:
HAVE_SQLITE3 = False
DB_VER = 4
DB_VER = 5
class Up2k(object):
"""
TODO:
* documentation
* registry persistence
* ~/.config flatfiles for active jobs
"""
def __init__(self, hub):
self.hub = hub
self.asrv = hub.asrv
self.asrv = hub.asrv # type: AuthSrv
self.args = hub.args
self.log_func = hub.log
@@ -67,6 +65,7 @@ class Up2k(object):
self.n_hashq = 0
self.n_tagq = 0
self.volstate = {}
self.need_rescan = {}
self.registry = {}
self.entags = {}
self.flags = {}
@@ -101,17 +100,16 @@ class Up2k(object):
if self.args.no_fastboot:
self.deferred_init()
else:
t = threading.Thread(
target=self.deferred_init, name="up2k-deferred-init", args=(0.5,)
)
t.daemon = True
t.start()
def deferred_init(self, wait=0):
if wait:
time.sleep(wait)
def init_vols(self):
if self.args.no_fastboot:
return
t = threading.Thread(target=self.deferred_init, name="up2k-deferred-init")
t.daemon = True
t.start()
def deferred_init(self):
all_vols = self.asrv.vfs.all_vols
have_e2d = self.init_indexes(all_vols)
@@ -124,6 +122,10 @@ class Up2k(object):
thr.daemon = True
thr.start()
thr = threading.Thread(target=self._sched_rescan, name="up2k-rescan")
thr.daemon = True
thr.start()
if self.mtag:
thr = threading.Thread(target=self._tagger, name="up2k-tagger")
thr.daemon = True
@@ -173,6 +175,73 @@ class Up2k(object):
t.start()
return None
def _sched_rescan(self):
volage = {}
while True:
time.sleep(self.args.re_int)
now = time.time()
with self.mutex:
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
maxage = vol.flags.get("scan")
if not maxage:
continue
if vp not in volage:
volage[vp] = now
if now - volage[vp] >= maxage:
self.need_rescan[vp] = 1
vols = list(sorted(self.need_rescan.keys()))
self.need_rescan = {}
if vols:
err = self.rescan(self.asrv.vfs.all_vols, vols)
if err:
for v in vols:
self.need_rescan[v] = True
continue
for v in vols:
volage[v] = now
if self.args.no_lifetime:
continue
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
lifetime = vol.flags.get("lifetime")
if not lifetime:
continue
cur = self.cur.get(vol.realpath)
if not cur:
continue
nrm = 0
deadline = time.time() - int(lifetime)
q = "select rd, fn from up where at > 0 and at < ? limit 100"
while True:
with self.mutex:
hits = cur.execute(q, (deadline,)).fetchall()
if not hits:
break
for rd, fn in hits:
if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn)
fvp = "{}/{}".format(rd, fn).strip("/")
if vp:
fvp = "{}/{}".format(vp, fvp)
self._handle_rm(LEELOO_DALLAS, None, fvp)
nrm += 1
if nrm:
self.log("{} files graduated in {}".format(nrm, vp))
def _vis_job_progress(self, job):
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
path = os.path.join(job["ptop"], job["prel"], job["name"])
@@ -218,7 +287,7 @@ class Up2k(object):
# only need to protect register_vpath but all in one go feels right
for vol in vols:
try:
os.listdir(vol.realpath)
bos.listdir(vol.realpath)
except:
self.volstate[vol.vpath] = "OFFLINE (cannot access folder)"
self.log("cannot access " + vol.realpath, c=1)
@@ -356,14 +425,14 @@ class Up2k(object):
reg = {}
path = os.path.join(histpath, "up2k.snap")
if "e2d" in flags and os.path.exists(path):
if "e2d" in flags and bos.path.exists(path):
with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8")
reg2 = json.loads(j)
for k, job in reg2.items():
path = os.path.join(job["ptop"], job["prel"], job["name"])
if os.path.exists(fsenc(path)):
if bos.path.exists(path):
reg[k] = job
job["poke"] = time.time()
else:
@@ -378,10 +447,7 @@ class Up2k(object):
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
return None
try:
os.makedirs(histpath)
except:
pass
bos.makedirs(histpath)
try:
cur = self._open_db(db_path)
@@ -420,14 +486,7 @@ class Up2k(object):
return True, n_add or n_rm or do_vac
def _build_dir(self, dbw, top, excl, cdir, nohash, seen):
rcdir = cdir
if not ANYWIN:
try:
# a bit expensive but worth
rcdir = os.path.realpath(cdir)
except:
pass
rcdir = absreal(cdir) # a bit expensive but worth
if rcdir in seen:
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}"
self.log(m.format(seen[-1], rcdir, cdir), 3)
@@ -498,7 +557,7 @@ class Up2k(object):
wark = up2k_wark_from_hashlist(self.salt, sz, hashes)
self.db_add(dbw[0], wark, rd, fn, lmod, sz)
self.db_add(dbw[0], wark, rd, fn, lmod, sz, "", 0)
dbw[1] += 1
ret += 1
td = time.time() - dbw[2]
@@ -513,8 +572,8 @@ class Up2k(object):
rm = []
nchecked = 0
nfiles = next(cur.execute("select count(w) from up"))[0]
c = cur.execute("select * from up")
for dwark, dts, dsz, drd, dfn in c:
c = cur.execute("select rd, fn from up")
for drd, dfn in c:
nchecked += 1
if drd.startswith("//") or dfn.startswith("//"):
drd, dfn = s3dec(drd, dfn)
@@ -523,7 +582,7 @@ class Up2k(object):
# almost zero overhead dw
self.pp.msg = "b{} {}".format(nfiles - nchecked, abspath)
try:
if not os.path.exists(fsenc(abspath)):
if not bos.path.exists(abspath):
rm.append([drd, dfn])
except Exception as ex:
self.log("stat-rm: {} @ [{}]".format(repr(ex), abspath))
@@ -911,12 +970,21 @@ class Up2k(object):
# x.set_trace_callback(trace)
def _open_db(self, db_path):
existed = os.path.exists(db_path)
existed = bos.path.exists(db_path)
cur = self._orz(db_path)
ver = self._read_ver(cur)
if not existed and ver is None:
return self._create_db(db_path, cur)
if ver == 4:
try:
m = "creating backup before upgrade: "
cur = self._backup_db(db_path, cur, ver, m)
self._upgrade_v4(cur)
ver = 5
except:
self.log("WARN: failed to upgrade from v4", 3)
if ver == DB_VER:
try:
nfiles = next(cur.execute("select count(w) from up"))[0]
@@ -929,19 +997,38 @@ class Up2k(object):
m = "database is version {}, this copyparty only supports versions <= {}"
raise Exception(m.format(ver, DB_VER))
bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
db = cur.connection
cur.close()
db.close()
msg = "creating new DB (old is bad); backup: {}"
if ver:
msg = "creating new DB (too old to upgrade); backup: {}"
self.log(msg.format(bak))
os.rename(fsenc(db_path), fsenc(bak))
cur = self._backup_db(db_path, cur, ver, msg)
db = cur.connection
cur.close()
db.close()
bos.unlink(db_path)
return self._create_db(db_path, None)
def _backup_db(self, db_path, cur, ver, msg):
bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
self.log(msg + bak)
try:
c2 = sqlite3.connect(bak)
with c2:
cur.connection.backup(c2)
return cur
except:
m = "native sqlite3 backup failed; using fallback method:\n"
self.log(m + min_ex())
finally:
c2.close()
db = cur.connection
cur.close()
db.close()
shutil.copy2(fsenc(db_path), fsenc(bak))
return self._orz(db_path)
def _read_ver(self, cur):
for tab in ["ki", "kv"]:
try:
@@ -968,9 +1055,10 @@ class Up2k(object):
idx = r"create index up_w on up(w)"
for cmd in [
r"create table up (w text, mt int, sz int, rd text, fn text)",
r"create table up (w text, mt int, sz int, rd text, fn text, ip text, at int)",
r"create index up_rd on up(rd)",
r"create index up_fn on up(fn)",
r"create index up_ip on up(ip)",
idx,
r"create table mt (w text, k text, v int)",
r"create index mt_w on mt(w)",
@@ -985,6 +1073,17 @@ class Up2k(object):
self.log("created DB at {}".format(db_path))
return cur
def _upgrade_v4(self, cur):
for cmd in [
r"alter table up add column ip text",
r"alter table up add column at int",
r"create index up_ip on up(ip)",
r"update kv set v=5 where k='sver'",
]:
cur.execute(cmd)
cur.connection.commit()
def handle_json(self, cj):
with self.mutex:
if not self.register_vpath(cj["ptop"], cj["vcfg"]):
@@ -1008,13 +1107,13 @@ class Up2k(object):
argv = (wark[:16], wark)
cur = cur.execute(q, argv)
for _, dtime, dsize, dp_dir, dp_fn in cur:
for _, dtime, dsize, dp_dir, dp_fn, ip, at in cur:
if dp_dir.startswith("//") or dp_fn.startswith("//"):
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
# relying on path.exists to return false on broken symlinks
if os.path.exists(fsenc(dp_abs)):
if bos.path.exists(dp_abs):
job = {
"name": dp_fn,
"prel": dp_dir,
@@ -1022,6 +1121,8 @@ class Up2k(object):
"ptop": cj["ptop"],
"size": dsize,
"lmod": dtime,
"addr": ip,
"at": at,
"hash": [],
"need": [],
}
@@ -1038,7 +1139,7 @@ class Up2k(object):
for fn in names:
path = os.path.join(job["ptop"], job["prel"], fn)
try:
if os.path.getsize(fsenc(path)) > 0:
if bos.path.getsize(path) > 0:
# upload completed or both present
break
except:
@@ -1072,10 +1173,26 @@ class Up2k(object):
job["name"] = self._untaken(pdir, cj["name"], now, cj["addr"])
dst = os.path.join(job["ptop"], job["prel"], job["name"])
if not self.args.nw:
os.unlink(fsenc(dst)) # TODO ed pls
bos.unlink(dst) # TODO ed pls
self._symlink(src, dst)
if cur:
a = [cj[x] for x in "prel name lmod size addr".split()]
a += [cj.get("at") or time.time()]
self.db_add(cur, wark, *a)
cur.connection.commit()
if not job:
vfs = self.asrv.vfs.all_vols[cj["vtop"]]
if vfs.lim:
ap1 = os.path.join(cj["ptop"], cj["prel"])
ap2, cj["prel"] = vfs.lim.all(
cj["addr"], cj["prel"], cj["size"], ap1
)
bos.makedirs(ap2)
vfs.lim.nup(cj["addr"])
vfs.lim.bup(cj["addr"], cj["size"])
job = {
"wark": wark,
"t0": now,
@@ -1106,8 +1223,12 @@ class Up2k(object):
self._new_upload(job)
purl = "{}/{}".format(job["vtop"], job["prel"]).strip("/")
purl = "/{}/".format(purl) if purl else "/"
return {
"name": job["name"],
"purl": purl,
"size": job["size"],
"lmod": job["lmod"],
"hash": job["need"],
@@ -1124,17 +1245,18 @@ class Up2k(object):
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
return f["orz"][1]
def _symlink(self, src, dst):
# TODO store this in linktab so we never delete src if there are links to it
self.log("linking dupe:\n {0}\n {1}".format(src, dst))
def _symlink(self, src, dst, verbose=True):
if verbose:
self.log("linking dupe:\n {0}\n {1}".format(src, dst))
if self.args.nw:
return
try:
lsrc = src
ldst = dst
fs1 = os.stat(fsenc(os.path.split(src)[0])).st_dev
fs2 = os.stat(fsenc(os.path.split(dst)[0])).st_dev
fs1 = bos.stat(os.path.dirname(src)).st_dev
fs2 = bos.stat(os.path.dirname(dst)).st_dev
if fs1 == 0:
# py2 on winxp or other unsupported combination
raise OSError()
@@ -1157,7 +1279,7 @@ class Up2k(object):
hops = len(ndst[nc:]) - 1
lsrc = "../" * hops + "/".join(lsrc)
os.symlink(fsenc(lsrc), fsenc(ldst))
except (AttributeError, OSError) as ex:
except Exception as ex:
self.log("cannot symlink; creating copy: " + repr(ex))
shutil.copy2(fsenc(src), fsenc(dst))
@@ -1217,27 +1339,21 @@ class Up2k(object):
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
self.lastmod_q.put(a)
# legit api sware 2 me mum
if self.idx_wark(
job["ptop"],
job["wark"],
job["prel"],
job["name"],
job["lmod"],
job["size"],
):
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
a += [job.get("at") or time.time()]
if self.idx_wark(*a):
del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads
return ret, dst
def idx_wark(self, ptop, wark, rd, fn, lmod, sz):
def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
cur = self.cur.get(ptop)
if not cur:
return False
self.db_rm(cur, rd, fn)
self.db_add(cur, wark, rd, fn, int(lmod), sz)
self.db_add(cur, wark, rd, fn, lmod, sz, ip, at)
cur.connection.commit()
if "e2t" in self.flags[ptop]:
@@ -1253,16 +1369,326 @@ class Up2k(object):
except:
db.execute(sql, s3enc(self.mem_cur, rd, fn))
def db_add(self, db, wark, rd, fn, ts, sz):
sql = "insert into up values (?,?,?,?,?)"
v = (wark, int(ts), sz, rd, fn)
def db_add(self, db, wark, rd, fn, ts, sz, ip, at):
sql = "insert into up values (?,?,?,?,?,?,?)"
v = (wark, int(ts), sz, rd, fn, ip or "", int(at or 0))
try:
db.execute(sql, v)
except:
rd, fn = s3enc(self.mem_cur, rd, fn)
v = (wark, ts, sz, rd, fn)
v = (wark, int(ts), sz, rd, fn, ip or "", int(at or 0))
db.execute(sql, v)
def handle_rm(self, uname, ip, vpaths):
n_files = 0
ok = {}
ng = {}
for vp in vpaths:
a, b, c = self._handle_rm(uname, ip, vp)
n_files += a
for k in b:
ok[k] = 1
for k in c:
ng[k] = 1
ng = {k: 1 for k in ng if k not in ok}
ok = len(ok)
ng = len(ng)
return "deleted {} files (and {}/{} folders)".format(n_files, ok, ok + ng)
def _handle_rm(self, uname, ip, vpath):
try:
permsets = [[True, False, False, True]]
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
unpost = False
except:
# unpost with missing permissions? try read+write and verify with db
if not self.args.unpost:
raise Pebkac(400, "the unpost feature is disabled in server config")
unpost = True
permsets = [[True, True]]
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
_, _, _, _, dip, dat = self._find_from_vpath(vn.realpath, rem)
m = "you cannot delete this: "
if not dip:
m += "file not found"
elif dip != ip:
m += "not uploaded by (You)"
elif dat < time.time() - self.args.unpost:
m += "uploaded too long ago"
else:
m = None
if m:
raise Pebkac(400, m)
ptop = vn.realpath
atop = vn.canonical(rem, False)
adir, fn = os.path.split(atop)
try:
st = bos.lstat(atop)
except:
raise Pebkac(400, "file not found on disk (already deleted?)")
scandir = not self.args.no_scandir
if stat.S_ISLNK(st.st_mode) or stat.S_ISREG(st.st_mode):
dbv, vrem = self.asrv.vfs.get(vpath, uname, *permsets[0])
dbv, vrem = dbv.get_dbv(vrem)
voldir = vsplit(vrem)[0]
vpath_dir = vsplit(vpath)[0]
g = [[dbv, voldir, vpath_dir, adir, [[fn, 0]], [], []]]
else:
g = vn.walk("", rem, [], uname, permsets, True, scandir, True)
if unpost:
raise Pebkac(400, "cannot unpost folders")
n_files = 0
for dbv, vrem, _, adir, files, rd, vd in g:
for fn in [x[0] for x in files]:
n_files += 1
abspath = os.path.join(adir, fn)
volpath = "{}/{}".format(vrem, fn).strip("/")
vpath = "{}/{}".format(dbv.vpath, volpath).strip("/")
self.log("rm {}\n {}".format(vpath, abspath))
_ = dbv.get(volpath, uname, *permsets[0])
with self.mutex:
cur = None
try:
ptop = dbv.realpath
cur, wark, _, _, _, _ = self._find_from_vpath(ptop, volpath)
self._forget_file(ptop, volpath, cur, wark, True)
finally:
if cur:
cur.connection.commit()
bos.unlink(abspath)
rm = rmdirs(self.log_func, scandir, True, atop)
return n_files, rm[0], rm[1]
def handle_mv(self, uname, svp, dvp):
svn, srem = self.asrv.vfs.get(svp, uname, True, False, True)
svn, srem = svn.get_dbv(srem)
sabs = svn.canonical(srem, False)
if not srem:
raise Pebkac(400, "mv: cannot move a mountpoint")
st = bos.lstat(sabs)
if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
with self.mutex:
return self._mv_file(uname, svp, dvp)
jail = svn.get_dbv(srem)[0]
permsets = [[True, False, True]]
scandir = not self.args.no_scandir
# following symlinks is too scary
g = svn.walk("", srem, [], uname, permsets, True, scandir, True)
for dbv, vrem, _, atop, files, rd, vd in g:
if dbv != jail:
# fail early (prevent partial moves)
raise Pebkac(400, "mv: source folder contains other volumes")
g = svn.walk("", srem, [], uname, permsets, True, scandir, True)
for dbv, vrem, _, atop, files, rd, vd in g:
if dbv != jail:
# the actual check (avoid toctou)
raise Pebkac(400, "mv: source folder contains other volumes")
for fn in files:
svpf = "/".join(x for x in [dbv.vpath, vrem, fn[0]] if x)
if not svpf.startswith(svp + "/"): # assert
raise Pebkac(500, "mv: bug at {}, top {}".format(svpf, svp))
dvpf = dvp + svpf[len(svp) :]
with self.mutex:
self._mv_file(uname, svpf, dvpf)
rmdirs(self.log_func, scandir, True, sabs)
return "k"
def _mv_file(self, uname, svp, dvp):
svn, srem = self.asrv.vfs.get(svp, uname, True, False, True)
svn, srem = svn.get_dbv(srem)
dvn, drem = self.asrv.vfs.get(dvp, uname, False, True)
dvn, drem = dvn.get_dbv(drem)
sabs = svn.canonical(srem, False)
dabs = dvn.canonical(drem)
drd, dfn = vsplit(drem)
n1 = svp.split("/")[-1]
n2 = dvp.split("/")[-1]
if n1.startswith(".") or n2.startswith("."):
if self.args.no_dot_mv:
raise Pebkac(400, "moving dotfiles is disabled in server config")
elif self.args.no_dot_ren and n1 != n2:
raise Pebkac(400, "renaming dotfiles is disabled in server config")
if bos.path.exists(dabs):
raise Pebkac(400, "mv2: target file exists")
bos.makedirs(os.path.dirname(dabs))
if bos.path.islink(sabs):
dlabs = absreal(sabs)
m = "moving symlink from [{}] to [{}], target [{}]"
self.log(m.format(sabs, dabs, dlabs))
os.unlink(sabs)
self._symlink(dlabs, dabs, False)
# folders are too scary, schedule rescan of both vols
self.need_rescan[svn.vpath] = 1
self.need_rescan[dvn.vpath] = 1
return "k"
c1, w, ftime, fsize, ip, at = self._find_from_vpath(svn.realpath, srem)
c2 = self.cur.get(dvn.realpath)
if ftime is None:
st = bos.stat(sabs)
ftime = st.st_mtime
fsize = st.st_size
if w:
if c2 and c2 != c1:
self._copy_tags(c1, c2, w)
self._forget_file(svn.realpath, srem, c1, w, c1 != c2)
self._relink(w, svn.realpath, srem, dabs)
c1.connection.commit()
if c2:
self.db_add(c2, w, drd, dfn, ftime, fsize, ip, at)
c2.connection.commit()
else:
self.log("not found in src db: [{}]".format(svp))
bos.rename(sabs, dabs)
return "k"
def _copy_tags(self, csrc, cdst, wark):
"""copy all tags for wark from src-db to dst-db"""
w = wark[:16]
if cdst.execute("select * from mt where w=? limit 1", (w,)).fetchone():
return # existing tags in dest db
for _, k, v in csrc.execute("select * from mt where w=?", (w,)):
cdst.execute("insert into mt values(?,?,?)", (w, k, v))
def _find_from_vpath(self, ptop, vrem):
cur = self.cur.get(ptop)
if not cur:
return [None] * 6
rd, fn = vsplit(vrem)
q = "select w, mt, sz, ip, at from up where rd=? and fn=? limit 1"
try:
c = cur.execute(q, (rd, fn))
except:
c = cur.execute(q, s3enc(self.mem_cur, rd, fn))
hit = c.fetchone()
if hit:
wark, ftime, fsize, ip, at = hit
return cur, wark, ftime, fsize, ip, at
return cur, None, None, None, None, None
def _forget_file(self, ptop, vrem, cur, wark, drop_tags):
"""forgets file in db, fixes symlinks, does not delete"""
srd, sfn = vsplit(vrem)
self.log("forgetting {}".format(vrem))
if wark:
self.log("found {} in db".format(wark))
if drop_tags:
if self._relink(wark, ptop, vrem, None):
drop_tags = False
if drop_tags:
q = "delete from mt where w=?"
cur.execute(q, (wark[:16],))
self.db_rm(cur, srd, sfn)
reg = self.registry.get(ptop)
if reg:
if not wark:
wark = [
x
for x, y in reg.items()
if fn in [y["name"], y.get("tnam")] and y["prel"] == vrem
]
if wark and wark in reg:
m = "forgetting partial upload {} ({})"
p = self._vis_job_progress(wark)
self.log(m.format(wark, p))
del reg[wark]
def _relink(self, wark, sptop, srem, dabs):
"""
update symlinks from file at svn/srem to dabs (rename),
or to first remaining full if no dabs (delete)
"""
dupes = []
sabs = os.path.join(sptop, srem)
q = "select rd, fn from up where substr(w,1,16)=? and w=?"
for ptop, cur in self.cur.items():
for rd, fn in cur.execute(q, (wark[:16], wark)):
if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn)
dvrem = "/".join([rd, fn]).strip("/")
if ptop != sptop or srem != dvrem:
dupes.append([ptop, dvrem])
self.log("found {} dupe: [{}] {}".format(wark, ptop, dvrem))
if not dupes:
return 0
full = {}
links = {}
for ptop, vp in dupes:
ap = os.path.join(ptop, vp)
try:
d = links if bos.path.islink(ap) else full
d[ap] = [ptop, vp]
except:
self.log("relink: not found: [{}]".format(ap))
if not dabs and not full and links:
# deleting final remaining full copy; swap it with a symlink
slabs = list(sorted(links.keys()))[0]
ptop, rem = links.pop(slabs)
self.log("linkswap [{}] and [{}]".format(sabs, slabs))
bos.unlink(slabs)
bos.rename(sabs, slabs)
self._symlink(slabs, sabs, False)
full[slabs] = [ptop, rem]
if not dabs:
dabs = list(sorted(full.keys()))[0]
for alink in links.keys():
try:
if alink != sabs and absreal(alink) != sabs:
continue
self.log("relinking [{}] to [{}]".format(alink, dabs))
bos.unlink(alink)
except:
pass
self._symlink(dabs, alink, False)
return len(full) + len(links)
def _get_wark(self, cj):
if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB
raise Pebkac(400, "name or numchunks not according to spec")
@@ -1284,7 +1710,7 @@ class Up2k(object):
def _hashlist_from_file(self, path):
pp = self.pp if hasattr(self, "pp") else None
fsz = os.path.getsize(fsenc(path))
fsz = bos.path.getsize(path)
csz = up2k_chunksize(fsz)
ret = []
with open(fsenc(path), "rb", 512 * 1024) as f:
@@ -1352,7 +1778,7 @@ class Up2k(object):
for path, sz, times in ready:
self.log("lmod: setting times {} on {}".format(times, path))
try:
os.utime(fsenc(path), times)
bos.utime(path, times)
except:
self.log("lmod: failed to utime ({}, {})".format(path, times))
@@ -1388,13 +1814,13 @@ class Up2k(object):
try:
# remove the filename reservation
path = os.path.join(job["ptop"], job["prel"], job["name"])
if os.path.getsize(fsenc(path)) == 0:
os.unlink(fsenc(path))
if bos.path.getsize(path) == 0:
bos.unlink(path)
if len(job["hash"]) == len(job["need"]):
# PARTIAL is empty, delete that too
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
os.unlink(fsenc(path))
bos.unlink(path)
except:
pass
@@ -1402,8 +1828,8 @@ class Up2k(object):
if not reg:
if ptop not in self.snap_prev or self.snap_prev[ptop] is not None:
self.snap_prev[ptop] = None
if os.path.exists(fsenc(path)):
os.unlink(fsenc(path))
if bos.path.exists(path):
bos.unlink(path)
return
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
@@ -1411,10 +1837,7 @@ class Up2k(object):
if etag == self.snap_prev.get(ptop):
return
try:
os.makedirs(histpath)
except:
pass
bos.makedirs(histpath)
path2 = "{}.{}".format(path, os.getpid())
j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8")
@@ -1472,23 +1895,23 @@ class Up2k(object):
self.n_hashq -= 1
# self.log("hashq {}".format(self.n_hashq))
ptop, rd, fn = self.hashq.get()
ptop, rd, fn, ip, at = self.hashq.get()
# self.log("hashq {} pop {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
if "e2d" not in self.flags[ptop]:
continue
abspath = os.path.join(ptop, rd, fn)
self.log("hashing " + abspath)
inf = os.stat(fsenc(abspath))
inf = bos.stat(abspath)
hashes = self._hashlist_from_file(abspath)
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
with self.mutex:
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size)
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size, ip, at)
def hash_file(self, ptop, flags, rd, fn):
def hash_file(self, ptop, flags, rd, fn, ip, at):
with self.mutex:
self.register_vpath(ptop, flags)
self.hashq.put([ptop, rd, fn])
self.hashq.put([ptop, rd, fn, ip, at])
self.n_hashq += 1
# self.log("hashq {} push {}/{}/{}".format(self.n_hashq, ptop, rd, fn))

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import re
import os
import sys
import stat
import time
import base64
import select
@@ -18,7 +19,7 @@ import subprocess as sp # nosec
from datetime import datetime
from collections import Counter
from .__init__ import PY2, WINDOWS, ANYWIN
from .__init__ import PY2, WINDOWS, ANYWIN, VT100
from .stolen import surrogateescape
FAKE_MP = False
@@ -57,6 +58,9 @@ except:
return struct.unpack(f.decode("ascii"), *a, **ka)
ansi_re = re.compile("\033\\[[^mK]*[mK]")
surrogateescape.register_surrogateescape()
FS_ENCODING = sys.getfilesystemencoding()
if WINDOWS and PY2:
@@ -76,6 +80,7 @@ HTTPCODE = {
403: "Forbidden",
404: "Not Found",
405: "Method Not Allowed",
411: "Length Required",
413: "Payload Too Large",
416: "Requested Range Not Satisfiable",
422: "Unprocessable Entity",
@@ -164,7 +169,7 @@ class Cooldown(object):
return ret
class Unrecv(object):
class _Unrecv(object):
"""
undo any number of socket recv ops
"""
@@ -184,10 +189,68 @@ class Unrecv(object):
except:
return b""
def recv_ex(self, nbytes):
"""read an exact number of bytes"""
ret = self.recv(nbytes)
while ret and len(ret) < nbytes:
buf = self.recv(nbytes - len(ret))
if not buf:
break
ret += buf
return ret
def unrecv(self, buf):
self.buf = buf + self.buf
class _LUnrecv(object):
"""
with expensive debug logging
"""
def __init__(self, s):
self.s = s
self.buf = b""
def recv(self, nbytes):
if self.buf:
ret = self.buf[:nbytes]
self.buf = self.buf[nbytes:]
m = "\033[0;7mur:pop:\033[0;1;32m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
print(m.format(ret, self.buf), end="")
return ret
try:
ret = self.s.recv(nbytes)
m = "\033[0;7mur:recv\033[0;1;33m {}\033[0m\n"
print(m.format(ret), end="")
return ret
except:
return b""
def recv_ex(self, nbytes):
"""read an exact number of bytes"""
ret = self.recv(nbytes)
while ret and len(ret) < nbytes:
buf = self.recv(nbytes - len(ret))
if not buf:
break
ret += buf
return ret
def unrecv(self, buf):
self.buf = buf + self.buf
m = "\033[0;7mur:push\033[0;1;31m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
print(m.format(buf, self.buf), end="")
Unrecv = _Unrecv
class ProgressPrinter(threading.Thread):
"""
periodically print progress info without linefeeds
@@ -202,17 +265,22 @@ class ProgressPrinter(threading.Thread):
def run(self):
msg = None
fmt = " {}\033[K\r" if VT100 else " {} $\r"
while not self.end:
time.sleep(0.1)
if msg == self.msg or self.end:
continue
msg = self.msg
uprint(" {}\033[K\r".format(msg))
uprint(fmt.format(msg))
if PY2:
sys.stdout.flush()
print("\033[K", end="")
if VT100:
print("\033[K", end="")
elif msg:
print("------------------------")
sys.stdout.flush() # necessary on win10 even w/ stderr btw
@@ -339,6 +407,13 @@ def log_thrs(log, ival, name):
log(name, "\033[0m \033[33m".join(tv), 3)
def vol_san(vols, txt):
for vol in vols:
txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8"))
return txt
def min_ex():
et, ev, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
@@ -570,19 +645,21 @@ class MultipartParser(object):
yields [fieldname, unsanitized_filename, fieldvalue]
where fieldvalue yields chunks of data
"""
while True:
run = True
while run:
fieldname, filename = self._read_header()
yield [fieldname, filename, self._read_data()]
tail = self.sr.recv(2)
tail = self.sr.recv_ex(2)
if tail == b"--":
# EOF indicated by this immediately after final boundary
self.sr.recv(2)
return
tail = self.sr.recv_ex(2)
run = False
if tail != b"\r\n":
raise Pebkac(400, "protocol error after field value")
m = "protocol error after field value: want b'\\r\\n', got {!r}"
raise Pebkac(400, m.format(tail))
def _read_value(self, iterator, max_len):
ret = b""
@@ -683,6 +760,17 @@ def humansize(sz, terse=False):
return ret.replace("iB", "").replace(" ", "")
def unhumanize(sz):
try:
return float(sz)
except:
pass
mul = sz[-1:].lower()
mul = {"k": 1024, "m": 1024 * 1024, "g": 1024 * 1024 * 1024}.get(mul, 1)
return float(sz[:-1]) * mul
def get_spd(nbyte, t0, t=None):
if t is None:
t = time.time()
@@ -758,6 +846,19 @@ def sanitize_fn(fn, ok, bad):
return fn.strip()
def absreal(fpath):
try:
return fsdec(os.path.abspath(os.path.realpath(fsenc(fpath))))
except:
if not WINDOWS:
raise
# cpython bug introduced in 3.8, still exists in 3.9.1,
# some win7sp1 and win10:20H2 boxes cannot realpath a
# networked drive letter such as b"n:" or b"n:\\"
return os.path.abspath(os.path.realpath(fpath))
def u8safe(txt):
try:
return txt.encode("utf-8", "xmlcharrefreplace").decode("utf-8", "replace")
@@ -815,6 +916,13 @@ def unquotep(txt):
return w8dec(unq2)
def vsplit(vpath):
if "/" not in vpath:
return "", vpath
return vpath.rsplit("/", 1)
def w8dec(txt):
"""decodes filesystem-bytes to wtf8"""
if PY2:
@@ -937,8 +1045,12 @@ def read_socket_chunked(sr, log=None):
raise Pebkac(400, err)
if chunklen == 0:
sr.recv(2) # \r\n after final chunk
return
x = sr.recv_ex(2)
if x == b"\r\n":
return
m = "protocol error after final chunk: want b'\\r\\n', got {!r}"
raise Pebkac(400, m.format(x))
if log:
log("receiving {} byte chunk".format(chunklen))
@@ -946,7 +1058,10 @@ def read_socket_chunked(sr, log=None):
for chunk in read_socket(sr, chunklen):
yield chunk
sr.recv(2) # \r\n after each chunk too
x = sr.recv_ex(2)
if x != b"\r\n":
m = "protocol error in chunk separator: want b'\\r\\n', got {!r}"
raise Pebkac(400, m.format(x))
def yieldfile(fn):
@@ -1014,6 +1129,9 @@ def sendfile_kern(lower, upper, f, s):
def statdir(logger, scandir, lstat, top):
if lstat and not os.supports_follow_symlinks:
scandir = False
try:
btop = fsenc(top)
if scandir and hasattr(os, "scandir"):
@@ -1038,6 +1156,29 @@ def statdir(logger, scandir, lstat, top):
logger(src, "{} @ {}".format(repr(ex), top), 1)
def rmdirs(logger, scandir, lstat, top):
if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)):
top = os.path.dirname(top)
dirs = statdir(logger, scandir, lstat, top)
dirs = [x[0] for x in dirs if stat.S_ISDIR(x[1].st_mode)]
dirs = [os.path.join(top, x) for x in dirs]
ok = []
ng = []
for d in dirs[::-1]:
a, b = rmdirs(logger, scandir, lstat, d)
ok += a
ng += b
try:
os.rmdir(fsenc(top))
ok.append(top)
except:
ng.append(top)
return ok, ng
def unescape_cookie(orig):
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
ret = ""
@@ -1081,7 +1222,7 @@ def guess_mime(url, fallback="application/octet-stream"):
return ret
def runcmd(*argv):
def runcmd(argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate()
stdout = stdout.decode("utf-8", "replace")
@@ -1089,8 +1230,8 @@ def runcmd(*argv):
return [p.returncode, stdout, stderr]
def chkcmd(*argv):
ok, sout, serr = runcmd(*argv)
def chkcmd(argv):
ok, sout, serr = runcmd(argv)
if ok != 0:
raise Exception(serr)

View File

@@ -22,7 +22,7 @@ window.baguetteBox = (function () {
afterHide: null,
onChange: null,
},
overlay, slider, btnPrev, btnNext, btnHelp, btnVmode, btnClose,
overlay, slider, btnPrev, btnNext, btnHelp, btnRotL, btnRotR, btnSel, btnVmode, btnClose,
currentGallery = [],
currentIndex = 0,
isOverlayVisible = false,
@@ -49,7 +49,7 @@ window.baguetteBox = (function () {
};
var touchstartHandler = function (e) {
touch.count++;
touch.count = e.touches.length;
if (touch.count > 1)
touch.multitouch = true;
@@ -72,8 +72,11 @@ window.baguetteBox = (function () {
hideOverlay();
}
};
var touchendHandler = function () {
var touchendHandler = function (e) {
touch.count--;
if (e && e.touches)
touch.count = e.touches.length;
if (touch.count <= 0)
touch.multitouch = false;
@@ -175,6 +178,9 @@ window.baguetteBox = (function () {
'<button id="bbox-next" class="bbox-btn" type="button" aria-label="Next">&gt;</button>' +
'<div id="bbox-btns">' +
'<button id="bbox-help" type="button">?</button>' +
'<button id="bbox-rotl" type="button">↶</button>' +
'<button id="bbox-rotr" type="button">↷</button>' +
'<button id="bbox-tsel" type="button">sel</button>' +
'<button id="bbox-vmode" type="button" tt="a"></button>' +
'<button id="bbox-close" type="button" aria-label="Close">X</button>' +
'</div></div>'
@@ -187,6 +193,9 @@ window.baguetteBox = (function () {
btnPrev = ebi('bbox-prev');
btnNext = ebi('bbox-next');
btnHelp = ebi('bbox-help');
btnRotL = ebi('bbox-rotl');
btnRotR = ebi('bbox-rotr');
btnSel = ebi('bbox-tsel');
btnVmode = ebi('bbox-vmode');
btnClose = ebi('bbox-close');
bindEvents();
@@ -203,11 +212,13 @@ window.baguetteBox = (function () {
['right, L', 'next file'],
['home', 'first file'],
['end', 'last file'],
['R', 'rotate (shift=ccw)'],
['S', 'toggle file selection'],
['space, P, K', 'video: play / pause'],
['U', 'video: seek 10sec back'],
['P', 'video: seek 10sec ahead'],
['M', 'video: toggle mute'],
['R', 'video: toggle loop'],
['V', 'video: toggle loop'],
['C', 'video: toggle auto-next'],
['F', 'video: toggle fullscreen'],
],
@@ -249,7 +260,7 @@ window.baguetteBox = (function () {
v.muted = vmute = !vmute;
mp_ctl();
}
else if (k == "KeyR" && v) {
else if (k == "KeyV" && v) {
vloop = !vloop;
vnext = vnext && !vloop;
setVmode();
@@ -267,6 +278,10 @@ window.baguetteBox = (function () {
v.requestFullscreen();
}
catch (ex) { }
else if (k == "KeyS")
tglsel();
else if (k == "KeyR")
rotn(e.shiftKey ? -1 : 1);
}
function setVmode() {
@@ -279,7 +294,7 @@ window.baguetteBox = (function () {
if (vloop) {
lbl = 'Loop';
msg += 'repeat it';
tts = '$NHotkey: R';
tts = '$NHotkey: V';
}
else if (vnext) {
lbl = 'Cont';
@@ -314,6 +329,40 @@ window.baguetteBox = (function () {
tt.show.bind(this)();
}
function tglsel() {
var thumb = currentGallery[currentIndex].imageElement,
name = vsplit(thumb.href)[1],
files = msel.getall();
for (var a = 0; a < files.length; a++)
if (vsplit(files[a].vp)[1] == name)
clmod(ebi(files[a].id).closest('tr'), 'sel', 't');
msel.selui();
selbg();
}
function selbg() {
var img = vidimg(),
thumb = currentGallery[currentIndex].imageElement,
name = vsplit(thumb.href)[1],
files = msel.getsel(),
sel = false;
for (var a = 0; a < files.length; a++)
if (vsplit(files[a].vp)[1] == name)
sel = true;
ebi('bbox-overlay').style.background = sel ?
'rgba(153,34,85,0.7)' : '';
img.style.borderRadius = sel ? '1em' : '';
btnSel.style.color = sel ? '#fff' : '';
btnSel.style.background = sel ? '#d48' : '';
btnSel.style.textShadow = sel ? '1px 1px 0 #b38' : '';
btnSel.style.boxShadow = sel ? '.15em .15em 0 #502' : '';
}
function keyUpHandler(e) {
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
return;
@@ -348,6 +397,9 @@ window.baguetteBox = (function () {
bind(btnClose, 'click', hideOverlay);
bind(btnVmode, 'click', tglVmode);
bind(btnHelp, 'click', halp);
bind(btnRotL, 'click', rotl);
bind(btnRotR, 'click', rotr);
bind(btnSel, 'click', tglsel);
bind(slider, 'contextmenu', contextmenuHandler);
bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
bind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
@@ -362,11 +414,15 @@ window.baguetteBox = (function () {
unbind(btnClose, 'click', hideOverlay);
unbind(btnVmode, 'click', tglVmode);
unbind(btnHelp, 'click', halp);
unbind(btnRotL, 'click', rotl);
unbind(btnRotR, 'click', rotr);
unbind(btnSel, 'click', tglsel);
unbind(slider, 'contextmenu', contextmenuHandler);
unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
unbind(overlay, 'touchend', touchendHandler);
unbind(document, 'focus', trapFocusInsideOverlay, true);
timer.rm(rotn);
}
function prepareOverlay(gallery, userOptions) {
@@ -617,10 +673,91 @@ window.baguetteBox = (function () {
return true;
}
var prev_cw = 0, prev_ch = 0, unrot_timer = null;
function rotn(n) {
var el = vidimg(),
orot = parseInt(el.getAttribute('rot') || 0),
frot = orot + (n || 0) * 90;
if (!frot && !orot)
return; // reflow noop
var co = ebi('bbox-overlay'),
cw = co.clientWidth,
ch = co.clientHeight;
if (!n && prev_cw === cw && prev_ch === ch)
return; // reflow noop
prev_cw = cw;
prev_ch = ch;
var rot = frot,
iw = el.naturalWidth || el.videoWidth,
ih = el.naturalHeight || el.videoHeight,
magic = 4, // idk, works in enough browsers
dl = el.closest('div').querySelector('figcaption a'),
vw = cw,
vh = ch - dl.offsetHeight + magic,
pmag = Math.min(1, Math.min(vw / ih, vh / iw)),
wmag = Math.min(1, Math.min(vw / iw, vh / ih));
while (rot < 0) rot += 360;
while (rot >= 360) rot -= 360;
var q = rot == 90 || rot == 270 ? 1 : 0,
mag = q ? pmag : wmag;
el.style.cssText = 'max-width:none; max-height:none; position:absolute; display:block; margin:0';
if (!orot) {
el.style.width = iw * wmag + 'px';
el.style.height = ih * wmag + 'px';
el.style.left = (vw - iw * wmag) / 2 + 'px';
el.style.top = (vh - ih * wmag) / 2 - magic + 'px';
q = el.offsetHeight;
}
el.style.width = iw * mag + 'px';
el.style.height = ih * mag + 'px';
el.style.left = (vw - iw * mag) / 2 + 'px';
el.style.top = (vh - ih * mag) / 2 - magic + 'px';
el.style.transform = 'rotate(' + frot + 'deg)';
el.setAttribute('rot', frot);
timer.add(rotn);
if (!rot) {
clearTimeout(unrot_timer);
unrot_timer = setTimeout(unrot, 300);
}
}
function rotl() {
rotn(-1);
}
function rotr() {
rotn(1);
}
function unrot() {
var el = vidimg(),
orot = el.getAttribute('rot'),
rot = parseInt(orot || 0);
while (rot < 0) rot += 360;
while (rot >= 360) rot -= 360;
if (rot || orot === null)
return;
clmod(el, 'nt', 1);
el.removeAttribute('rot');
el.removeAttribute("style");
rot = el.offsetHeight;
clmod(el, 'nt');
timer.rm(rotn);
}
function vid() {
return imagesElements[currentIndex].querySelector('video');
}
function vidimg() {
return imagesElements[currentIndex].querySelector('img, video');
}
function playvid(play) {
if (vid())
vid()[play ? 'play' : 'pause']();
@@ -662,15 +799,21 @@ window.baguetteBox = (function () {
}
function updateOffset() {
var offset = -currentIndex * 100 + '%';
var offset = -currentIndex * 100 + '%',
xform = slider.style.perspective !== undefined;
if (options.animation === 'fadeIn') {
slider.style.opacity = 0;
setTimeout(function () {
slider.style.transform = 'translate3d(' + offset + ',0,0)';
xform ?
slider.style.transform = 'translate3d(' + offset + ',0,0)' :
slider.style.left = offset;
slider.style.opacity = 1;
}, 400);
} else {
slider.style.transform = 'translate3d(' + offset + ',0,0)';
xform ?
slider.style.transform = 'translate3d(' + offset + ',0,0)' :
slider.style.left = offset;
}
playvid(false);
var v = vid();
@@ -679,8 +822,21 @@ window.baguetteBox = (function () {
v.muted = vmute;
v.loop = vloop;
}
selbg();
mp_ctl();
setVmode();
var el = vidimg();
if (el.getAttribute('rot'))
timer.add(rotn);
else
timer.rm(rotn);
var prev = QS('.full-image.vis');
if (prev)
clmod(prev, 'vis');
clmod(el.closest('div'), 'vis', 1);
}
function preloadNext(index) {

File diff suppressed because it is too large Load Diff

View File

@@ -6,10 +6,10 @@
<title>⇆🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css?_={{ ts }}">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="/.cpr/browser.css?_={{ ts }}">
{%- if css %}
<link rel="stylesheet" type="text/css" media="screen" href="{{ css }}?_={{ ts }}">
<link rel="stylesheet" media="screen" href="{{ css }}?_={{ ts }}">
{%- endif %}
</head>
@@ -59,12 +59,14 @@
</form>
</div>
<div id="op_unpost" class="opview opbox"></div>
<div id="op_up2k" class="opview"></div>
<div id="op_cfg" class="opview opbox opwide"></div>
<h1 id="path">
<a href="#" id="entree" tt="show directory tree$NHotkey: B">🌲</a>
<a href="#" id="entree" tt="show navpane (directory tree sidebar)$NHotkey: B">🌲</a>
{%- for n in vpnodes %}
<a href="/{{ n[0] }}">{{ n[1] }}</a>
{%- endfor %}
@@ -111,6 +113,8 @@
<div id="epi" class="logue">{{ logues[1] }}</div>
<h2><a href="/?h">control-panel</a></h2>
<a href="#" id="repl">π</a>
</div>
@@ -121,11 +125,16 @@
<div id="widget"></div>
<script>
var perms = {{ perms }},
tag_order_cfg = {{ tag_order }},
var acct = "{{ acct }}",
perms = {{ perms }},
def_hcols = {{ def_hcols|tojson }},
have_up2k_idx = {{ have_up2k_idx|tojson }},
have_tags_idx = {{ have_tags_idx|tojson }},
have_zip = {{ have_zip|tojson }};
have_mv = {{ have_mv|tojson }},
have_del = {{ have_del|tojson }},
have_unpost = {{ have_unpost|tojson }},
have_zip = {{ have_zip|tojson }},
readme = {{ readme|tojson }};
</script>
<script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/browser.js?_={{ ts }}"></script>

File diff suppressed because it is too large Load Diff

View File

@@ -1,53 +1,16 @@
@font-face {
font-family: 'scp';
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
}
html, body {
color: #333;
background: #eee;
font-family: sans-serif;
line-height: 1.5em;
}
#tt {
position: fixed;
max-width: 34em;
background: #222;
border: 0 solid #777;
overflow: hidden;
margin-top: 1em;
padding: 0 1.3em;
height: 0;
opacity: .1;
transition: opacity 0.14s, height 0.14s, padding 0.14s;
box-shadow: 0 .2em .5em #222;
border-radius: .4em;
z-index: 9001;
}
#tt.b {
padding: 0 2em;
border-radius: .5em;
box-shadow: 0 .2em 1em #000;
}
#tt.show {
padding: 1em 1.3em;
border-width: .4em 0;
height: auto;
opacity: 1;
}
#tt.show.b {
padding: 1.5em 2em;
border-width: .5em 0;
}
#tt code {
background: #3c3c3c;
padding: .1em .3em;
border-top: 1px solid #777;
border-radius: .3em;
font-family: monospace, monospace;
line-height: 1.7em;
}
#tt em {
color: #f6a;
#repl {
position: absolute;
top: 0;
right: .5em;
border: none;
color: inherit;
background: none;
}
#mtw {
display: none;
@@ -56,122 +19,12 @@ html, body {
margin: 0 auto;
padding: 0 1.5em;
}
pre, code, a {
color: #480;
background: #f7f7f7;
border: .07em solid #ddd;
border-radius: .2em;
padding: .1em .3em;
margin: 0 .1em;
#toast {
bottom: auto;
top: 1.4em;
}
code {
font-size: .96em;
}
pre, code {
font-family: 'scp', monospace, monospace;
white-space: pre-wrap;
word-break: break-all;
}
pre {
counter-reset: precode;
}
pre code {
counter-increment: precode;
display: inline-block;
margin: 0 -.3em;
padding: .4em .5em;
border: none;
border-bottom: 1px solid #cdc;
min-width: calc(100% - .6em);
line-height: 1.1em;
}
pre code:last-child {
border-bottom: none;
}
pre code::before {
content: counter(precode);
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
display: inline-block;
text-align: right;
font-size: .75em;
color: #48a;
width: 4em;
padding-right: 1.5em;
margin-left: -5.5em;
}
pre code:hover {
background: #fec;
color: #360;
}
h1, h2 {
line-height: 1.5em;
}
h1 {
font-size: 1.7em;
text-align: center;
border: 1em solid #777;
border-width: .05em 0;
margin: 3em 0;
}
h2 {
font-size: 1.5em;
font-weight: normal;
background: #f7f7f7;
border-top: .07em solid #fff;
border-bottom: .07em solid #bbb;
border-radius: .5em .5em 0 0;
padding-left: .4em;
margin-top: 3em;
}
h3 {
border-bottom: .1em solid #999;
}
h1 a, h3 a, h5 a,
h2 a, h4 a, h6 a {
color: inherit;
display: block;
background: none;
border: none;
padding: 0;
margin: 0;
}
#mp ul,
#mp ol {
border-left: .3em solid #ddd;
}
#m>ul,
#m>ol {
border-color: #bbb;
}
#mp ul>li {
list-style-type: disc;
}
#mp ul>li,
#mp ol>li {
margin: .7em 0;
}
strong {
color: #000;
}
p>em,
li>em,
td>em {
color: #c50;
padding: .1em;
border-bottom: .1em solid #bbb;
}
blockquote {
font-family: serif;
background: #f7f7f7;
border: .07em dashed #ccc;
padding: 0 2em;
margin: 1em 0;
}
small {
opacity: .8;
a {
text-decoration: none;
}
#toc {
margin: 0 1em;
@@ -207,7 +60,7 @@ small {
z-index: 99;
position: relative;
display: inline-block;
font-family: monospace, monospace;
font-family: 'scp', monospace, monospace;
font-weight: bold;
font-size: 1.3em;
line-height: .1em;
@@ -219,14 +72,6 @@ small {
color: #6b3;
text-shadow: .02em 0 0 #6b3;
}
table {
border-collapse: collapse;
margin: 1em 0;
}
th, td {
padding: .2em .5em;
border: .12em solid #aaa;
}
blink {
animation: blinker .7s cubic-bezier(.9, 0, .1, 1) infinite;
}
@@ -239,6 +84,36 @@ blink {
}
}
.mdo pre {
counter-reset: precode;
}
.mdo pre code {
counter-increment: precode;
display: inline-block;
border: none;
border-bottom: 1px solid #cdc;
min-width: calc(100% - .6em);
}
.mdo pre code:last-child {
border-bottom: none;
}
.mdo pre code::before {
content: counter(precode);
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
display: inline-block;
text-align: right;
font-size: .75em;
color: #48a;
width: 4em;
padding-right: 1.5em;
margin-left: -5.5em;
}
@media screen {
html, body {
margin: 0;
@@ -255,34 +130,6 @@ blink {
#mp {
max-width: 52em;
margin-bottom: 6em;
word-break: break-word;
overflow-wrap: break-word;
word-wrap: break-word; /*ie*/
}
a {
color: #fff;
background: #39b;
text-decoration: none;
padding: 0 .3em;
border: none;
border-bottom: .07em solid #079;
}
h2 {
color: #fff;
background: #555;
margin-top: 2em;
border-bottom: .22em solid #999;
border-top: none;
}
h1 {
color: #fff;
background: #444;
font-weight: normal;
border-top: .4em solid #fb0;
border-bottom: .4em solid #777;
border-radius: 0 1em 0 1em;
margin: 3em 0 1em 0;
padding: .5em 0;
}
#mn {
padding: 1.3em 0 .7em 1em;
@@ -335,6 +182,8 @@ blink {
color: #444;
background: none;
text-decoration: underline;
margin: 0 .1em;
padding: 0 .3em;
border: none;
}
#mh a:hover {
@@ -383,55 +232,6 @@ blink {
html.dark #toc li {
border-width: 0;
}
html.dark #mp a {
background: #057;
}
html.dark #mp h1 a, html.dark #mp h4 a,
html.dark #mp h2 a, html.dark #mp h5 a,
html.dark #mp h3 a, html.dark #mp h6 a {
color: inherit;
background: none;
}
html.dark pre,
html.dark code {
color: #8c0;
background: #1a1a1a;
border: .07em solid #333;
}
html.dark #mp ul,
html.dark #mp ol {
border-color: #444;
}
html.dark #m>ul,
html.dark #m>ol {
border-color: #555;
}
html.dark strong {
color: #fff;
}
html.dark p>em,
html.dark li>em,
html.dark td>em {
color: #f94;
border-color: #666;
}
html.dark h1 {
background: #383838;
border-top: .4em solid #b80;
border-bottom: .4em solid #4c4c4c;
}
html.dark h2 {
background: #444;
border-bottom: .22em solid #555;
}
html.dark td,
html.dark th {
border-color: #444;
}
html.dark blockquote {
background: #282828;
border: .07em dashed #444;
}
html.dark #mn a:not(:last-child)::after {
border-color: rgba(255,255,255,0.3);
}
@@ -537,12 +337,15 @@ blink {
mso-footer-margin: .6in;
mso-paper-source: 0;
}
a {
.mdo a {
color: #079;
text-decoration: none;
border-bottom: .07em solid #4ac;
padding: 0 .3em;
}
#repl {
display: none;
}
#toc>ul {
border-left: .1em solid #84c4dd;
}
@@ -567,18 +370,20 @@ blink {
a[ctr]::before {
content: attr(ctr) '. ';
}
h1 {
.mdo h1 {
margin: 2em 0;
}
h2 {
.mdo h2 {
margin: 2em 0 0 0;
}
h1, h2, h3 {
.mdo h1,
.mdo h2,
.mdo h3 {
page-break-inside: avoid;
}
h1::after,
h2::after,
h3::after {
.mdo h1::after,
.mdo h2::after,
.mdo h3::after {
content: 'orz';
color: transparent;
display: block;
@@ -586,20 +391,20 @@ blink {
padding: 4em 0 0 0;
margin: 0 0 -5em 0;
}
p {
.mdo p {
page-break-inside: avoid;
}
table {
.mdo table {
page-break-inside: auto;
}
tr {
.mdo tr {
page-break-inside: avoid;
page-break-after: auto;
}
thead {
.mdo thead {
display: table-header-group;
}
tfoot {
.mdo tfoot {
display: table-footer-group;
}
#mp a.vis::after {
@@ -607,31 +412,32 @@ blink {
border-bottom: 1px solid #bbb;
color: #444;
}
blockquote {
.mdo blockquote {
border-color: #555;
}
code {
.mdo code {
border-color: #bbb;
}
pre, pre code {
.mdo pre,
.mdo pre code {
border-color: #999;
}
pre code::before {
.mdo pre code::before {
color: #058;
}
html.dark a {
html.dark .mdo a {
color: #000;
}
html.dark pre,
html.dark code {
html.dark .mdo pre,
html.dark .mdo code {
color: #240;
}
html.dark p>em,
html.dark li>em,
html.dark td>em {
html.dark .mdo p>em,
html.dark .mdo li>em,
html.dark .mdo td>em {
color: #940;
}
}

View File

@@ -1,11 +1,12 @@
<!DOCTYPE html><html><head>
<meta charset="utf-8">
<title>📝🎉 {{ title }}</title> <!-- 📜 -->
<title>📝🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7">
<link href="/.cpr/md.css?_={{ ts }}" rel="stylesheet">
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/md.css?_={{ ts }}">
{%- if edit %}
<link href="/.cpr/md2.css?_={{ ts }}" rel="stylesheet">
<link rel="stylesheet" href="/.cpr/md2.css?_={{ ts }}">
{%- endif %}
</head>
<body>
@@ -42,8 +43,9 @@
if you're still reading this, check that javascript is allowed
</div>
</div>
<div id="mp"></div>
<div id="mp" class="mdo"></div>
</div>
<a href="#" id="repl">π</a>
{%- if edit %}
<div id="helpbox">
@@ -131,18 +133,18 @@ var md_opt = {
};
(function () {
var btn = document.getElementById("lightswitch");
var toggle = function (e) {
if (e) e.preventDefault();
var dark = !document.documentElement.getAttribute("class");
document.documentElement.setAttribute("class", dark ? "dark" : "");
btn.innerHTML = "go " + (dark ? "light" : "dark");
if (window.localStorage)
localStorage.setItem('lightmode', dark ? 0 : 1);
};
btn.onclick = toggle;
if (window.localStorage && localStorage.getItem('lightmode') != 1)
toggle();
var l = localStorage,
drk = l.getItem('lightmode') != 1,
btn = document.getElementById("lightswitch"),
f = function (e) {
if (e) { e.preventDefault(); drk = !drk; }
document.documentElement.setAttribute("class", drk? "dark":"light");
btn.innerHTML = "go " + (drk ? "light":"dark");
l.setItem('lightmode', drk? 0:1);
};
btn.onclick = f;
f();
})();
</script>

View File

@@ -24,23 +24,6 @@ var dbg = function () { };
var md_plug = {};
function hesc(txt) {
return txt.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
}
function cls(dom, name, add) {
var re = new RegExp('(^| )' + name + '( |$)');
var lst = (dom.getAttribute('class') + '').replace(re, "$1$2").replace(/ /, "");
dom.setAttribute('class', lst + (add ? ' ' + name : ''));
}
function statify(obj) {
return JSON.parse(JSON.stringify(obj));
}
// dodge browser issues
(function () {
var ua = navigator.userAgent;
@@ -65,7 +48,7 @@ function statify(obj) {
if (a > 0)
loc.push(n[a]);
var dec = hesc(uricom_dec(n[a])[0]);
var dec = esc(uricom_dec(n[a])[0]);
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
}
@@ -73,6 +56,26 @@ function statify(obj) {
})();
// image load handler
var img_load = (function () {
var r = {};
r.callbacks = [];
function fire() {
for (var a = 0; a < r.callbacks.length; a++)
r.callbacks[a]();
}
var timeout = null;
r.done = function () {
clearTimeout(timeout);
timeout = setTimeout(fire, 500);
};
return r;
})();
// faster than replacing the entire html (chrome 1.8x, firefox 1.6x)
function copydom(src, dst, lv) {
var sc = src.childNodes,
@@ -176,7 +179,7 @@ function md_plug_err(ex, js) {
var lns = js.split('\n');
if (ln < lns.length) {
o = mknod('span');
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
o.style.cssText = "color:#ac2;font-size:.9em;font-family:'scp',monospace,monospace;display:block";
o.textContent = lns[ln - 1];
}
}
@@ -185,7 +188,7 @@ function md_plug_err(ex, js) {
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
errbox.textContent = msg;
errbox.onclick = function () {
alert('' + ex.stack);
modal.alert('<pre>' + esc(ex.stack) + '</pre>');
};
if (o) {
errbox.appendChild(o);
@@ -356,6 +359,10 @@ function convert_markdown(md_text, dest_dom) {
copydom(md_dom, dest_dom, 0);
var imgs = dest_dom.getElementsByTagName('img');
for (var a = 0, aa = imgs.length; a < aa; a++)
imgs[a].onload = img_load.done;
if (ext && ext[0].render2)
try {
ext[0].render2(dest_dom);
@@ -490,6 +497,7 @@ function init_toc() {
// "main" :p
convert_markdown(dom_src.value, dom_pre);
var toc = init_toc();
img_load.callbacks = [toc.refresh];
// scroll handler

View File

@@ -50,7 +50,7 @@
outline: none;
padding: 0;
margin: 0;
font-family: 'consolas', monospace, monospace;
font-family: 'scp', monospace, monospace;
white-space: pre-wrap;
word-break: break-word;
overflow-wrap: break-word;
@@ -84,13 +84,10 @@ html.dark #save.force-save {
#save.disabled {
opacity: .4;
}
#helpbox,
#toast {
#helpbox {
background: #f7f7f7;
border-radius: .4em;
z-index: 9001;
}
#helpbox {
display: none;
position: fixed;
padding: 2em;
@@ -107,19 +104,7 @@ html.dark #save.force-save {
}
html.dark #helpbox {
box-shadow: 0 .5em 2em #444;
}
html.dark #helpbox,
html.dark #toast {
background: #222;
border: 1px solid #079;
border-width: 1px 0;
}
#toast {
font-weight: bold;
text-align: center;
padding: .6em 0;
position: fixed;
top: 30%;
transition: opacity 0.2s ease-in-out;
opacity: 1;
}

View File

@@ -98,7 +98,7 @@ var draw_md = (function () {
var src = dom_src.value;
convert_markdown(src, dom_pre);
var lines = hesc(src).replace(/\r/g, "").split('\n');
var lines = esc(src).replace(/\r/g, "").split('\n');
nlines = lines.length;
var html = [];
for (var a = 0; a < lines.length; a++)
@@ -108,7 +108,7 @@ var draw_md = (function () {
map_src = genmap(dom_ref, map_src);
map_pre = genmap(dom_pre, map_pre);
cls(ebi('save'), 'disabled', src == server_md);
clmod(ebi('save'), 'disabled', src == server_md);
var t1 = Date.now();
delay = t1 - t0 > 100 ? 25 : 1;
@@ -127,6 +127,12 @@ var draw_md = (function () {
})();
// discard TOC callback, just regen editor scroll map
img_load.callbacks = [function () {
map_pre = genmap(dom_pre, map_pre);
}];
// resize handler
redraw = (function () {
function onresize() {
@@ -136,7 +142,6 @@ redraw = (function () {
dom_ref.style.width = getComputedStyle(dom_src).offsetWidth + 'px';
map_src = genmap(dom_ref, map_src);
map_pre = genmap(dom_pre, map_pre);
dbg(document.body.clientWidth + 'x' + document.body.clientHeight);
}
function setsbs() {
dom_wrap.setAttribute('class', '');
@@ -236,7 +241,7 @@ function Modpoll() {
var skip = null;
if (ebi('toast'))
if (toast.visible)
skip = 'toast';
else if (this.skip_one)
@@ -285,16 +290,15 @@ function Modpoll() {
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
this.modpoll.disabled = true;
var msg = [
"The document has changed on the server.<br />" +
"The document has changed on the server.",
"The changes will NOT be loaded into your editor automatically.",
"Press F5 or CTRL-R to refresh the page,<br />" +
"",
"Press F5 or CTRL-R to refresh the page,",
"replacing your document with the server copy.",
"You can click this message to ignore and contnue."
"",
"You can close this message to ignore and contnue."
];
return toast(false, "box-shadow:0 1em 2em rgba(64,64,64,0.8);font-weight:normal",
36, "<p>" + msg.join('</p>\n<p>') + '</p>');
return toast.warn(0, msg.join('\n'));
}
console.log('modpoll eq');
@@ -323,57 +327,56 @@ function save(e) {
var save_btn = ebi("save"),
save_cls = save_btn.getAttribute('class') + '';
if (save_cls.indexOf('disabled') >= 0) {
toast(true, ";font-size:2em;color:#c90", 9, "no changes");
return;
}
if (save_cls.indexOf('disabled') >= 0)
return toast.inf(2, "no changes");
var force = (save_cls.indexOf('force-save') >= 0);
if (force && !confirm('confirm that you wish to lose the changes made on the server since you opened this document')) {
alert('ok, aborted');
return;
function save2() {
var txt = dom_src.value,
fd = new FormData();
fd.append("act", "tput");
fd.append("lastmod", (force ? -1 : last_modified));
fd.append("body", txt);
var url = (document.location + '').split('?')[0];
var xhr = new XMLHttpRequest();
xhr.open('POST', url, true);
xhr.responseType = 'text';
xhr.onreadystatechange = save_cb;
xhr.btn = save_btn;
xhr.txt = txt;
modpoll.skip_one = true; // skip one iteration while we save
xhr.send(fd);
}
var txt = dom_src.value;
var fd = new FormData();
fd.append("act", "tput");
fd.append("lastmod", (force ? -1 : last_modified));
fd.append("body", txt);
var url = (document.location + '').split('?')[0];
var xhr = new XMLHttpRequest();
xhr.open('POST', url, true);
xhr.responseType = 'text';
xhr.onreadystatechange = save_cb;
xhr.btn = save_btn;
xhr.txt = txt;
modpoll.skip_one = true; // skip one iteration while we save
xhr.send(fd);
if (!force)
save2();
else
modal.confirm('confirm that you wish to lose the changes made on the server since you opened this document', save2, function () {
toast.inf(3, 'aborted');
});
}
function save_cb() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) {
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
return;
}
if (this.status !== 200)
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
var r;
try {
r = JSON.parse(this.responseText);
}
catch (ex) {
alert('Failed to parse reply from server:\n\n' + this.responseText);
return;
return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
}
if (!r.ok) {
if (!this.btn.classList.contains('force-save')) {
this.btn.classList.add('force-save');
if (!clgot(this.btn, 'force-save')) {
clmod(this.btn, 'force-save', 1);
var msg = [
'This file has been modified since you started editing it!\n',
'if you really want to overwrite, press save again.\n',
@@ -383,15 +386,13 @@ function save_cb() {
r.lastmod + ' lastmod on the server now,',
r.now + ' server time now,\n',
];
alert(msg.join('\n'));
return toast.err(0, msg.join('\n'));
}
else {
alert('Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
}
return;
else
return toast.err(0, 'Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
}
this.btn.classList.remove('force-save');
clmod(this.btn, 'force-save');
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
run_savechk(r.lastmod, this.txt, this.btn, 0);
@@ -415,10 +416,8 @@ function savechk_cb() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) {
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
return;
}
if (this.status !== 200)
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
var doc1 = this.txt.replace(/\r\n/g, "\n");
var doc2 = this.responseText.replace(/\r\n/g, "\n");
@@ -431,58 +430,22 @@ function savechk_cb() {
}, 100);
return;
}
alert(
modal.alert(
'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' +
'Length: yours=' + doc1.length + ', server=' + doc2.length
);
alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
modal.alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
modal.alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
return;
}
last_modified = this.lastmod;
server_md = this.txt;
draw_md();
toast(true, ";font-size:6em;font-family:serif;color:#9b4", 4,
'OK✔<span style="font-size:.2em;color:#999;position:absolute">' + this.ntry + '</span>');
toast.ok(2, 'save OK' + (this.ntry ? '\nattempt ' + this.ntry : ''));
modpoll.disabled = false;
}
function toast(autoclose, style, width, msg) {
var ok = ebi("toast");
if (ok)
ok.parentNode.removeChild(ok);
style = "width:" + width + "em;left:calc(50% - " + (width / 2) + "em);" + style;
ok = mknod('div');
ok.setAttribute('id', 'toast');
ok.setAttribute('style', style);
ok.innerHTML = msg;
var parent = ebi('m');
document.documentElement.appendChild(ok);
var hide = function (delay) {
delay = delay || 0;
setTimeout(function () {
ok.style.opacity = 0;
}, delay);
setTimeout(function () {
if (ok.parentNode)
ok.parentNode.removeChild(ok);
}, delay + 250);
}
ok.onclick = function () {
hide(0);
};
if (autoclose)
hide(500);
}
// firefox bug: initial selection offset isn't cleared properly through js
var ff_clearsel = (function () {
@@ -761,7 +724,7 @@ function fmt_table(e) {
var ind2 = tab[a].match(re_ind)[0];
if (ind != ind2 && a != 1) // the table can be a list entry or something, ignore [0]
return alert(err + 'indentation mismatch on row#2 and ' + row_name + ',\n' + tab[a]);
return toast.err(7, err + 'indentation mismatch on row#2 and ' + row_name + ',\n' + tab[a]);
var t = tab[a].slice(ind.length);
t = t.replace(re_lpipe, "");
@@ -771,7 +734,7 @@ function fmt_table(e) {
if (a == 0)
ncols = tab[a].length;
else if (ncols < tab[a].length)
return alert(err + 'num.columns(' + row_name + ') exceeding row#2; ' + ncols + ' < ' + tab[a].length);
return toast.err(7, err + 'num.columns(' + row_name + ') exceeding row#2; ' + ncols + ' < ' + tab[a].length);
// if row has less columns than row2, fill them in
while (tab[a].length < ncols)
@@ -788,7 +751,7 @@ function fmt_table(e) {
for (var col = 0; col < tab[1].length; col++) {
var m = tab[1][col].match(re_align);
if (!m)
return alert(err + 'invalid column specification, row#2, col ' + (col + 1) + ', [' + tab[1][col] + ']');
return toast.err(7, err + 'invalid column specification, row#2, col ' + (col + 1) + ', [' + tab[1][col] + ']');
if (m[2]) {
if (m[1])
@@ -876,10 +839,9 @@ function mark_uni(e) {
ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g'),
mod = txt.replace(/\r/g, "").replace(ptn, "\u2588\u2770$1\u2771");
if (txt == mod) {
alert('no results; no modifications were made');
return;
}
if (txt == mod)
return toast.inf(5, 'no results; no modifications were made');
dom_src.value = mod;
}
@@ -893,10 +855,9 @@ function iter_uni(e) {
re = new RegExp('([^' + js_uni_whitelist + ']+)'),
m = re.exec(txt.slice(ofs));
if (!m) {
alert('no more hits from cursor onwards');
return;
}
if (!m)
return toast.inf(5, 'no more hits from cursor onwards');
ofs += m.index;
dom_src.setSelectionRange(ofs, ofs + m[0].length, "forward");
@@ -911,12 +872,10 @@ function iter_uni(e) {
function cfg_uni(e) {
if (e) e.preventDefault();
var reply = prompt("unicode whitelist", esc_uni_whitelist);
if (reply === null)
return;
esc_uni_whitelist = reply;
js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
modal.prompt("unicode whitelist", esc_uni_whitelist, function (reply) {
esc_uni_whitelist = reply;
js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
}, null);
}
@@ -1132,9 +1091,9 @@ action_stack = (function () {
ref = newtxt;
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
if (hist.un.length > 0)
dbg(statify(hist.un.slice(-1)[0]));
dbg(jcp(hist.un.slice(-1)[0]));
if (hist.re.length > 0)
dbg(statify(hist.re.slice(-1)[0]));
dbg(jcp(hist.re.slice(-1)[0]));
}
return {

View File

@@ -7,6 +7,8 @@ html .editor-toolbar>button.active { border-color: rgba(0,0,0,0.4); background:
html .editor-toolbar>i.separator { border-left: 1px solid #ccc; }
html .editor-toolbar.disabled-for-preview>button:not(.no-disable) { opacity: .35 }
html {
line-height: 1.5em;
}
@@ -18,6 +20,22 @@ html, body {
background: #f7f7f7;
color: #333;
}
#toast {
bottom: auto;
top: 1.4em;
}
#repl {
position: absolute;
top: 0;
right: .5em;
border: none;
color: inherit;
background: none;
text-decoration: none;
}
#mn {
font-weight: normal;
margin: 1.3em 0 .7em 1em;
@@ -59,148 +77,12 @@ html .editor-toolbar>button.disabled {
html .editor-toolbar>button.save.force-save {
background: #f97;
}
/* copied from md.css for now */
.mdo pre,
.mdo code,
.mdo a {
color: #480;
background: #f7f7f7;
border: .07em solid #ddd;
border-radius: .2em;
padding: .1em .3em;
margin: 0 .1em;
}
.mdo code {
font-size: .96em;
}
.mdo pre,
.mdo code {
font-family: monospace, monospace;
white-space: pre-wrap;
word-break: break-all;
}
.mdo pre code {
display: block;
margin: 0 -.3em;
padding: .4em .5em;
line-height: 1.1em;
}
.mdo a {
color: #fff;
background: #39b;
text-decoration: none;
padding: 0 .3em;
border: none;
border-bottom: .07em solid #079;
}
.mdo h2 {
color: #fff;
background: #555;
margin-top: 2em;
border-bottom: .22em solid #999;
border-top: none;
}
.mdo h1 {
color: #fff;
background: #444;
font-weight: normal;
border-top: .4em solid #fb0;
border-bottom: .4em solid #777;
border-radius: 0 1em 0 1em;
margin: 3em 0 1em 0;
padding: .5em 0;
}
h1, h2 {
line-height: 1.5em;
}
h1 {
font-size: 1.7em;
text-align: center;
border: 1em solid #777;
border-width: .05em 0;
margin: 3em 0;
}
h2 {
font-size: 1.5em;
font-weight: normal;
background: #f7f7f7;
border-top: .07em solid #fff;
border-bottom: .07em solid #bbb;
border-radius: .5em .5em 0 0;
padding-left: .4em;
margin-top: 3em;
}
.mdo ul,
.mdo ol {
border-left: .3em solid #ddd;
}
.mdo>ul,
.mdo>ol {
border-color: #bbb;
}
.mdo ul>li {
list-style-type: disc;
}
.mdo ul>li,
.mdo ol>li {
margin: .7em 0;
}
strong {
color: #000;
}
p>em,
li>em,
td>em {
color: #c50;
padding: .1em;
border-bottom: .1em solid #bbb;
}
blockquote {
font-family: serif;
background: #f7f7f7;
border: .07em dashed #ccc;
padding: 0 2em;
margin: 1em 0;
}
small {
opacity: .8;
}
table {
border-collapse: collapse;
}
td {
padding: .2em .5em;
border: .12em solid #aaa;
}
th {
border: .12em solid #aaa;
}
/* mde support */
.mdo {
padding: 1em;
background: #f7f7f7;
}
html.dark .mdo {
background: #1c1c1c;
}
.CodeMirror {
background: #f7f7f7;
}
/* darkmode */
html.dark .mdo,
html.dark .CodeMirror {
@@ -224,55 +106,6 @@ html.dark .CodeMirror-selectedtext {
background: #246;
color: #fff;
}
html.dark .mdo a {
background: #057;
}
html.dark .mdo h1 a, html.dark .mdo h4 a,
html.dark .mdo h2 a, html.dark .mdo h5 a,
html.dark .mdo h3 a, html.dark .mdo h6 a {
color: inherit;
background: none;
}
html.dark pre,
html.dark code {
color: #8c0;
background: #1a1a1a;
border: .07em solid #333;
}
html.dark .mdo ul,
html.dark .mdo ol {
border-color: #444;
}
html.dark .mdo>ul,
html.dark .mdo>ol {
border-color: #555;
}
html.dark strong {
color: #fff;
}
html.dark p>em,
html.dark li>em,
html.dark td>em {
color: #f94;
border-color: #666;
}
html.dark h1 {
background: #383838;
border-top: .4em solid #b80;
border-bottom: .4em solid #4c4c4c;
}
html.dark h2 {
background: #444;
border-bottom: .22em solid #555;
}
html.dark td,
html.dark th {
border-color: #444;
}
html.dark blockquote {
background: #282828;
border: .07em dashed #444;
}
@@ -308,4 +141,15 @@ html.dark .editor-toolbar>button.active {
html.dark .editor-toolbar::after,
html.dark .editor-toolbar::before {
background: none;
}
}
/* ui.css overrides */
.mdo {
padding: 1em;
background: #f7f7f7;
}
html.dark .mdo {
background: #1c1c1c;
}

View File

@@ -3,9 +3,10 @@
<title>📝🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7">
<link href="/.cpr/mde.css?_={{ ts }}" rel="stylesheet">
<link href="/.cpr/deps/mini-fa.css?_={{ ts }}" rel="stylesheet">
<link href="/.cpr/deps/easymde.css?_={{ ts }}" rel="stylesheet">
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/mde.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/deps/mini-fa.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/deps/easymde.css?_={{ ts }}">
</head>
<body>
<div id="mw">
@@ -20,6 +21,7 @@
<textarea id="mt" style="display:none" autocomplete="off">{{ md }}</textarea>
</div>
</div>
<a href="#" id="repl">π</a>
<script>
var last_modified = {{ lastmod }};
@@ -30,16 +32,15 @@ var md_opt = {
};
var lightswitch = (function () {
var fun = function () {
var dark = !document.documentElement.getAttribute("class");
document.documentElement.setAttribute("class", dark ? "dark" : "");
if (window.localStorage)
localStorage.setItem('lightmode', dark ? 0 : 1);
};
if (window.localStorage && localStorage.getItem('lightmode') != 1)
fun();
return fun;
var l = localStorage,
drk = l.getItem('lightmode') != 1,
f = function (e) {
if (e) drk = !drk;
document.documentElement.setAttribute("class", drk? "dark":"light");
l.setItem('lightmode', drk? 0:1);
};
f();
return f;
})();
</script>

View File

@@ -75,7 +75,7 @@ function set_jumpto() {
}
function jumpto(ev) {
var tgt = ev.target || ev.srcElement;
var tgt = ev.target;
var ln = null;
while (tgt && !ln) {
ln = tgt.getAttribute('data-ln');
@@ -96,65 +96,61 @@ function md_changed(mde, on_srv) {
var md_now = mde.value();
var save_btn = QS('.editor-toolbar button.save');
if (md_now == window.md_saved)
save_btn.classList.add('disabled');
else
save_btn.classList.remove('disabled');
clmod(save_btn, 'disabled', md_now == window.md_saved);
set_jumpto();
}
function save(mde) {
var save_btn = QS('.editor-toolbar button.save');
if (save_btn.classList.contains('disabled')) {
alert('there is nothing to save');
return;
}
var force = save_btn.classList.contains('force-save');
if (force && !confirm('confirm that you wish to lose the changes made on the server since you opened this document')) {
alert('ok, aborted');
return;
if (clgot(save_btn, 'disabled'))
return toast.inf(2, 'no changes');
var force = clgot(save_btn, 'force-save');
function save2() {
var txt = mde.value();
var fd = new FormData();
fd.append("act", "tput");
fd.append("lastmod", (force ? -1 : last_modified));
fd.append("body", txt);
var url = (document.location + '').split('?')[0];
var xhr = new XMLHttpRequest();
xhr.open('POST', url, true);
xhr.responseType = 'text';
xhr.onreadystatechange = save_cb;
xhr.btn = save_btn;
xhr.mde = mde;
xhr.txt = txt;
xhr.send(fd);
}
var txt = mde.value();
var fd = new FormData();
fd.append("act", "tput");
fd.append("lastmod", (force ? -1 : last_modified));
fd.append("body", txt);
var url = (document.location + '').split('?')[0];
var xhr = new XMLHttpRequest();
xhr.open('POST', url, true);
xhr.responseType = 'text';
xhr.onreadystatechange = save_cb;
xhr.btn = save_btn;
xhr.mde = mde;
xhr.txt = txt;
xhr.send(fd);
if (!force)
save2();
else
modal.confirm('confirm that you wish to lose the changes made on the server since you opened this document', save2, function () {
toast.inf(3, 'aborted');
});
}
function save_cb() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) {
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
return;
}
if (this.status !== 200)
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
var r;
try {
r = JSON.parse(this.responseText);
}
catch (ex) {
alert('Failed to parse reply from server:\n\n' + this.responseText);
return;
return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
}
if (!r.ok) {
if (!this.btn.classList.contains('force-save')) {
this.btn.classList.add('force-save');
if (!clgot(this.btn, 'force-save')) {
clmod(this.btn, 'force-save', 1);
var msg = [
'This file has been modified since you started editing it!\n',
'if you really want to overwrite, press save again.\n',
@@ -164,15 +160,13 @@ function save_cb() {
r.lastmod + ' lastmod on the server now,',
r.now + ' server time now,\n',
];
alert(msg.join('\n'));
return toast.err(0, msg.join('\n'));
}
else {
alert('Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
}
return;
else
return toast.err(0, 'Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
}
this.btn.classList.remove('force-save');
clmod(this.btn, 'force-save');
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
// download the saved doc from the server and compare
@@ -192,35 +186,23 @@ function save_chk() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) {
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
return;
}
if (this.status !== 200)
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
var doc1 = this.txt.replace(/\r\n/g, "\n");
var doc2 = this.responseText.replace(/\r\n/g, "\n");
if (doc1 != doc2) {
alert(
modal.alert(
'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' +
'Length: yours=' + doc1.length + ', server=' + doc2.length
);
alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
modal.alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
modal.alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
return;
}
last_modified = this.lastmod;
md_changed(this.mde, true);
var ok = mknod('div');
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
ok.innerHTML = 'OK✔';
var parent = ebi('m');
document.documentElement.appendChild(ok);
setTimeout(function () {
ok.style.opacity = 0;
}, 500);
setTimeout(function () {
ok.parentNode.removeChild(ok);
}, 750);
toast.ok(2, 'save OK' + (this.ntry ? '\nattempt ' + this.ntry : ''));
}

View File

@@ -11,14 +11,12 @@ html {
background: #333;
font-family: sans-serif;
text-shadow: 1px 1px 0px #000;
touch-action: manipulation;
}
html, body {
margin: 0;
padding: 0;
}
body {
padding-bottom: 5em;
}
#box {
padding: .5em 1em;
background: #2c2c2c;
@@ -28,4 +26,4 @@ pre {
}
a {
color: #fc5;
}
}

View File

@@ -6,7 +6,7 @@
<title>copyparty</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/msg.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">
</head>
<body>

View File

@@ -3,6 +3,9 @@ html, body, #wrap {
background: #f7f7f7;
font-family: sans-serif;
}
html {
touch-action: manipulation;
}
#wrap {
max-width: 40em;
margin: 2em auto;
@@ -26,6 +29,12 @@ a {
border-radius: .2em;
padding: .2em .8em;
}
#repl {
border: none;
background: none;
color: inherit;
padding: 0;
}
table {
border-collapse: collapse;
}
@@ -73,4 +82,4 @@ html.dark input {
}
html.dark .num {
border-color: #777;
}
}

View File

@@ -6,7 +6,8 @@
<title>copyparty</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/splash.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
</head>
<body>
@@ -66,11 +67,13 @@
</form>
</ul>
</div>
<a href="#" id="repl">π</a>
<script>
if (window.localStorage && localStorage.getItem('lightmode') != 1)
if (localStorage.getItem('lightmode') != 1)
document.documentElement.setAttribute("class", "dark");
</script>
<script src="/.cpr/util.js?_={{ ts }}"></script>
</body>
</html>
</html>

442
copyparty/web/ui.css Normal file
View File

@@ -0,0 +1,442 @@
@font-face {
font-family: 'scp';
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
}
html {
touch-action: manipulation;
}
#tt, #toast {
position: fixed;
max-width: 34em;
max-width: min(34em, 90%);
max-width: min(34em, calc(100% - 7em));
background: #222;
border: 0 solid #777;
box-shadow: 0 .2em .5em #222;
border-radius: .4em;
z-index: 9001;
}
#tt {
max-width: min(34em, calc(100% - 3.3em));
overflow: hidden;
margin: .7em 0;
padding: 0 1.3em;
height: 0;
opacity: .1;
transition: opacity 0.14s, height 0.14s, padding 0.14s;
}
#toast {
bottom: 5em;
right: -1em;
line-height: 1.5em;
padding: 1em 1.3em;
margin-left: 3em;
border-width: .4em 0;
overflow-wrap: break-word;
transform: translateX(100%);
transition:
transform .4s cubic-bezier(.2, 1.2, .5, 1),
right .4s cubic-bezier(.2, 1.2, .5, 1);
text-shadow: 1px 1px 0 #000;
color: #fff;
}
#toast a {
color: inherit;
text-shadow: inherit;
background: rgba(0, 0, 0, 0.4);
border-radius: .3em;
padding: .2em .3em;
}
#toast a#toastc {
display: inline-block;
position: absolute;
overflow: hidden;
left: 0;
width: 0;
opacity: 0;
padding: .3em 0;
margin: -.3em 0 0 0;
line-height: 1.3em;
color: #000;
border: none;
outline: none;
text-shadow: none;
border-radius: .5em 0 0 .5em;
transition: left .3s, width .3s, padding .3s, opacity .3s;
}
#toastb {
max-height: 70vh;
overflow-y: auto;
}
#toast.scroll #toastb {
overflow-y: scroll;
margin-right: -1.2em;
padding-right: .7em;
}
#toast pre {
margin: 0;
}
#toast.vis {
right: 1.3em;
transform: unset;
}
#toast.vis #toastc {
left: -2em;
width: .4em;
padding: .3em .8em;
opacity: 1;
}
#toast.inf {
background: #07a;
border-color: #0be;
}
#toast.inf #toastc {
background: #0be;
}
#toast.ok {
background: #380;
border-color: #8e4;
}
#toast.ok #toastc {
background: #8e4;
}
#toast.warn {
background: #960;
border-color: #fc0;
}
#toast.warn #toastc {
background: #fc0;
}
#toast.err {
background: #900;
border-color: #d06;
}
#toast.err #toastc {
background: #d06;
}
#tt.b {
padding: 0 2em;
border-radius: .5em;
box-shadow: 0 .2em 1em #000;
}
#tt.show {
padding: 1em 1.3em;
border-width: .4em 0;
height: auto;
opacity: 1;
}
#tt.show.b {
padding: 1.5em 2em;
border-width: .5em 0;
}
#modalc code,
#tt code {
background: #3c3c3c;
padding: .1em .3em;
border-top: 1px solid #777;
border-radius: .3em;
line-height: 1.7em;
}
#tt em {
color: #f6a;
}
html.light #tt {
background: #fff;
border-color: #888 #000 #777 #000;
}
html.light #tt,
html.light #toast {
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
}
#modalc code,
html.light #tt code {
background: #060;
color: #fff;
}
html.light #tt em {
color: #d38;
}
#modal {
position: fixed;
overflow: auto;
top: 0;
left: 0;
right: 0;
bottom: 0;
width: 100%;
height: 100%;
z-index: 9001;
background: rgba(64,64,64,0.6);
}
#modal>table {
width: 100%;
height: 100%;
}
#modal td {
text-align: center;
}
#modalc {
position: relative;
display: inline-block;
background: #f7f7f7;
color: #333;
text-shadow: none;
text-align: left;
margin: 3em;
padding: 1em 1.1em;
border-radius: .6em;
box-shadow: 0 .3em 3em rgba(0,0,0,0.5);
max-width: 50em;
max-height: 30em;
overflow: auto;
}
@media (min-width: 40em) {
#modalc {
min-width: 30em;
}
}
#modalc li {
margin: 1em 0;
}
#modalc h6 {
font-size: 1.3em;
border-bottom: 1px solid #999;
margin: 0;
padding: .3em;
text-align: center;
}
#modalb {
position: sticky;
text-align: right;
padding-top: 1em;
bottom: 0;
right: 0;
}
#modalb a {
color: #000;
background: #ccc;
display: inline-block;
border-radius: .3em;
padding: .5em 1em;
outline: none;
border: none;
}
#modalb a:focus,
#modalb a:hover {
background: #06d;
color: #fff;
}
#modalb a+a {
margin-left: .5em;
}
#modali {
display: block;
background: #fff;
color: #000;
width: calc(100% - 1.25em);
margin: 1em -.1em 0 -.1em;
padding: .5em;
outline: none;
border: .25em solid #ccc;
border-radius: .4em;
}
#modali:focus {
border-color: #06d;
}
#repl_pre {
max-width: 24em;
}
.mdo pre,
.mdo code,
.mdo a {
color: #480;
background: #f7f7f7;
border: .07em solid #ddd;
border-radius: .2em;
padding: .1em .3em;
margin: 0 .1em;
}
.mdo pre,
.mdo code,
.mdo tt {
font-family: 'scp', monospace, monospace;
white-space: pre-wrap;
word-break: break-all;
}
.mdo code {
font-size: .96em;
}
.mdo h1,
.mdo h2 {
line-height: 1.5em;
}
.mdo h1 {
font-size: 1.7em;
text-align: center;
border: 1em solid #777;
border-width: .05em 0;
margin: 3em 0;
}
.mdo h2 {
font-size: 1.5em;
font-weight: normal;
background: #f7f7f7;
border-top: .07em solid #fff;
border-bottom: .07em solid #bbb;
border-radius: .5em .5em 0 0;
padding-left: .4em;
margin-top: 3em;
}
.mdo h3 {
border-bottom: .1em solid #999;
}
.mdo h1 a, .mdo h3 a, .mdo h5 a,
.mdo h2 a, .mdo h4 a, .mdo h6 a {
color: inherit;
display: block;
background: none;
border: none;
padding: 0;
margin: 0;
}
.mdo ul,
.mdo ol {
border-left: .3em solid #ddd;
}
.mdo ul>li,
.mdo ol>li {
margin: .7em 0;
list-style-type: disc;
}
.mdo strong {
color: #000;
}
.mdo p>em,
.mdo li>em,
.mdo td>em {
color: #c50;
padding: .1em;
border-bottom: .1em solid #bbb;
}
.mdo blockquote {
font-family: serif;
background: #f7f7f7;
border: .07em dashed #ccc;
padding: 0 2em;
margin: 1em 0;
}
.mdo small {
opacity: .8;
}
.mdo pre code {
display: block;
margin: 0 -.3em;
padding: .4em .5em;
line-height: 1.1em;
}
.mdo pre code:hover {
background: #fec;
color: #360;
}
.mdo table {
border-collapse: collapse;
margin: 1em 0;
}
.mdo th,
.mdo td {
padding: .2em .5em;
border: .12em solid #aaa;
}
@media screen {
.mdo {
word-break: break-word;
overflow-wrap: break-word;
word-wrap: break-word; /*ie*/
}
html.light .mdo a,
.mdo a {
color: #fff;
background: #39b;
text-decoration: none;
padding: 0 .3em;
border: none;
border-bottom: .07em solid #079;
}
.mdo h1 {
color: #fff;
background: #444;
font-weight: normal;
border-top: .4em solid #fb0;
border-bottom: .4em solid #777;
border-radius: 0 1em 0 1em;
margin: 3em 0 1em 0;
padding: .5em 0;
}
.mdo h2 {
color: #fff;
background: #555;
margin-top: 2em;
border-bottom: .22em solid #999;
border-top: none;
}
html.dark .mdo a {
background: #057;
}
html.dark .mdo h1 a, html.dark .mdo h4 a,
html.dark .mdo h2 a, html.dark .mdo h5 a,
html.dark .mdo h3 a, html.dark .mdo h6 a {
color: inherit;
background: none;
}
html.dark .mdo pre,
html.dark .mdo code {
color: #8c0;
background: #1a1a1a;
border: .07em solid #333;
}
html.dark .mdo ul,
html.dark .mdo ol {
border-color: #444;
}
html.dark .mdo strong {
color: #fff;
}
html.dark .mdo p>em,
html.dark .mdo li>em,
html.dark .mdo td>em {
color: #f94;
border-color: #666;
}
html.dark .mdo h1 {
background: #383838;
border-top: .4em solid #b80;
border-bottom: .4em solid #4c4c4c;
}
html.dark .mdo h2 {
background: #444;
border-bottom: .22em solid #555;
}
html.dark .mdo td,
html.dark .mdo th {
border-color: #444;
}
html.dark .mdo blockquote {
background: #282828;
border: .07em dashed #444;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,301 +0,0 @@
#op_up2k {
padding: 0 1em 1em 1em;
}
#u2form {
position: absolute;
top: 0;
left: 0;
width: 2px;
height: 2px;
overflow: hidden;
}
#u2form input {
background: #444;
border: 0px solid #444;
outline: none;
}
#u2err.err {
color: #f87;
padding: .5em;
}
#u2err.msg {
color: #999;
padding: .5em;
font-size: .9em;
}
#u2btn {
color: #eee;
background: #555;
background: -moz-linear-gradient(top, #367 0%, #489 50%, #38788a 51%, #367 100%);
background: -webkit-linear-gradient(top, #367 0%, #489 50%, #38788a 51%, #367 100%);
background: linear-gradient(to bottom, #367 0%, #489 50%, #38788a 51%, #367 100%);
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#489', endColorstr='#38788a', GradientType=0);
text-decoration: none;
line-height: 1.3em;
border: 1px solid #222;
border-radius: .4em;
text-align: center;
font-size: 1.5em;
margin: .5em auto;
padding: .8em 0;
width: 16em;
cursor: pointer;
box-shadow: .4em .4em 0 #111;
}
#op_up2k.srch #u2btn {
background: linear-gradient(to bottom, #ca3 0%, #fd8 50%, #fc6 51%, #b92 100%);
text-shadow: 1px 1px 1px #fc6;
color: #333;
}
#u2conf #u2btn {
margin: -1.5em 0;
padding: .8em 0;
width: 100%;
max-width: 12em;
display: inline-block;
}
#u2conf #u2btn_cw {
text-align: right;
}
#u2notbtn {
display: none;
text-align: center;
background: #333;
padding-top: 1em;
}
#u2notbtn * {
line-height: 1.3em;
}
#u2tab {
margin: 3em auto;
width: calc(100% - 2em);
max-width: 100em;
}
#op_up2k.srch #u2tab {
max-width: none;
}
#u2tab td {
border: 1px solid #ccc;
border-width: 0 0px 1px 0;
padding: .1em .3em;
}
#u2tab td:nth-child(2) {
width: 5em;
white-space: nowrap;
}
#u2tab td:nth-child(3) {
width: 40%;
}
#op_up2k.srch #u2tab td:nth-child(3) {
font-family: sans-serif;
width: auto;
}
#u2tab tbody tr:hover td {
background: #222;
}
#u2cards {
padding: 1em 0 .3em 1em;
margin: 1.5em auto -2.5em auto;
white-space: nowrap;
text-align: center;
overflow: hidden;
}
#u2cards.w {
width: 45em;
text-align: left;
}
#u2cards a {
padding: .2em 1em;
border: 1px solid #777;
border-width: 0 0 1px 0;
background: linear-gradient(to bottom, #333, #222);
}
#u2cards a:first-child {
border-radius: .4em 0 0 0;
}
#u2cards a:last-child {
border-radius: 0 .4em 0 0;
}
#u2cards a.act {
padding-bottom: .5em;
border-width: 1px 1px .1em 1px;
border-radius: .3em .3em 0 0;
margin-left: -1px;
background: linear-gradient(to bottom, #464, #333 80%);
box-shadow: 0 -.17em .67em #280;
border-color: #7c5 #583 #333 #583;
position: relative;
color: #fd7;
}
#u2cards span {
color: #fff;
}
#u2conf {
margin: 1em auto;
width: 30em;
}
#u2conf.has_btn {
width: 48em;
}
#u2conf * {
text-align: center;
line-height: 1em;
margin: 0;
padding: 0;
border: none;
outline: none;
}
#u2conf .txtbox {
width: 3em;
color: #fff;
background: #444;
border: 1px solid #777;
font-size: 1.2em;
padding: .15em 0;
height: 1.05em;
}
#u2conf .txtbox.err {
background: #922;
}
#u2conf a {
color: #fff;
background: #c38;
text-decoration: none;
border-radius: .1em;
font-size: 1.5em;
padding: .1em 0;
margin: 0 -1px;
width: 1.5em;
height: 1em;
display: inline-block;
position: relative;
bottom: -0.08em;
}
#u2conf input+a {
background: #d80;
}
#u2conf label {
font-size: 1.6em;
width: 2em;
height: 1em;
padding: .4em 0;
display: block;
border-radius: .25em;
}
#u2conf input[type="checkbox"] {
position: relative;
opacity: .02;
top: 2em;
}
#u2conf input[type="checkbox"]+label {
position: relative;
background: #603;
border-bottom: .2em solid #a16;
box-shadow: 0 .1em .3em #a00 inset;
}
#u2conf input[type="checkbox"]:checked+label {
background: #6a1;
border-bottom: .2em solid #efa;
box-shadow: 0 .1em .5em #0c0;
}
#u2conf input[type="checkbox"]+label:hover {
box-shadow: 0 .1em .3em #fb0;
border-color: #fb0;
}
#op_up2k.srch #u2conf td:nth-child(1)>*,
#op_up2k.srch #u2conf td:nth-child(2)>*,
#op_up2k.srch #u2conf td:nth-child(3)>* {
background: #777;
border-color: #ccc;
box-shadow: none;
opacity: .2;
}
#u2foot {
color: #fff;
font-style: italic;
}
#u2foot .warn {
font-size: 1.3em;
padding: .5em .8em;
margin: 1em -.6em;
color: #f74;
background: #322;
border: 1px solid #633;
border-width: .1em 0;
text-align: center;
}
#u2foot .warn span {
color: #f86;
}
html.light #u2foot .warn {
color: #b00;
background: #fca;
border-color: #f70;
}
html.light #u2foot .warn span {
color: #930;
}
#u2foot span {
color: #999;
font-size: .9em;
font-weight: normal;
}
#u2footfoot {
margin-bottom: -1em;
}
.prog {
font-family: monospace;
}
#u2tab a>span {
font-weight: bold;
font-style: italic;
color: #fff;
padding-left: .2em;
}
#u2cleanup {
float: right;
margin-bottom: -.3em;
}
.fsearch_explain {
padding-left: .7em;
font-size: 1.1em;
line-height: 0;
}
html.light #u2btn {
box-shadow: .4em .4em 0 #ccc;
}
html.light #u2cards span {
color: #000;
}
html.light #u2cards a {
background: linear-gradient(to bottom, #eee, #fff);
}
html.light #u2cards a.act {
color: #037;
background: inherit;
box-shadow: 0 -.17em .67em #0ad;
border-color: #09c #05a #eee #05a;
}
html.light #u2conf .txtbox {
background: #fff;
color: #444;
}
html.light #u2conf .txtbox.err {
background: #f96;
color: #300;
}
html.light #op_up2k.srch #u2btn {
border-color: #a80;
}
html.light #u2foot {
color: #000;
}
html.light #u2tab tbody tr:hover td {
background: #fff;
}

View File

@@ -7,7 +7,15 @@ if (!window['console'])
var is_touch = 'ontouchstart' in window,
ANDROID = /(android)/i.test(navigator.userAgent);
IPHONE = /iPhone|iPad|iPod/i.test(navigator.userAgent),
ANDROID = /android/i.test(navigator.userAgent),
WINDOWS = navigator.platform ? navigator.platform == 'Win32' : /Windows/.test(navigator.userAgent);
var ebi = document.getElementById.bind(document),
QS = document.querySelector.bind(document),
QSA = document.querySelectorAll.bind(document),
mknod = document.createElement.bind(document);
// error handler for mobile devices
@@ -21,36 +29,135 @@ function esc(txt) {
}[c];
});
}
window.onunhandledrejection = function (e) {
console.log("REJ: " + e.reason);
};
try {
console.hist = [];
var hook = function (t) {
var orig = console[t].bind(console),
cfun = function () {
console.hist.push(Date.now() + ' ' + t + ': ' + Array.from(arguments).join(', '));
if (console.hist.length > 100)
console.hist = console.hist.slice(50);
orig.apply(console, arguments);
};
console['std' + t] = orig;
console[t] = cfun;
};
hook('log');
console.log('log-capture ok');
hook('debug');
hook('warn');
hook('error');
}
catch (ex) {
if (console.stdlog)
console.log = console.stdlog;
console.log(ex);
}
var crashed = false, ignexd = {};
function vis_exh(msg, url, lineNo, columnNo, error) {
if (!window.onerror)
if ((msg + '').indexOf('ResizeObserver') !== -1)
return; // chrome issue 809574 (benign, from <video>)
var ekey = url + '\n' + lineNo + '\n' + msg;
if (ignexd[ekey] || crashed)
return;
crashed = true;
window.onerror = undefined;
window['vis_exh'] = null;
var html = ['<h1>you hit a bug!</h1><p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();" style="text-decoration:underline;color:#fc0">reset copyparty settings</a> if you are stuck here</p><p>please send me a screenshot arigathanks gozaimuch: <code>ed/irc.rizon.net</code> or <code>ed#2644</code><br />&nbsp; (and if you can, press F12 and include the "Console" tab in the screenshot too)</p><p>',
esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)) + '</p>'];
var html = [
'<h1>you hit a bug!</h1>',
'<p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a></p>',
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">github issue</a> or <code>ed#2644</code></p>',
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)) + '</p>',
'<p><b>UA:</b> ' + esc(navigator.userAgent + '')
];
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<h3>' + find[a] + '</h3>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
try {
var ua = '',
ad = navigator.userAgentData,
adb = ad.brands;
for (var a = 0; a < adb.length; a++)
if (!/Not.*A.*Brand/.exec(adb[a].brand))
ua += adb[a].brand + '/' + adb[a].version + ', ';
ua += ad.platform;
html.push('<br /><b>UAD:</b> ' + esc(ua.slice(0, 100)));
}
document.body.innerHTML = html.join('\n');
catch (e) { }
html.push('</p>');
var s = mknod('style');
s.innerHTML = 'body{background:#333;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em} h1{margin:.5em 1em 0 0;padding:0} h3{border-top:1px solid #999;margin:0} code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} *{line-height:1.5em}';
document.head.appendChild(s);
try {
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<p class="b"><b>' + find[a] + ':</b><br />' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n') + '</p>');
}
ignexd[ekey] = true;
var ls = jcp(localStorage);
if (ls.fman_clip)
ls.fman_clip = ls.fman_clip.length + ' items';
var lsk = Object.keys(ls);
lsk.sort();
html.push('<p class="b">');
for (var a = 0; a < lsk.length; a++)
html.push(' <b>' + esc(lsk[a]) + '</b> <code>' + esc(ls[lsk[a]]) + '</code> ');
html.push('</p>');
}
catch (e) { }
if (console.hist.length) {
html.push('<p class="b"><b>console:</b><ul><li>' + Date.now() + ' @</li>');
for (var a = console.hist.length - 1, aa = Math.max(0, console.hist.length - 20); a >= aa; a--)
html.push('<li>' + esc(console.hist[a]) + '</li>');
html.push('</ul>')
}
try {
var exbox = ebi('exbox');
if (!exbox) {
exbox = mknod('div');
exbox.setAttribute('id', 'exbox');
document.body.appendChild(exbox);
var s = mknod('style');
s.innerHTML = (
'#exbox{background:#333;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%;overflow:auto;width:calc(100% - 2em)} ' +
'#exbox,#exbox *{line-height:1.5em;overflow-wrap:break-word} ' +
'#exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} ' +
'#exbox a{text-decoration:underline;color:#fc0} ' +
'#exbox h1{margin:.5em 1em 0 0;padding:0} ' +
'#exbox p.b{border-top:1px solid #999;margin:1em 0 0 0;font-size:1em} ' +
'#exbox ul, #exbox li {margin:0 0 0 .5em;padding:0} ' +
'#exbox b{color:#fff}'
);
document.head.appendChild(s);
}
exbox.innerHTML = html.join('\n').replace(/https?:\/\/[^ \/]+\//g, '/').replace(/js\?_=[a-zA-Z]{4}/g, 'js').replace(/<ghi>/, 'https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md');
exbox.style.display = 'block';
}
catch (e) {
document.body.innerHTML = html.join('\n');
}
throw 'fatal_err';
}
var ebi = document.getElementById.bind(document),
QS = document.querySelector.bind(document),
QSA = document.querySelectorAll.bind(document),
mknod = document.createElement.bind(document);
function ignex(all) {
var o = ebi('exbox');
o.style.display = 'none';
o.innerHTML = '';
crashed = false;
if (!all)
window.onerror = vis_exh;
}
function ctrl(e) {
@@ -92,6 +199,22 @@ if (!String.startsWith) {
return this.substring(i, i + s.length) === s;
};
}
if (!Element.prototype.matches) {
Element.prototype.matches =
Element.prototype.oMatchesSelector ||
Element.prototype.msMatchesSelector ||
Element.prototype.mozMatchesSelector ||
Element.prototype.webkitMatchesSelector;
}
if (!Element.prototype.closest) {
Element.prototype.closest = function (s) {
var el = this;
do {
if (el.matches(s)) return el;
el = el.parentElement || el.parentNode;
} while (el !== null && el.nodeType === 1);
}
}
// https://stackoverflow.com/a/950146
@@ -100,10 +223,10 @@ function import_js(url, cb) {
var script = mknod('script');
script.type = 'text/javascript';
script.src = url;
script.onreadystatechange = cb;
script.onload = cb;
script.onerror = function () {
toast.err(0, 'Failed to load module:\n' + url);
};
head.appendChild(script);
}
@@ -130,90 +253,37 @@ function crc32(str) {
}
function clmod(obj, cls, add) {
var re = new RegExp('\\s*\\b' + cls + '\\s*\\b', 'g');
if (add == 't')
add = !re.test(obj.className);
function clmod(el, cls, add) {
if (el.classList) {
var have = el.classList.contains(cls);
if (add == 't')
add = !have;
obj.className = obj.className.replace(re, ' ') + (add ? ' ' + cls : '');
if (add != have)
el.classList[add ? 'add' : 'remove'](cls);
return;
}
var re = new RegExp('\\s*\\b' + cls + '\\s*\\b', 'g'),
n1 = el.className;
if (add == 't')
add = !re.test(n1);
var n2 = n1.replace(re, ' ') + (add ? ' ' + cls : '');
if (n1 != n2)
el.className = n2;
}
function sortfiles(nodes) {
var sopts = jread('fsort', [["href", 1, ""]]);
function clgot(el, cls) {
if (el.classList)
return el.classList.contains(cls);
try {
var is_srch = false;
if (nodes[0]['rp']) {
is_srch = true;
for (var b = 0, bb = nodes.length; b < bb; b++)
nodes[b].ext = nodes[b].rp.split('.').pop();
for (var b = 0; b < sopts.length; b++)
if (sopts[b][0] == 'href')
sopts[b][0] = 'rp';
}
for (var a = sopts.length - 1; a >= 0; a--) {
var name = sopts[a][0], rev = sopts[a][1], typ = sopts[a][2];
if (!name)
continue;
if (name == 'ts')
typ = 'int';
if (name.indexOf('tags/') === 0) {
name = name.slice(5);
for (var b = 0, bb = nodes.length; b < bb; b++)
nodes[b]._sv = nodes[b].tags[name];
}
else {
for (var b = 0, bb = nodes.length; b < bb; b++) {
var v = nodes[b][name];
if ((v + '').indexOf('<a ') === 0)
v = v.split('>')[1];
else if (name == "href" && v) {
if (v.slice(-1) == '/')
v = '\t' + v;
v = uricom_dec(v)[0]
}
nodes[b]._sv = v;
}
}
var onodes = nodes.map(function (x) { return x; });
nodes.sort(function (n1, n2) {
var v1 = n1._sv,
v2 = n2._sv;
if (v1 === undefined) {
if (v2 === undefined) {
return onodes.indexOf(n1) - onodes.indexOf(n2);
}
return -1 * rev;
}
if (v2 === undefined) return 1 * rev;
var ret = rev * (typ == 'int' ? (v1 - v2) : (v1.localeCompare(v2)));
if (ret === 0)
ret = onodes.indexOf(n1) - onodes.indexOf(n2);
return ret;
});
}
for (var b = 0, bb = nodes.length; b < bb; b++) {
delete nodes[b]._sv;
if (is_srch)
delete nodes[b].ext;
}
}
catch (ex) {
console.log("failed to apply sort config: " + ex);
console.log("resetting fsort " + sread('fsort'))
localStorage.removeItem('fsort');
}
return nodes;
var lst = (el.getAttribute('class') + '').split(/ /g);
return has(lst, cls);
}
@@ -321,6 +391,18 @@ function linksplit(rp) {
}
function vsplit(vp) {
if (vp.endsWith('/'))
vp = vp.slice(0, -1);
var ofs = vp.lastIndexOf('/') + 1,
base = vp.slice(0, ofs),
fn = vp.slice(ofs);
return [base, fn];
}
function uricom_enc(txt, do_fb_enc) {
try {
return encodeURIComponent(txt);
@@ -334,6 +416,16 @@ function uricom_enc(txt, do_fb_enc) {
}
}
function url_enc(txt) {
var parts = txt.split('/'),
ret = [];
for (var a = 0; a < parts.length; a++)
ret.push(uricom_enc(parts[a]));
return ret.join('/');
}
function uricom_dec(txt) {
try {
@@ -346,6 +438,17 @@ function uricom_dec(txt) {
}
function uricom_adec(arr, li) {
var ret = [];
for (var a = 0; a < arr.length; a++) {
var txt = uricom_dec(arr[a])[0];
ret.push(li ? '<li>' + esc(txt) + '</li>' : txt);
}
return ret;
}
function get_evpath() {
var ret = document.location.pathname;
@@ -385,6 +488,41 @@ function s2ms(s) {
}
function f2f(val, nd) {
// 10.toFixed(1) returns 10.00 for certain values of 10
val = (val * Math.pow(10, nd)).toFixed(0).split('.')[0];
return nd ? (val.slice(0, -nd) || '0') + '.' + val.slice(-nd) : val;
}
function humansize(b, terse) {
var i = 0, u = terse ? ['B', 'K', 'M', 'G'] : ['B', 'KB', 'MB', 'GB'];
while (b >= 1000 && i < u.length) {
b /= 1024;
i += 1;
}
return f2f(b, b >= 100 ? 0 : b >= 10 ? 1 : 2) + ' ' + u[i];
}
function humantime(v) {
if (v >= 60 * 60 * 24)
return v;
try {
return /.*(..:..:..).*/.exec(new Date(v * 1000).toUTCString())[1];
}
catch (ex) {
return v;
}
}
function clamp(v, a, b) {
return Math.min(Math.max(v, a), b);
}
function has(haystack, needle) {
for (var a = 0; a < haystack.length; a++)
if (haystack[a] == needle)
@@ -407,19 +545,14 @@ function jcp(obj) {
function sread(key) {
if (window.localStorage)
return localStorage.getItem(key);
return null;
return localStorage.getItem(key);
}
function swrite(key, val) {
if (window.localStorage) {
if (val === undefined || val === null)
localStorage.removeItem(key);
else
localStorage.setItem(key, val);
}
if (val === undefined || val === null)
localStorage.removeItem(key);
else
localStorage.setItem(key, val);
}
function jread(key, fb) {
@@ -499,6 +632,42 @@ function hist_replace(url) {
}
var timer = (function () {
var r = {};
r.q = [];
r.last = 0;
r.add = function (fun, run) {
r.rm(fun);
r.q.push(fun);
if (run)
fun();
};
r.rm = function (fun) {
apop(r.q, fun);
};
function doevents() {
if (crashed)
return;
if (Date.now() - r.last < 69)
return;
var q = r.q.slice(0);
for (var a = 0; a < q.length; a++)
q[a]();
r.last = Date.now();
}
setInterval(doevents, 100);
return r;
})();
var tt = (function () {
var r = {
"tt": mknod("div"),
@@ -515,6 +684,8 @@ var tt = (function () {
r.skip = false;
return;
}
if (QS('body.bbox-open'))
return;
var cfg = sread('tooltips');
if (cfg !== null && cfg != '1')
@@ -527,32 +698,66 @@ var tt = (function () {
r.el = this;
var pos = this.getBoundingClientRect(),
dir = this.getAttribute('ttd') || '',
left = pos.left < window.innerWidth / 2,
margin = parseFloat(this.getAttribute('ttm') || 0),
top = pos.top < window.innerHeight / 2,
big = this.className.indexOf(' ttb') !== -1;
if (dir.indexOf('u') + 1) top = false;
if (dir.indexOf('d') + 1) top = true;
if (dir.indexOf('l') + 1) left = false;
if (dir.indexOf('r') + 1) left = true;
clmod(r.tt, 'b', big);
r.tt.style.top = top ? pos.bottom + 'px' : 'auto';
r.tt.style.bottom = top ? 'auto' : (window.innerHeight - pos.top) + 'px';
r.tt.style.left = left ? pos.left + 'px' : 'auto';
r.tt.style.right = left ? 'auto' : (window.innerWidth - pos.right) + 'px';
r.tt.style.left = '0';
r.tt.style.top = '0';
r.tt.innerHTML = msg.replace(/\$N/g, "<br />");
r.el.addEventListener('mouseleave', r.hide);
window.addEventListener('scroll', r.hide);
clmod(r.tt, 'show', 1);
var tw = r.tt.offsetWidth,
x = pos.left + (pos.right - pos.left) / 2 - tw / 2;
if (x + tw >= window.innerWidth - 24)
x = window.innerWidth - tw - 24;
if (x < 0)
x = 12;
r.tt.style.left = x + 'px';
r.tt.style.top = top ? (margin + pos.bottom) + 'px' : 'auto';
r.tt.style.bottom = top ? 'auto' : (margin + window.innerHeight - pos.top) + 'px';
};
r.hide = function () {
r.hide = function (e) {
ev(e);
window.removeEventListener('scroll', r.hide);
clmod(r.tt, 'show');
if (r.el)
r.el.removeEventListener('mouseleave', r.hide);
};
if (is_touch && IPHONE) {
var f1 = r.show,
f2 = r.hide,
q = [];
// if an onclick-handler creates a new timer,
// iOS 13.1.2 delays the entire handler by up to 401ms,
// win by using a shared timer instead
timer.add(function () {
while (q.length && Date.now() >= q[0][0])
q.shift()[1]();
});
r.show = function () {
q.push([Date.now() + 100, f1.bind(this)]);
};
r.hide = function () {
q.push([Date.now() + 100, f2.bind(this)]);
};
}
r.tt.onclick = r.hide;
r.att = function (ctr) {
@@ -585,3 +790,317 @@ var tt = (function () {
return r;
})();
function lf2br(txt) {
var html = '', hp = txt.split(/(?=<.?pre>)/i);
for (var a = 0; a < hp.length; a++)
html += hp[a].startsWith('<pre>') ? hp[a] :
hp[a].replace(/<br ?.?>\n/g, '\n').replace(/\n<br ?.?>/g, '\n').replace(/\n/g, '<br />\n');
return html;
}
var toast = (function () {
var r = {},
te = null,
scrolling = false,
obj = mknod('div');
obj.setAttribute('id', 'toast');
document.body.appendChild(obj);
r.visible = false;
r.txt = null;
function scrollchk() {
if (scrolling)
return;
var tb = ebi('toastb'),
vis = tb.offsetHeight,
all = tb.scrollHeight;
if (8 + vis >= all)
return;
clmod(obj, 'scroll', 1);
scrolling = true;
}
function unscroll() {
timer.rm(scrollchk);
clmod(obj, 'scroll');
scrolling = false;
}
r.hide = function (e) {
ev(e);
unscroll();
clearTimeout(te);
clmod(obj, 'vis');
r.visible = false;
};
r.show = function (cl, sec, txt) {
clearTimeout(te);
if (sec)
te = setTimeout(r.hide, sec * 1000);
obj.innerHTML = '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
obj.className = cl;
sec += obj.offsetWidth;
obj.className += ' vis';
ebi('toastc').onclick = r.hide;
timer.add(scrollchk);
r.visible = true;
r.txt = txt;
};
r.ok = function (sec, txt) {
r.show('ok', sec, txt);
};
r.inf = function (sec, txt) {
r.show('inf', sec, txt);
};
r.warn = function (sec, txt) {
r.show('warn', sec, txt);
};
r.err = function (sec, txt) {
r.show('err', sec, txt);
};
return r;
})();
var modal = (function () {
var r = {},
q = [],
o = null,
cb_up = null,
cb_ok = null,
cb_ng = null,
prim = '<a href="#" id="modal-ok">OK</a>',
sec = '<a href="#" id="modal-ng">Cancel</a>',
ok_cancel = WINDOWS ? prim + sec : sec + prim;
r.busy = false;
r.show = function (html) {
o = mknod('div');
o.setAttribute('id', 'modal');
o.innerHTML = '<table><tr><td><div id="modalc">' + html + '</div></td></tr></table>';
document.body.appendChild(o);
document.addEventListener('keydown', onkey);
r.busy = true;
var a = ebi('modal-ng');
if (a)
a.onclick = ng;
a = ebi('modal-ok');
a.onclick = ok;
var inp = ebi('modali');
(inp || a).focus();
if (inp)
setTimeout(function () {
inp.setSelectionRange(0, inp.value.length, "forward");
}, 0);
document.addEventListener('focus', onfocus);
timer.add(onfocus);
if (cb_up)
setTimeout(cb_up, 1);
};
r.hide = function () {
timer.rm(onfocus);
document.removeEventListener('focus', onfocus);
document.removeEventListener('keydown', onkey);
o.parentNode.removeChild(o);
r.busy = false;
setTimeout(next, 50);
};
function ok(e) {
ev(e);
var v = ebi('modali');
v = v ? v.value : true;
r.hide();
if (cb_ok)
cb_ok(v);
}
function ng(e) {
ev(e);
r.hide();
if (cb_ng)
cb_ng(null);
}
function onfocus(e) {
var ctr = ebi('modalc');
if (!ctr || !ctr.contains || !document.activeElement || ctr.contains(document.activeElement))
return;
setTimeout(function () {
ebi('modal-ok').focus();
}, 20);
ev(e);
}
function onkey(e) {
if (e.code == 'Enter') {
var a = ebi('modal-ng');
if (a && document.activeElement == a)
return ng();
return ok();
}
if (e.code == 'Escape')
return ng();
}
function next() {
if (!r.busy && q.length)
q.shift()();
}
r.alert = function (html, cb, fun) {
q.push(function () {
_alert(lf2br(html), cb, fun);
});
next();
};
function _alert(html, cb, fun) {
cb_ok = cb_ng = cb;
cb_up = fun;
html += '<div id="modalb"><a href="#" id="modal-ok">OK</a></div>';
r.show(html);
}
r.confirm = function (html, cok, cng, fun) {
q.push(function () {
_confirm(lf2br(html), cok, cng, fun);
});
next();
}
function _confirm(html, cok, cng, fun) {
cb_ok = cok;
cb_ng = cng === undefined ? cok : null;
cb_up = fun;
html += '<div id="modalb">' + ok_cancel + '</div>';
r.show(html);
}
r.prompt = function (html, v, cok, cng, fun) {
q.push(function () {
_prompt(lf2br(html), v, cok, cng, fun);
});
next();
}
function _prompt(html, v, cok, cng, fun) {
cb_ok = cok;
cb_ng = cng === undefined ? cok : null;
cb_up = fun;
html += '<input id="modali" type="text" /><div id="modalb">' + ok_cancel + '</div>';
r.show(html);
ebi('modali').value = v || '';
}
return r;
})();
function winpopup(txt) {
fetch(get_evpath(), {
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8'
},
body: 'msg=' + uricom_enc(Date.now() + ', ' + txt)
});
}
var last_repl = null;
function repl_load() {
var ipre = ebi('repl_pre'),
tb = ebi('modali');
function getpres() {
var o, ret = jread("repl_pre", []);
if (!ret.length)
ret = [
'var v=Object.keys(localStorage); v.sort(); JSON.stringify(v)',
'console.hist.slice(-10).join("\\n")'
];
ipre.innerHTML = '<option value=""></option>';
for (var a = 0; a < ret.length; a++) {
o = mknod('option');
o.setAttribute('value', ret[a]);
o.textContent = ret[a];
ipre.appendChild(o);
}
last_repl = ipre.value = (last_repl || (ret.length ? ret.slice(-1)[0] : ''));
return ret;
}
ebi('repl_pdel').onclick = function (e) {
var val = ipre.value,
pres = getpres();
apop(pres, val);
jwrite('repl_pre', pres);
getpres();
};
ebi('repl_pnew').onclick = function (e) {
var val = tb.value,
pres = getpres();
apop(pres, ipre.value);
pres.push(val);
jwrite('repl_pre', pres);
getpres();
ipre.value = val;
};
ipre.oninput = ipre.onchange = function () {
tb.value = last_repl = ipre.value;
};
tb.oninput = function () {
last_repl = this.value;
};
getpres();
tb.value = last_repl;
setTimeout(function () {
tb.setSelectionRange(0, tb.value.length, "forward");
}, 10);
}
function repl(e) {
ev(e);
var html = [
'<p>js repl (prefix with <code>,</code> to allow raise)</p>',
'<p><select id="repl_pre"></select>',
' &nbsp; <button id="repl_pdel">❌ del</button>',
' &nbsp; <button id="repl_pnew">💾 SAVE</button></p>'
];
modal.prompt(html.join(''), '', function (cmd) {
if (!cmd)
return toast.inf(3, 'eval aborted');
if (cmd.startsWith(','))
return modal.alert(esc(eval(cmd.slice(1)) + ''))
try {
modal.alert(esc(eval(cmd) + ''));
}
catch (ex) {
modal.alert('<h6>exception</h6>' + esc(ex + ''));
}
}, undefined, repl_load);
}
if (ebi('repl'))
ebi('repl').onclick = repl;

View File

@@ -1,11 +1,21 @@
# example `.epilogue.html`
**NOTE:** there's more stuff (sharex config, service scripts, nginx configs, ...) in [`/contrib/`](/contrib/)
# example resource files
can be provided to copyparty to tweak things
## example `.epilogue.html`
save one of these as `.epilogue.html` inside a folder to customize it:
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
# example browser-css
## example browser-css
point `--css-browser` to one of these by URL:
* [`browser.css`](browser.css) changes the background
@@ -19,4 +29,23 @@ point `--css-browser` to one of these by URL:
* notes on using rclone as a fuse client/server
## [`example.conf`](example.conf)
* example config file for `-c` which never really happened
* example config file for `-c` (supports accounts, volumes, and volume-flags)
# junk
alphabetical list of the remaining files
| what | why |
| -- | -- |
| [biquad.html](biquad.html) | bruteforce calibrator for the audio equalizer since im not that good at maths |
| [design.txt](design.txt) | initial brainstorming of the copyparty design, unmaintained, incorrect, sentimental value only |
| [hls.html](hls.html) | experimenting with hls playback using `hls.js`, works p well, almost became a thing |
| [music-analysis.sh](music-analysis.sh) | testing various bpm/key detection libraries before settling on the ones used in [`/bin/mtag/`](/bin/mtag/) |
| [notes.sh](notes.sh) | notepad, just scraps really |
| [nuitka.txt](nuitka.txt) | how to build a copyparty exe using nuitka (not maintained) |
| [pretend-youre-qnap.patch](pretend-youre-qnap.patch) | simulate a NAS which keeps returning old cached data even though you just modified the file yourself |
| [tcp-debug.sh](tcp-debug.sh) | looks like this was to debug stuck tcp connections? |
| [unirange.py](unirange.py) | uhh |
| [up2k.txt](up2k.txt) | initial ideas for how up2k should work, another unmaintained sentimental-value-only thing |

View File

@@ -3,6 +3,24 @@
setTimeout(location.reload.bind(location), 700);
document.documentElement.scrollLeft = 0;
var cali = (function() {
var ac = new AudioContext(),
fi = ac.createBiquadFilter(),
freqs = new Float32Array(1),
mag = new Float32Array(1),
phase = new Float32Array(1);
freqs[0] = 14000;
fi.type = 'peaking';
fi.frequency.value = 18000;
fi.Q.value = 0.8;
fi.gain.value = 1;
fi.getFrequencyResponse(freqs, mag, phase);
return mag[0]; // 1.0407 good, 1.0563 bad
})(),
mp = cali < 1.05;
var can = document.createElement('canvas'),
cc = can.getContext('2d'),
w = 2048,
@@ -28,12 +46,12 @@ var cfg = [ // hz, q, g
[1000, 0.9, 1.1],
[2000, 0.9, 1.105],
[4000, 0.88, 1.05],
[8000 * 1.006, 0.73, 1.24],
[8000 * 1.006, 0.73, mp ? 1.24 : 1.2],
//[16000 * 1.00, 0.5, 1.75], // peak.v1
//[16000 * 1.19, 0, 1.8] // shelf.v1
[16000 * 0.89, 0.7, 1.26], // peak
[16000 * 1.13, 0.82, 1.09], // peak
[16000 * 1.205, 0, 1.9] // shelf
[16000 * 0.89, 0.7, mp ? 1.26 : 1.2], // peak
[16000 * 1.13, 0.82, mp ? 1.09 : 0.75], // peak
[16000 * 1.205, 0, mp ? 1.9 : 1.85] // shelf
];
var freqs = new Float32Array(22000),

View File

@@ -1,37 +1,7 @@
/* put filetype icons inline with text
#ggrid>a>span:before,
#ggrid>a>span.dir:before {
display: inline;
line-height: 0;
font-size: 1.7em;
margin: -.7em .1em -.5em -.6em;
}
*/
/* video, alternative 1:
top-left icon, just like the other formats
=======================================================================
/* move folder icons top-left */
#ggrid>a>span.dir:before {
content: initial;
}
#ggrid>a[href$="/"]:before {
content: '📂';
}
/* put filetype icons top-left */
#ggrid>a:before {
display: block;
position: absolute;
padding: .3em 0;
margin: -.4em;
text-shadow: 0 0 .1em #000;
background: linear-gradient(135deg,rgba(255,255,255,0) 50%,rgba(255,255,255,0.2));
border-radius: .3em;
font-size: 2em;
}
/* video */
#ggrid>a:is(
[href$=".mkv"i],
[href$=".mp4"i],
@@ -39,6 +9,40 @@
):before {
content: '📺';
}
*/
/* video, alternative 2:
play-icon in the middle of the thumbnail
=======================================================================
*/
#ggrid>a:is(
[href$=".mkv"i],
[href$=".mp4"i],
[href$=".webm"i],
) {
position: relative;
overflow: hidden;
}
#ggrid>a:is(
[href$=".mkv"i],
[href$=".mp4"i],
[href$=".webm"i],
):before {
content: '▶';
opacity: .8;
margin: 0;
padding: 1em .5em 1em .7em;
border-radius: 9em;
line-height: 0;
color: #fff;
text-shadow: none;
background: rgba(0, 0, 0, 0.7);
left: calc(50% - 1em);
top: calc(50% - 1.4em);
}
/* audio */
@@ -54,6 +58,7 @@
}
/* image */
#ggrid>a:is(
[href$=".jpg"i],

View File

@@ -10,19 +10,25 @@ u k:k
# share "." (the current directory)
# as "/" (the webroot) for the following users:
# "r" grants read-access for anyone
# "a ed" grants read-write to ed
# "rw ed" grants read-write to ed
.
/
r
a ed
rw ed
# custom permissions for the "priv" folder:
# user "k" can see/read the contents
# and "ed" gets read-write access
# user "k" can only see/read the contents
# user "ed" gets read-write access
./priv
/priv
r k
a ed
rw ed
# this does the same thing:
./priv
/priv
r ed k
w ed
# share /home/ed/Music/ as /music and let anyone read it
# (this will replace any folder called "music" in the webroot)
@@ -41,5 +47,5 @@ c e2d
c nodupe
# this entire config file can be replaced with these arguments:
# -u ed:123 -u k:k -v .::r:aed -v priv:priv:rk:aed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w
# -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d:c,nodupe
# but note that the config file always wins in case of conflicts

View File

@@ -11,7 +11,9 @@
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
#u2cards /* and the upload progress tabs */
#srch_dz, #srch_zd, /* the filesearch dropzone */
#u2cards, #u2etaw /* and the upload progress tabs */
{display: none !important} /* do it! */
@@ -19,7 +21,7 @@
/* add some margins because now it's weird */
.opview {margin-top: 2.5em}
#op_up2k {margin-top: 3em}
#op_up2k {margin-top: 6em}
/* and embiggen the upload button */
#u2conf #u2btn, #u2btn {padding:1.5em 0}
@@ -27,6 +29,9 @@
/* adjust the button area a bit */
#u2conf.has_btn {width: 35em !important; margin: 5em auto}
/* a */
#op_up2k {min-height: 0}
</style>
<a href="#" onclick="this.parentNode.innerHTML='';">show advanced options</a>

View File

@@ -1,26 +0,0 @@
method = self.s.recv(4)
self.s.unrecv(method)
print("xxx unrecv'd [{}]".format(method))
# jython used to do this, they stopped since it's broken
# but reimplementing sendall is out of scope for now
if not getattr(self.s.s, "sendall", None):
self.s.s.sendall = self.s.s.send
# TODO this is also pretty bad
have = dir(self.s)
for k in self.s.s.__dict__:
if k not in have and not k.startswith("__"):
if k == "recv":
raise Exception("wait what")
self.s.__dict__[k] = self.s.s.__dict__[k]
have = dir(self.s)
for k in dir(self.s.s):
if k not in have and not k.startswith("__"):
if k == "recv":
raise Exception("wait what")
setattr(self.s, k, getattr(self.s.s, k))

View File

@@ -44,7 +44,7 @@ avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} c
dirs=("$HOME/vfs/ほげ" "$HOME/vfs/ほげ/ぴよ" "$HOME/vfs/$(printf \\xed\\x91)" "$HOME/vfs/$(printf \\xed\\x91/\\xed\\x92)")
mkdir -p "${dirs[@]}"
for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qwe,rty;asd fgh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd&nbsp;fgh'; do echo "$dir" > "$dir/$fn.html"; done; done
# qw er+ty%20ui%%20op<as>df&gh&amp;jk#zx'cv"bn`m=qw*er^ty?ui@op,as.df-gh_jk
##
## upload mojibake
@@ -79,6 +79,10 @@ command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (ti
# get all up2k search result URLs
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
# rename all selected songs to <leading-track-number> + <Title> + <extension>
var sel=msel.getsel(), ci=find_file_col('Title')[0], re=[]; for (var a=0; a<sel.length; a++) { var url=sel[a].vp, tag=ebi(sel[a].id).closest('tr').querySelectorAll('td')[ci].textContent, name=uricom_dec(vsplit(url)[1])[0], m=/^([0-9]+[\. -]+)?.*(\.[^\.]+$)/.exec(name), name2=(m[1]||'')+tag+m[2], url2=vsplit(url)[0]+uricom_enc(name2,false); if (url!=url2) re.push([url, url2]); }
console.log(JSON.stringify(re, null, ' '));
function f() { if (!re.length) return treectl.goto(get_evpath()); var [u1,u2] = re.shift(); fetch(u1+'?move='+u2).then((rsp) => {if (rsp.ok) f(); }); }; f();
##
## bash oneliners
@@ -122,6 +126,13 @@ e=6; s=10; d=~/dev/copyparty/srv/aus; n=1; p=0; e=$((e*60)); rm -rf $d; mkdir $d
-v srv/aus:aus:r:ce2dsa:ce2ts:cmtp=fgsfds=bin/mtag/sleep.py
sqlite3 .hist/up2k.db 'select * from mt where k="fgsfds" or k="t:mtp"' | tee /dev/stderr | wc -l
# generate the sine meme
for ((f=420;f<1200;f++)); do sz=$(ffmpeg -y -f lavfi -i sine=frequency=$f:duration=2 -vf volume=0.1 -ac 1 -ar 44100 -f s16le /dev/shm/a.wav 2>/dev/null; base64 -w0 </dev/shm/a.wav | gzip -c | wc -c); printf '%d %d\n' $f $sz; done | tee /dev/stderr | sort -nrk2,2
ffmpeg -y -f lavfi -i sine=frequency=1050:duration=2 -vf volume=0.1 -ac 1 -ar 44100 /dev/shm/a.wav
# play icon calibration pics
for w in 150 170 190 210 230 250; do for h in 130 150 170 190 210; do /c/Program\ Files/ImageMagick-7.0.11-Q16-HDRI/magick.exe convert -size ${w}x${h} xc:brown -fill orange -draw "circle $((w/2)),$((h/2)) $((w/2)),$((h/3))" $w-$h.png; done; done
##
## vscode
@@ -153,7 +164,7 @@ brew install python@2
pip install virtualenv
# readme toc
cat README.md | awk '!/^#/{next} {lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab)} {printf "%" ((lv-1)*4+1) "s [%s](#%s)\n", "*",$0,bab}'
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md
# fix firefox phantom breakpoints,
# suggestions from bugtracker, doesnt work (debugger is not attachable)
@@ -166,7 +177,10 @@ dbg.asyncStore.pendingBreakpoints = {}
about:config >> devtools.debugger.prefs-schema-version = -1
# determine server version
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
# download all sfx versions
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="copyparty $v $t.py"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
##

View File

@@ -3,7 +3,7 @@ WORKDIR /z
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_hashwasm=4.7.0 \
ver_marked=1.1.0 \
ver_ogvjs=1.8.0 \
ver_ogvjs=1.8.4 \
ver_mde=2.14.0 \
ver_codemirror=5.59.3 \
ver_fontawesome=5.13.0 \
@@ -74,23 +74,16 @@ RUN cd hash-wasm \
# build ogvjs
RUN cd ogvjs-$ver_ogvjs \
&& cp -pv \
ogv.js \
ogv-worker-audio.js \
ogv-demuxer-ogg-wasm.js \
ogv-demuxer-ogg-wasm.wasm \
ogv-demuxer-webm-wasm.js \
ogv-demuxer-webm-wasm.wasm \
ogv-decoder-audio-opus-wasm.js \
ogv-decoder-audio-opus-wasm.wasm \
ogv-decoder-audio-vorbis-wasm.js \
ogv-decoder-audio-vorbis-wasm.wasm \
/z/dist
# ogv-demuxer-ogg.js \
# ogv-demuxer-webm.js \
# ogv-decoder-audio-opus.js \
# ogv-decoder-audio-vorbis.js \
# dynamicaudio.swf \
/z/dist \
&& cp -pv \
ogv-es2017.js /z/dist/ogv.js
# build marked

View File

@@ -2,7 +2,7 @@ all: $(addsuffix .gz, $(wildcard *.*))
%.gz: %
#brotli -q 11 $<
pigz -11 -J 34 -I 573 $<
pigz -11 -I 573 $<
# pigz -11 -J 34 -I 100 -F < $< > $@.first

View File

@@ -2,6 +2,7 @@
set -e
echo
help() { exec cat <<'EOF'
# optional args:
#
@@ -15,12 +16,19 @@ echo
#
# `no-sh` makes just the python sfx, skips the sh/unix sfx
#
# `no-ogv` saves ~500k by removing the opus/vorbis audio codecs
# `no-ogv` saves ~192k by removing the opus/vorbis audio codecs
# (only affects apple devices; everything else has native support)
#
# `no-cm` saves ~90k by removing easymde/codemirror
# `no-cm` saves ~92k by removing easymde/codemirror
# (the fancy markdown editor)
#
# `no-fnt` saves ~9k by removing the source-code-pro font
# (browsers will try to use 'Consolas' instead)
#
# `no-dd` saves ~2k by removing the mouse cursor
EOF
}
# port install gnutar findutils gsed coreutils
gtar=$(command -v gtar || command -v gnutar) || true
@@ -29,6 +37,9 @@ gtar=$(command -v gtar || command -v gnutar) || true
sed() { gsed "$@"; }
find() { gfind "$@"; }
sort() { gsort "$@"; }
shuf() { gshuf "$@"; }
nproc() { gnproc; }
sha1sum() { shasum "$@"; }
unexpand() { gunexpand "$@"; }
command -v grealpath >/dev/null &&
realpath() { grealpath "$@"; }
@@ -56,15 +67,22 @@ pybin=$(command -v python3 || command -v python) || {
use_gz=
do_sh=1
do_py=1
zopf=2560
while [ ! -z "$1" ]; do
[ "$1" = clean ] && clean=1 && shift && continue
[ "$1" = re ] && repack=1 && shift && continue
[ "$1" = gz ] && use_gz=1 && shift && continue
[ "$1" = no-ogv ] && no_ogv=1 && shift && continue
[ "$1" = no-cm ] && no_cm=1 && shift && continue
[ "$1" = no-sh ] && do_sh= && shift && continue
[ "$1" = no-py ] && do_py= && shift && continue
break
case $1 in
clean) clean=1 ; ;;
re) repack=1 ; ;;
gz) use_gz=1 ; ;;
no-ogv) no_ogv=1 ; ;;
no-fnt) no_fnt=1 ; ;;
no-dd) no_dd=1 ; ;;
no-cm) no_cm=1 ; ;;
no-sh) do_sh= ; ;;
no-py) do_py= ; ;;
fast) zopf=100 ; ;;
*) help ; ;;
esac
shift
done
tmv() {
@@ -72,16 +90,23 @@ tmv() {
mv t "$1"
}
stamp=$(
for d in copyparty scripts; do
find $d -type f -printf '%TY-%Tm-%Td %TH:%TM:%TS %p\n'
done | sort | tail -n 1 | sha1sum | cut -c-16
)
rm -rf sfx/*
mkdir -p sfx build
cd sfx
[ $repack ] && {
old="$(
printf '%s\n' "$TMPDIR" /tmp |
awk '/./ {print; exit}'
)/pe-copyparty"
tmpdir="$(
printf '%s\n' "$TMPDIR" /tmp |
awk '/./ {print; exit}'
)"
[ $repack ] && {
old="$tmpdir/pe-copyparty"
echo "repack of files in $old"
cp -pR "$old/"*{dep-j2,copyparty} .
}
@@ -113,7 +138,7 @@ cd sfx
# msys2 tar is bad, make the best of it
echo collecting source
[ $clean ] && {
(cd .. && git archive master >tar) && tar -xf ../tar copyparty
(cd .. && git archive hovudstraum >tar) && tar -xf ../tar copyparty
(cd .. && tar -cf tar copyparty/web/deps) && tar -xf ../tar
}
[ $clean ] || {
@@ -123,6 +148,7 @@ cd sfx
}
ver=
[ -z "$repack" ] &&
git describe --tags >/dev/null 2>/dev/null && {
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//')";
@@ -154,7 +180,7 @@ git describe --tags >/dev/null 2>/dev/null && {
[ -z "$ver" ] &&
ver="$(awk '/^VERSION *= \(/ {
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
gsub(/[^0-9,a-g-]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
ts=$(date -u +%s)
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
@@ -163,12 +189,12 @@ mkdir -p ../dist
sfx_out=../dist/copyparty-sfx
echo cleanup
find .. -name '*.pyc' -delete
find .. -name __pycache__ -delete
find -name '*.pyc' -delete
find -name __pycache__ -delete
# especially prevent osx from leaking your lan ip (wtf apple)
find .. -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
find -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
find -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
echo use smol web deps
rm -f copyparty/web/deps/*.full.* copyparty/web/dbg-* copyparty/web/Makefile
@@ -187,7 +213,24 @@ done
rm -rf copyparty/web/mde.* copyparty/web/deps/easymde*
echo h > copyparty/web/mde.html
f=copyparty/web/md.html
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
sed -r '/edit2">edit \(fancy/d' <$f >t
tmv "$f"
}
[ $no_fnt ] && {
rm -f copyparty/web/deps/scp.woff2
f=copyparty/web/ui.css
gzip -d "$f.gz" || true
sed -r "s/src:.*scp.*\)/src:local('Consolas')/" <$f >t
tmv "$f"
}
[ $no_dd ] && {
rm -rf copyparty/web/dd
f=copyparty/web/browser.css
gzip -d "$f.gz" || true
sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; /[0-9]+% \{cursor:/d; /animation: ?cursor/d' <$f >t
tmv "$f"
}
[ $repack ] ||
@@ -200,8 +243,15 @@ f=dep-j2/jinja2/constants.py
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
tmv "$f"
grep -rLE '^#[^a-z]*coding: utf-8' dep-j2 |
while IFS= read -r f; do
(echo "# coding: utf-8"; cat "$f") >t
tmv "$f"
done
# up2k goes from 28k to 22k laff
echo entabbening
awk 'BEGIN{gensub(//,"",1)}' </dev/null &&
echo entabbening &&
find | grep -E '\.css$' | while IFS= read -r f; do
awk '{
sub(/^[ \t]+/,"");
@@ -215,25 +265,61 @@ find | grep -E '\.css$' | while IFS= read -r f; do
' <$f | sed 's/;\}$/}/' >t
tmv "$f"
done
unexpand -h 2>/dev/null &&
find | grep -E '\.(js|html)$' | while IFS= read -r f; do
unexpand -t 4 --first-only <"$f" >t
tmv "$f"
done
gzres() {
command -v pigz &&
pk='pigz -11 -J 34 -I 100' ||
pk='gzip'
command -v pigz &&
pk="pigz -11 -I $zopf" ||
pk='gzip'
echo "$pk"
find | grep -E '\.(js|css)$' | grep -vF /deps/ | while IFS= read -r f; do
echo -n .
$pk "$f"
done
echo
np=$(nproc)
echo "$pk #$np"
while IFS=' ' read -r _ f; do
while true; do
na=$(ps auxwww | grep -F "$pk" | wc -l)
[ $na -le $np ] && break
sleep 0.2
done
echo -n .
$pk "$f" &
done < <(
find -printf '%s %p\n' |
grep -E '\.(js|css)$' |
grep -vF /deps/ |
sort -nr
)
wait
echo
}
zdir="$tmpdir/cpp-mksfx"
[ -e "$zdir/$stamp" ] || rm -rf "$zdir"
mkdir -p "$zdir"
echo a > "$zdir/$stamp"
nf=$(ls -1 "$zdir"/arc.* | wc -l)
[ $nf -ge 2 ] && [ ! $repack ] && use_zdir=1 || use_zdir=
[ $use_zdir ] || {
echo "$nf alts += 1"
gzres
[ $repack ] ||
tar -cf "$zdir/arc.$(date +%s)" copyparty/web/*.gz
}
[ $use_zdir ] && {
arcs=("$zdir"/arc.*)
arc="${arcs[$RANDOM % ${#arcs[@]} ] }"
echo "using $arc"
tar -xf "$arc"
for f in copyparty/web/*.gz; do
rm "${f%.*}"
done
}
gzres
echo gen tarlist
@@ -241,7 +327,7 @@ for d in copyparty dep-j2; do find $d -type f; done |
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1) >list || true
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | shuf) >list || true
echo creating tar
args=(--owner=1000 --group=1000)

View File

@@ -61,7 +61,7 @@ rls_dir="$tmp/copyparty-$ver"
mkdir "$rls_dir"
echo ">>> export from git"
git archive master | tar -xC "$rls_dir"
git archive hovudstraum | tar -xC "$rls_dir"
echo ">>> export untracked deps"
tar -c copyparty/web/deps | tar -xC "$rls_dir"
@@ -122,5 +122,5 @@ echo " $zip_path"
echo " $tgz_path"
echo
# function alr() { ls -alR copyparty-$1 | sed -r "s/copyparty-$1/copyparty/" | sed -r 's/[A-Z][a-z]{2} [0-9 ]{2} [0-9]{2}:[0-9]{2}//' > $1; }; for x in master rls src ; do alr $x; done
# function alr() { ls -alR copyparty-$1 | sed -r "s/copyparty-$1/copyparty/" | sed -r 's/[A-Z][a-z]{2} [0-9 ]{2} [0-9]{2}:[0-9]{2}//' > $1; }; for x in hovudstraum rls src ; do alr $x; done

View File

@@ -364,7 +364,7 @@ def confirm(rv):
except:
pass
sys.exit(rv)
sys.exit(rv or 1)
def run(tmp, j2):

View File

@@ -108,6 +108,7 @@ def tc1():
pdirs = [x.replace("\\", "/") for x in pdirs]
udirs = [x.split("/", 2)[2] for x in pdirs]
perms = [x.rstrip("j/")[-1] for x in pdirs]
perms = ["rw" if x == "a" else x for x in perms]
for pd, ud, p in zip(pdirs, udirs, perms):
if ud[-1] == "j":
continue
@@ -124,7 +125,7 @@ def tc1():
arg = "{}:{}:{}".format(pd, ud, p, hp)
if hp:
arg += ":chist=" + hp
arg += ":c,hist=" + hp
args += ["-v", arg]
@@ -147,14 +148,14 @@ def tc1():
u = "{}{}/a.h264".format(ub, d)
r = requests.get(u)
ok = bool(r)
if ok != (p in ["a"]):
if ok != (p in ["rw"]):
raise Exception("get {} with perm {} at {}".format(ok, p, u))
# stat filesystem
for d, p in zip(pdirs, perms):
u = "{}/a.h264".format(d)
ok = os.path.exists(u)
if ok != (p in ["a", "w"]):
if ok != (p in ["rw", "w"]):
raise Exception("stat {} with perm {} at {}".format(ok, p, u))
# GET thumbnail, vreify contents
@@ -162,7 +163,7 @@ def tc1():
u = "{}{}/a.h264?th=j".format(ub, d)
r = requests.get(u)
ok = bool(r and r.content[:3] == b"\xff\xd8\xff")
if ok != (p in ["a"]):
if ok != (p in ["rw"]):
raise Exception("thumb {} with perm {} at {}".format(ok, p, u))
# check tags
@@ -179,10 +180,10 @@ def tc1():
r_ok = bool(j)
w_ok = bool(r_ok and j.get("files"))
if not r_ok or w_ok != (p in ["a"]):
if not r_ok or w_ok != (p in ["rw"]):
raise Exception("ls {} with perm {} at {}".format(ok, p, u))
if (tag and p != "a") or (not tag and p == "a"):
if (tag and p != "rw") or (not tag and p == "rw"):
raise Exception("tag {} with perm {} at {}".format(tag, p, u))
if tag is not None and tag != "48x32":

View File

@@ -65,9 +65,9 @@ def uncomment(fpath):
def main():
print("uncommenting", end="")
print("uncommenting", end="", flush=True)
for f in sys.argv[1:]:
print(".", end="")
print(".", end="", flush=True)
uncomment(f)
print("k")

View File

@@ -61,7 +61,7 @@ class clean2(Command):
pass
nuke = []
for (dirpath, dirnames, filenames) in os.walk("."):
for (dirpath, _, filenames) in os.walk("."):
for fn in filenames:
if (
fn.startswith("MANIFEST")
@@ -86,7 +86,7 @@ args = {
"url": "https://github.com/9001/copyparty",
"license": "MIT",
"classifiers": [
"Development Status :: 4 - Beta",
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
@@ -99,7 +99,9 @@ args = {
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: Jython",
"Programming Language :: Python :: Implementation :: PyPy",
"Environment :: Console",
"Environment :: No Input/Output (Daemon)",

View File

@@ -31,14 +31,22 @@ class Cfg(Namespace):
rproxy=0,
ed=False,
nw=False,
unpost=600,
no_mv=False,
no_del=False,
no_zip=False,
no_voldump=True,
no_scandir=False,
no_sendfile=True,
no_rescan=True,
no_logues=False,
no_readme=False,
re_maxage=0,
ihead=False,
nih=True,
mtp=[],
mte="a",
mth="",
hist=None,
no_hash=False,
css_browser=None,
@@ -90,7 +98,7 @@ class TestHttpCli(unittest.TestCase):
if not vol.startswith(top):
continue
mode = vol[-2]
mode = vol[-2].replace("a", "rwmd")
usr = vol[-1]
if usr == "a":
usr = ""
@@ -99,7 +107,7 @@ class TestHttpCli(unittest.TestCase):
vol += "/"
top, sub = vol.split("/", 1)
vcfg.append("{0}/{1}:{1}:{2}{3}".format(top, sub, mode, usr))
vcfg.append("{0}/{1}:{1}:{2},{3}".format(top, sub, mode, usr))
pprint.pprint(vcfg)
@@ -145,6 +153,7 @@ class TestHttpCli(unittest.TestCase):
tar = tarfile.open(fileobj=io.BytesIO(b)).getnames()
except:
tar = []
tar = [x[4:] if x.startswith("top/") else x for x in tar]
tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar]
tar = [[x] + self.can_rw(x) for x in tar]
tar_ok = [x[0] for x in tar if x[1]]

View File

@@ -21,9 +21,14 @@ class Cfg(Namespace):
ex2 = {
"mtp": [],
"mte": "a",
"mth": "",
"hist": None,
"no_hash": False,
"css_browser": None,
"no_voldump": True,
"no_logues": False,
"no_readme": False,
"re_maxage": 0,
"rproxy": 0,
}
ex.update(ex2)
@@ -57,8 +62,8 @@ class TestVFS(unittest.TestCase):
# type: (VFS, str, str) -> tuple[str, str, str]
"""helper for resolving and listing a folder"""
vn, rem = vfs.get(vpath, uname, True, False)
r1 = vn.ls(rem, uname, False)
r2 = vn.ls(rem, uname, False)
r1 = vn.ls(rem, uname, False, [[True]])
r2 = vn.ls(rem, uname, False, [[True]])
self.assertEqual(r1, r2)
fsdir, real, virt = r1
@@ -68,6 +73,11 @@ class TestVFS(unittest.TestCase):
def log(self, src, msg, c=0):
pass
def assertAxs(self, dct, lst):
t1 = list(sorted(dct.keys()))
t2 = list(sorted(lst))
self.assertEqual(t1, t2)
def test(self):
td = os.path.join(self.td, "vfs")
os.mkdir(td)
@@ -88,53 +98,53 @@ class TestVFS(unittest.TestCase):
self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td)
self.assertEqual(vfs.uread, ["*"])
self.assertEqual(vfs.uwrite, ["*"])
self.assertAxs(vfs.axs.uread, ["*"])
self.assertAxs(vfs.axs.uwrite, ["*"])
# single read-only rootfs (relative path)
vfs = AuthSrv(Cfg(v=["a/ab/::r"]), self.log).vfs
self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
self.assertEqual(vfs.uread, ["*"])
self.assertEqual(vfs.uwrite, [])
self.assertAxs(vfs.axs.uread, ["*"])
self.assertAxs(vfs.axs.uwrite, [])
# single read-only rootfs (absolute path)
vfs = AuthSrv(Cfg(v=[td + "//a/ac/../aa//::r"]), self.log).vfs
self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
self.assertEqual(vfs.uread, ["*"])
self.assertEqual(vfs.uwrite, [])
self.assertAxs(vfs.axs.uread, ["*"])
self.assertAxs(vfs.axs.uwrite, [])
# read-only rootfs with write-only subdirectory (read-write for k)
vfs = AuthSrv(
Cfg(a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]),
Cfg(a=["k:k"], v=[".::r:rw,k", "a/ac/acb:a/ac/acb:w:rw,k"]),
self.log,
).vfs
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td)
self.assertEqual(vfs.uread, ["*", "k"])
self.assertEqual(vfs.uwrite, ["k"])
self.assertAxs(vfs.axs.uread, ["*", "k"])
self.assertAxs(vfs.axs.uwrite, ["k"])
n = vfs.nodes["a"]
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a")
self.assertEqual(n.realpath, os.path.join(td, "a"))
self.assertEqual(n.uread, ["*", "k"])
self.assertEqual(n.uwrite, ["k"])
self.assertAxs(n.axs.uread, ["*", "k"])
self.assertAxs(n.axs.uwrite, ["k"])
n = n.nodes["ac"]
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a/ac")
self.assertEqual(n.realpath, os.path.join(td, "a", "ac"))
self.assertEqual(n.uread, ["*", "k"])
self.assertEqual(n.uwrite, ["k"])
self.assertAxs(n.axs.uread, ["*", "k"])
self.assertAxs(n.axs.uwrite, ["k"])
n = n.nodes["acb"]
self.assertEqual(n.nodes, {})
self.assertEqual(n.vpath, "a/ac/acb")
self.assertEqual(n.realpath, os.path.join(td, "a", "ac", "acb"))
self.assertEqual(n.uread, ["k"])
self.assertEqual(n.uwrite, ["*", "k"])
self.assertAxs(n.axs.uread, ["k"])
self.assertAxs(n.axs.uwrite, ["*", "k"])
# something funky about the windows path normalization,
# doesn't really matter but makes the test messy, TODO?
@@ -173,24 +183,24 @@ class TestVFS(unittest.TestCase):
# admin-only rootfs with all-read-only subfolder
vfs = AuthSrv(
Cfg(a=["k:k"], v=[".::ak", "a:a:r"]),
Cfg(a=["k:k"], v=[".::rw,k", "a:a:r"]),
self.log,
).vfs
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td)
self.assertEqual(vfs.uread, ["k"])
self.assertEqual(vfs.uwrite, ["k"])
self.assertAxs(vfs.axs.uread, ["k"])
self.assertAxs(vfs.axs.uwrite, ["k"])
n = vfs.nodes["a"]
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a")
self.assertEqual(n.realpath, os.path.join(td, "a"))
self.assertEqual(n.uread, ["*"])
self.assertEqual(n.uwrite, [])
self.assertEqual(vfs.can_access("/", "*"), [False, False])
self.assertEqual(vfs.can_access("/", "k"), [True, True])
self.assertEqual(vfs.can_access("/a", "*"), [True, False])
self.assertEqual(vfs.can_access("/a", "k"), [True, False])
self.assertAxs(n.axs.uread, ["*"])
self.assertAxs(n.axs.uwrite, [])
self.assertEqual(vfs.can_access("/", "*"), [False, False, False, False])
self.assertEqual(vfs.can_access("/", "k"), [True, True, False, False])
self.assertEqual(vfs.can_access("/a", "*"), [True, False, False, False])
self.assertEqual(vfs.can_access("/a", "k"), [True, False, False, False])
# breadth-first construction
vfs = AuthSrv(
@@ -247,26 +257,26 @@ class TestVFS(unittest.TestCase):
./src
/dst
r a
a asd
rw asd
"""
).encode("utf-8")
)
au = AuthSrv(Cfg(c=[cfg_path]), self.log)
self.assertEqual(au.user["a"], "123")
self.assertEqual(au.user["asd"], "fgh:jkl")
self.assertEqual(au.acct["a"], "123")
self.assertEqual(au.acct["asd"], "fgh:jkl")
n = au.vfs
# root was not defined, so PWD with no access to anyone
self.assertEqual(n.vpath, "")
self.assertEqual(n.realpath, None)
self.assertEqual(n.uread, [])
self.assertEqual(n.uwrite, [])
self.assertAxs(n.axs.uread, [])
self.assertAxs(n.axs.uwrite, [])
self.assertEqual(len(n.nodes), 1)
n = n.nodes["dst"]
self.assertEqual(n.vpath, "dst")
self.assertEqual(n.realpath, os.path.join(td, "src"))
self.assertEqual(n.uread, ["a", "asd"])
self.assertEqual(n.uwrite, ["asd"])
self.assertAxs(n.axs.uread, ["a", "asd"])
self.assertAxs(n.axs.uwrite, ["asd"])
self.assertEqual(len(n.nodes), 0)
os.unlink(cfg_path)

View File

@@ -31,7 +31,7 @@ if MACOS:
from copyparty.util import Unrecv
def runcmd(*argv):
def runcmd(argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate()
stdout = stdout.decode("utf-8")
@@ -39,8 +39,8 @@ def runcmd(*argv):
return [p.returncode, stdout, stderr]
def chkcmd(*argv):
ok, sout, serr = runcmd(*argv)
def chkcmd(argv):
ok, sout, serr = runcmd(argv)
if ok != 0:
raise Exception(serr)
@@ -60,12 +60,12 @@ def get_ramdisk():
if os.path.exists("/Volumes"):
# hdiutil eject /Volumes/cptd/
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://131072")
devname, _ = chkcmd("hdiutil attach -nomount ram://131072".split())
devname = devname.strip()
print("devname: [{}]".format(devname))
for _ in range(10):
try:
_, _ = chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
_, _ = chkcmd(["diskutil", "eraseVolume", "HFS+", "cptd", devname])
with open("/Volumes/cptd/.metadata_never_index", "w") as f:
f.write("orz")