Compare commits

...

129 Commits

Author SHA1 Message Date
ed
edbedcdad3 v1.1.3 2021-11-20 02:27:09 +01:00
ed
e4ae5f74e6 add tooltip indicator 2021-11-20 01:47:16 +01:00
ed
2c7ffe08d7 include sha512 as both hex and b64 in responses 2021-11-20 01:03:32 +01:00
ed
3ca46bae46 good oneliner 2021-11-20 00:20:34 +01:00
ed
7e82aaf843 simplify/improve up2k ui debounce 2021-11-20 00:03:15 +01:00
ed
315bd71adf limit turbo runahead 2021-11-20 00:01:14 +01:00
ed
2c612c9aeb ux 2021-11-19 21:31:05 +01:00
ed
36aee085f7 add timeouts to FFmpeg things 2021-11-16 22:22:09 +01:00
ed
d01bb69a9c u2cli: option to ignore inaccessible files 2021-11-16 21:53:00 +01:00
ed
c9b1c48c72 sizelimit registry + persist without e2d 2021-11-16 21:31:24 +01:00
ed
aea3843cf2 this is just noise 2021-11-16 21:28:50 +01:00
ed
131b6f4b9a workaround chrome rendering bug 2021-11-16 21:28:36 +01:00
ed
6efb8b735a better handling of python builds without sqlite3 2021-11-16 01:13:04 +01:00
ed
223b7af2ce more iOS jank 2021-11-16 00:05:35 +01:00
ed
e72c2a6982 add fastpath for using the eq as a pure gain control 2021-11-15 23:19:43 +01:00
ed
dd9b93970e autoenable aac transcoding when codec missing 2021-11-15 23:18:52 +01:00
ed
e4c7cd81a9 update readme 2021-11-15 20:28:53 +01:00
ed
12b3a62586 fix dumb mistakes 2021-11-15 20:13:16 +01:00
ed
2da3bdcd47 delay tooltips, fix #6 2021-11-15 03:56:17 +01:00
ed
c1dccbe0ba trick iphones into preloading natively 2021-11-15 03:01:11 +01:00
ed
9629fcde68 optionally enable seeking through os controls 2021-11-15 02:47:42 +01:00
ed
cae436b566 add client-option to disconnect on HTTP 304 2021-11-15 02:45:18 +01:00
ed
01714700ae more gapless fixes 2021-11-14 20:25:28 +01:00
ed
51e6c4852b retire ogvjs 2021-11-14 19:28:44 +01:00
ed
b206c5d64e handle multiple simultaneous uploads of the same file 2021-11-14 15:03:11 +01:00
ed
62c3272351 add option to simulate latency 2021-11-14 15:01:20 +01:00
ed
c5d822c70a v1.1.2 2021-11-12 23:08:24 +01:00
ed
9c09b4061a prefer fpool on linux as well 2021-11-12 22:57:36 +01:00
ed
c26fb43ced more cleanup 2021-11-12 22:30:23 +01:00
ed
deb8f20db6 misc cleanup/unjank 2021-11-12 20:48:26 +01:00
ed
50e18ed8ff fix up2k layout in readonly folders 2021-11-12 19:18:52 +01:00
ed
31f3895f40 close misc views on escape 2021-11-12 19:18:29 +01:00
ed
615929268a cache monet 2021-11-12 02:00:44 +01:00
ed
b8b15814cf add traffic shaping, bump speeds on https/windows 2021-11-12 01:34:56 +01:00
ed
7766fffe83 mostly fix ogvjs preloading 2021-11-12 01:09:01 +01:00
ed
2a16c150d1 general preload improvements 2021-11-12 01:04:31 +01:00
ed
418c2166cc add cursed doubleclick-handler in gridsel mode 2021-11-11 01:03:14 +01:00
ed
a4dd44f648 textviewer initiable through hotkeys 2021-11-11 00:18:34 +01:00
ed
5352f7cda7 fix ctrl-a fencing in codeblocks 2021-11-11 00:11:29 +01:00
ed
5533b47099 handle crc collisions 2021-11-10 23:59:07 +01:00
ed
e9b14464ee terminate preloader if it can't finish in time 2021-11-10 22:53:02 +01:00
ed
4e986e5cd1 xhr preload is not gapless 2021-11-10 22:00:24 +01:00
ed
8a59b40c53 better clientside upload dedup 2021-11-10 20:57:45 +01:00
ed
391caca043 v1.1.1 2021-11-08 22:39:00 +01:00
ed
171ce348d6 improve swr 2021-11-08 22:25:35 +01:00
ed
c2cc729135 update sfx sizes 2021-11-08 21:11:10 +01:00
ed
e7e71b76f0 add alternative preloader for spotty connections 2021-11-08 20:46:40 +01:00
ed
a2af61cf6f fix clipboard sharing on recent firefox versions 2021-11-08 20:43:26 +01:00
ed
e111edd5e4 v1.1.0 2021-11-06 23:27:48 +01:00
ed
3375377371 update tests 2021-11-06 23:27:21 +01:00
ed
0ced020c67 update readme 2021-11-06 22:15:37 +01:00
ed
c0d7aa9e4a add file selection from text viewer 2021-11-06 22:02:43 +01:00
ed
e5b3d2a312 dont hilight huge files 2021-11-06 20:56:23 +01:00
ed
7b4a794981 systemd-service: add reload 2021-11-06 20:33:15 +01:00
ed
86a859de17 navpane default on if 60em viewport 2021-11-06 20:32:43 +01:00
ed
b3aaa7bd0f fence ctrl-a within documents and codeblocks 2021-11-06 19:37:19 +01:00
ed
a90586e6a8 add reload api 2021-11-06 19:05:58 +01:00
ed
807f272895 missed one 2021-11-06 18:33:32 +01:00
ed
f050647b43 rescan volumes on sigusr1 2021-11-06 18:20:31 +01:00
ed
73baebbd16 initial sigusr1 acc/vol reload 2021-11-06 07:15:04 +01:00
ed
f327f698b9 finally drop the -e2s compat 2021-11-06 03:19:57 +01:00
ed
8164910fe8 support setting argv from config files 2021-11-06 03:11:21 +01:00
ed
3498644055 fix permission parser so it matches the documentation 2021-11-06 03:09:03 +01:00
ed
d31116b54c spaghetti unraveling 2021-11-06 02:07:13 +01:00
ed
aced110cdf bump preload window wrt opus transcoding 2021-11-06 01:02:22 +01:00
ed
e9ab6aec77 allow full mime override 2021-11-06 00:50:20 +01:00
ed
15b261c861 help windows a little 2021-11-06 00:45:42 +01:00
ed
970badce66 positioning + optimization 2021-11-06 00:06:14 +01:00
ed
64304a9d65 make it optional 2021-11-06 00:06:05 +01:00
ed
d1983553d2 add click handlers 2021-11-06 00:04:45 +01:00
ed
6b15df3bcd fix wordwrap not being set initially 2021-11-06 00:00:35 +01:00
ed
730b1fff71 hilight parents of current folder 2021-11-06 00:00:04 +01:00
ed
c3add751e5 oh 2021-11-05 02:12:25 +01:00
ed
9da2dbdc1c rough attempt at docked navpane context 2021-11-05 02:03:35 +01:00
ed
977f09c470 .txt.gz is not actually .txt 2021-11-05 00:29:25 +01:00
ed
4d0c6a8802 ensure selected item visible when toggling navpane mode 2021-11-05 00:13:09 +01:00
ed
5345565037 a 2021-11-04 23:34:00 +01:00
ed
be38c27c64 thxci 2021-11-04 22:33:10 +01:00
ed
82a0401099 at some point firefox became case-sensitive 2021-11-04 22:10:45 +01:00
ed
33bea1b663 navpane mode-toggle button and hotkey 2021-11-04 22:04:32 +01:00
ed
f083acd46d let client force plaintext response content-type 2021-11-04 22:02:39 +01:00
ed
5aacd15272 ux 2021-11-04 03:38:09 +01:00
ed
cb7674b091 make prism optional 2021-11-04 03:10:13 +01:00
ed
3899c7ad56 golfimize 2021-11-04 02:36:21 +01:00
ed
d2debced09 navigation history support 2021-11-04 02:29:24 +01:00
ed
b86c0ddc48 optimize 2021-11-04 02:06:55 +01:00
ed
ba36f33bd8 add textfile viewer 2021-11-04 01:40:03 +01:00
ed
49368a10ba navpane enabled by default on non-touch devices 2021-11-04 01:35:05 +01:00
ed
ac1568cacf golf elm removal 2021-11-04 01:33:40 +01:00
ed
862ca3439d proactive opus cache expiration 2021-11-02 20:39:08 +01:00
ed
fdd4f9f2aa dirlist alignment 2021-11-02 18:59:34 +01:00
ed
aa2dc49ebe trailing newline for plaintext folder listings 2021-11-02 18:48:32 +01:00
ed
cc23b7ee74 better user-feedback when transcoding is unavailable 2021-11-02 03:22:39 +01:00
ed
f6f9fc5a45 add audio transcoder 2021-11-02 02:59:37 +01:00
ed
26c8589399 Merge branch 'hovudstraum' of github.com:9001/copyparty into hovudstraum 2021-11-02 00:26:54 +01:00
ed
c2469935cb add audio spectrogram thumbnails 2021-11-02 00:26:51 +01:00
kipukun
5e7c20955e contrib: describe rc script 2021-10-31 19:25:22 +01:00
kipukun
967fa38108 contrib: add freebsd rc script 2021-10-31 19:25:22 +01:00
ed
280fe8e36b document some of the api 2021-10-31 15:30:09 +01:00
ed
03ca96ccc3 performance tips 2021-10-31 06:24:11 +01:00
ed
b5b8a2c9d5 why are there https warnings when https checking is disabled 2021-10-31 03:37:31 +01:00
ed
0008832730 update repacker 2021-10-31 02:22:14 +02:00
ed
c9b385db4b v1.0.14 2021-10-30 00:37:46 +02:00
ed
c951b66ae0 less messy startup messages 2021-10-29 23:43:09 +02:00
ed
de735f3a45 list successful binds only 2021-10-29 23:03:36 +02:00
ed
19161425f3 if no args, try to bind 80 and 443 as well 2021-10-29 23:01:07 +02:00
ed
c69e8d5bf4 filesearch donut accuracy 2021-10-29 21:07:46 +02:00
ed
3d3bce2788 less fancy but better 2021-10-29 11:02:20 +02:00
ed
1cb0dc7f8e colorcoded favicon donut 2021-10-29 02:40:17 +02:00
ed
cd5c56e601 u2cli: orz 2021-10-29 01:49:40 +02:00
ed
8c979905e4 mention fedora things 2021-10-29 01:07:58 +02:00
ed
4d69f15f48 fix empty files blocking successive uploads 2021-10-29 01:04:38 +02:00
ed
083f6572f7 ie11 support 2021-10-29 01:04:09 +02:00
ed
4e7dd75266 add upload donut 2021-10-29 01:01:32 +02:00
ed
3eb83f449b truncate ridiculous extensions 2021-10-27 23:42:28 +02:00
ed
d31f69117b better plaintext and vt100 folder listings 2021-10-27 23:04:59 +02:00
ed
f5f9e3ac97 reduce rescan/lifetime wakeups 2021-10-27 22:23:03 +02:00
ed
598d6c598c reduce wakeups in httpsrv 2021-10-27 22:20:21 +02:00
ed
744727087a better rmtree semantics 2021-10-27 09:40:20 +02:00
ed
f93212a665 add logout button to contrl panel 2021-10-27 01:27:59 +02:00
ed
6dade82d2c run tag scrapers in parallel on new uploads 2021-10-27 00:47:50 +02:00
ed
6b737bf1d7 abort tagging if the file has poofed 2021-10-27 00:11:58 +02:00
ed
94dbd70677 plaintext folder listing with ?ls=t 2021-10-27 00:00:12 +02:00
ed
527ae0348e locale-aware sorting of the navpane too 2021-10-26 23:59:21 +02:00
ed
79629c430a add refresh button on volumes listing 2021-10-26 23:58:10 +02:00
ed
908dd61be5 add cheatcode for turning links into downloads 2021-10-26 01:11:07 +02:00
ed
88f77b8cca spacebar as actionkey when ok/cancel focused 2021-10-25 21:31:27 +02:00
ed
1e846657d1 more css nitpicks 2021-10-25 21:31:12 +02:00
ed
ce70f62a88 catch shady vfs configs 2021-10-25 21:13:51 +02:00
46 changed files with 2572 additions and 871 deletions

172
README.md
View File

@@ -46,7 +46,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [markdown viewer](#markdown-viewer) - and there are *two* editors
* [other tricks](#other-tricks)
* [searching](#searching) - search by size, date, path/name, mp3-tags, ...
* [server config](#server-config)
* [server config](#server-config) - using arguments or config files, or a mix of both
* [file indexing](#file-indexing)
* [upload rules](#upload-rules) - set upload rules using volume flags
* [compress uploads](#compress-uploads) - files can be autocompressed on upload
@@ -60,11 +60,17 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [up2k](#up2k) - quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
* [client-side](#client-side) - when uploading files
* [security](#security) - some notes on hardening
* [gotchas](#gotchas) - behavior that might be unexpected
* [recovering from crashes](#recovering-from-crashes)
* [client crashes](#client-crashes)
* [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads
* [HTTP API](#HTTP-API)
* [read](#read)
* [write](#write)
* [admin](#admin)
* [general](#general)
* [dependencies](#dependencies) - mandatory deps
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
* [install recommended deps](#install-recommended-deps)
@@ -72,6 +78,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [sfx](#sfx) - there are two self-contained "binaries"
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
* [install on android](#install-on-android)
* [reporting bugs](#reporting-bugs) - ideas for context to include in bug reports
* [building](#building)
* [dev env setup](#dev-env-setup)
* [just the sfx](#just-the-sfx)
@@ -153,11 +160,13 @@ feature summary
* browser
* ☑ [navpane](#navpane) (directory tree sidebar)
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
* ☑ audio player (with OS media controls)
* ☑ audio player (with OS media controls and opus transcoding)
* ☑ image gallery with webm player
* ☑ textfile browser with syntax hilighting
* ☑ [thumbnails](#thumbnails)
* ☑ ...of images using Pillow
* ☑ ...of videos using FFmpeg
* ☑ ...of audio (spectrograms) using FFmpeg
* ☑ cache eviction (max-age; maybe max-size eventually)
* ☑ SPA (browse while uploading)
* if you use the navpane to navigate, not folders in the file list
@@ -218,6 +227,7 @@ some improvement ideas
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* Windows: python 2.7 cannot handle filenames with mojibake
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
* `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions
## general bugs
@@ -226,6 +236,10 @@ some improvement ideas
## not my bugs
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
* "future" because `AudioContext` is broken in the current iOS version (15.1), maybe one day...
* Windows: folders cannot be accessed if the name ends with `.`
* python or windows bug
@@ -242,6 +256,7 @@ some improvement ideas
* is it possible to block read-access to folders unless you know the exact URL for a particular file inside?
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
* can I make copyparty download a file to my server if I give it a URL?
* not officially, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
@@ -249,7 +264,10 @@ some improvement ideas
# accounts and volumes
per-folder, per-user permissions
per-folder, per-user permissions - if your setup is getting complex, consider making a [config file](./docs/example.conf) instead of using arguments
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if logged in as admin)
configuring accounts/volumes with arguments:
* `-a usr:pwd` adds account `usr` with password `pwd`
* `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
@@ -304,6 +322,7 @@ the browser has the following hotkeys (always qwerty)
* `B` toggle breadcrumbs / [navpane](#navpane)
* `I/K` prev/next folder
* `M` parent folder (or unexpand current)
* `V` toggle folders / textfiles in the navpane
* `G` toggle list / [grid view](#thumbnails)
* `T` toggle thumbnails / icons
* `ctrl-X` cut selected files/folders
@@ -315,6 +334,10 @@ the browser has the following hotkeys (always qwerty)
* ctrl+`Up/Down` move cursor and scroll viewport
* `Space` toggle file selection
* `Ctrl-A` toggle select all
* when a textfile is open:
* `I/K` prev/next textfile
* `S` toggle selection of open file
* `M` close textfile
* when playing audio:
* `J/L` prev/next song
* `U/O` skip 10sec back/forward
@@ -366,6 +389,8 @@ press `g` to toggle grid-view instead of the file listing, and `t` toggles icon
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
@@ -421,6 +446,8 @@ see [up2k](#up2k) for details on how it works
**protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
**protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress
the up2k UI is the epitome of polished inutitive experiences:
* "parallel uploads" specifies how many chunks to upload at the same time
* `[🏃]` analysis of other files should continue while one is uploading
@@ -457,8 +484,6 @@ the files will be hashed on the client-side, and each hash is sent to the server
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files (or just refresh the page)
### unpost
@@ -546,6 +571,8 @@ and there are *two* editors
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
* get a plaintext file listing by adding `?ls=t` to a URL, or a compact colored one with `?ls=v` (for unix terminals)
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
* click the bottom-left `π` to open a javascript prompt for debugging
@@ -574,6 +601,12 @@ add the argument `-e2ts` to also scan/index tags from music files, which brings
# server config
using arguments or config files, or a mix of both:
* config files (`-c some.conf`) can set additional commandline arguments; see [./docs/example.conf](docs/example.conf)
* `kill -s USR1` (same as `systemctl reload copyparty`) to reload accounts and volumes from config files without restarting
* or click the `[reload cfg]` button in the control-panel when logged in as admin
## file indexing
file indexing relies on two database tables, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`), stored in `.hist/up2k.db`. Configuration can be done through arguments, volume flags, or a mix of both.
@@ -610,7 +643,7 @@ if you set `--no-hash [...]` globally, you can enable hashing for specific volum
set upload rules using volume flags, some examples:
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: b, k, m, g)
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: `b`, `k`, `m`, `g`)
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
@@ -716,7 +749,7 @@ that'll run the command `notify-send` with the path to the uploaded file as the
note that it will only trigger on new unique files, not dupes
and it will occupy the parsing threads, so fork anything expensive, or if you want to intentionally queue/singlethread you can combine it with `--no-mtag-mt`
and it will occupy the parsing threads, so fork anything expensive, or if you want to intentionally queue/singlethread you can combine it with `--mtag-mt 1`
if this becomes popular maybe there should be a less janky way to do it actually
@@ -759,7 +792,7 @@ TLDR: yes
* internet explorer 6 to 8 behave the same
* firefox 52 and chrome 49 are the final winxp versions
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
* `*3` using a wasm decoder which consumes a bit more power
* `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server
quick summary of more eccentric web-browsers trying to view a directory index:
@@ -779,8 +812,8 @@ quick summary of more eccentric web-browsers trying to view a directory index:
interact with copyparty using non-browser clients
* javascript: dump some state into a file (two separate examples)
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* `await fetch('//127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
* `var xhr = new XMLHttpRequest(); xhr.open('POST', '//127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* curl/wget: upload some files (post=file, chunk=stdin)
* `post(){ curl -b cppwd=wark -F act=bput -F f=@"$1" http://127.0.0.1:3923/;}`
@@ -845,8 +878,6 @@ hashwasm would solve the streaming issue but reduces hashing speed for sha512 (x
defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
you can ignore the `cannot efficiently use multiple CPU cores` message, very unlikely to be a problem
below are some tweaks roughly ordered by usefulness:
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
@@ -860,6 +891,21 @@ below are some tweaks roughly ordered by usefulness:
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
## client-side
when uploading files,
* chrome is recommended, at least compared to firefox:
* up to 90% faster when hashing, especially on SSDs
* up to 40% faster when uploading over extremely fast internets
* but [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) can be 40% faster than chrome again
* if you're cpu-bottlenecked, or the browser is maxing a cpu core:
* up to 30% faster uploads if you hide the upload status list by switching away from the `[🚀]` up2k ui-tab (or closing it)
* switching to another browser-tab also works, the favicon will update every 10 seconds in that case
* unlikely to be a problem, but can happen when uploding many small files, or your internet is too fast, or PC too slow
# security
some notes on hardening
@@ -903,6 +949,84 @@ however you can hit `F12` in the up2k tab and use the devtools to see how far yo
`await fetch('/inc', {method:'PUT', body:JSON.stringify(ng,null,1)})`
# HTTP API
* table-column `params` = URL parameters; `?foo=bar&qux=...`
* table-column `body` = POST payload
* method `jPOST` = json post
* method `mPOST` = multipart post
* method `uPOST` = url-encoded post
* `FILE` = conventional HTTP file upload entry (rfc1867 et al, filename in `Content-Disposition`)
authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
## read
| method | params | result |
|--|--|--|
| GET | `?ls` | list files/folders at URL as JSON |
| GET | `?ls&dots` | list files/folders at URL as JSON, including dotfiles |
| GET | `?ls=t` | list files/folders at URL as plaintext |
| GET | `?ls=v` | list files/folders at URL, terminal-formatted |
| GET | `?b` | list files/folders at URL as simplified HTML |
| GET | `?tree=.` | list one level of subdirectories inside URL |
| GET | `?tree` | list one level of subdirectories for each level until URL |
| GET | `?tar` | download everything below URL as a tar file |
| GET | `?zip=utf-8` | download everything below URL as a zip file |
| GET | `?ups` | show recent uploads from your IP |
| GET | `?ups&filter=f` | ...where URL contains `f` |
| GET | `?mime=foo` | specify return mimetype `foo` |
| GET | `?raw` | get markdown file at URL as plaintext |
| GET | `?txt` | get file at URL as plaintext |
| GET | `?txt=iso-8859-1` | ...with specific charset |
| GET | `?th` | get image/video at URL as thumbnail |
| GET | `?th=opus` | convert audio file to 128kbps opus |
| GET | `?th=caf` | ...in the iOS-proprietary container |
| method | body | result |
|--|--|--|
| jPOST | `{"q":"foo"}` | do a server-wide search; see the `[🔎]` search tab `raw` field for syntax |
| method | params | body | result |
|--|--|--|--|
| jPOST | `?tar` | `["foo","bar"]` | download folders `foo` and `bar` inside URL as a tar file |
## write
| method | params | result |
|--|--|--|
| GET | `?move=/foo/bar` | move/rename the file/folder at URL to /foo/bar |
| method | params | body | result |
|--|--|--|--|
| PUT | | (binary data) | upload into file at URL |
| PUT | `?gz` | (binary data) | compress with gzip and write into file at URL |
| PUT | `?xz` | (binary data) | compress with xz and write into file at URL |
| mPOST | | `act=bput`, `f=FILE` | upload `FILE` into the folder at URL |
| mPOST | `?j` | `act=bput`, `f=FILE` | ...and reply with json |
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
| GET | `?delete` | | delete URL recursively |
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
| uPOST | | `msg=foo` | send message `foo` into server log |
| mPOST | | `act=tput`, `body=TEXT` | overwrite markdown document at URL |
server behavior of `msg` can be reconfigured with `--urlform`
## admin
| method | params | result |
|--|--|--|
| GET | `?reload=cfg` | reload config files and rescan volumes |
| GET | `?scan` | initiate a rescan of the volume which provides URL |
| GET | `?stack` | show a stacktrace of all threads |
## general
| method | params | result |
|--|--|--|
| GET | `?pw=x` | logout |
# dependencies
mandatory deps:
@@ -919,7 +1043,7 @@ enable music tags:
enable [thumbnails](#thumbnails) of...
* **images:** `Pillow` (requires py2.7 or py3.5+)
* **videos:** `ffmpeg` and `ffprobe` somewhere in `$PATH`
* **videos/audio:** `ffmpeg` and `ffprobe` somewhere in `$PATH`
* **HEIF pictures:** `pyheif-pillow-opener` (requires Linux or a C compiler)
* **AVIF pictures:** `pillow-avif-plugin`
@@ -953,19 +1077,19 @@ pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `
reduce the size of an sfx by removing features
if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL)
* `525k` size of original sfx.py as of v0.11.30
* `315k` after `./scripts/make-sfx.sh re no-ogv`
* `223k` after `./scripts/make-sfx.sh re no-ogv no-cm`
* `393k` size of original sfx.py as of v1.1.3
* `310k` after `./scripts/make-sfx.sh re no-cm`
* `269k` after `./scripts/make-sfx.sh re no-cm no-hl`
the features you can opt to drop are
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files, saves ~192k
* `cm`/easymde, the "fancy" markdown editor, saves ~92k
* `cm`/easymde, the "fancy" markdown editor, saves ~82k
* `hl`, prism, the syntax hilighter, saves ~41k
* `fnt`, source-code-pro, the monospace font, saves ~9k
* `dd`, the custom mouse cursor for the media player tray tab, saves ~2k
for the `re`pack to work, first run one of the sfx'es once to unpack it
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a `no-ogv no-cm` repack; works on linux/macos (and windows with msys2 or WSL)
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a few repacks; works on linux/macos (and windows with msys2 or WSL)
# install on android
@@ -979,6 +1103,16 @@ echo $?
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
# reporting bugs
ideas for context to include in bug reports
if something broke during an upload (replacing FILENAME with a part of the filename that broke):
```
journalctl -aS '48 hour ago' -u copyparty | grep -C10 FILENAME | tee bug.log
```
# building
## dev env setup

View File

@@ -2,7 +2,7 @@
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
* file uploads, file-search, autoresume of aborted/broken uploads
* faster than browsers
* early beta, if something breaks just restart it
* if something breaks just restart it

View File

@@ -3,7 +3,7 @@ from __future__ import print_function, unicode_literals
"""
up2k.py: upload to copyparty
2021-10-12, v0.9, ed <irc.rizon.net>, MIT-Licensed
2021-11-16, v0.12, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
- dependencies: requests
@@ -224,38 +224,57 @@ class CTermsize(object):
ss = CTermsize()
def statdir(top):
def _scd(err, top):
"""non-recursive listing of directory contents, along with stat() info"""
if hasattr(os, "scandir"):
with os.scandir(top) as dh:
for fh in dh:
yield [os.path.join(top, fh.name), fh.stat()]
else:
for name in os.listdir(top):
abspath = os.path.join(top, name)
with os.scandir(top) as dh:
for fh in dh:
abspath = os.path.join(top, fh.name)
try:
yield [abspath, fh.stat()]
except:
err.append(abspath)
def _lsd(err, top):
"""non-recursive listing of directory contents, along with stat() info"""
for name in os.listdir(top):
abspath = os.path.join(top, name)
try:
yield [abspath, os.stat(abspath)]
except:
err.append(abspath)
def walkdir(top):
if hasattr(os, "scandir"):
statdir = _scd
else:
statdir = _lsd
def walkdir(err, top):
"""recursive statdir"""
for ap, inf in sorted(statdir(top)):
for ap, inf in sorted(statdir(err, top)):
if stat.S_ISDIR(inf.st_mode):
for x in walkdir(ap):
yield x
try:
for x in walkdir(err, ap):
yield x
except:
err.append(ap)
else:
yield ap, inf
def walkdirs(tops):
def walkdirs(err, tops):
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
sep = "{0}".format(os.sep).encode("ascii")
for top in tops:
stop = top
if top[-1:] == sep:
stop = os.path.dirname(top.rstrip(sep))
stop = top.rstrip(sep)
else:
stop = os.path.dirname(top)
if os.path.isdir(top):
for ap, inf in walkdir(top):
for ap, inf in walkdir(err, top):
yield stop, ap[len(stop) :].lstrip(sep), inf
else:
d, n = top.rsplit(sep, 1)
@@ -445,20 +464,32 @@ class Ctl(object):
nfiles = 0
nbytes = 0
for _, _, inf in walkdirs(ar.files):
err = []
for _, _, inf in walkdirs(err, ar.files):
nfiles += 1
nbytes += inf.st_size
if err:
eprint("\n# failed to access {} paths:\n".format(len(err)))
for x in err:
eprint(x.decode("utf-8", "replace") + "\n")
eprint("^ failed to access those {} paths ^\n\n".format(len(err)))
if not ar.ok:
eprint("aborting because --ok is not set\n")
return
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
self.nfiles = nfiles
self.nbytes = nbytes
if ar.td:
requests.packages.urllib3.disable_warnings()
req_ses.verify = False
if ar.te:
req_ses.verify = ar.te
self.filegen = walkdirs(ar.files)
self.filegen = walkdirs([], ar.files)
if ar.safe:
self.safe()
else:
@@ -781,6 +812,7 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
ap.add_argument("-a", metavar="PASSWORD", help="password")
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
ap = app.add_argument_group("performance tweaks")
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")

View File

@@ -30,6 +30,7 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
# OS integration
init-scripts to start copyparty as a service
* [`systemd/copyparty.service`](systemd/copyparty.service) runs the sfx normally
* [`rc/copyparty`](rc/copyparty) runs sfx normally on freebsd, create a `copyparty` user
* [`systemd/prisonparty.service`](systemd/prisonparty.service) runs the sfx in a chroot
* [`openrc/copyparty`](openrc/copyparty)

31
contrib/rc/copyparty Normal file
View File

@@ -0,0 +1,31 @@
#!/bin/sh
#
# PROVIDE: copyparty
# REQUIRE: networking
# KEYWORD:
. /etc/rc.subr
name="copyparty"
rcvar="copyparty_enable"
copyparty_user="copyparty"
copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit
copyparty_command="/usr/local/bin/python3.8 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
pidfile="/var/run/copyparty/${name}.pid"
command="/usr/sbin/daemon"
command_args="-P ${pidfile} -r -f ${copyparty_command}"
stop_postcmd="copyparty_shutdown"
copyparty_shutdown()
{
if [ -e "${pidfile}" ]; then
echo "Stopping supervising daemon."
kill -s TERM `cat ${pidfile}`
fi
}
load_rc_config $name
: ${copyparty_enable:=no}
run_rc_command "$1"

View File

@@ -3,10 +3,15 @@
#
# installation:
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
# restorecon -vr /etc/systemd/system/copyparty.service
# firewall-cmd --permanent --add-port={80,443,3923}/tcp
# firewall-cmd --reload
#
# you may want to:
# change '/usr/bin/python' to another interpreter
# change '/usr/bin/python3' to another interpreter
# change '/mnt::rw' to another location or permission-set
# remove '-p 80,443,3923' to only listen on port 3923
# add '-i 127.0.0.1' to only allow local connections
#
# with `Type=notify`, copyparty will signal systemd when it is ready to
# accept connections; correctly delaying units depending on copyparty.
@@ -14,11 +19,8 @@
# python disabling line-buffering, so messages are out-of-order:
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
#
# enable line-buffering for realtime logging (slight performance cost):
# modify ExecStart and prefix it with `/usr/bin/stdbuf -oL` like so:
# ExecStart=/usr/bin/stdbuf -oL /usr/bin/python3 [...]
# but some systemd versions require this instead (higher performance cost):
# inside the [Service] block, add the following line:
# if you remove -q to enable logging, you may also want to remove the
# following line to enable buffering (slightly better performance):
# Environment=PYTHONUNBUFFERED=x
[Unit]
@@ -27,8 +29,10 @@ Description=copyparty file server
[Service]
Type=notify
SyslogIdentifier=copyparty
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
Environment=PYTHONUNBUFFERED=x
ExecReload=/bin/kill -s USR1 $MAINPID
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -p 80,443,3923 -v /mnt::rw
[Install]
WantedBy=multi-user.target

View File

@@ -20,7 +20,7 @@ import threading
import traceback
from textwrap import dedent
from .__init__ import E, WINDOWS, VT100, PY2, unicode
from .__init__ import E, WINDOWS, ANYWIN, VT100, PY2, unicode
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re
@@ -186,6 +186,32 @@ def configure_ssl_ciphers(al):
sys.exit(0)
def args_from_cfg(cfg_path):
ret = []
skip = False
with open(cfg_path, "rb") as f:
for ln in [x.decode("utf-8").strip() for x in f]:
if not ln:
skip = False
continue
if ln.startswith("#"):
continue
if not ln.startswith("-"):
continue
if skip:
continue
try:
ret.extend(ln.split(" ", 1))
except:
ret.append(ln)
return ret
def sighandler(sig=None, frame=None):
msg = [""] * 5
for th in threading.enumerate():
@@ -208,6 +234,8 @@ def run_argparse(argv, formatter):
except:
fk_salt = "hunter2"
cores = os.cpu_count() if hasattr(os, "cpu_count") else 4
sects = [
[
"accounts",
@@ -333,7 +361,7 @@ def run_argparse(argv, formatter):
ap2 = ap.add_argument_group('general options')
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all")
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark")
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
ap2.add_argument("-ed", action="store_true", help="enable ?dots")
@@ -348,12 +376,16 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without")
ap2.add_argument("--no-symlink", action="store_true", help="duplicate file contents instead")
ap2.add_argument("--reg-cap", metavar="N", type=int, default=9000, help="max number of uploads to keep in memory when running without -e2d")
ap2 = ap.add_argument_group('network options')
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="socket write delay in seconds")
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="response delay in seconds")
ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
@@ -395,23 +427,31 @@ def run_argparse(argv, formatter):
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
ap2 = ap.add_argument_group('admin panel options')
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
ap2 = ap.add_argument_group('thumbnail options')
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms)")
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=0, help="max num cpu cores to use, 0=all")
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for generating thumbnails")
ap2.add_argument("--th-convt", metavar="SEC", type=int, default=60, help="conversion timeout in seconds")
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs")
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for")
ap2 = ap.add_argument_group('transcoding options')
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete transcode output after SEC seconds")
ap2 = ap.add_argument_group('general db options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
@@ -419,7 +459,6 @@ def run_argparse(argv, formatter):
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans")
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval")
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
@@ -428,8 +467,8 @@ def run_argparse(argv, formatter):
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead")
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader")
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for tag scanning")
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash")
@@ -440,6 +479,7 @@ def run_argparse(argv, formatter):
ap2 = ap.add_argument_group('ui options')
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
ap2 = ap.add_argument_group('debug options')
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
@@ -482,7 +522,12 @@ def main(argv=None):
if HAVE_SSL:
ensure_cert()
deprecated = [["-e2s", "-e2ds"]]
for k, v in zip(argv[1:], argv[2:]):
if k == "-c":
supp = args_from_cfg(v)
argv.extend(supp)
deprecated = []
for dk, nk in deprecated:
try:
idx = argv.index(dk)
@@ -494,6 +539,12 @@ def main(argv=None):
argv[idx] = nk
time.sleep(2)
try:
if len(argv) == 1 and (ANYWIN or not os.geteuid()):
argv.extend(["-p80,443,3923", "--ign-ebind"])
except:
pass
try:
al = run_argparse(argv, RiceFormatter)
except AssertionError:

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (1, 0, 13)
CODENAME = "sufficient"
BUILD_DT = (2021, 10, 24)
VERSION = (1, 1, 3)
CODENAME = "opus"
BUILD_DT = (2021, 11, 20)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -526,8 +526,27 @@ class AuthSrv(object):
yield prev, True
def _map_volume(self, src, dst, mount, daxs, mflags):
if dst in mount:
m = "multiple filesystem-paths mounted at [/{}]:\n [{}]\n [{}]"
self.log(m.format(dst, mount[dst], src), c=1)
raise Exception("invalid config")
if src in mount.values():
m = "warning: filesystem-path [{}] mounted in multiple locations:"
m = m.format(src)
for v in [k for k, v in mount.items() if v == src] + [dst]:
m += "\n /{}".format(v)
self.log(m, c=3)
mount[dst] = src
daxs[dst] = AXS()
mflags[dst] = {}
def _parse_config_file(self, fd, acct, daxs, mflags, mount):
# type: (any, str, dict[str, AXS], any, str) -> None
skip = False
vol_src = None
vol_dst = None
self.line_ctr = 0
@@ -537,6 +556,11 @@ class AuthSrv(object):
vol_src = None
vol_dst = None
if skip:
if not ln:
skip = False
continue
if not ln or ln.startswith("#"):
continue
@@ -544,6 +568,8 @@ class AuthSrv(object):
if ln.startswith("u "):
u, p = ln[2:].split(":", 1)
acct[u] = p
elif ln.startswith("-"):
skip = True # argv
else:
vol_src = ln
continue
@@ -556,9 +582,7 @@ class AuthSrv(object):
# cfg files override arguments and previous files
vol_src = bos.path.abspath(vol_src)
vol_dst = vol_dst.strip("/")
mount[vol_dst] = vol_src
daxs[vol_dst] = AXS()
mflags[vol_dst] = {}
self._map_volume(vol_src, vol_dst, mount, daxs, mflags)
continue
try:
@@ -597,7 +621,7 @@ class AuthSrv(object):
if uname == "":
uname = "*"
for un in uname.split(","):
for un in uname.replace(",", " ").strip().split():
if "r" in lvl:
axs.uread[un] = 1
@@ -663,9 +687,7 @@ class AuthSrv(object):
# print("\n".join([src, dst, perms]))
src = bos.path.abspath(src)
dst = dst.strip("/")
mount[dst] = src
daxs[dst] = AXS()
mflags[dst] = {}
self._map_volume(src, dst, mount, daxs, mflags)
for x in perms.split(":"):
lvl, uname = x.split(",", 1) if "," in x else [x, ""]
@@ -986,7 +1008,7 @@ class AuthSrv(object):
v, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == v.realpath:
self.warn_anonwrite = False
msg = "anyone can read/write the current directory: {}"
msg = "anyone can read/write the current directory: {}\n"
self.log(msg.format(v.realpath), c=1)
except Pebkac:
self.warn_anonwrite = True

View File

@@ -21,6 +21,10 @@ def getsize(p):
return os.path.getsize(fsenc(p))
def isfile(p):
return os.path.isfile(fsenc(p))
def isdir(p):
return os.path.isdir(fsenc(p))

View File

@@ -62,6 +62,11 @@ class BrokerMp(object):
procs.pop()
def reload(self):
self.log("broker", "reloading")
for _, proc in enumerate(self.procs):
proc.q_pend.put([0, "reload", []])
def collector(self, proc):
"""receive message from hub in other process"""
while True:

View File

@@ -29,7 +29,7 @@ class MpWorker(object):
# we inherited signal_handler from parent,
# replace it with something harmless
if not FAKE_MP:
for sig in [signal.SIGINT, signal.SIGTERM]:
for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGUSR1]:
signal.signal(sig, self.signal_handler)
# starting to look like a good idea
@@ -69,6 +69,11 @@ class MpWorker(object):
sys.exit(0)
return
elif dest == "reload":
self.logw("mpw.asrv reloading")
self.asrv.reload()
self.logw("mpw.asrv reloaded")
elif dest == "listen":
self.httpsrv.listen(args[0], args[1])

View File

@@ -21,10 +21,13 @@ class BrokerThr(object):
# instantiate all services here (TODO: inheritance?)
self.httpsrv = HttpSrv(self, None)
self.reload = self.noop
def shutdown(self):
# self.log("broker", "shutting down")
self.httpsrv.shutdown()
def noop(self):
pass
def put(self, want_retval, dest, *args):

View File

@@ -126,7 +126,8 @@ class HttpCli(object):
self.loud_reply(unicode(ex), status=ex.code, volsan=True)
return self.keepalive
# time.sleep(0.4)
if self.args.rsp_slp:
time.sleep(self.args.rsp_slp)
# normalize incoming headers to lowercase;
# outgoing headers however are Correct-Case
@@ -227,8 +228,8 @@ class HttpCli(object):
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0]
ua = self.headers.get("user-agent", "")
self.is_rclone = ua.startswith("rclone/")
self.ua = self.headers.get("user-agent", "")
self.is_rclone = self.ua.startswith("rclone/")
if self.is_rclone:
uparam["raw"] = False
uparam["dots"] = False
@@ -283,12 +284,19 @@ class HttpCli(object):
n = "604800" if cache == "i" else cache or "69"
self.out_headers["Cache-Control"] = "max-age=" + n
def k304(self):
k304 = self.cookies.get("k304", "")
return k304 == "y" or ("; Trident/" in self.ua and not k304)
def send_headers(self, length, status=200, mime=None, headers=None):
response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])]
if length is not None:
response.append("Content-Length: " + unicode(length))
if status == 304 and self.k304():
self.keepalive = False
# close if unknown length, otherwise take client's preference
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
@@ -298,7 +306,7 @@ class HttpCli(object):
# default to utf8 html if no content-type is set
if not mime:
mime = self.out_headers.get("Content-Type", "text/html; charset=UTF-8")
mime = self.out_headers.get("Content-Type", "text/html; charset=utf-8")
self.out_headers["Content-Type"] = mime
@@ -419,12 +427,18 @@ class HttpCli(object):
return self.scanvol()
if not self.vpath:
if "reload" in self.uparam:
return self.handle_reload()
if "stack" in self.uparam:
return self.tx_stack()
if "ups" in self.uparam:
return self.tx_ups()
if "k304" in self.uparam:
return self.set_k304()
if "h" in self.uparam:
return self.tx_mounts()
@@ -502,7 +516,7 @@ class HttpCli(object):
return self.handle_stash()
if "save" in opt:
post_sz, _, _, path = self.dump_to_file()
post_sz, _, _, _, path = self.dump_to_file()
self.log("urlform: {} bytes, {}".format(post_sz, path))
elif "print" in opt:
reader, _ = self.get_body_reader()
@@ -529,11 +543,11 @@ class HttpCli(object):
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
def get_body_reader(self):
chunked = "chunked" in self.headers.get("transfer-encoding", "").lower()
if "chunked" in self.headers.get("transfer-encoding", "").lower():
return read_socket_chunked(self.sr), -1
remains = int(self.headers.get("content-length", -1))
if chunked:
return read_socket_chunked(self.sr), remains
elif remains == -1:
if remains == -1:
self.keepalive = False
return read_socket_unbounded(self.sr), remains
else:
@@ -618,7 +632,7 @@ class HttpCli(object):
with ren_open(fn, *open_a, **params) as f:
f, fn = f["orz"]
path = os.path.join(fdir, fn)
post_sz, _, sha_b64 = hashcopy(reader, f)
post_sz, sha_hex, sha_b64 = hashcopy(reader, f)
if lim:
lim.nup(self.ip)
@@ -642,13 +656,14 @@ class HttpCli(object):
time.time(),
)
return post_sz, sha_b64, remains, path
return post_sz, sha_hex, sha_b64, remains, path
def handle_stash(self):
post_sz, sha_b64, remains, path = self.dump_to_file()
post_sz, sha_hex, sha_b64, remains, path = self.dump_to_file()
spd = self._spd(post_sz)
self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path))
self.reply("{}\n{}\n".format(post_sz, sha_b64).encode("utf-8"))
m = "{}\n{}\n{}\n".format(post_sz, sha_b64, sha_hex[:56])
self.reply(m.encode("utf-8"))
return True
def _spd(self, nbytes, add=True):
@@ -780,6 +795,10 @@ class HttpCli(object):
return True
def handle_search(self, body):
idx = self.conn.get_u2idx()
if not hasattr(idx, "p_end"):
raise Pebkac(500, "sqlite3 is not available on the server; cannot search")
vols = []
seen = {}
for vtop in self.rvol:
@@ -791,7 +810,6 @@ class HttpCli(object):
seen[vfs] = True
vols.append([vfs.vpath, vfs.realpath, vfs.flags])
idx = self.conn.get_u2idx()
t0 = time.time()
if idx.p_end:
penalty = 0.7
@@ -851,63 +869,63 @@ class HttpCli(object):
response = x.get()
chunksize, cstart, path, lastmod = response
if self.args.nw:
path = os.devnull
if remains > chunksize:
raise Pebkac(400, "your chunk is too big to fit")
self.log("writing {} #{} @{} len {}".format(path, chash, cstart, remains))
reader = read_socket(self.sr, remains)
f = None
fpool = not self.args.no_fpool
if fpool:
with self.mutex:
try:
f = self.u2fh.pop(path)
except:
pass
f = f or open(fsenc(path), "rb+", 512 * 1024)
try:
f.seek(cstart[0])
post_sz, _, sha_b64 = hashcopy(reader, f)
if self.args.nw:
path = os.devnull
if sha_b64 != chash:
raise Pebkac(
400,
"your chunk got corrupted somehow (received {} bytes); expected vs received hash:\n{}\n{}".format(
post_sz, chash, sha_b64
),
)
if remains > chunksize:
raise Pebkac(400, "your chunk is too big to fit")
if len(cstart) > 1 and path != os.devnull:
self.log(
"clone {} to {}".format(
cstart[0], " & ".join(unicode(x) for x in cstart[1:])
)
)
ofs = 0
while ofs < chunksize:
bufsz = min(chunksize - ofs, 4 * 1024 * 1024)
f.seek(cstart[0] + ofs)
buf = f.read(bufsz)
for wofs in cstart[1:]:
f.seek(wofs + ofs)
f.write(buf)
self.log("writing {} #{} @{} len {}".format(path, chash, cstart, remains))
ofs += len(buf)
reader = read_socket(self.sr, remains)
self.log("clone {} done".format(cstart[0]))
finally:
if not fpool:
f.close()
else:
f = None
fpool = not self.args.no_fpool
if fpool:
with self.mutex:
self.u2fh.put(path, f)
try:
f = self.u2fh.pop(path)
except:
pass
f = f or open(fsenc(path), "rb+", 512 * 1024)
try:
f.seek(cstart[0])
post_sz, _, sha_b64 = hashcopy(reader, f)
if sha_b64 != chash:
m = "your chunk got corrupted somehow (received {} bytes); expected vs received hash:\n{}\n{}"
raise Pebkac(400, m.format(post_sz, chash, sha_b64))
if len(cstart) > 1 and path != os.devnull:
self.log(
"clone {} to {}".format(
cstart[0], " & ".join(unicode(x) for x in cstart[1:])
)
)
ofs = 0
while ofs < chunksize:
bufsz = min(chunksize - ofs, 4 * 1024 * 1024)
f.seek(cstart[0] + ofs)
buf = f.read(bufsz)
for wofs in cstart[1:]:
f.seek(wofs + ofs)
f.write(buf)
ofs += len(buf)
self.log("clone {} done".format(cstart[0]))
finally:
if not fpool:
f.close()
else:
with self.mutex:
self.u2fh.put(path, f)
finally:
x = self.conn.hsrv.broker.put(True, "up2k.release_chunk", ptop, wark, chash)
x.get() # block client until released
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
x = x.get()
@@ -954,15 +972,13 @@ class HttpCli(object):
def get_pwd_cookie(self, pwd):
if pwd in self.asrv.iacct:
msg = "login ok"
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
dur = 60 * 60 * 24 * 365
else:
msg = "naw dude"
pwd = "x" # nosec
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
dur = None
ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp)
return [ck, msg]
return [gencookie("cppwd", pwd, dur), msg]
def handle_mkdir(self):
new_dir = self.parser.require("name", 512)
@@ -1070,7 +1086,7 @@ class HttpCli(object):
f, fname = f["orz"]
abspath = os.path.join(fdir, fname)
self.log("writing to {}".format(abspath))
sz, sha512_hex, _ = hashcopy(p_data, f)
sz, sha_hex, sha_b64 = hashcopy(p_data, f)
if sz == 0:
raise Pebkac(400, "empty files in post")
@@ -1083,7 +1099,7 @@ class HttpCli(object):
bos.unlink(abspath)
raise
files.append([sz, sha512_hex, p_file, fname, abspath])
files.append([sz, sha_hex, sha_b64, p_file, fname, abspath])
dbv, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put(
False,
@@ -1135,7 +1151,7 @@ class HttpCli(object):
jmsg["error"] = errmsg
errmsg = "ERROR: " + errmsg
for sz, sha512, ofn, lfn, ap in files:
for sz, sha_hex, sha_b64, ofn, lfn, ap in files:
vsuf = ""
if self.can_read and "fk" in vfs.flags:
vsuf = "?k=" + gen_filekey(
@@ -1146,8 +1162,13 @@ class HttpCli(object):
)[: vfs.flags["fk"]]
vpath = "{}/{}".format(upload_vpath, lfn).strip("/")
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a> {}\n'.format(
sha512[:56], sz, quotep(vpath) + vsuf, html_escape(ofn, crlf=True), vsuf
msg += 'sha512: {} // {} // {} bytes // <a href="/{}">{}</a> {}\n'.format(
sha_hex[:56],
sha_b64,
sz,
quotep(vpath) + vsuf,
html_escape(ofn, crlf=True),
vsuf,
)
# truncated SHA-512 prevents length extension attacks;
# using SHA-512/224, optionally SHA-512/256 = :64
@@ -1157,7 +1178,8 @@ class HttpCli(object):
self.headers.get("host", "copyparty"),
vpath + vsuf,
),
"sha512": sha512[:56],
"sha512": sha_hex[:56],
"sha_b64": sha_b64,
"sz": sz,
"fn": lfn,
"fn_orig": ofn,
@@ -1375,8 +1397,7 @@ class HttpCli(object):
if "gzip" not in supported_editions:
decompress = True
else:
ua = self.headers.get("user-agent", "")
if re.match(r"MSIE [4-6]\.", ua) and " SV1" not in ua:
if re.match(r"MSIE [4-6]\.", self.ua) and " SV1" not in self.ua:
decompress = True
if not decompress:
@@ -1461,12 +1482,15 @@ class HttpCli(object):
else:
self.permit_caching()
if "txt" in self.uparam:
mime = "text/plain; charset={}".format(self.uparam["txt"] or "utf-8")
elif "mime" in self.uparam:
mime = self.uparam.get("mime")
else:
mime = guess_mime(req_path)
self.out_headers["Accept-Ranges"] = "bytes"
self.send_headers(
length=upper - lower,
status=status,
mime=guess_mime(req_path),
)
self.send_headers(length=upper - lower, status=status, mime=mime)
logmsg += unicode(status) + logtail
@@ -1478,10 +1502,10 @@ class HttpCli(object):
ret = True
with open_func(*open_args) as f:
if use_sendfile:
remains = sendfile_kern(lower, upper, f, self.s)
else:
remains = sendfile_py(lower, upper, f, self.s)
sendfun = sendfile_kern if use_sendfile else sendfile_py
remains = sendfun(
lower, upper, f, self.s, self.args.s_wr_sz, self.args.s_wr_slp
)
if remains > 0:
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
@@ -1686,10 +1710,16 @@ class HttpCli(object):
tagq=vs["tagq"],
mtpq=vs["mtpq"],
url_suf=suf,
k304=self.k304(),
)
self.reply(html.encode("utf-8"))
return True
def set_k304(self):
ck = gencookie("k304", self.uparam["k304"], 60 * 60 * 24 * 365)
self.out_headers["Set-Cookie"] = ck
self.redirect("", "?h#cc")
def tx_404(self, is_403=False):
if self.args.vague_403:
m = '<h1>404 not found &nbsp;┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>'
@@ -1711,7 +1741,7 @@ class HttpCli(object):
vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True)
args = [self.asrv.vfs.all_vols, [vn.vpath]]
args = [self.asrv.vfs.all_vols, [vn.vpath], False]
x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args)
x = x.get()
@@ -1721,6 +1751,20 @@ class HttpCli(object):
raise Pebkac(500, x)
def handle_reload(self):
act = self.uparam.get("reload")
if act != "cfg":
raise Pebkac(400, "only config files ('cfg') can be reloaded rn")
if not [x for x in self.wvol if x in self.rvol]:
raise Pebkac(403, "not allowed for user " + self.uname)
if self.args.no_reload:
raise Pebkac(403, "the reload feature is disabled in server config")
x = self.conn.hsrv.broker.put(True, "reload")
return self.redirect("", "?h", x.get(), "return to", False)
def tx_stack(self):
if not [x for x in self.wvol if x in self.rvol]:
raise Pebkac(403, "not allowed for user " + self.uname)
@@ -1792,13 +1836,16 @@ class HttpCli(object):
if not self.args.unpost:
raise Pebkac(400, "the unpost feature is disabled in server config")
idx = self.conn.get_u2idx()
if not hasattr(idx, "p_end"):
raise Pebkac(500, "sqlite3 is not available on the server; cannot unpost")
filt = self.uparam.get("filter")
lm = "ups [{}]".format(filt)
self.log(lm)
ret = []
t0 = time.time()
idx = self.conn.get_u2idx()
lim = time.time() - self.args.unpost
for vol in self.asrv.vfs.all_vols.values():
cur = idx.get_cur(vol.realpath)
@@ -1856,6 +1903,64 @@ class HttpCli(object):
)
self.loud_reply(x.get())
def tx_ls(self, ls):
dirs = ls["dirs"]
files = ls["files"]
arg = self.uparam["ls"]
if arg in ["v", "t", "txt"]:
try:
biggest = max(ls["files"] + ls["dirs"], key=itemgetter("sz"))["sz"]
except:
biggest = 0
if arg == "v":
fmt = "\033[0;7;36m{{}} {{:>{}}}\033[0m {{}}"
nfmt = "{}"
biggest = 0
f2 = "".join(
"{}{{}}".format(x)
for x in [
"\033[7m",
"\033[27m",
"",
"\033[0;1m",
"\033[0;36m",
"\033[0m",
]
)
ctab = {"B": 6, "K": 5, "M": 1, "G": 3}
for lst in [dirs, files]:
for x in lst:
a = x["dt"].replace("-", " ").replace(":", " ").split(" ")
x["dt"] = f2.format(*list(a))
sz = humansize(x["sz"], True)
x["sz"] = "\033[0;3{}m{:>5}".format(ctab.get(sz[-1:], 0), sz)
else:
fmt = "{{}} {{:{},}} {{}}"
nfmt = "{:,}"
fmt = fmt.format(len(nfmt.format(biggest)))
ret = [
"# {}: {}".format(x, ls[x])
for x in ["acct", "perms", "srvinf"]
if x in ls
]
ret += [
fmt.format(x["dt"], x["sz"], x["name"])
for y in [dirs, files]
for x in y
]
ret = "\n".join(ret)
mime = "text/plain; charset=utf-8"
else:
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
ret = json.dumps(ls)
mime = "application/json"
self.reply(ret.encode("utf-8", "replace") + b"\n", mime=mime)
return True
def tx_browser(self):
vpath = ""
vpnodes = [["", "/"]]
@@ -2014,6 +2119,7 @@ class HttpCli(object):
"def_hcols": [],
"have_up2k_idx": ("e2d" in vn.flags),
"have_tags_idx": ("e2t" in vn.flags),
"have_acode": (not self.args.no_acode),
"have_mv": (not self.args.no_mv),
"have_del": (not self.args.no_del),
"have_zip": (not self.args.no_zip),
@@ -2027,9 +2133,7 @@ class HttpCli(object):
}
if not self.can_read:
if is_ls:
ret = json.dumps(ls_ret)
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
return True
return self.tx_ls(ls_ret)
if not stat.S_ISDIR(st.st_mode):
return self.tx_404(True)
@@ -2126,6 +2230,8 @@ class HttpCli(object):
try:
ext = "---" if is_dir else fn.rsplit(".", 1)[1]
if len(ext) > 16:
ext = ext[:16]
except:
ext = "%"
@@ -2204,13 +2310,26 @@ class HttpCli(object):
f["tags"] = {}
if is_ls:
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
ls_ret["dirs"] = dirs
ls_ret["files"] = files
ls_ret["taglist"] = taglist
ret = json.dumps(ls_ret)
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
return True
return self.tx_ls(ls_ret)
doc = self.uparam.get("doc") if self.can_read else None
if doc:
doc = unquotep(doc.replace("+", " "))
j2a["docname"] = doc
if next((x for x in files if x["name"] == doc), None):
with open(os.path.join(abspath, doc), "rb") as f:
doc = f.read().decode("utf-8", "replace")
else:
self.log("doc 404: [{}]".format(doc), c=6)
doc = "( textfile not found )"
j2a["doc"] = doc
if not self.conn.hsrv.prism:
j2a["no_prism"] = True
for d in dirs:
d["name"] += "/"
@@ -2220,6 +2339,7 @@ class HttpCli(object):
j2a["files"] = dirs + files
j2a["logues"] = logues
j2a["taglist"] = taglist
j2a["txt_ext"] = self.args.textfiles.replace(",", " ")
if "mth" in vn.flags:
j2a["def_hcols"] = vn.flags["mth"].split(",")

View File

@@ -39,7 +39,7 @@ class HttpConn(object):
self.u2fh = hsrv.u2fh
enth = HAVE_PIL and not self.args.no_thumb
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
self.thumbcli = ThumbCli(hsrv) if enth else None
self.ico = Ico(self.args)
self.t0 = time.time()

View File

@@ -50,10 +50,9 @@ class HttpSrv(object):
self.log = broker.log
self.asrv = broker.asrv
nsuf = "-{}".format(nid) if nid else ""
nsuf2 = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
self.name = "hsrv" + nsuf2
self.name = "hsrv" + nsuf
self.mutex = threading.Lock()
self.stopping = False
@@ -61,6 +60,7 @@ class HttpSrv(object):
self.tp_ncli = 0 # fading
self.tp_time = None # latest worker collect
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
self.t_periodic = None
self.u2fh = FHC()
self.srvs = []
@@ -76,6 +76,7 @@ class HttpSrv(object):
x: env.get_template(x + ".html")
for x in ["splash", "browser", "browser2", "msg", "md", "mde"]
}
self.prism = os.path.exists(os.path.join(E.mod, "web", "deps", "prism.js.gz"))
cert_path = os.path.join(E.cfg, "cert.pem")
if bos.path.exists(cert_path):
@@ -93,10 +94,6 @@ class HttpSrv(object):
if self.args.log_thrs:
start_log_thrs(self.log, self.args.log_thrs, nid)
t = threading.Thread(target=self.periodic, name="hsrv-pt" + nsuf)
t.daemon = True
t.start()
def start_threads(self, n):
self.tp_nthr += n
if self.args.log_htp:
@@ -120,7 +117,7 @@ class HttpSrv(object):
def periodic(self):
while True:
time.sleep(2 if self.tp_ncli else 10)
time.sleep(2 if self.tp_ncli or self.ncli else 10)
with self.mutex:
self.u2fh.clean()
if self.tp_q:
@@ -128,6 +125,10 @@ class HttpSrv(object):
if self.tp_nthr > self.tp_ncli + 8:
self.stop_threads(4)
if not self.ncli and not self.u2fh.cache and self.tp_nthr <= 8:
self.t_periodic = None
return
def listen(self, sck, nlisteners):
ip, port = sck.getsockname()
self.srvs.append(sck)
@@ -146,7 +147,12 @@ class HttpSrv(object):
fno = srv_sck.fileno()
msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
self.log(self.name, msg)
self.broker.put(False, "cb_httpsrv_up")
def fun():
self.broker.put(False, "cb_httpsrv_up")
threading.Thread(target=fun).start()
while not self.stopping:
if self.args.log_conn:
self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30")
@@ -186,6 +192,16 @@ class HttpSrv(object):
with self.mutex:
self.ncli += 1
if not self.t_periodic:
name = "hsrv-pt"
if self.nid:
name += "-{}".format(self.nid)
t = threading.Thread(target=self.periodic, name=name)
self.t_periodic = t
t.daemon = True
t.start()
if self.tp_q:
self.tp_time = self.tp_time or now
self.tp_ncli = max(self.tp_ncli, self.ncli)

View File

@@ -8,7 +8,7 @@ import shutil
import subprocess as sp
from .__init__ import PY2, WINDOWS, unicode
from .util import fsenc, fsdec, uncyg, REKOBO_LKEY
from .util import fsenc, fsdec, uncyg, runcmd, REKOBO_LKEY
from .bos import bos
@@ -73,7 +73,7 @@ class MParser(object):
raise Exception()
def ffprobe(abspath):
def ffprobe(abspath, timeout=10):
cmd = [
b"ffprobe",
b"-hide_banner",
@@ -82,10 +82,8 @@ def ffprobe(abspath):
b"--",
fsenc(abspath),
]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
r = p.communicate()
txt = r[0].decode("utf-8", "replace")
return parse_ffprobe(txt)
rc = runcmd(cmd, timeout=timeout)
return parse_ffprobe(rc[1])
def parse_ffprobe(txt):
@@ -413,6 +411,9 @@ class MTag(object):
return r1
def get_mutagen(self, abspath):
if not bos.path.isfile(abspath):
return {}
import mutagen
try:
@@ -458,10 +459,16 @@ class MTag(object):
return self.normalize_tags(ret, md)
def get_ffprobe(self, abspath):
if not bos.path.isfile(abspath):
return {}
ret, md = ffprobe(abspath)
return self.normalize_tags(ret, md)
def get_bin(self, parsers, abspath):
if not bos.path.isfile(abspath):
return {}
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
pypath = [str(pypath)] + [str(x) for x in sys.path if x]
pypath = str(os.pathsep.join(pypath))

View File

@@ -18,6 +18,7 @@ from .authsrv import AuthSrv
from .tcpsrv import TcpSrv
from .up2k import Up2k
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
class SvcHub(object):
@@ -36,7 +37,9 @@ class SvcHub(object):
self.argv = argv
self.logf = None
self.stop_req = False
self.reload_req = False
self.stopping = False
self.reloading = False
self.stop_cond = threading.Condition()
self.retcode = 0
self.httpsrv_up = 0
@@ -54,8 +57,10 @@ class SvcHub(object):
if args.log_thrs:
start_log_thrs(self.log, args.log_thrs, 0)
if not ANYWIN and not args.use_fpool:
if not args.use_fpool and args.j != 1:
args.no_fpool = True
m = "multithreading enabled with -j {}, so disabling fpool -- this can reduce upload performance on some filesystems"
self.log("root", m.format(args.j))
if not args.no_fpool and args.j != 1:
m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
@@ -88,11 +93,22 @@ class SvcHub(object):
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3
)
if not args.no_acode and args.no_thumb:
msg = "setting --no-acode because --no-thumb (sorry)"
self.log("thumb", msg, c=6)
args.no_acode = True
if not args.no_acode and (not HAVE_FFMPEG or not HAVE_FFPROBE):
msg = "setting --no-acode because either FFmpeg or FFprobe is not available"
self.log("thumb", msg, c=6)
args.no_acode = True
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
# decide which worker impl to use
if self.check_mp_enable():
from .broker_mp import BrokerMp as Broker
else:
self.log("root", "cannot efficiently use multiple CPU cores")
from .broker_thr import BrokerThr as Broker
self.broker = Broker(self)
@@ -104,16 +120,16 @@ class SvcHub(object):
if not failed:
return
m = "{}/{} workers failed to start"
m = m.format(failed, expected)
self.log("root", m, 1)
if self.args.ign_ebind_all:
return
if self.args.ign_ebind and self.tcpsrv.srv:
return
m = "{}/{} workers failed to start"
m = m.format(failed, expected)
self.log("root", m, 1)
self.retcode = 1
os.kill(os.getpid(), signal.SIGTERM)
@@ -122,6 +138,7 @@ class SvcHub(object):
if self.httpsrv_up != self.broker.num_workers:
return
time.sleep(0.1) # purely cosmetic dw
self.log("root", "workers OK\n")
self.up2k.init_vols()
@@ -182,7 +199,11 @@ class SvcHub(object):
thr.daemon = True
thr.start()
for sig in [signal.SIGINT, signal.SIGTERM]:
sigs = [signal.SIGINT, signal.SIGTERM]
if not ANYWIN:
sigs.append(signal.SIGUSR1)
for sig in sigs:
signal.signal(sig, self.signal_handler)
# macos hangs after shutdown on sigterm with while-sleep,
@@ -206,18 +227,45 @@ class SvcHub(object):
else:
self.stop_thr()
def reload(self):
if self.reloading:
return "cannot reload; already in progress"
self.reloading = True
t = threading.Thread(target=self._reload)
t.daemon = True
t.start()
return "reload initiated"
def _reload(self):
self.log("root", "reload scheduled")
with self.up2k.mutex:
self.asrv.reload()
self.up2k.reload()
self.broker.reload()
self.reloading = False
def stop_thr(self):
while not self.stop_req:
with self.stop_cond:
self.stop_cond.wait(9001)
if self.reload_req:
self.reload_req = False
self.reload()
self.shutdown()
def signal_handler(self, sig, frame):
if self.stopping:
return
self.stop_req = True
if sig == signal.SIGUSR1:
self.reload_req = True
else:
self.stop_req = True
with self.stop_cond:
self.stop_cond.notify_all()
@@ -349,10 +397,10 @@ class SvcHub(object):
def check_mp_enable(self):
if self.args.j == 1:
self.log("root", "multiprocessing disabled by argument -j 1;")
return False
if mp.cpu_count() <= 1:
self.log("svchub", "only one CPU detected; multiprocessing disabled")
return False
try:
@@ -367,6 +415,7 @@ class SvcHub(object):
return True
else:
self.log("svchub", err)
self.log("svchub", "cannot efficiently use multiple CPU cores")
return False
def sd_notify(self):

View File

@@ -21,6 +21,29 @@ class TcpSrv(object):
self.stopping = False
self.srv = []
self.nsrv = 0
ok = {}
for ip in self.args.i:
ok[ip] = []
for port in self.args.p:
self.nsrv += 1
try:
self._listen(ip, port)
ok[ip].append(port)
except Exception as ex:
if self.args.ign_ebind or self.args.ign_ebind_all:
m = "could not listen on {}:{}: {}"
self.log("tcpsrv", m.format(ip, port, ex), c=3)
else:
raise
if not self.srv and not self.args.ign_ebind_all:
raise Exception("could not listen on any of the given interfaces")
if self.nsrv != len(self.srv):
self.log("tcpsrv", "")
ip = "127.0.0.1"
eps = {ip: "local only"}
nonlocals = [x for x in self.args.i if x != ip]
@@ -34,6 +57,9 @@ class TcpSrv(object):
m = "available @ http://{}:{}/ (\033[33m{}\033[0m)"
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
for port in sorted(self.args.p):
if port not in ok.get(ip, ok.get("0.0.0.0", [])):
continue
msgs.append(m.format(ip, port, desc))
if msgs:
@@ -41,23 +67,6 @@ class TcpSrv(object):
for m in msgs:
self.log("tcpsrv", m)
self.srv = []
self.nsrv = 0
for ip in self.args.i:
for port in self.args.p:
self.nsrv += 1
try:
self._listen(ip, port)
except Exception as ex:
if self.args.ign_ebind or self.args.ign_ebind_all:
m = "could not listen on {}:{}: {}"
self.log("tcpsrv", m.format(ip, port, ex), c=1)
else:
raise
if not self.srv and not self.args.ign_ebind_all:
raise Exception("could not listen on any of the given interfaces")
def _listen(self, ip, port):
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)

View File

@@ -4,28 +4,44 @@ from __future__ import print_function, unicode_literals
import os
from .util import Cooldown
from .th_srv import thumb_path, THUMBABLE, FMT_FF
from .th_srv import thumb_path, THUMBABLE, FMT_FFV, FMT_FFA
from .bos import bos
class ThumbCli(object):
def __init__(self, broker):
self.broker = broker
self.args = broker.args
self.asrv = broker.asrv
def __init__(self, hsrv):
self.broker = hsrv.broker
self.log_func = hsrv.log
self.args = hsrv.args
self.asrv = hsrv.asrv
# cache on both sides for less broker spam
self.cooldown = Cooldown(self.args.th_poke)
def log(self, msg, c=0):
self.log_func("thumbcli", msg, c)
def get(self, ptop, rem, mtime, fmt):
ext = rem.rsplit(".")[-1].lower()
if ext not in THUMBABLE:
return None
is_vid = ext in FMT_FF
is_vid = ext in FMT_FFV
if is_vid and self.args.no_vthumb:
return None
want_opus = fmt in ("opus", "caf")
is_au = ext in FMT_FFA
if is_au:
if want_opus:
if self.args.no_acode:
return None
else:
if self.args.no_athumb:
return None
elif want_opus:
return None
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]:
return os.path.join(ptop, rem)
@@ -33,10 +49,14 @@ class ThumbCli(object):
fmt = "w"
if fmt == "w":
if self.args.th_no_webp or (is_vid and self.args.th_ff_jpg):
if self.args.th_no_webp or ((is_vid or is_au) and self.args.th_ff_jpg):
fmt = "j"
histpath = self.asrv.vfs.histtab[ptop]
histpath = self.asrv.vfs.histtab.get(ptop)
if not histpath:
self.log("no histpath for [{}]".format(ptop))
return None
tpath = thumb_path(histpath, rem, mtime, fmt)
ret = None
try:
@@ -53,6 +73,11 @@ class ThumbCli(object):
if self.cooldown.poke(tdir):
self.broker.put(False, "thumbsrv.poke", tdir)
if want_opus:
# audio files expire individually
if self.cooldown.poke(tpath):
self.broker.put(False, "thumbsrv.poke", tpath)
return ret
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)

View File

@@ -10,7 +10,7 @@ import threading
import subprocess as sp
from .__init__ import PY2, unicode
from .util import fsenc, vsplit, runcmd, Queue, Cooldown, BytesIO, min_ex
from .util import fsenc, vsplit, statdir, runcmd, Queue, Cooldown, BytesIO, min_ex
from .bos import bos
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
@@ -50,7 +50,8 @@ except:
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# ffmpeg -formats
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
FMT_FFV = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
FMT_FFA = "aac m4a ogg opus flac alac mp3 mp2 ac3 dts wma ra wav aif aiff au alaw ulaw mulaw amr gsm ape tak tta wv"
if HAVE_HEIF:
FMT_PIL += " heif heifs heic heics"
@@ -58,7 +59,9 @@ if HAVE_HEIF:
if HAVE_AVIF:
FMT_PIL += " avif avifs"
FMT_PIL, FMT_FF = [{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FF]]
FMT_PIL, FMT_FFV, FMT_FFA = [
{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FFV, FMT_FFA]
]
THUMBABLE = {}
@@ -67,7 +70,8 @@ if HAVE_PIL:
THUMBABLE.update(FMT_PIL)
if HAVE_FFMPEG and HAVE_FFPROBE:
THUMBABLE.update(FMT_FF)
THUMBABLE.update(FMT_FFV)
THUMBABLE.update(FMT_FFA)
def thumb_path(histpath, rem, mtime, fmt):
@@ -86,9 +90,13 @@ def thumb_path(histpath, rem, mtime, fmt):
h = hashlib.sha512(fsenc(fn)).digest()
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
return "{}/th/{}/{}.{:x}.{}".format(
histpath, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
)
if fmt in ("opus", "caf"):
cat = "ac"
else:
fmt = "webp" if fmt == "w" else "jpg"
cat = "th"
return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt)
class ThumbSrv(object):
@@ -105,9 +113,7 @@ class ThumbSrv(object):
self.mutex = threading.Lock()
self.busy = {}
self.stopping = False
self.nthr = self.args.th_mt
if not self.nthr:
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
self.nthr = max(1, self.args.th_mt)
self.q = Queue(self.nthr * 4)
for n in range(self.nthr):
@@ -117,7 +123,8 @@ class ThumbSrv(object):
t.daemon = True
t.start()
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE):
want_ff = not self.args.no_vthumb or not self.args.no_athumb
if want_ff and (not HAVE_FFMPEG or not HAVE_FFPROBE):
missing = []
if not HAVE_FFMPEG:
missing.append("FFmpeg")
@@ -125,12 +132,12 @@ class ThumbSrv(object):
if not HAVE_FFPROBE:
missing.append("FFprobe")
msg = "cannot create video thumbnails because some of the required programs are not available: "
msg = "cannot create audio/video thumbnails because some of the required programs are not available: "
msg += ", ".join(missing)
self.log(msg, c=3)
if self.args.th_clean:
t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
t = threading.Thread(target=self.cleaner, name="thumb.cln")
t.daemon = True
t.start()
@@ -147,7 +154,11 @@ class ThumbSrv(object):
return not self.nthr
def get(self, ptop, rem, mtime, fmt):
histpath = self.asrv.vfs.histtab[ptop]
histpath = self.asrv.vfs.histtab.get(ptop)
if not histpath:
self.log("no histpath for [{}]".format(ptop))
return None
tpath = thumb_path(histpath, rem, mtime, fmt)
abspath = os.path.join(ptop, rem)
cond = threading.Condition(self.mutex)
@@ -183,6 +194,7 @@ class ThumbSrv(object):
try:
st = bos.stat(tpath)
if st.st_size:
self.poke(tpath)
return tpath
except:
pass
@@ -201,8 +213,13 @@ class ThumbSrv(object):
if not bos.path.exists(tpath):
if ext in FMT_PIL:
fun = self.conv_pil
elif ext in FMT_FF:
elif ext in FMT_FFV:
fun = self.conv_ffmpeg
elif ext in FMT_FFA:
if tpath.endswith(".opus") or tpath.endswith(".caf"):
fun = self.conv_opus
else:
fun = self.conv_spec
if fun:
try:
@@ -328,25 +345,116 @@ class ThumbSrv(object):
]
cmd += [fsenc(tpath)]
# self.log((b" ".join(cmd)).decode("utf-8"))
self._run_ff(cmd)
ret, sout, serr = runcmd(cmd)
def _run_ff(self, cmd):
# self.log((b" ".join(cmd)).decode("utf-8"))
ret, sout, serr = runcmd(cmd, timeout=self.args.th_convt)
if ret != 0:
m = "FFmpeg failed (probably a corrupt video file):\n"
m += "\n".join(["ff: {}".format(x) for x in serr.split("\n")])
self.log(m, c="1;30")
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
def conv_spec(self, abspath, tpath):
ret, _ = ffprobe(abspath)
if "ac" not in ret:
raise Exception("not audio")
fc = "[0:a:0]aresample=48000{},showspectrumpic=s=640x512,crop=780:544:70:50[o]"
if self.args.th_ff_swr:
fco = ":filter_size=128:cutoff=0.877"
else:
fco = ":resampler=soxr"
fc = fc.format(fco)
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath),
b"-filter_complex", fc.encode("utf-8"),
b"-map", b"[o]"
]
# fmt: on
if tpath.endswith(".jpg"):
cmd += [
b"-q:v",
b"6", # default=??
]
else:
cmd += [
b"-q:v",
b"50", # default=75
b"-compression_level:v",
b"6", # default=4, 0=fast, 6=max
]
cmd += [fsenc(tpath)]
self._run_ff(cmd)
def conv_opus(self, abspath, tpath):
if self.args.no_acode:
raise Exception("disabled in server config")
ret, _ = ffprobe(abspath)
if "ac" not in ret:
raise Exception("not audio")
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
want_caf = tpath.endswith(".caf")
tmp_opus = tpath
if want_caf:
tmp_opus = tpath.rsplit(".", 1)[0] + ".opus"
if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)):
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath),
b"-map_metadata", b"-1",
b"-map", b"0:a:0",
b"-c:a", b"libopus",
b"-b:a", b"128k",
fsenc(tmp_opus)
]
# fmt: on
self._run_ff(cmd)
if want_caf:
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath if src_opus else tmp_opus),
b"-map_metadata", b"-1",
b"-map", b"0:a:0",
b"-c:a", b"copy",
b"-f", b"caf",
fsenc(tpath)
]
# fmt: on
self._run_ff(cmd)
def poke(self, tdir):
if not self.poke_cd.poke(tdir):
return
ts = int(time.time())
try:
p1 = os.path.dirname(tdir)
p2 = os.path.dirname(p1)
for dp in [tdir, p1, p2]:
bos.utime(dp, (ts, ts))
for _ in range(4):
bos.utime(tdir, (ts, ts))
tdir = os.path.dirname(tdir)
except:
pass
@@ -366,25 +474,36 @@ class ThumbSrv(object):
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
def clean(self, histpath):
thumbpath = os.path.join(histpath, "th")
ret = 0
for cat in ["th", "ac"]:
ret += self._clean(histpath, cat, None)
return ret
def _clean(self, histpath, cat, thumbpath):
if not thumbpath:
thumbpath = os.path.join(histpath, cat)
# self.log("cln {}".format(thumbpath))
maxage = self.args.th_maxage
exts = ["jpg", "webp"] if cat == "th" else ["opus", "caf"]
maxage = getattr(self.args, cat + "_maxage")
now = time.time()
prev_b64 = None
prev_fp = None
try:
ents = bos.listdir(thumbpath)
ents = statdir(self.log, not self.args.no_scandir, False, thumbpath)
ents = sorted(list(ents))
except:
return 0
ndirs = 0
for f in sorted(ents):
for f, inf in ents:
fp = os.path.join(thumbpath, f)
cmp = fp.lower().replace("\\", "/")
# "top" or b64 prefix/full (a folder)
if len(f) <= 3 or len(f) == 24:
age = now - bos.path.getmtime(fp)
age = now - inf.st_mtime
if age > maxage:
with self.mutex:
safe = True
@@ -398,16 +517,15 @@ class ThumbSrv(object):
self.log("rm -rf [{}]".format(fp))
shutil.rmtree(fp, ignore_errors=True)
else:
ndirs += self.clean(fp)
self._clean(histpath, cat, fp)
continue
# thumb file
try:
b64, ts, ext = f.split(".")
if len(b64) != 24 or len(ts) != 8 or ext not in ["jpg", "webp"]:
if len(b64) != 24 or len(ts) != 8 or ext not in exts:
raise Exception()
ts = int(ts, 16)
except:
if f != "dir.txt":
self.log("foreign file in thumbs dir: [{}]".format(fp), 1)
@@ -418,6 +536,10 @@ class ThumbSrv(object):
self.log("rm replaced [{}]".format(fp))
bos.unlink(prev_fp)
if cat != "th" and inf.st_mtime + maxage < now:
self.log("rm expired [{}]".format(fp))
bos.unlink(fp)
prev_b64 = b64
prev_fp = fp

View File

@@ -67,7 +67,11 @@ class U2idx(object):
if cur:
return cur
histpath = self.asrv.vfs.histtab[ptop]
histpath = self.asrv.vfs.histtab.get(ptop)
if not histpath:
self.log("no histpath for [{}]".format(ptop))
return None
db_path = os.path.join(histpath, "up2k.db")
if not bos.path.exists(db_path):
return None

View File

@@ -63,14 +63,17 @@ class Up2k(object):
# state
self.mutex = threading.Lock()
self.rescan_cond = threading.Condition()
self.hashq = Queue()
self.tagq = Queue()
self.n_hashq = 0
self.n_tagq = 0
self.gid = 0
self.volstate = {}
self.need_rescan = {}
self.dupesched = {}
self.registry = {}
self.droppable = {}
self.entags = {}
self.flags = {}
self.cur = {}
@@ -113,15 +116,21 @@ class Up2k(object):
t.daemon = True
t.start()
def reload(self):
self.gid += 1
self.log("reload #{} initiated".format(self.gid))
all_vols = self.asrv.vfs.all_vols
self.rescan(all_vols, list(all_vols.keys()), True)
def deferred_init(self):
all_vols = self.asrv.vfs.all_vols
have_e2d = self.init_indexes(all_vols)
if have_e2d:
thr = threading.Thread(target=self._snapshot, name="up2k-snapshot")
thr.daemon = True
thr.start()
thr = threading.Thread(target=self._snapshot, name="up2k-snapshot")
thr.daemon = True
thr.start()
if have_e2d:
thr = threading.Thread(target=self._hasher, name="up2k-hasher")
thr.daemon = True
thr.start()
@@ -131,9 +140,11 @@ class Up2k(object):
thr.start()
if self.mtag:
thr = threading.Thread(target=self._tagger, name="up2k-tagger")
thr.daemon = True
thr.start()
for n in range(max(1, self.args.mtag_mt)):
name = "tagger-{}".format(n)
thr = threading.Thread(target=self._tagger, name=name)
thr.daemon = True
thr.start()
thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-init")
thr.daemon = True
@@ -165,15 +176,15 @@ class Up2k(object):
}
return json.dumps(ret, indent=4)
def rescan(self, all_vols, scan_vols):
if hasattr(self, "pp"):
def rescan(self, all_vols, scan_vols, wait):
if not wait and hasattr(self, "pp"):
return "cannot initiate; scan is already in progress"
args = (all_vols, scan_vols)
t = threading.Thread(
target=self.init_indexes,
args=args,
name="up2k-rescan-{}".format(scan_vols[0]),
name="up2k-rescan-{}".format(scan_vols[0] if scan_vols else "all"),
)
t.daemon = True
t.start()
@@ -181,9 +192,23 @@ class Up2k(object):
def _sched_rescan(self):
volage = {}
cooldown = 0
timeout = time.time() + 3
while True:
time.sleep(self.args.re_int)
timeout = max(timeout, cooldown)
wait = max(0.1, timeout + 0.1 - time.time())
with self.rescan_cond:
self.rescan_cond.wait(wait)
now = time.time()
if now < cooldown:
continue
if hasattr(self, "pp"):
cooldown = now + 5
continue
timeout = now + 9001
with self.mutex:
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
maxage = vol.flags.get("scan")
@@ -193,14 +218,18 @@ class Up2k(object):
if vp not in volage:
volage[vp] = now
if now - volage[vp] >= maxage:
deadline = volage[vp] + maxage
if deadline <= now:
self.need_rescan[vp] = 1
timeout = min(timeout, deadline)
vols = list(sorted(self.need_rescan.keys()))
self.need_rescan = {}
if vols:
err = self.rescan(self.asrv.vfs.all_vols, vols)
cooldown = now + 10
err = self.rescan(self.asrv.vfs.all_vols, vols, False)
if err:
for v in vols:
self.need_rescan[v] = True
@@ -222,8 +251,11 @@ class Up2k(object):
if not cur:
continue
lifetime = int(lifetime)
timeout = min(timeout, now + lifetime)
nrm = 0
deadline = time.time() - int(lifetime)
deadline = time.time() - lifetime
q = "select rd, fn from up where at > 0 and at < ? limit 100"
while True:
with self.mutex:
@@ -240,12 +272,22 @@ class Up2k(object):
if vp:
fvp = "{}/{}".format(vp, fvp)
self._handle_rm(LEELOO_DALLAS, None, fvp, True)
self._handle_rm(LEELOO_DALLAS, None, fvp)
nrm += 1
if nrm:
self.log("{} files graduated in {}".format(nrm, vp))
if timeout < 10:
continue
q = "select at from up where at > 0 order by at limit 1"
with self.mutex:
hits = cur.execute(q).fetchone()
if hits:
timeout = min(timeout, now + lifetime - (now - hits[0]))
def _vis_job_progress(self, job):
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
path = os.path.join(job["ptop"], job["prel"], job["name"])
@@ -254,7 +296,8 @@ class Up2k(object):
def _vis_reg_progress(self, reg):
ret = []
for _, job in reg.items():
ret.append(self._vis_job_progress(job))
if job["need"]:
ret.append(self._vis_job_progress(job))
return ret
@@ -269,6 +312,16 @@ class Up2k(object):
return True, ret
def init_indexes(self, all_vols, scan_vols=None):
gid = self.gid
while hasattr(self, "pp") and gid == self.gid:
time.sleep(0.1)
if gid != self.gid:
return
if gid:
self.log("reload #{} running".format(self.gid))
self.pp = ProgressPrinter()
vols = all_vols.values()
t0 = time.time()
@@ -399,7 +452,11 @@ class Up2k(object):
return have_e2d
def register_vpath(self, ptop, flags):
histpath = self.asrv.vfs.histtab[ptop]
histpath = self.asrv.vfs.histtab.get(ptop)
if not histpath:
self.log("no histpath for [{}]".format(ptop))
return None
db_path = os.path.join(histpath, "up2k.db")
if ptop in self.registry:
try:
@@ -428,26 +485,41 @@ class Up2k(object):
self.log("/{} {}".format(vpath, " ".join(sorted(a))), "35")
reg = {}
drp = None
path = os.path.join(histpath, "up2k.snap")
if "e2d" in flags and bos.path.exists(path):
if bos.path.exists(path):
with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8")
reg2 = json.loads(j)
try:
drp = reg2["droppable"]
reg2 = reg2["registry"]
except:
pass
for k, job in reg2.items():
path = os.path.join(job["ptop"], job["prel"], job["name"])
if bos.path.exists(path):
reg[k] = job
job["poke"] = time.time()
job["busy"] = {}
else:
self.log("ign deleted file in snap: [{}]".format(path))
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
if drp is None:
drp = [k for k, v in reg.items() if not v.get("need", [])]
else:
drp = [x for x in drp if x in reg]
m = "loaded snap {} |{}| ({})".format(path, len(reg.keys()), len(drp or []))
m = [m] + self._vis_reg_progress(reg)
self.log("\n".join(m))
self.flags[ptop] = flags
self.registry[ptop] = reg
self.droppable[ptop] = drp or []
self.regdrop(ptop, None)
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
return None
@@ -700,7 +772,7 @@ class Up2k(object):
return n_add, n_rm, False
mpool = False
if self.mtag.prefer_mt and not self.args.no_mtag_mt:
if self.mtag.prefer_mt and self.args.mtag_mt > 1:
mpool = self._start_mpool()
conn = sqlite3.connect(db_path, timeout=15)
@@ -767,10 +839,11 @@ class Up2k(object):
return ret
def _run_all_mtp(self):
gid = self.gid
t0 = time.time()
for ptop, flags in self.flags.items():
if "mtp" in flags:
self._run_one_mtp(ptop)
self._run_one_mtp(ptop, gid)
td = time.time() - t0
msg = "mtp finished in {:.2f} sec ({})"
@@ -781,7 +854,10 @@ class Up2k(object):
if "OFFLINE" not in self.volstate[k]:
self.volstate[k] = "online, idle"
def _run_one_mtp(self, ptop):
def _run_one_mtp(self, ptop, gid):
if gid != self.gid:
return
entags = self.entags[ptop]
parsers = {}
@@ -814,6 +890,9 @@ class Up2k(object):
in_progress = {}
while True:
with self.mutex:
if gid != self.gid:
break
q = "select w from mt where k = 't:mtp' limit ?"
warks = cur.execute(q, (batch_sz,)).fetchall()
warks = [x[0] for x in warks]
@@ -933,9 +1012,7 @@ class Up2k(object):
def _start_mpool(self):
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
# both do crazy runahead so lets reinvent another wheel
nw = os.cpu_count() if hasattr(os, "cpu_count") else 4
if self.args.no_mtag_mt:
nw = 1
nw = max(1, self.args.mtag_mt)
if self.pending_tags is None:
self.log("using {}x {}".format(nw, self.mtag.backend))
@@ -998,7 +1075,10 @@ class Up2k(object):
except Exception as ex:
msg = "failed to read tags from {}:\n{}"
self.log(msg.format(abspath, ex), c=3)
return
return 0
if not bos.path.isfile(abspath):
return 0
if entags:
tags = {k: v for k, v in tags.items() if k in entags}
@@ -1193,6 +1273,7 @@ class Up2k(object):
"at": at,
"hash": [],
"need": [],
"busy": {},
}
if job and wark in reg:
@@ -1275,6 +1356,7 @@ class Up2k(object):
"t0": now,
"hash": deepcopy(cj["hash"]),
"need": [],
"busy": {},
}
# client-provided, sanitized by _get_wark: name, size, lmod
for k in [
@@ -1381,6 +1463,14 @@ class Up2k(object):
if not nchunk:
raise Pebkac(400, "unknown chunk")
if chash in job["busy"]:
nh = len(job["hash"])
idx = job["hash"].index(chash)
m = "that chunk is already being written to:\n {}\n {} {}/{}\n {}"
raise Pebkac(400, m.format(wark, chash, idx, nh, job["name"]))
job["busy"][chash] = 1
job["poke"] = time.time()
chunksize = up2k_chunksize(job["size"])
@@ -1390,6 +1480,14 @@ class Up2k(object):
return [chunksize, ofs, path, job["lmod"]]
def release_chunk(self, ptop, wark, chash):
with self.mutex:
job = self.registry[ptop].get(wark)
if job:
job["busy"].pop(chash, None)
return [True]
def confirm_chunk(self, ptop, wark, chash):
with self.mutex:
try:
@@ -1400,6 +1498,8 @@ class Up2k(object):
except Exception as ex:
return "confirm_chunk, wark, " + repr(ex)
job["busy"].pop(chash, None)
try:
job["need"].remove(chash)
except Exception as ex:
@@ -1410,7 +1510,7 @@ class Up2k(object):
return ret, src
if self.args.nw:
# del self.registry[ptop][wark]
self.regdrop(ptop, wark)
return ret, dst
# windows cant rename open files
@@ -1442,9 +1542,9 @@ class Up2k(object):
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
a += [job.get("at") or time.time()]
if self.idx_wark(*a):
# self.log("pop " + wark + " " + dst + " finish_upload idx_wark", 4)
del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads
else:
self.regdrop(ptop, wark)
dupes = self.dupesched.pop(dst, [])
if not dupes:
@@ -1464,6 +1564,21 @@ class Up2k(object):
if cur:
cur.connection.commit()
def regdrop(self, ptop, wark):
t = self.droppable[ptop]
if wark:
t.append(wark)
if len(t) <= self.args.reg_cap:
return
n = len(t) - int(self.args.reg_cap / 2)
m = "up2k-registry [{}] has {} droppables; discarding {}"
self.log(m.format(ptop, len(t), n))
for k in t[:n]:
self.registry[ptop].pop(k, None)
self.droppable[ptop] = t[n:]
def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
cur = self.cur.get(ptop)
if not cur:
@@ -1501,7 +1616,7 @@ class Up2k(object):
ok = {}
ng = {}
for vp in vpaths:
a, b, c = self._handle_rm(uname, ip, vp, False)
a, b, c = self._handle_rm(uname, ip, vp)
n_files += a
for k in b:
ok[k] = 1
@@ -1514,7 +1629,7 @@ class Up2k(object):
return "deleted {} files (and {}/{} folders)".format(n_files, ok, ok + ng)
def _handle_rm(self, uname, ip, vpath, rm_topdir):
def _handle_rm(self, uname, ip, vpath):
try:
permsets = [[True, False, False, True]]
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
@@ -1585,7 +1700,7 @@ class Up2k(object):
bos.unlink(abspath)
rm = rmdirs(self.log_func, scandir, True, atop, 1 if rm_topdir else 0)
rm = rmdirs(self.log_func, scandir, True, atop, 1)
return n_files, rm[0], rm[1]
def handle_mv(self, uname, svp, dvp):
@@ -1664,6 +1779,9 @@ class Up2k(object):
# folders are too scary, schedule rescan of both vols
self.need_rescan[svn.vpath] = 1
self.need_rescan[dvn.vpath] = 1
with self.rescan_cond:
self.rescan_cond.notify_all()
return "k"
c1, w, ftime, fsize, ip, at = self._find_from_vpath(svn.realpath, srem)
@@ -1926,7 +2044,8 @@ class Up2k(object):
self.snap_prev = {}
while True:
time.sleep(self.snap_persist_interval)
self.do_snapshot()
if not hasattr(self, "pp"):
self.do_snapshot()
def do_snapshot(self):
with self.mutex:
@@ -1935,7 +2054,10 @@ class Up2k(object):
def _snap_reg(self, ptop, reg):
now = time.time()
histpath = self.asrv.vfs.histtab[ptop]
histpath = self.asrv.vfs.histtab.get(ptop)
if not histpath:
return
rm = [x for x in reg.values() if now - x["poke"] > self.snap_discard_interval]
if rm:
m = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
@@ -1972,7 +2094,8 @@ class Up2k(object):
bos.makedirs(histpath)
path2 = "{}.{}".format(path, os.getpid())
j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8")
body = {"droppable": self.droppable[ptop], "registry": reg}
j = json.dumps(body, indent=2, sort_keys=True).encode("utf-8")
with gzip.GzipFile(path2, "wb") as f:
f.write(j)

View File

@@ -100,10 +100,25 @@ IMPLICATIONS = [
MIMES = {
"md": "text/plain; charset=UTF-8",
"md": "text/plain",
"txt": "text/plain",
"js": "text/javascript",
"opus": "audio/ogg; codecs=opus",
"webp": "image/webp",
"caf": "audio/x-caf",
"mp3": "audio/mpeg",
"m4a": "audio/mp4",
"jpg": "image/jpeg",
}
for ln in """text css html csv
application json wasm xml pdf rtf zip
image webp jpeg png gif bmp
audio aac ogg wav
video webm mp4 mpeg
font woff woff2 otf ttf
""".splitlines():
k, vs = ln.split(" ", 1)
for v in vs.strip().split():
MIMES[v] = "{}/{}".format(k, v)
REKOBO_KEY = {
@@ -445,7 +460,7 @@ def log_thrs(log, ival, name):
tv = [x.name for x in threading.enumerate()]
tv = [
x.split("-")[0]
if x.startswith("httpconn-") or x.startswith("thumb-")
if x.split("-")[0] in ["httpconn", "thumb", "tagger"]
else "listen"
if "-listen-" in x
else x
@@ -807,6 +822,17 @@ def gen_filekey(salt, fspath, fsize, inode):
).decode("ascii")
def gencookie(k, v, dur):
v = v.replace(";", "")
if dur:
dt = datetime.utcfromtimestamp(time.time() + dur)
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
else:
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
return "{}={}; Path=/; Expires={}; SameSite=Lax".format(k, v, exp)
def humansize(sz, terse=False):
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
if sz < 1024:
@@ -1150,12 +1176,14 @@ def hashcopy(fin, fout):
return tlen, hashobj.hexdigest(), digest_b64
def sendfile_py(lower, upper, f, s):
def sendfile_py(lower, upper, f, s, bufsz, slp):
remains = upper - lower
f.seek(lower)
while remains > 0:
# time.sleep(0.01)
buf = f.read(min(1024 * 32, remains))
if slp:
time.sleep(slp)
buf = f.read(min(bufsz, remains))
if not buf:
return remains
@@ -1168,7 +1196,7 @@ def sendfile_py(lower, upper, f, s):
return 0
def sendfile_kern(lower, upper, f, s):
def sendfile_kern(lower, upper, f, s, bufsz, slp):
out_fd = s.fileno()
in_fd = f.fileno()
ofs = lower
@@ -1224,6 +1252,7 @@ def statdir(logger, scandir, lstat, top):
def rmdirs(logger, scandir, lstat, top, depth):
if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)):
top = os.path.dirname(top)
depth -= 1
dirs = statdir(logger, scandir, lstat, top)
dirs = [x[0] for x in dirs if stat.S_ISDIR(x[1].st_mode)]
@@ -1279,18 +1308,33 @@ def guess_mime(url, fallback="application/octet-stream"):
except:
return fallback
ret = MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
ret = MIMES.get(ext)
if not ret:
x = mimetypes.guess_type(url)
ret = "application/{}".format(x[1]) if x[1] else x[0]
if not ret:
ret = fallback
if ";" not in ret:
if ret.startswith("text/") or ret.endswith("/javascript"):
ret += "; charset=UTF-8"
ret += "; charset=utf-8"
return ret
def runcmd(argv):
def runcmd(argv, timeout=None):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate()
if not timeout or PY2:
stdout, stderr = p.communicate()
else:
try:
stdout, stderr = p.communicate(timeout=timeout)
except sp.TimeoutExpired:
p.kill()
stdout, stderr = p.communicate()
stdout = stdout.decode("utf-8", "replace")
stderr = stderr.decode("utf-8", "replace")
return [p.returncode, stdout, stderr]

View File

@@ -237,7 +237,7 @@ window.baguetteBox = (function () {
}
function keyDownHandler(e) {
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing || modal.busy)
return;
var k = e.code + '', v = vid();
@@ -331,7 +331,7 @@ window.baguetteBox = (function () {
function tglsel() {
var thumb = currentGallery[currentIndex].imageElement,
name = vsplit(thumb.href)[1],
name = vsplit(thumb.href)[1].split('?')[0],
files = msel.getall();
for (var a = 0; a < files.length; a++)
@@ -345,7 +345,7 @@ window.baguetteBox = (function () {
function selbg() {
var img = vidimg(),
thumb = currentGallery[currentIndex].imageElement,
name = vsplit(thumb.href)[1],
name = vsplit(thumb.href)[1].split('?')[0],
files = msel.getsel(),
sel = false;
@@ -530,9 +530,7 @@ window.baguetteBox = (function () {
if (options.bodyClass && document.body.classList)
document.body.classList.remove(options.bodyClass);
var h = ebi('bbox-halp');
if (h)
h.parentNode.removeChild(h);
qsr('#bbox-halp');
if (options.afterHide)
options.afterHide();
@@ -590,8 +588,7 @@ window.baguetteBox = (function () {
image.addEventListener(is_vid ? 'loadedmetadata' : 'load', function () {
// Remove loader element
var spinner = QS('#baguette-img-' + index + ' .bbox-spinner');
figure.removeChild(spinner);
qsr('#baguette-img-' + index + ' .bbox-spinner');
if (!options.async && callback)
callback();
});

View File

@@ -23,7 +23,7 @@ html, body {
margin: 0;
padding: 0;
}
pre, code, tt {
pre, code, tt, #doc, #doc>code {
font-family: 'scp', monospace, monospace;
}
#path,
@@ -31,9 +31,8 @@ pre, code, tt {
font-size: 1em;
}
#path {
color: #aca;
color: #ccc;
text-shadow: 1px 1px 0 #000;
font-variant: small-caps;
font-weight: normal;
display: inline-block;
padding: .35em .5em .2em .5em;
@@ -45,8 +44,9 @@ pre, code, tt {
margin-left: -.7em;
}
#files {
border-spacing: 0;
z-index: 1;
top: -.3em;
border-spacing: 0;
position: relative;
}
#files tbody a {
@@ -73,7 +73,7 @@ a, #files tbody div a:last-child {
}
#files thead {
position: sticky;
top: 0;
top: -1px;
}
#files thead a {
color: #999;
@@ -83,7 +83,7 @@ a, #files tbody div a:last-child {
background: #1c1c1c;
}
#files thead th {
padding: 0 .3em .3em .3em;
padding: .3em;
border-bottom: 1px solid #444;
cursor: pointer;
}
@@ -205,6 +205,12 @@ a, #files tbody div a:last-child {
#repl {
padding: .33em;
}
#files a.doc {
color: #666;
}
#files a.doc.bri {
color: #f5a;
}
#files tbody a.play {
color: #e70;
padding: .2em;
@@ -507,6 +513,11 @@ html.light #wfm a:not(.en) {
box-shadow: 0 -.15em .2em #000 inset;
padding-bottom: .3em;
}
#ops a svg {
width: 1.75em;
height: 1.75em;
margin: -.5em -.3em;
}
#ops {
margin: 1.7em 1.5em 0 1.5em;
padding: .3em .6em;
@@ -549,6 +560,11 @@ input[type="radio"]:checked+label,
input[type="checkbox"]:checked+label {
color: #fc5;
}
.opwide div>span>input+label {
padding: .3em 0 .3em .3em;
margin: 0 0 0 -.3em;
cursor: pointer;
}
.opview input.i {
width: calc(100% - 16.2em);
}
@@ -663,6 +679,17 @@ input.eq_gain {
border-bottom: 1px solid #111;
overflow: hidden;
}
#treepar {
z-index: 1;
position: fixed;
left: -.75em;
width: calc(var(--nav-sz) - 0.5em);
border-bottom: 1px solid #444;
overflow: hidden;
}
#treepar.off {
display: none;
}
#tree, #treeh {
border-radius: 0 .3em 0 0;
}
@@ -727,37 +754,46 @@ input.eq_gain {
#tree li:last-child {
border-bottom: none;
}
#treeul a.hl {
#tree ul a.sel {
background: #111;
box-shadow: -.8em 0 0 #c37 inset;
color: #fff;
}
#tree ul a.hl {
color: #400;
background: #fc4;
text-shadow: none;
}
#treeul a {
#tree ul a.par {
color: #fff;
}
#tree ul a {
border-radius: .3em;
display: inline-block;
}
#treeul a+a {
.ntree a+a {
width: calc(100% - 2em);
line-height: 1em;
}
#tree.nowrap #treeul li {
#tree.nowrap li {
min-height: 1.4em;
white-space: nowrap;
}
#tree.nowrap #treeul a+a:hover {
#tree.nowrap .ntree a+a:hover {
background: rgba(16, 16, 16, 0.67);
min-width: calc(var(--nav-sz) - 2em);
width: auto;
}
html.light #tree.nowrap #treeul a+a:hover {
html.light #tree.nowrap .ntree a+a:hover {
background: rgba(255, 255, 255, 0.67);
color: #000;
}
#treeul a+a:hover {
#docul a:hover,
#tree .ntree a+a:hover {
background: #181818;
color: #fff;
}
#treeul a:first-child {
.ntree a:first-child {
font-family: 'scp', monospace, monospace;
font-size: 1.2em;
line-height: 0;
@@ -840,42 +876,47 @@ html.light #tree.nowrap #treeul a+a:hover {
border-bottom: 1px solid #555;
}
#thumbs,
#au_fullpre,
#au_os_seek,
#au_osd_cv,
#u2tdate {
opacity: .3;
}
#griden.on+#thumbs,
#au_os_ctl.on+#au_osd_cv,
#au_preload.on+#au_fullpre,
#au_os_ctl.on+#au_os_seek,
#au_os_ctl.on+#au_os_seek+#au_osd_cv,
#u2turbo.on+#u2tdate {
opacity: 1;
}
#wraptree.on+#hovertree {
display: none;
}
#ghead {
.ghead {
border-radius: .3em;
padding: .2em .5em;
line-height: 2.3em;
margin-bottom: 1em;
margin-bottom: 1.5em;
}
#ghead {
position: sticky;
top: -.3em;
z-index: 1;
}
html.light #ghead {
html.light .ghead {
background: #f7f7f7;
border-color: #ddd;
}
#ghead .btn {
.ghead .btn {
position: relative;
top: 0;
}
#ghead>span {
.ghead>span {
white-space: pre;
padding-left: .3em;
}
#ggrid {
padding-top: .5em;
margin: 0 -.5em;
margin: -.2em -.5em;
}
#ggrid>a>span {
overflow: hidden;
@@ -984,6 +1025,53 @@ html.light #rui {
padding: 0;
font-size: 1.5em;
}
#doc {
background: none;
overflow: visible;
margin: -1em 0 .5em 0;
padding: 1em 0 1em 0;
}
#docul {
position: relative;
}
#docul li.bn {
text-align: center;
padding: .5em;
}
#doc.prism {
padding-left: 3em;
}
#doc>code {
background: none;
box-shadow: none;
z-index: 1;
}
#doc.mdo {
white-space: normal;
font-family: sans-serif;
}
#doc.prism * {
line-height: 1.5em;
}
#doc .line-highlight {
border-radius: .3em;
box-shadow: 0 0 .5em #333;
background: linear-gradient(90deg, #111, #222);
}
html.light #doc .line-highlight {
box-shadow: 0 0 .5em #ccc;
background: linear-gradient(90deg, #fff, #eee);
}
#docul li {
margin: 0;
}
#tree #docul a {
display: block;
}
#seldoc.sel {
color: #fff;
background: #925;
}
#pvol,
#barbuf,
#barpos,
@@ -1038,7 +1126,7 @@ html,
.opbox,
#path,
#srch_form,
#ghead {
.ghead {
background: #2b2b2b;
border: 1px solid #333;
box-shadow: 0 0 .3em #111;
@@ -1047,7 +1135,8 @@ html,
background: #282828;
}
#tree,
#treeh {
#treeh,
#treepar {
background: #2b2b2b;
}
#wtoggle,
@@ -1105,18 +1194,24 @@ html.light #ops,
html.light .opbox,
html.light #path,
html.light #srch_form,
html.light #ghead,
html.light .ghead,
html.light #u2etas {
background: #f7f7f7;
box-shadow: 0 0 .3em #ccc;
border-color: #f7f7f7;
}
html.light #wrap.doc {
background: #f7f7f7;
}
html.light #ops a.act {
box-shadow: 0 .2em .2em #ccc;
background: #fff;
border-color: #07a;
padding-top: .4em;
}
html.light #ops svg circle {
stroke: black;
}
html.light #op_cfg h3 {
border-color: #ccc;
}
@@ -1146,21 +1241,25 @@ html.light #acc_info {
html.light #srv_info span {
color: #777;
}
html.light #treeul a+a {
html.light #tree .ntree a+a {
background: inherit;
color: #06a;
}
html.light #treeul a.hl {
html.light #tree ul a.hl {
background: #07a;
color: #fff;
}
html.light #treeul a.hl:hover {
html.light #tree ul a.par {
color: #000;
}
html.light #tree ul a.hl:hover {
background: #059;
}
html.light #tree li {
html.light #tree li,
html.light #tree #treepar {
border-color: #f7f7f7 #fff #ddd #fff;
}
html.light #treeul a:hover {
html.light #tree ul a:hover {
background: #fff;
}
html.light #tree ul {
@@ -1208,6 +1307,12 @@ html.light #files tbody a.play {
html.light #files tbody a.play.act {
color: #90c;
}
html.light #files a.doc {
color: #bbb;
}
html.light #files a.doc.bri {
color: #d38;
}
html.light #files tr.play td {
background: #fc5;
border-color: #eb1;
@@ -1275,6 +1380,7 @@ html.light #files td div span {
color: #000;
}
html.light #path {
color: #777;
background: #f7f7f7;
text-shadow: none;
box-shadow: 0 0 .3em #bbb;
@@ -1292,13 +1398,15 @@ html.light #path a:hover {
html.light #files tbody div a {
color: #d38;
}
html.light #docul a:hover,
html.light #files a:hover,
html.light #files tr.sel a:hover {
color: #000;
background: #fff;
text-decoration: underline;
}
html.light #treeh {
html.light #treeh,
html.light #treepar {
background: #f7f7f7;
border-color: #ddd;
}
@@ -1558,8 +1666,6 @@ html.light #bbox-overlay figcaption a {
#op_up2k {
padding: 0 1em 1em 1em;
min-height: 0;
transition: min-height .2s;
}
#drops {
display: none;
@@ -1696,8 +1802,9 @@ html.light #u2err.err {
box-shadow: .4em .4em 0 #111;
}
#u2conf.ww #u2btn {
font-size: 1.3em;
margin-right: .5em;
line-height: 1em;
padding: .5em 0;
margin: -1.5em .5em -3em 0;
}
#op_up2k.srch #u2btn {
background: linear-gradient(to bottom, #ca3 0%, #fd8 50%, #fc6 51%, #b92 100%);
@@ -1705,8 +1812,9 @@ html.light #u2err.err {
color: #333;
}
#u2conf #u2btn {
margin: -2.4em 0;
padding: .8em 0;
padding: .6em 0;
margin: -2em 0;
font-size: 1.25em;
width: 100%;
max-width: 12em;
display: inline-block;
@@ -1722,13 +1830,17 @@ html.light #u2err.err {
#u2notbtn * {
line-height: 1.3em;
}
#u2tabw {
min-height: 0;
transition: min-height .2s;
margin: 3em auto;
}
#u2tab {
border-collapse: collapse;
margin: 3em auto;
width: calc(100% - 2em);
max-width: 100em;
}
#op_up2k.srch #u2tab {
#op_up2k.srch #u2tabf {
max-width: none;
}
#u2tab td {
@@ -1771,6 +1883,7 @@ html.light #u2err.err {
display: none;
}
#u2etas.o .o {
display: inherit;
display: unset;
}
#u2etaw {
@@ -1830,6 +1943,11 @@ html.light #u2err.err {
#u2cards span {
color: #fff;
}
#u2cards > a:nth-child(4) > span {
display: inline-block;
text-align: center;
min-width: 1.3em;
}
#u2conf {
margin: 1em auto;
width: 30em;
@@ -1988,10 +2106,6 @@ html.light #u2foot .warn span {
color: #fff;
padding-left: .2em;
}
#u2cleanup {
float: right;
margin-bottom: -.3em;
}
.fsearch_explain {
padding-left: .7em;
font-size: 1.1em;

View File

@@ -76,6 +76,12 @@
<div id="wrap">
{%- if doc %}
<div id="bdoc"><pre>{{ doc|e }}</pre></div>
{%- else %}
<div id="bdoc"></div>
{%- endif %}
<div id="pro" class="logue">{{ logues[0] }}</div>
<table id="files">
@@ -130,10 +136,13 @@
def_hcols = {{ def_hcols|tojson }},
have_up2k_idx = {{ have_up2k_idx|tojson }},
have_tags_idx = {{ have_tags_idx|tojson }},
have_acode = {{ have_acode|tojson }},
have_mv = {{ have_mv|tojson }},
have_del = {{ have_del|tojson }},
have_unpost = {{ have_unpost|tojson }},
have_zip = {{ have_zip|tojson }},
txt_ext = "{{ txt_ext }}",
{% if no_prism %}no_prism = 1,{% endif %}
readme = {{ readme|tojson }};
document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark");

File diff suppressed because it is too large Load Diff

View File

@@ -164,10 +164,7 @@ function copydom(src, dst, lv) {
function md_plug_err(ex, js) {
var errbox = ebi('md_errbox');
if (errbox)
errbox.parentNode.removeChild(errbox);
qsr('#md_errbox');
if (!ex)
return;
@@ -183,7 +180,7 @@ function md_plug_err(ex, js) {
o.textContent = lns[ln - 1];
}
}
errbox = mknod('div');
var errbox = mknod('div');
errbox.setAttribute('id', 'md_errbox');
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
errbox.textContent = msg;
@@ -381,8 +378,7 @@ function convert_markdown(md_text, dest_dom) {
function init_toc() {
var loader = ebi('ml');
loader.parentNode.removeChild(loader);
qsr('#ml');
var anchors = []; // list of toc entries, complex objects
var anchor = null; // current toc node

View File

@@ -65,8 +65,7 @@ var mde = (function () {
mde.codemirror.on("change", function () {
md_changed(mde);
});
var loader = ebi('ml');
loader.parentNode.removeChild(loader);
qsr('#ml');
return mde;
})();

View File

@@ -25,10 +25,24 @@ a {
color: #047;
background: #fff;
text-decoration: none;
border-bottom: 1px solid #aaa;
border-bottom: 1px solid #8ab;
border-radius: .2em;
padding: .2em .8em;
}
a+a {
margin-left: .5em;
}
.refresh,
.logout {
float: right;
margin: -.2em 0 0 .5em;
}
.logout,
.btns a,
a.r {
color: #c04;
border-color: #c7a;
}
#repl {
border: none;
background: none;
@@ -42,6 +56,7 @@ table {
.vols th {
padding: .3em .6em;
text-align: left;
white-space: nowrap;
}
.num {
border-right: 1px solid #bbb;
@@ -65,6 +80,11 @@ table {
margin-top: .3em;
text-align: right;
}
blockquote {
margin: 0 0 0 .6em;
padding: .7em 1em;
border-left: .3em solid rgba(128,128,128,0.5);
}
html.dark,
@@ -81,6 +101,12 @@ html.dark a {
background: #057;
border-color: #37a;
}
html.dark .logout,
html.dark .btns a,
html.dark a.r {
background: #804;
border-color: #c28;
}
html.dark input {
color: #fff;
background: #626;

View File

@@ -12,9 +12,12 @@
<body>
<div id="wrap">
<a href="/?h" class="refresh">refresh</a>
{%- if this.uname == '*' %}
<p>howdy stranger &nbsp; <small>(you're not logged in)</small></p>
{%- else %}
<a href="/?pw=x" class="logout">logout</a>
<p>welcome back, <strong>{{ this.uname }}</strong></p>
{%- endif %}
@@ -46,7 +49,8 @@
</table>
</td></tr></table>
<div class="btns">
<a href="/?stack">dump stack</a>
<a href="/?stack" tt="shows the state of all active threads">dump stack</a>
<a href="/?reload=cfg" tt="reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes">reload cfg</a>
</div>
{%- endif %}
@@ -68,6 +72,16 @@
</ul>
{%- endif %}
<h1 id="cc">client config:</h1>
<ul>
{% if k304 %}
<li><a href="/?k304=n" class="r">disable k304</a> (currently enabled)
{%- else %}
<li><a href="/?k304=y">enable k304</a> (currently disabled)
{% endif %}
<blockquote>enabling this will disconnect your client on every HTTP 304, which can prevent some buggy browsers/proxies from getting stuck (suddenly not being able to load pages), <em>but</em> it will also make things slower in general</blockquote></li>
</ul>
<h1>login for more:</h1>
<ul>
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
@@ -80,10 +94,10 @@
<a href="#" id="repl">π</a>
<script>
if (localStorage.lightmode != 1)
document.documentElement.setAttribute("class", "dark");
document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark");
</script>
<script src="/.cpr/util.js?_={{ ts }}"></script>
<script>tt.init();</script>
</body>
</html>

View File

@@ -79,7 +79,8 @@ html {
}
#toast.vis {
right: 1.3em;
transform: unset;
transform: inherit;
transform: initial;
}
#toast.vis #toastc {
left: -2em;
@@ -115,6 +116,19 @@ html {
#toast.err #toastc {
background: #d06;
}
#tth {
color: #fff;
background: #111;
font-size: .9em;
padding: 0 .26em;
line-height: .97em;
border-radius: 1em;
position: absolute;
display: none;
}
#tth.act {
display: block;
}
#tt.b {
padding: 0 2em;
border-radius: .5em;
@@ -132,7 +146,8 @@ html {
}
#modalc code,
#tt code {
background: #3c3c3c;
color: #eee;
background: #444;
padding: .1em .3em;
border-top: 1px solid #777;
border-radius: .3em;
@@ -157,6 +172,10 @@ html.light #tt code {
html.light #tt em {
color: #d38;
}
html.light #tth {
color: #000;
background: #fff;
}
#modal {
position: fixed;
overflow: auto;

View File

@@ -30,7 +30,10 @@ catch (ex) {
try {
up2k = up2k_init(false);
}
catch (ex) { }
catch (ex) {
console.log('up2k init failed:', ex);
toast.err(10, 'could not initialze up2k\n\n' + basenames(ex));
}
}
treectl.onscroll();
@@ -210,14 +213,14 @@ function U2pvis(act, btns) {
};
r.setat = function (nfile, blocktab) {
r.tab[nfile].cb = blocktab;
var fo = r.tab[nfile], bd = 0;
var bd = 0;
for (var a = 0; a < blocktab.length; a++)
bd += blocktab[a];
r.tab[nfile].bd = bd;
r.tab[nfile].bd0 = bd;
fo.bd = bd;
fo.bd0 = bd;
fo.cb = blocktab;
};
r.perc = function (bd, bd0, sz, t0) {
@@ -329,8 +332,7 @@ function U2pvis(act, btns) {
r.head++;
if (!bz_act) {
var tr = ebi("f" + nfile);
tr.parentNode.removeChild(tr);
qsr("#f" + nfile);
}
}
else return;
@@ -349,9 +351,7 @@ function U2pvis(act, btns) {
last = parseInt(last.getAttribute('id').slice(1));
while (r.head - first > r.wsz) {
var obj = ebi('f' + (first++));
if (obj)
obj.parentNode.removeChild(obj);
qsr('#f' + (first++));
}
while (last - r.tail < r.wsz && last < r.tab.length - 2) {
var obj = ebi('f' + (++last));
@@ -477,14 +477,94 @@ function U2pvis(act, btns) {
}
function Donut(uc, st) {
var r = this,
el = null,
psvg = null,
o = 20 * 2 * Math.PI,
optab = QS('#ops a[data-dest="up2k"]');
optab.setAttribute('ico', optab.textContent);
function svg(v) {
var ico = v !== undefined,
bg = ico ? '#333' : 'transparent',
fg = '#fff',
fsz = 52,
rc = 32;
if (r.eta && (r.eta > 99 || (uc.fsearch ? st.time.hashing : st.time.uploading) < 20))
r.eta = null;
if (r.eta) {
if (r.eta < 10) {
fg = '#fa0';
fsz = 72;
}
rc = 8;
}
return (
'<svg version="1.1" viewBox="0 0 64 64" xmlns="http://www.w3.org/2000/svg">\n' +
(ico ? '<rect width="100%" height="100%" rx="' + rc + '" fill="#333" />\n' :
'<circle stroke="white" stroke-width="6" r="3" cx="32" cy="32" />\n') +
(r.eta ? (
'<text x="55%" y="58%" dominant-baseline="middle" text-anchor="middle"' +
' font-family="sans-serif" font-weight="bold" font-size="' + fsz + 'px"' +
' fill="' + fg + '">' + r.eta + '</text></svg>'
) : (
'<circle class="donut" stroke="white" fill="' + bg +
'" stroke-dashoffset="' + (ico ? v : o) + '" stroke-dasharray="' + o + ' ' + o +
'" transform="rotate(270 32 32)" stroke-width="12" r="20" cx="32" cy="32" /></svg>'
))
);
}
function pos() {
return uc.fsearch ? Math.max(st.bytes.hashed, st.bytes.finished) : st.bytes.finished;
}
r.on = function (ya) {
r.fc = 99;
r.eta = null;
r.base = pos();
optab.innerHTML = ya ? svg() : optab.getAttribute('ico');
el = QS('#ops a .donut');
if (!ya)
favico.upd();
};
r.do = function () {
if (!el)
return;
var t = st.bytes.total - r.base,
v = pos() - r.base,
ofs = el.style.strokeDashoffset = o - o * v / t;
if (favico.txt) {
if (++r.fc < 10 && r.eta && r.eta > 99)
return;
var s = svg(ofs);
if (s == psvg || (r.eta === null && r.fc < 10))
return;
favico.upd('', s);
psvg = s;
r.fc = 0;
}
};
}
function fsearch_explain(n) {
if (n)
return toast.inf(60, 'your access to this folder is Read-Only\n\n' + (acct == '*' ? 'you are currently not logged in' : 'you are currently logged in as "' + acct + '"'));
if (bcfg_get('fsearch', false))
return toast.inf(60, 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the green magnifying glass (next to the big yellow search button), and then refresh\n\nsorry');
return toast.inf(60, 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the green magnifying glass (next to the big yellow search button), and try uploading again\n\nsorry');
return toast.inf(60, 'refresh the page and try again, it should work now');
return toast.inf(60, 'try again, it should work now');
}
@@ -589,6 +669,7 @@ function up2k_init(subtle) {
var st = {
"files": [],
"seen": {},
"todo": {
"head": [],
"hash": [],
@@ -623,7 +704,8 @@ function up2k_init(subtle) {
});
}
var pvis = new U2pvis("bz", '#u2cards');
var pvis = new U2pvis("bz", '#u2cards'),
donut = new Donut(uc, st);
var bobslice = null;
if (window.File)
@@ -913,13 +995,9 @@ function up2k_init(subtle) {
}
function up_them(good_files) {
var seen = {},
evpath = get_evpath(),
var evpath = get_evpath(),
draw_each = good_files.length < 50;
for (var a = 0; a < st.files.length; a++)
seen[st.files[a].name + '\n' + st.files[a].size] = 1;
for (var a = 0; a < good_files.length; a++) {
var fobj = good_files[a][0],
name = good_files[a][1],
@@ -942,17 +1020,23 @@ function up2k_init(subtle) {
"lmod": lmod / 1000,
"purl": fdir,
"done": false,
"bytes_uploaded": 0,
"hash": []
},
key = entry.name + '\n' + entry.size;
key = name + '\n' + entry.size + '\n' + lmod + '\n' + uc.fsearch;
if (uc.fsearch)
entry.srch = 1;
if (seen[key])
continue;
try {
if (st.seen[fdir][key])
continue;
}
catch (ex) {
st.seen[fdir] = {};
}
seen[key] = 1;
st.seen[fdir][key] = 1;
pvis.addfile([
uc.fsearch ? esc(entry.name) : linksplit(
@@ -985,23 +1069,7 @@ function up2k_init(subtle) {
}
more_one_file();
function u2cleanup(e) {
ev(e);
for (var a = 0; a < st.files.length; a++) {
var t = st.files[a];
if (t.done && t.name) {
var tr = ebi('f' + t.n);
if (!tr)
continue;
tr.parentNode.removeChild(tr);
t.name = undefined;
}
}
}
ebi('u2cleanup').onclick = u2cleanup;
var etaref = 0, etaskip = 0, op_minh = 0;
var etaref = 0, etaskip = 0, utw_minh = 0;
function etafun() {
var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length,
nsend = st.busy.upload.length + st.todo.upload.length,
@@ -1014,13 +1082,10 @@ function up2k_init(subtle) {
//ebi('acc_info').innerHTML = humantime(st.time.busy) + ' ' + f2f(now / 1000, 1);
var op = ebi('op_up2k'),
uff = ebi('u2footfoot'),
minh = QS('#op_up2k.act') ? Math.max(op_minh, uff.offsetTop + uff.offsetHeight - op.offsetTop + 32) : 0;
if (minh > op_minh || !op_minh) {
op_minh = minh;
op.style.minHeight = op_minh + 'px';
var minh = QS('#op_up2k.act') && st.is_busy ? Math.max(utw_minh, ebi('u2tab').offsetHeight + 32) : 0;
if (utw_minh < minh || !utw_minh) {
utw_minh = minh;
ebi('u2tabw').style.minHeight = utw_minh + 'px';
}
if (!nhash)
@@ -1066,6 +1131,7 @@ function up2k_init(subtle) {
continue;
}
donut.eta = eta;
if (etaskip)
continue;
@@ -1142,15 +1208,16 @@ function up2k_init(subtle) {
running = true;
while (true) {
var now = Date.now(),
is_busy = 0 !=
st.todo.head.length +
st.todo.hash.length +
st.todo.handshake.length +
st.todo.upload.length +
st.busy.head.length +
st.busy.hash.length +
st.busy.handshake.length +
st.busy.upload.length;
oldest_active = Math.min( // gzip take the wheel
st.todo.head.length ? st.todo.head[0].n : st.files.length,
st.todo.hash.length ? st.todo.hash[0].n : st.files.length,
st.todo.upload.length ? st.todo.upload[0].nfile : st.files.length,
st.todo.handshake.length ? st.todo.handshake[0].n : st.files.length,
st.busy.head.length ? st.busy.head[0].n : st.files.length,
st.busy.hash.length ? st.busy.hash[0].n : st.files.length,
st.busy.upload.length ? st.busy.upload[0].nfile : st.files.length,
st.busy.handshake.length ? st.busy.handshake[0].n : st.files.length),
is_busy = oldest_active < st.files.length;
if (was_busy && !is_busy) {
for (var a = 0; a < st.files.length; a++) {
@@ -1170,11 +1237,13 @@ function up2k_init(subtle) {
}
if (was_busy != is_busy) {
was_busy = is_busy;
st.is_busy = was_busy = is_busy;
window[(is_busy ? "add" : "remove") +
"EventListener"]("beforeunload", warn_uploader_busy);
donut.on(is_busy);
if (!is_busy) {
var k = uc.fsearch ? 'searches' : 'uploads',
ks = uc.fsearch ? 'Search' : 'Upload',
@@ -1196,9 +1265,11 @@ function up2k_init(subtle) {
toast.err(t, '{0} {1}'.format(ks, tng));
timer.rm(etafun);
op_minh = 0;
timer.rm(donut.do);
utw_minh = 0;
}
else {
timer.add(donut.do);
timer.add(etafun, false);
ebi('u2etas').style.textAlign = 'left';
}
@@ -1247,7 +1318,8 @@ function up2k_init(subtle) {
}
if (st.todo.head.length &&
st.busy.head.length < parallel_uploads) {
st.busy.head.length < parallel_uploads &&
(!is_busy || st.todo.head[0].n - oldest_active < parallel_uploads * 2)) {
exec_head();
mou_ikkai = true;
}
@@ -1346,7 +1418,6 @@ function up2k_init(subtle) {
function exec_hash() {
var t = st.todo.hash.shift();
st.busy.hash.push(t);
t.bytes_uploaded = 0;
var bpend = 0,
nchunk = 0,
@@ -1771,7 +1842,8 @@ function up2k_init(subtle) {
st.bytes.uploaded += cdr - car;
t.bytes_uploaded += cdr - car;
}
else if (txt.indexOf('already got that') !== -1) {
else if (txt.indexOf('already got that') + 1 ||
txt.indexOf('already being written') + 1) {
console.log("ignoring dupe-segment error", t);
}
else {
@@ -1779,6 +1851,9 @@ function up2k_init(subtle) {
xhr.status, t.name) + (txt || "no further information"));
return;
}
orz2(xhr);
}
function orz2(xhr) {
apop(st.busy.upload, upt);
apop(t.postlist, npart);
if (!t.postlist.length) {
@@ -1800,9 +1875,11 @@ function up2k_init(subtle) {
if (crashed)
return;
toast.err(9.98, "failed to upload a chunk,\n" + tries + " retries so far -- retrying in 10sec\n\n" + t.name);
if (!toast.visible)
toast.warn(9.98, "failed to upload a chunk;\nprobably harmless, continuing\n\n" + t.name);
console.log('chunkpit onerror,', ++tries, t);
setTimeout(do_send, 10 * 1000);
orz2(xhr);
};
xhr.open('POST', t.purl, true);
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
@@ -1827,8 +1904,8 @@ function up2k_init(subtle) {
wpx = window.innerWidth,
fpx = parseInt(getComputedStyle(bar)['font-size']),
wem = wpx * 1.0 / fpx,
wide = wem > 54 ? 'w' : '',
write = has(perms, 'write'),
wide = write && wem > 54 ? 'w' : '',
parent = ebi(wide && write ? 'u2btn_cw' : 'u2btn_ct'),
btn = ebi('u2btn');
@@ -1840,7 +1917,7 @@ function up2k_init(subtle) {
ebi('u2etaw').setAttribute('class', wide);
}
wide = wem > 78 ? 'ww' : wide;
wide = write && wem > 78 ? 'ww' : wide;
parent = ebi(wide == 'ww' && write ? 'u2c3w' : 'u2c3t');
var its = [ebi('u2etaw'), ebi('u2cards')];
if (its[0].parentNode !== parent) {

View File

@@ -7,8 +7,7 @@ if (!window['console'])
var is_touch = 'ontouchstart' in window,
IPHONE = /iPhone|iPad|iPod/i.test(navigator.userAgent),
ANDROID = /android/i.test(navigator.userAgent),
IPHONE = is_touch && /iPhone|iPad|iPod/i.test(navigator.userAgent),
WINDOWS = navigator.platform ? navigator.platform == 'Win32' : /Windows/.test(navigator.userAgent);
@@ -18,6 +17,15 @@ var ebi = document.getElementById.bind(document),
mknod = document.createElement.bind(document);
function qsr(sel) {
var el = QS(sel);
if (el)
el.parentNode.removeChild(el);
return el;
}
// error handler for mobile devices
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
@@ -71,7 +79,7 @@ try {
catch (ex) {
if (console.stdlog)
console.log = console.stdlog;
console.log(ex);
console.log('console capture failed', ex);
}
var crashed = false, ignexd = {};
function vis_exh(msg, url, lineNo, columnNo, error) {
@@ -163,7 +171,6 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
catch (e) {
document.body.innerHTML = html.join('\n');
}
throw 'fatal_err';
}
function ignex(all) {
var o = ebi('exbox');
@@ -173,6 +180,7 @@ function ignex(all) {
if (!all)
window.onerror = vis_exh;
}
window.onerror = vis_exh;
function noop() { }
@@ -278,15 +286,19 @@ function crc32(str) {
function clmod(el, cls, add) {
if (!el)
return false;
if (el.classList) {
var have = el.classList.contains(cls);
if (add == 't')
add = !have;
if (add != have)
el.classList[add ? 'add' : 'remove'](cls);
if (!add == !have)
return false;
return;
el.classList[add ? 'add' : 'remove'](cls);
return true;
}
var re = new RegExp('\\s*\\b' + cls + '\\s*\\b', 'g'),
@@ -297,12 +309,18 @@ function clmod(el, cls, add) {
var n2 = n1.replace(re, ' ') + (add ? ' ' + cls : '');
if (n1 != n2)
el.className = n2;
if (!n1 == !n2)
return false;
el.className = n2;
return true;
}
function clgot(el, cls) {
if (!el)
return;
if (el.classList)
return el.classList.contains(cls);
@@ -727,7 +745,7 @@ function hist_replace(url) {
function sethash(hv) {
if (window.history && history.replaceState) {
hist_replace(document.location.pathname + '#' + hv);
hist_replace(document.location.pathname + document.location.search + '#' + hv);
}
else {
document.location.hash = hv;
@@ -774,13 +792,18 @@ var timer = (function () {
var tt = (function () {
var r = {
"tt": mknod("div"),
"th": mknod("div"),
"en": true,
"el": null,
"skip": false
"skip": false,
"lvis": 0
};
r.th.innerHTML = '?';
r.tt.setAttribute('id', 'tt');
r.th.setAttribute('id', 'tth');
document.body.appendChild(r.tt);
document.body.appendChild(r.th);
var prev = null;
r.cshow = function () {
@@ -790,11 +813,25 @@ var tt = (function () {
prev = this;
};
r.show = function () {
if (r.skip) {
r.skip = false;
var tev;
r.dshow = function (e) {
clearTimeout(tev);
if (!r.getmsg(this))
return;
}
if (Date.now() - r.lvis < 400)
return r.show.bind(this)();
tev = setTimeout(r.show.bind(this), 800);
if (is_touch)
return;
this.addEventListener('mousemove', r.move);
clmod(r.th, 'act', 1);
r.move(e);
};
r.getmsg = function (el) {
if (QS('body.bbox-open'))
return;
@@ -802,7 +839,16 @@ var tt = (function () {
if (cfg !== null && cfg != '1')
return;
var msg = this.getAttribute('tt');
return el.getAttribute('tt');
};
r.show = function () {
clearTimeout(tev);
if (r.skip) {
r.skip = false;
return;
}
var msg = r.getmsg(this);
if (!msg)
return;
@@ -816,6 +862,7 @@ var tt = (function () {
if (dir.indexOf('u') + 1) top = false;
if (dir.indexOf('d') + 1) top = true;
clmod(r.th, 'act');
clmod(r.tt, 'b', big);
r.tt.style.left = '0';
r.tt.style.top = '0';
@@ -841,14 +888,27 @@ var tt = (function () {
r.hide = function (e) {
ev(e);
clearTimeout(tev);
window.removeEventListener('scroll', r.hide);
clmod(r.tt, 'show');
clmod(r.tt, 'b');
clmod(r.th, 'act');
if (clmod(r.tt, 'show'))
r.lvis = Date.now();
if (r.el)
r.el.removeEventListener('mouseleave', r.hide);
if (e && e.target)
e.target.removeEventListener('mousemove', r.move);
};
if (is_touch && IPHONE) {
r.move = function (e) {
r.th.style.left = (e.pageX + 12) + 'px';
r.th.style.top = (e.pageY + 12) + 'px';
};
if (IPHONE) {
var f1 = r.show,
f2 = r.hide,
q = [];
@@ -874,14 +934,14 @@ var tt = (function () {
r.att = function (ctr) {
var _cshow = r.en ? r.cshow : null,
_show = r.en ? r.show : null,
_dshow = r.en ? r.dshow : null,
_hide = r.en ? r.hide : null,
o = ctr.querySelectorAll('*[tt]');
for (var a = o.length - 1; a >= 0; a--) {
o[a].onfocus = _cshow;
o[a].onblur = _hide;
o[a].onmouseenter = _show;
o[a].onmouseenter = _dshow;
o[a].onmouseleave = _hide;
}
r.hide();
@@ -1057,15 +1117,22 @@ var modal = (function () {
}
function onkey(e) {
if (e.code == 'Enter') {
var a = ebi('modal-ng');
if (a && document.activeElement == a)
var k = e.code,
eok = ebi('modal-ok'),
eng = ebi('modal-ng'),
ae = document.activeElement;
if (k == 'Space' && ae && (ae === eok || ae === eng))
k = 'Enter';
if (k == 'Enter') {
if (ae && ae == eng)
return ng();
return ok();
}
if (e.code == 'Escape')
if (k == 'Escape')
return ng();
}
@@ -1142,6 +1209,7 @@ function repl_load() {
if (!ret.length)
ret = [
'var v=Object.keys(localStorage); v.sort(); JSON.stringify(v)',
"for (var a of QSA('#files a[id]')) a.setAttribute('download','')",
'console.hist.slice(-10).join("\\n")'
];
@@ -1213,28 +1281,31 @@ if (ebi('repl'))
ebi('repl').onclick = repl;
var svg_decl = '<?xml version="1.0" encoding="UTF-8"?>\n';
var favico = (function () {
var r = {};
r.en = true;
r.tag = null;
function gx(txt) {
return (
'<?xml version="1.0" encoding="UTF-8"?>\n' +
'<svg version="1.1" viewBox="0 0 64 64" xmlns="http://www.w3.org/2000/svg"><g>\n' +
return (svg_decl +
'<svg version="1.1" viewBox="0 0 64 64" xmlns="http://www.w3.org/2000/svg">\n' +
(r.bg ? '<rect width="100%" height="100%" rx="16" fill="#' + r.bg + '" />\n' : '') +
'<text x="50%" y="55%" dominant-baseline="middle" text-anchor="middle"' +
' font-family="sans-serif" font-weight="bold" font-size="64px"' +
' fill="#' + r.fg + '">' + txt + '</text></g></svg>'
' fill="#' + r.fg + '">' + txt + '</text></svg>'
);
}
r.upd = function () {
var i = QS('link[rel="icon"]'), b64;
r.upd = function (txt, svg) {
if (!r.txt)
return;
var b64;
try {
b64 = btoa(gx(r.txt));
b64 = btoa(svg ? svg_decl + svg : gx(r.txt));
}
catch (ex) {
b64 = encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g,
@@ -1243,12 +1314,12 @@ var favico = (function () {
b64 = btoa(gx(unescape(encodeURIComponent(r.txt))));
}
if (!i) {
i = mknod('link');
i.rel = 'icon';
document.head.appendChild(i);
if (!r.tag) {
r.tag = mknod('link');
r.tag.rel = 'icon';
document.head.appendChild(r.tag);
}
i.href = 'data:image/svg+xml;base64,' + b64;
r.tag.href = 'data:image/svg+xml;base64,' + b64;
};
r.init = function () {

View File

@@ -29,7 +29,7 @@ point `--css-browser` to one of these by URL:
* notes on using rclone as a fuse client/server
## [`example.conf`](example.conf)
* example config file for `-c` (supports accounts, volumes, and volume-flags)
* example config file for `-c`

View File

@@ -1,3 +1,10 @@
# append some arguments to the commandline;
# the first space in a line counts as a separator,
# any additional spaces are part of the value
-e2dsa
-e2ts
-i 127.0.0.1
# create users:
# u username:password
u ed:123
@@ -24,7 +31,8 @@ rw ed
r k
rw ed
# this does the same thing:
# this does the same thing,
# and will cause an error on startup since /priv is already taken:
./priv
/priv
r ed k

View File

@@ -9,7 +9,7 @@
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
#u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
#srch_dz, #srch_zd, /* the filesearch dropzone */

View File

@@ -38,6 +38,13 @@ para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}
avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} csz=$1;sum=0;nsmp=0} {sub(/\r$/,"")} /^[0-9]+$/ {pr($1);next} / MiB/ {sub(/ MiB.*/,"");sub(/.* /,"");sum+=$1;nsmp++} END {pr(0)}' "$1"; }
##
## time between first and last upload
python3 -um copyparty -nw -v srv::rw -i 127.0.0.1 2>&1 | tee log
cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} !a{a=t;sa=s} {b=t;sb=s} END {print b-a,sa,sb}'
##
## bad filenames
@@ -73,6 +80,12 @@ shab64() { sp=$1; f="$2"; v=0; sz=$(stat -c%s "$f"); while true; do w=$((v+sp*10
command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s ",k,p}}') 2>&1 | awk -v ts=$t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s \033[3%dm%s %s\033[0m\n",ts,c,t2,t1}'; sleep 0.1 || break; done
##
## track an up2k upload and print all chunks in file-order
grep '"name": "2021-07-18 02-17-59.mkv"' fug.log | head -n 1 | sed -r 's/.*"hash": \[//; s/\].*//' | tr '"' '\n' | grep -E '^[a-zA-Z0-9_-]{44}$' | while IFS= read -r cid; do cat -n fug.log | grep -vF '"purl": "' | grep -- "$cid"; echo; done | stdbuf -oL tr '\t' ' ' | while IFS=' ' read -r ln _ _ _ _ _ ts ip port msg; do [ -z "$msg" ] && echo && continue; printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; read -r ln _ _ _ _ _ ts ip port msg < <(cat -n fug.log | tail -n +$((ln+1)) | grep -F "$ip $port" | head -n 1); printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; done
##
## js oneliners
@@ -162,7 +175,7 @@ brew install python@2
pip install virtualenv
# readme toc
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#|]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
# fix firefox phantom breakpoints,
# suggestions from bugtracker, doesnt work (debugger is not attachable)
@@ -178,7 +191,7 @@ about:config >> devtools.debugger.prefs-schema-version = -1
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
# download all sfx versions
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="copyparty $v $t.py"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="$(printf '%s\n' "copyparty $v $t.py" | tr / -)"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
##

View File

@@ -10,14 +10,41 @@ set -e
# (and those are usually linux so bash is good inaff)
# (but that said this even has macos support)
#
# bundle will look like:
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty
# -rw-r--r-- 0 ed ed 491318 Nov 19 00:40 copyparty-extras/copyparty-0.5.4.tar.gz
# -rwxr-xr-x 0 ed ed 30254 Nov 17 23:58 copyparty-extras/copyparty-fuse.py
# -rwxr-xr-x 0 ed ed 481403 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.sh
# -rwxr-xr-x 0 ed ed 506043 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.py
# -rwxr-xr-x 0 ed ed 167699 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.sh
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
# output summary (filesizes and contents):
#
# 535672 copyparty-extras/sfx-full/copyparty-sfx.sh
# 550760 copyparty-extras/sfx-full/copyparty-sfx.py
# `- original unmodified sfx from github
#
# 572923 copyparty-extras/sfx-full/copyparty-sfx-gz.py
# `- unmodified but recompressed from bzip2 to gzip
#
# 341792 copyparty-extras/sfx-ent/copyparty-sfx.sh
# 353975 copyparty-extras/sfx-ent/copyparty-sfx.py
# 376934 copyparty-extras/sfx-ent/copyparty-sfx-gz.py
# `- removed iOS ogg/opus/vorbis audio decoder,
# removed the audio tray mouse cursor,
# "enterprise edition"
#
# 259288 copyparty-extras/sfx-lite/copyparty-sfx.sh
# 270004 copyparty-extras/sfx-lite/copyparty-sfx.py
# 293159 copyparty-extras/sfx-lite/copyparty-sfx-gz.py
# `- also removed the codemirror markdown editor
# and the text-viewer syntax hilighting,
# only essential features remaining
#
# 646297 copyparty-extras/copyparty-1.0.14.tar.gz
# 4823 copyparty-extras/copyparty-repack.sh
# `- source files from github
#
# 23663 copyparty-extras/up2k.py
# `- standalone utility to upload or search for files
#
# 32280 copyparty-extras/copyparty-fuse.py
# `- standalone to mount a URL as a local read-only filesystem
#
# 270004 copyparty
# `- minimal binary, same as sfx-lite/copyparty-sfx.py
command -v gnutar && tar() { gnutar "$@"; }
@@ -54,6 +81,7 @@ cache="$od/.copyparty-repack.cache"
# fallback to awk (sorry)
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
) |
grep -E '(sfx\.(sh|py)|tar\.gz)$' |
tee /dev/stderr |
tr -d '\r' | tr '\n' '\0' |
xargs -0 bash -c 'dl_files "$@"' _
@@ -64,7 +92,7 @@ cache="$od/.copyparty-repack.cache"
# move src into copyparty-extras/,
# move sfx into copyparty-extras/sfx-full/
mkdir -p copyparty-extras/sfx-{full,lite}
mkdir -p copyparty-extras/sfx-{full,ent,lite}
mv copyparty-sfx.* copyparty-extras/sfx-full/
mv copyparty-*.tar.gz copyparty-extras/
@@ -112,14 +140,17 @@ repack() {
}
repack sfx-full "re gz no-sh"
repack sfx-lite "re no-ogv no-cm"
repack sfx-lite "re no-ogv no-cm gz no-sh"
repack sfx-ent "re no-dd"
repack sfx-ent "re no-dd gz no-sh"
repack sfx-lite "re no-dd no-cm no-hl"
repack sfx-lite "re no-dd no-cm no-hl gz no-sh"
# move fuse client into copyparty-extras/,
# move fuse and up2k clients into copyparty-extras/,
# copy lite-sfx.py to ./copyparty,
# delete extracted source code
( cd copyparty-extras/
mv copyparty-*/bin/up2k.py .
mv copyparty-*/bin/copyparty-fuse.py .
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
rm -rf copyparty-{0..9}*.*.*{0..9}

View File

@@ -3,7 +3,6 @@ WORKDIR /z
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_hashwasm=4.9.0 \
ver_marked=3.0.4 \
ver_ogvjs=1.8.4 \
ver_mde=2.15.0 \
ver_codemirror=5.62.3 \
ver_fontawesome=5.13.0 \
@@ -15,7 +14,6 @@ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
RUN mkdir -p /z/dist/no-pk \
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \
&& wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
@@ -23,7 +21,6 @@ RUN mkdir -p /z/dist/no-pk \
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \
&& unzip ogvjs.zip \
&& (mkdir hash-wasm \
&& cd hash-wasm \
&& unzip ../hash-wasm.zip) \
@@ -45,6 +42,12 @@ RUN mkdir -p /z/dist/no-pk \
&& tar -xf zopfli.tgz
# todo
# https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/highlight.min.js
# https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/styles/default.min.css
# https://prismjs.com/download.html#themes=prism-funky&languages=markup+css+clike+javascript+autohotkey+bash+basic+batch+c+csharp+cpp+cmake+diff+docker+go+ini+java+json+kotlin+latex+less+lisp+lua+makefile+objectivec+perl+powershell+python+r+jsx+ruby+rust+sass+scss+sql+swift+systemd+toml+typescript+vbnet+verilog+vhdl+yaml&plugins=line-highlight+line-numbers+autolinker
# build fonttools (which needs zopfli)
RUN tar -xf zopfli.tgz \
&& cd zopfli* \
@@ -71,21 +74,6 @@ RUN cd hash-wasm \
&& mv sha512.umd.min.js /z/dist/sha512.hw.js
# build ogvjs
RUN cd ogvjs-$ver_ogvjs \
&& cp -pv \
ogv-worker-audio.js \
ogv-demuxer-ogg-wasm.js \
ogv-demuxer-ogg-wasm.wasm \
ogv-decoder-audio-opus-wasm.js \
ogv-decoder-audio-opus-wasm.wasm \
ogv-decoder-audio-vorbis-wasm.js \
ogv-decoder-audio-vorbis-wasm.wasm \
/z/dist \
&& cp -pv \
ogv-es2017.js /z/dist/ogv.js
# build marked
COPY marked.patch /z/
COPY marked-ln.patch /z/

View File

@@ -16,12 +16,11 @@ help() { exec cat <<'EOF'
#
# `no-sh` makes just the python sfx, skips the sh/unix sfx
#
# `no-ogv` saves ~192k by removing the opus/vorbis audio codecs
# (only affects apple devices; everything else has native support)
#
# `no-cm` saves ~92k by removing easymde/codemirror
# `no-cm` saves ~82k by removing easymde/codemirror
# (the fancy markdown editor)
#
# `no-hl` saves ~41k by removing syntax hilighting in the text viewer
#
# `no-fnt` saves ~9k by removing the source-code-pro font
# (browsers will try to use 'Consolas' instead)
#
@@ -73,8 +72,8 @@ while [ ! -z "$1" ]; do
clean) clean=1 ; ;;
re) repack=1 ; ;;
gz) use_gz=1 ; ;;
no-ogv) no_ogv=1 ; ;;
no-fnt) no_fnt=1 ; ;;
no-hl) no_hl=1 ; ;;
no-dd) no_dd=1 ; ;;
no-cm) no_cm=1 ; ;;
no-sh) do_sh= ; ;;
@@ -215,9 +214,6 @@ cat have | while IFS= read -r x; do
done
rm have
[ $no_ogv ] &&
rm -rf copyparty/web/deps/{dynamicaudio,ogv}*
[ $no_cm ] && {
rm -rf copyparty/web/mde.* copyparty/web/deps/easymde*
echo h > copyparty/web/mde.html
@@ -226,6 +222,9 @@ rm have
tmv "$f"
}
[ $no_hl ] &&
rm -rf copyparty/web/deps/prism*
[ $no_fnt ] && {
rm -f copyparty/web/deps/scp.woff2
f=copyparty/web/ui.css

View File

@@ -49,14 +49,9 @@ copyparty/web/deps/easymde.js,
copyparty/web/deps/marked.js,
copyparty/web/deps/mini-fa.css,
copyparty/web/deps/mini-fa.woff,
copyparty/web/deps/ogv-decoder-audio-opus-wasm.js,
copyparty/web/deps/ogv-decoder-audio-opus-wasm.wasm,
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.js,
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.wasm,
copyparty/web/deps/ogv-demuxer-ogg-wasm.js,
copyparty/web/deps/ogv-demuxer-ogg-wasm.wasm,
copyparty/web/deps/ogv-worker-audio.js,
copyparty/web/deps/ogv.js,
copyparty/web/deps/prism.js,
copyparty/web/deps/prism.css,
copyparty/web/deps/prismd.css,
copyparty/web/deps/scp.woff2,
copyparty/web/deps/sha512.ac.js,
copyparty/web/deps/sha512.hw.js,

View File

@@ -47,12 +47,13 @@ class Cfg(Namespace):
mtp=[],
mte="a",
mth="",
textfiles="",
hist=None,
no_idx=None,
no_hash=None,
js_browser=None,
css_browser=None,
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr no_acode".split()}
)

View File

@@ -17,7 +17,8 @@ from copyparty import util
class Cfg(Namespace):
def __init__(self, a=None, v=None, c=None):
ex = {k: False for k in "nw e2d e2ds e2dsa e2t e2ts e2tsr".split()}
ex = "nw e2d e2ds e2dsa e2t e2ts e2tsr no_logues no_readme no_acode"
ex = {k: False for k in ex.split()}
ex2 = {
"mtp": [],
"mte": "a",
@@ -28,8 +29,6 @@ class Cfg(Namespace):
"js_browser": None,
"css_browser": None,
"no_voldump": True,
"no_logues": False,
"no_readme": False,
"re_maxage": 0,
"rproxy": 0,
}

View File

@@ -113,6 +113,7 @@ class VSock(object):
class VHttpSrv(object):
def __init__(self):
self.broker = NullBroker()
self.prism = None
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
self.j2 = {x: J2_FILES for x in aliases}