Compare commits

...

118 Commits

Author SHA1 Message Date
ed
eb5aaddba4 v0.11.0 2021-05-29 15:03:32 +02:00
ed
d8fd82bcb5 ffthumb only gets one shot 2021-05-29 12:32:51 +02:00
ed
97be495861 another chrome bug:
navigating somewhere and back can return a REALLY OLD copy of the page
2021-05-29 12:31:06 +02:00
ed
8b53c159fc dodge chrome bug 2021-05-29 10:58:21 +02:00
ed
81e281f703 add opus mimetype 2021-05-29 10:17:24 +02:00
ed
3948214050 drop deleted files from snap 2021-05-29 09:03:18 +02:00
ed
c5e9a643e7 more accurate url escaping 2021-05-29 09:02:42 +02:00
ed
d25881d5c3 mojibake fixes 2021-05-29 09:01:59 +02:00
ed
38d8d9733f fix bugs 2021-05-29 05:50:41 +02:00
ed
118ebf668d fix bugs 2021-05-29 05:43:09 +02:00
ed
a86f09fa46 mtp: file extension filtering 2021-05-29 04:18:57 +02:00
ed
dd4fb35c8f nit 2021-05-29 03:45:02 +02:00
ed
621eb4cf95 add multitag example 2021-05-29 03:43:30 +02:00
ed
deea66ad0b support multiple tags from mtp helpers 2021-05-29 03:43:14 +02:00
ed
bf99445377 groking the ffprobe tarot cards 2021-05-28 06:25:44 +02:00
ed
7b54a63396 icon fix 2021-05-28 06:25:00 +02:00
ed
0fcb015f9a minor fixes 2021-05-28 05:16:28 +02:00
ed
0a22b1ffb6 dont log thumbnail GETs by default 2021-05-28 05:16:01 +02:00
ed
68cecc52ab dont grow thumbs 2021-05-28 05:01:25 +02:00
ed
53657ccfff add avif read support 2021-05-28 05:01:12 +02:00
ed
96223fda01 detect missing webp support 2021-05-28 05:00:08 +02:00
ed
374ff3433e gj 2021-05-28 02:52:03 +02:00
ed
5d63949e98 create webp thumbnails by default 2021-05-28 02:44:13 +02:00
ed
6b065d507d crop thumbs for AESTHETICS 2021-05-28 01:46:27 +02:00
ed
e79997498a a 2021-05-27 01:42:22 +02:00
ed
f7ee02ec35 ux fixes 2021-05-27 01:41:50 +02:00
ed
69dc433e1c ffprobe parser less bad 2021-05-27 01:41:12 +02:00
ed
c880cd848c gridview lightmode 2021-05-26 22:53:40 +02:00
ed
5752b6db48 hook up the multiselect ui 2021-05-26 00:47:43 +02:00
ed
b36f905eab sort folders first + tweak thumbs ui 2021-05-25 21:15:54 +02:00
ed
483dd527c6 add cache eviction 2021-05-25 19:46:35 +02:00
ed
e55678e28f fix thumb/ico bugs 2021-05-25 17:36:31 +02:00
ed
3f4a8b9d6f fixes 2021-05-25 06:35:12 +02:00
ed
02a856ecb4 create video thumbnails 2021-05-25 06:14:25 +02:00
ed
4dff726310 initial thumbnail and icon stuff 2021-05-25 03:37:01 +02:00
ed
cbc449036f readme: todo 2021-05-23 02:43:40 +02:00
ed
8f53152220 todays mistake 2021-05-21 02:30:45 +02:00
ed
bbb1e165d6 v0.10.22 2021-05-18 04:10:37 +02:00
ed
fed8d94885 handle unsupported codecs better 2021-05-18 03:44:30 +02:00
ed
58040cc0ed fix the treesize off-by-one (*finally*) 2021-05-18 03:21:53 +02:00
ed
03d692db66 add now-playing clipboard meme 2021-05-18 02:54:52 +02:00
ed
903f8e8453 logging 2021-05-17 18:45:15 +02:00
ed
405ae1308e v0.10.21 2021-05-16 20:22:33 +02:00
ed
8a0f583d71 oh no 2021-05-16 11:01:32 +02:00
ed
b6d7017491 readme 2021-05-16 09:05:40 +02:00
ed
0f0217d203 readme 2021-05-16 08:52:22 +02:00
ed
a203e33347 v0.10.20 2021-05-16 07:51:39 +02:00
ed
3b8f697dd4 include links in bup summary 2021-05-16 07:51:22 +02:00
ed
78ba16f722 log filtering by url regex 2021-05-16 07:29:34 +02:00
ed
0fcfe79994 general-purpose file parsing 2021-05-16 07:04:18 +02:00
ed
c0e6df4b63 let it gooo 2021-05-16 05:27:04 +02:00
ed
322abdcb43 more dino support 2021-05-16 05:04:44 +02:00
ed
31100787ce ahh whatever 2021-05-16 03:21:49 +02:00
ed
c57d721be4 ie11 doesnt support sha512 2021-05-16 03:11:37 +02:00
ed
3b5a03e977 this too 2021-05-16 02:34:36 +02:00
ed
ed807ee43e native sha512 on old iphones 2021-05-16 02:25:00 +02:00
ed
073c130ae6 respect tooltip pref in up2k 2021-05-16 02:18:54 +02:00
ed
8810e0be13 add option to log headers 2021-05-16 02:11:09 +02:00
ed
f93016ab85 dont suggest bup if no write-access 2021-05-16 00:30:32 +02:00
ed
b19cf260c2 drop the control-panel link too 2021-05-14 20:07:48 +02:00
ed
db03e1e7eb readme 2021-05-14 16:38:07 +02:00
ed
e0d975e36a v0.10.19 2021-05-14 00:00:15 +02:00
ed
cfeb15259f not careful enough 2021-05-13 23:29:15 +02:00
ed
3b3f8fc8fb careful rice 2021-05-13 23:00:51 +02:00
ed
88bd2c084c misc 2021-05-13 22:58:36 +02:00
ed
bd367389b0 broke windows 2021-05-13 22:58:23 +02:00
ed
58ba71a76f option to hide incomplete uploads 2021-05-13 22:56:52 +02:00
ed
d03e34d55d v0.10.18 2021-05-13 17:42:06 +02:00
ed
24f239a46c ui tweaks 2021-05-13 17:41:14 +02:00
ed
2c0826f85a conditional sections in volume listing 2021-05-13 17:24:37 +02:00
ed
c061461d01 fix md perm reqs + dyn up2k modeset 2021-05-13 17:22:31 +02:00
ed
e7982a04fe explicit redirect to single non-roots 2021-05-13 16:54:31 +02:00
ed
33b91a7513 set password cookie expiration 2021-05-13 16:23:28 +02:00
ed
9bb1323e44 rclone faster + query params correctness 2021-05-13 16:02:30 +02:00
ed
e62bb807a5 better 2021-05-13 01:36:14 +02:00
ed
3fc0d2cc4a better 2021-05-13 00:43:25 +02:00
ed
0c786b0766 v0.10.17 2021-05-12 23:39:54 +02:00
ed
68c7528911 yes good 2021-05-12 23:26:30 +02:00
ed
26e18ae800 disallow uploading logues 2021-05-12 23:22:43 +02:00
ed
c30dc0b546 write-only QoL mostly 2021-05-12 23:06:13 +02:00
ed
f94aa46a11 open write-only folders from tree 2021-05-12 21:50:32 +02:00
ed
403261a293 support pyinstaller 2021-05-12 21:21:07 +02:00
ed
c7d9cbb11f show logues in write-only folders 2021-05-12 21:20:59 +02:00
ed
57e1c53cbb mention volume flags in the cfg-file example 2021-05-02 09:48:19 +02:00
ed
0754b553dd v0.10.16 2021-05-02 09:18:19 +02:00
ed
50661d941b cfg-parser: fix wildcard permissions 2021-05-02 09:16:14 +02:00
ed
c5db7c1a0c pickle needs this ;_; 2021-04-29 22:41:57 +02:00
ed
2cef5365f7 readme again 2021-04-27 09:26:14 +02:00
ed
fbc4e94007 readme (realized this was confusing) 2021-04-27 09:24:50 +02:00
ed
037ed5a2ad readme 2021-04-26 04:02:22 +02:00
ed
69dfa55705 readme 2021-04-26 04:01:47 +02:00
ed
a79a5c4e3e readme + ui tweaks 2021-04-25 22:44:50 +02:00
ed
7e80eabfe6 readme 2021-04-25 21:42:45 +02:00
ed
375b72770d readme 2021-04-25 04:34:06 +02:00
ed
e2dd683def does this look better 2021-04-25 03:04:24 +02:00
ed
9eba50c6e4 readme 2021-04-25 03:00:47 +02:00
ed
5a579dba52 sfx: help bzip2 make smaller archives 2021-04-24 22:07:09 +02:00
ed
e86c719575 sfx: cooperate better with other instances 2021-04-24 22:06:50 +02:00
ed
0e87f35547 ui tweaks 2021-04-24 22:06:21 +02:00
ed
b6d3d791a5 shave 2021-04-24 20:08:07 +02:00
ed
c9c3302664 a 2021-04-24 19:22:15 +02:00
ed
c3e4d65b80 v0.10.15 2021-04-24 04:05:57 +02:00
ed
27a03510c5 quick upload test too 2021-04-24 03:35:58 +02:00
ed
ed7727f7cb fix write-only volumes + add regression test 2021-04-24 02:48:41 +02:00
ed
127ec10c0d js cleanup + minor tweaks 2021-04-23 20:04:17 +02:00
ed
5a9c0ad225 ui tweaks 2021-04-22 09:10:32 +02:00
ed
7e8daf650e v0.10.14 2021-04-21 22:04:21 +02:00
ed
0cf737b4ce 404 rather than redirect home if 404 or 403 2021-04-21 21:51:27 +02:00
ed
74635e0113 phew 2021-04-21 21:42:37 +02:00
ed
e5c4f49901 ok ok 2021-04-21 21:26:55 +02:00
ed
e4654ee7f1 uhh 2021-04-21 21:13:16 +02:00
ed
e5d05c05ed up2k ui tweaks 2021-04-21 20:50:10 +02:00
ed
73c4f99687 add markdown streaming 2021-04-21 20:28:50 +02:00
ed
28c12ef3bf cleanup 2021-04-21 18:48:23 +02:00
ed
eed82dbb54 remove dead code 2021-04-21 18:44:47 +02:00
ed
2c4b4ab928 up2k-cli: cond. readahead 2021-04-21 18:39:55 +02:00
ed
505a8fc6f6 up2k: sparse alloc on windows 2021-04-21 18:32:21 +02:00
ed
e4801d9b06 support msys2-python 2021-04-21 18:28:44 +02:00
49 changed files with 3426 additions and 1390 deletions

2
.vscode/launch.py vendored
View File

@@ -12,7 +12,7 @@ sys.path.insert(0, os.getcwd())
import jstyleson
from copyparty.__main__ import main as copyparty
with open(".vscode/launch.json", "r") as f:
with open(".vscode/launch.json", "r", encoding="utf-8") as f:
tj = f.read()
oj = jstyleson.loads(tj)

243
README.md
View File

@@ -9,9 +9,12 @@
turn your phone or raspi into a portable file server with resumable uploads/downloads using IE6 or any other browser
* server runs on anything with `py2.7` or `py3.3+`
* *resumable* uploads need `firefox 12+` / `chrome 6+` / `safari 6+` / `IE 10+`
* browse/upload with IE4 / netscape4.0 on win3.11 (heh)
* *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+` for full speed
* code standard: `black`
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [thumbnails](#thumbnails) // [md-viewer](#markdown-viewer) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [ie4](#browser-support)
## readme toc
@@ -20,8 +23,18 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [notes](#notes)
* [status](#status)
* [bugs](#bugs)
* [usage](#usage)
* [general bugs](#general-bugs)
* [not my bugs](#not-my-bugs)
* [the browser](#the-browser)
* [tabs](#tabs)
* [hotkeys](#hotkeys)
* [tree-mode](#tree-mode)
* [thumbnails](#thumbnails)
* [zip downloads](#zip-downloads)
* [uploading](#uploading)
* [file-search](#file-search)
* [markdown viewer](#markdown-viewer)
* [other tricks](#other-tricks)
* [searching](#searching)
* [search configuration](#search-configuration)
* [metadata from audio files](#metadata-from-audio-files)
@@ -29,7 +42,10 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [complete examples](#complete-examples)
* [browser support](#browser-support)
* [client examples](#client-examples)
* [up2k](#up2k)
* [dependencies](#dependencies)
* [optional dependencies](#optional-dependencies)
* [install recommended deps](#install-recommended-deps)
* [optional gpl stuff](#optional-gpl-stuff)
* [sfx](#sfx)
* [sfx repack](#sfx-repack)
@@ -43,25 +59,27 @@ turn your phone or raspi into a portable file server with resumable uploads/down
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
running the sfx without arguments (for example doubleclicking it on Windows) will let anyone access the current folder; see `-h` for help if you want accounts and volumes etc
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone full access to the current folder; see `-h` for help if you want accounts and volumes etc
you may also want these, especially on servers:
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for legit https)
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
## notes
* iPhone/iPad: use Firefox to download files
* Android-Chrome: set max "parallel uploads" for 200% upload speed (android bug)
* Android-Firefox: takes a while to select files (in order to avoid the above android-chrome issue)
* Desktop-Firefox: may use gigabytes of RAM if your connection is great and your files are massive
* Android-Chrome: increase "parallel uploads" for higher speed (android bug)
* Android-Firefox: takes a while to select files (their fix for ☝️)
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
* because no browsers currently implement the media-query to do this properly orz
## status
summary: all planned features work! now please enjoy the bloatening
* backend stuff
* ☑ sanic multipart parser
* ☑ load balancer (multiprocessing)
@@ -79,9 +97,12 @@ you may also want these, especially on servers:
* browser
* ☑ tree-view
* ☑ media player
* thumbnails
* ✖ SPA (browse while uploading)
* currently safe using the file-tree on the left only, not folders in the file list
* thumbnails
* ☑ images using Pillow
* ☑ videos using FFmpeg
* ☑ cache eviction (max-age; maybe max-size eventually)
* ☑ SPA (browse while uploading)
* if you use the file-tree on the left only, not folders in the file list
* server indexing
* ☑ locate files by contents
* ☑ search by name/path/date/size
@@ -90,28 +111,70 @@ you may also want these, especially on servers:
* ☑ viewer
* ☑ editor (sure why not)
summary: it works! you can use it! (but technically not even close to beta)
# bugs
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* Windows: python 2.7 cannot handle filenames with mojibake
## general bugs
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
* cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1`
* hiding the contents at url `/d1/d2/d3` using `-v :d1/d2/d3:cd2d` has the side-effect of creating databases (for files/tags) inside folders d1 and d2, and those databases take precedence over the main db at the top of the vfs - this means all files in d2 and below will be reindexed unless you already had a vfs entry at or below d2
* probably more, pls let me know
## not my bugs
# usage
* Windows: msys2-python 3.8.6 occasionally throws "RuntimeError: release unlocked lock" when leaving a scoped mutex in up2k
* this is an msys2 bug, the regular windows edition of python is fine
# the browser
![copyparty-browser-fs8](https://user-images.githubusercontent.com/241032/115978054-65106380-a57d-11eb-98f8-59e3dee73557.png)
## tabs
* `[🔎]` search by size, date, path/name, mp3-tags ... see [searching](#searching)
* `[🚀]` and `[🎈]` are the uploaders, see [uploading](#uploading)
* `[📂]` mkdir, create directories
* `[📝]` new-md, create a new markdown document
* `[📟]` send-msg, either to server-log or into textfiles if `--urlform save`
* `[⚙️]` client configuration options
## hotkeys
the browser has the following hotkeys
* `0..9` jump to 10%..90%
* `U/O` skip 10sec back/forward
* `J/L` prev/next song
* `I/K` prev/next folder
* `P` parent folder
* `G` toggle list / grid view
* `T` toggle thumbnails / icons
* when playing audio:
* `0..9` jump to 10%..90%
* `U/O` skip 10sec back/forward
* `J/L` prev/next song
* `J` also starts playing the folder
* in the grid view:
* `S` toggle multiselect
* `A/D` zoom
you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&1:20` after the `.../#af-c8960dab`
## tree-mode
by default there's a breadcrumbs path; you can replace this with a tree-browser sidebar thing by clicking the 🌲
click `[-]` and `[+]` to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
## thumbnails
![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/120070302-10836b00-c08a-11eb-8eb4-82004a34c342.png)
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how destructive your users are
## zip downloads
@@ -130,12 +193,80 @@ the `zip` link next to folders can produce various types of zip/tar files using
* `zip_crc` will take longer to download since the server has to read each file twice
* please let me know if you find a program old enough to actually need this
you can also zip a selection of files or folders by clicking them in the browser, that brings up a selection editor and zip button in the bottom right
![copyparty-zipsel-fs8](https://user-images.githubusercontent.com/241032/116008321-372a2e00-a614-11eb-9a4a-4a1fd9074224.png)
## uploading
two upload methods are available in the html client:
* `🎈 bup`, the basic uploader, supports almost every browser since netscape 4.0
* `🚀 up2k`, the fancy one
up2k has several advantages:
* you can drop folders into the browser (files are added recursively)
* files are processed in chunks, and each chunk is checksummed
* uploads resume if they are interrupted (for example by a reboot)
* server detects any corruption; the client reuploads affected chunks
* the client doesn't upload anything that already exists on the server
* the last-modified timestamp of the file is preserved
see [up2k](#up2k) for details on how it works
![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/115978061-680b5400-a57d-11eb-9ef6-cbb5f60aeccc.png)
**protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
the up2k UI is the epitome of polished inutitive experiences:
* "parallel uploads" specifies how many chunks to upload at the same time
* `[🏃]` analysis of other files should continue while one is uploading
* `[💭]` ask for confirmation before files are added to the list
* `[💤]` sync uploading between other copyparty tabs so only one is active
* `[🔎]` switch between upload and file-search mode
and then theres the tabs below it,
* `[ok]` is uploads which completed successfully
* `[ng]` is the uploads which failed / got rejected (already exists, ...)
* `[done]` shows a combined list of `[ok]` and `[ng]`, chronological order
* `[busy]` files which are currently hashing, pending-upload, or uploading
* plus up to 3 entries each from `[done]` and `[que]` for context
* `[que]` is all the files that are still queued
### file-search
![copyparty-fsearch-fs8](https://user-images.githubusercontent.com/241032/116008320-36919780-a614-11eb-803f-04162326a700.png)
in the `[🚀 up2k]` tab, after toggling the `[🔎]` switch green, any files/folders you drop onto the dropzone will be hashed on the client-side. Each hash is sent to the server which checks if that file exists somewhere already
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files
note that since up2k has to read the file twice, `[🎈 bup]` can be up to 2x faster in extreme cases (if your internet connection is faster than the read-speed of your HDD)
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well thanks to tls also functioning as an integrity check
## markdown viewer
![copyparty-md-read-fs8](https://user-images.githubusercontent.com/241032/115978057-66419080-a57d-11eb-8539-d2be843991aa.png)
* the document preview has a max-width which is the same as an A4 paper when printed
## other tricks
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
# searching
![copyparty-search-fs8](https://user-images.githubusercontent.com/241032/115978060-6772bd80-a57d-11eb-81d3-174e869b72c3.png)
when started with `-e2dsa` copyparty will scan/index all your files. This avoids duplicates on upload, and also makes the volumes searchable through the web-ui:
* make search queries by `size`/`date`/`directory-path`/`filename`, or...
* drag/drop a local file to see if the same contents exist somewhere on the server (you get the URL if it does)
* drag/drop a local file to see if the same contents exist somewhere on the server, see [file-search](#file-search)
path/name queries are space-separated, AND'ed together, and words are negated with a `-` prefix, so for example:
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
@@ -180,6 +311,7 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
* is about 20x slower than mutagen
* catches a few tags that mutagen doesn't
* melodic key, video resolution, framerate, pixfmt
* avoids pulling any GPL code into copyparty
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
@@ -192,6 +324,11 @@ copyparty can invoke external programs to collect additional metadata for files
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
* `-v ~/music::r:cmtp=.bpm=~/bin/audio-bpm.py:cmtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly
*but wait, there's more!* `-mtp` can be used for non-audio files as well using the `a` flag: `ay` only do audio files, `an` only do non-audio files, or `ad` do all files (d as in dontcare)
* `-mtp ext=an,~/bin/file-ext.py` runs `~/bin/file-ext.py` to get the `ext` tag only if file is not audio (`an`)
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
## complete examples
@@ -201,6 +338,8 @@ copyparty can invoke external programs to collect additional metadata for files
# browser support
![copyparty-ie4-fs8](https://user-images.githubusercontent.com/241032/118192791-fb31fe00-b446-11eb-9647-898ea8efc1f7.png)
`ie` = internet-explorer, `ff` = firefox, `c` = chrome, `iOS` = iPhone/iPad, `Andr` = Android
| feature | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
@@ -225,14 +364,18 @@ copyparty can invoke external programs to collect additional metadata for files
* `*2` using a wasm decoder which can sometimes get stuck and consumes a bit more power
quick summary of more eccentric web-browsers trying to view a directory index:
* safari (14.0.3/macos) is chrome with janky wasm, so playing opus can deadlock the javascript engine
* safari (14.0.1/iOS) same as macos, except it recovers from the deadlocks if you poke it a bit
* links (2.21/macports) can browse, login, upload/mkdir/msg
* lynx (2.8.9/macports) can browse, login, upload/mkdir/msg
* w3m (0.5.3/macports) can browse, login, upload at 100kB/s, mkdir/msg
* netsurf (3.10/arch) is basically ie6 with much better css (javascript has almost no effect)
* netscape 4.0 and 4.5 can browse (text is yellow on white), upload with `?b=u`
* SerenityOS (22d13d8) hits a page fault, works with `?b=u`, file input not-impl, url params are multiplying
| browser | will it blend |
| ------- | ------------- |
| **safari** (14.0.3/macos) | is chrome with janky wasm, so playing opus can deadlock the javascript engine |
| **safari** (14.0.1/iOS) | same as macos, except it recovers from the deadlocks if you poke it a bit |
| **links** (2.21/macports) | can browse, login, upload/mkdir/msg |
| **lynx** (2.8.9/macports) | can browse, login, upload/mkdir/msg |
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
| **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` |
| **SerenityOS** (22d13d8) | hits a page fault, works with `?b=u`, file input not-impl, url params are multiplying |
# client examples
@@ -258,30 +401,64 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene
b512 <movie.mkv
# up2k
quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
* the up2k client splits a file into an "optimal" number of chunks
* 1 MiB each, unless that becomes more than 256 chunks
* tries 1.5M, 2M, 3, 4, 6, ... until <= 256 chunks or size >= 32M
* client posts the list of hashes, filename, size, last-modified
* server creates the `wark`, an identifier for this upload
* `sha512( salt + filesize + chunk_hashes )`
* and a sparse file is created for the chunks to drop into
* client uploads each chunk
* header entries for the chunk-hash and wark
* server writes chunks into place based on the hash
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
# dependencies
* `jinja2` (is built into the SFX)
**optional,** enables music tags:
## optional dependencies
enable music tags:
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
**optional,** will eventually enable thumbnails:
enable image thumbnails:
* `Pillow` (requires py2.7 or py3.5+)
enable video thumbnails:
* `ffmpeg` and `ffprobe` somewhere in `$PATH`
enable reading HEIF pictures:
* `pyheif-pillow-opener` (requires Linux or a C compiler)
enable reading AVIF pictures:
* `pillow-avif-plugin`
## install recommended deps
```
python -m pip install --user -U jinja2 mutagen Pillow
```
## optional gpl stuff
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
these are standalone and will never be imported / evaluated by copyparty
these are standalone programs and will never be imported / evaluated by copyparty
# sfx
currently there are two self-contained binaries:
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos
currently there are two self-contained "binaries":
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere, **recommended**
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos, kinda deprecated
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
@@ -348,7 +525,7 @@ roughly sorted by priority
* start from a chunk index and just go
* terminate client on bad data
* `os.copy_file_range` for up2k cloning
* support pillow-simd
* single sha512 across all up2k chunks? maybe
* figure out the deal with pixel3a not being connectable as hotspot
* pixel3a having unpredictable 3sec latency in general :||||

96
bin/mtag/exe.py Normal file
View File

@@ -0,0 +1,96 @@
#!/usr/bin/env python
import sys
import time
import json
import pefile
"""
retrieve exe info,
example for multivalue providers
"""
def unk(v):
return "unk({:04x})".format(v)
class PE2(pefile.PE):
def __init__(self, *a, **ka):
for k in [
# -- parse_data_directories:
"parse_import_directory",
"parse_export_directory",
# "parse_resources_directory",
"parse_debug_directory",
"parse_relocations_directory",
"parse_directory_tls",
"parse_directory_load_config",
"parse_delay_import_directory",
"parse_directory_bound_imports",
# -- full_load:
"parse_rich_header",
]:
setattr(self, k, self.noop)
super(PE2, self).__init__(*a, **ka)
def noop(*a, **ka):
pass
try:
pe = PE2(sys.argv[1], fast_load=False)
except:
sys.exit(0)
arch = pe.FILE_HEADER.Machine
if arch == 0x14C:
arch = "x86"
elif arch == 0x8664:
arch = "x64"
else:
arch = unk(arch)
try:
buildtime = time.gmtime(pe.FILE_HEADER.TimeDateStamp)
buildtime = time.strftime("%Y-%m-%d_%H:%M:%S", buildtime)
except:
buildtime = "invalid"
ui = pe.OPTIONAL_HEADER.Subsystem
if ui == 2:
ui = "GUI"
elif ui == 3:
ui = "cmdline"
else:
ui = unk(ui)
extra = {}
if hasattr(pe, "FileInfo"):
for v1 in pe.FileInfo:
for v2 in v1:
if v2.name != "StringFileInfo":
continue
for v3 in v2.StringTable:
for k, v in v3.entries.items():
v = v.decode("utf-8", "replace").strip()
if not v:
continue
if k in [b"FileVersion", b"ProductVersion"]:
extra["ver"] = v
if k in [b"OriginalFilename", b"InternalName"]:
extra["orig"] = v
r = {
"arch": arch,
"built": buildtime,
"ui": ui,
"cksum": "{:08x}".format(pe.OPTIONAL_HEADER.CheckSum),
}
r.update(extra)
print(json.dumps(r, indent=4))

9
bin/mtag/file-ext.py Normal file
View File

@@ -0,0 +1,9 @@
#!/usr/bin/env python
import sys
"""
example that just prints the file extension
"""
print(sys.argv[1].split(".")[-1])

View File

@@ -2,6 +2,7 @@
from __future__ import print_function, unicode_literals
import platform
import time
import sys
import os
@@ -16,12 +17,18 @@ if platform.system() == "Windows":
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393]
# introduced in anniversary update
ANYWIN = WINDOWS or sys.platform in ["msys"]
MACOS = platform.system() == "Darwin"
class EnvParams(object):
def __init__(self):
self.t0 = time.time()
self.mod = os.path.dirname(os.path.realpath(__file__))
if self.mod.endswith("__init__"):
self.mod = os.path.dirname(self.mod)
if sys.platform == "win32":
self.cfg = os.path.normpath(os.environ["APPDATA"] + "/copyparty")
elif sys.platform == "darwin":

View File

@@ -237,19 +237,29 @@ def run_argparse(argv, formatter):
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
ap.add_argument("-q", action="store_true", help="quiet")
ap.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
ap.add_argument("-ed", action="store_true", help="enable ?dots")
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap.add_argument("-nih", action="store_true", help="no info hostname")
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
ap.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)")
ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)")
ap.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms")
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
ap2 = ap.add_argument_group('thumbnail options')
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=1800, help="cleanup interval")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
ap2 = ap.add_argument_group('database options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
@@ -261,7 +271,7 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q")
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps")
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
@@ -272,6 +282,13 @@ def run_argparse(argv, formatter):
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
ap2 = ap.add_argument_group('debug options')
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header")
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
return ap.parse_args(args=argv[1:])
# fmt: on

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (0, 10, 13)
CODENAME = "zip it"
BUILD_DT = (2021, 4, 20)
VERSION = (0, 11, 0)
CODENAME = "the grid"
BUILD_DT = (2021, 5, 29)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -141,7 +141,12 @@ class VFS(object):
real.sort()
if not rem:
for name, vn2 in sorted(self.nodes.items()):
if uname in vn2.uread or "*" in vn2.uread:
if (
uname in vn2.uread
or "*" in vn2.uread
or uname in vn2.uwrite
or "*" in vn2.uwrite
):
virt_vis[name] = vn2
# no vfs nodes in the list of real inodes
@@ -241,6 +246,7 @@ class AuthSrv(object):
self.args = args
self.log_func = log_func
self.warn_anonwrite = warn_anonwrite
self.line_ctr = 0
if WINDOWS:
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
@@ -266,7 +272,9 @@ class AuthSrv(object):
def _parse_config_file(self, fd, user, mread, mwrite, mflags, mount):
vol_src = None
vol_dst = None
self.line_ctr = 0
for ln in [x.decode("utf-8").strip() for x in fd]:
self.line_ctr += 1
if not ln and vol_src is not None:
vol_src = None
vol_dst = None
@@ -296,7 +304,12 @@ class AuthSrv(object):
mflags[vol_dst] = {}
continue
lvl, uname = ln.split(" ")
if len(ln) > 1:
lvl, uname = ln.split(" ")
else:
lvl = ln
uname = "*"
self._read_vol_str(
lvl, uname, mread[vol_dst], mwrite[vol_dst], mflags[vol_dst]
)
@@ -374,7 +387,12 @@ class AuthSrv(object):
if self.args.c:
for cfg_fn in self.args.c:
with open(cfg_fn, "rb") as f:
self._parse_config_file(f, user, mread, mwrite, mflags, mount)
try:
self._parse_config_file(f, user, mread, mwrite, mflags, mount)
except:
m = "\n\033[1;31m\nerror in config file {} on line {}:\n\033[0m"
print(m.format(cfg_fn, self.line_ctr))
raise
if not mount:
# -h says our defaults are CWD at root and read/write for everyone
@@ -457,8 +475,10 @@ class AuthSrv(object):
# verify tags mentioned by -mt[mp] are used by -mte
local_mtp = {}
local_only_mtp = {}
for a in vol.flags.get("mtp", []) + vol.flags.get("mtm", []):
a = a.split("=")[0]
tags = vol.flags.get("mtp", []) + vol.flags.get("mtm", [])
tags = [x.split("=")[0] for x in tags]
tags = [y for x in tags for y in x.split(",")]
for a in tags:
local_mtp[a] = True
local = True
for b in self.args.mtp or []:
@@ -487,8 +507,10 @@ class AuthSrv(object):
self.log(m.format(vol.vpath, mtp), 1)
errors = True
for mtp in self.args.mtp or []:
mtp = mtp.split("=")[0]
tags = self.args.mtp or []
tags = [x.split("=")[0] for x in tags]
tags = [y for x in tags for y in x.split(",")]
for mtp in tags:
if mtp not in all_mte:
m = 'metadata tag "{}" is defined by "-mtm" or "-mtp", but is not used by "-mte" (or by any "cmte" volume-flag)'
self.log(m.format(mtp), 1)

View File

@@ -13,7 +13,7 @@ import ctypes
from datetime import datetime
import calendar
from .__init__ import E, PY2, WINDOWS
from .__init__ import E, PY2, WINDOWS, ANYWIN
from .util import * # noqa # pylint: disable=unused-wildcard-import
from .szip import StreamZip
from .star import StreamTar
@@ -22,6 +22,10 @@ if not PY2:
unicode = str
NO_CACHE = {"Cache-Control": "no-cache"}
NO_STORE = {"Cache-Control": "no-store; max-age=0"}
class HttpCli(object):
"""
Spawned by HttpConn to process one http transaction
@@ -36,6 +40,8 @@ class HttpCli(object):
self.addr = conn.addr
self.args = conn.args
self.auth = conn.auth
self.ico = conn.ico
self.thumbcli = conn.thumbcli
self.log_func = conn.log_func
self.log_src = conn.log_src
self.tls = hasattr(self.s, "cipher")
@@ -100,6 +106,16 @@ class HttpCli(object):
self.ip = v.split(",")[0]
self.log_src = self.conn.set_rproxy(self.ip)
if self.args.ihead:
keys = self.args.ihead
if "*" in keys:
keys = list(sorted(self.headers.keys()))
for k in keys:
v = self.headers.get(k)
if v is not None:
self.log("[H] {}: \033[33m[{}]".format(k, v), 6)
# split req into vpath + uparam
uparam = {}
if "?" not in self.req:
@@ -120,29 +136,35 @@ class HttpCli(object):
else:
uparam[k.lower()] = False
self.ouparam = {k: v for k, v in uparam.items()}
cookies = self.headers.get("cookie") or {}
if cookies:
cookies = [x.split("=", 1) for x in cookies.split(";") if "=" in x]
cookies = {k.strip(): unescape_cookie(v) for k, v in cookies}
for kc, ku in [["cppwd", "pw"], ["b", "b"]]:
if kc in cookies and ku not in uparam:
uparam[ku] = cookies[kc]
self.uparam = uparam
self.cookies = cookies
self.vpath = unquotep(vpath)
pwd = None
if "cookie" in self.headers:
cookies = self.headers["cookie"].split(";")
for k, v in [x.split("=", 1) for x in cookies]:
if k.strip() != "cppwd":
continue
pwd = unescape_cookie(v)
break
pwd = uparam.get("pw", pwd)
pwd = uparam.get("pw")
self.uname = self.auth.iuser.get(pwd, "*")
if self.uname:
self.rvol = self.auth.vfs.user_tree(self.uname, readable=True)
self.wvol = self.auth.vfs.user_tree(self.uname, writable=True)
ua = self.headers.get("user-agent", "")
if ua.startswith("rclone/"):
self.is_rclone = ua.startswith("rclone/")
if self.is_rclone:
uparam["raw"] = False
uparam["dots"] = False
uparam["b"] = False
cookies["b"] = False
self.do_log = not self.conn.lf_url or not self.conn.lf_url.search(self.req)
try:
if self.mode in ["GET", "HEAD"]:
@@ -182,10 +204,8 @@ class HttpCli(object):
self.out_headers.update(headers)
# default to utf8 html if no content-type is set
try:
mime = mime or self.out_headers["Content-Type"]
except KeyError:
mime = "text/html; charset=UTF-8"
if not mime:
mime = self.out_headers.get("Content-Type", "text/html; charset=UTF-8")
self.out_headers["Content-Type"] = mime
@@ -220,7 +240,14 @@ class HttpCli(object):
removing anything in rm, adding pairs in add
"""
kv = {k: v for k, v in self.uparam.items() if k not in rm}
if self.is_rclone:
return ""
kv = {
k: v
for k, v in self.uparam.items()
if k not in rm and self.cookies.get(k) != v
}
kv.update(add)
if not kv:
return ""
@@ -228,21 +255,43 @@ class HttpCli(object):
r = ["{}={}".format(k, quotep(v)) if v else k for k, v in kv.items()]
return "?" + "&amp;".join(r)
def redirect(
self, vpath, suf="", msg="aight", flavor="go to", click=True, use302=False
):
html = self.j2(
"msg",
h2='<a href="/{}">{} /{}</a>'.format(
quotep(vpath) + suf, flavor, html_escape(vpath, crlf=True) + suf
),
pre=msg,
click=click,
).encode("utf-8", "replace")
if use302:
h = {"Location": "/" + vpath, "Cache-Control": "no-cache"}
self.reply(html, status=302, headers=h)
else:
self.reply(html)
def handle_get(self):
logmsg = "{:4} {}".format(self.mode, self.req)
if self.do_log:
logmsg = "{:4} {}".format(self.mode, self.req)
if "range" in self.headers:
try:
rval = self.headers["range"].split("=", 1)[1]
except:
rval = self.headers["range"]
if "range" in self.headers:
try:
rval = self.headers["range"].split("=", 1)[1]
except:
rval = self.headers["range"]
logmsg += " [\033[36m" + rval + "\033[0m]"
logmsg += " [\033[36m" + rval + "\033[0m]"
self.log(logmsg)
self.log(logmsg)
# "embedded" resources
if self.vpath.startswith(".cpr"):
if self.vpath.startswith(".cpr/ico/"):
return self.tx_ico(self.vpath.split("/")[-1], exact=True)
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
return self.tx_file(static_path)
@@ -250,23 +299,27 @@ class HttpCli(object):
return self.tx_tree()
# conditional redirect to single volumes
if self.vpath == "" and not self.uparam:
if self.vpath == "" and not self.ouparam:
nread = len(self.rvol)
nwrite = len(self.wvol)
if nread + nwrite == 1 or (self.rvol == self.wvol and nread == 1):
if nread == 1:
self.vpath = self.rvol[0]
vpath = self.rvol[0]
else:
self.vpath = self.wvol[0]
vpath = self.wvol[0]
self.absolute_urls = True
if self.vpath != vpath:
self.redirect(vpath, flavor="redirecting to", use302=True)
return True
# go home if verboten
self.readable, self.writable = self.conn.auth.vfs.can_access(
self.vpath, self.uname
)
if not self.readable and not self.writable:
self.log("inaccessible: [{}]".format(self.vpath))
if self.vpath:
self.log("inaccessible: [{}]".format(self.vpath))
raise Pebkac(404)
self.uparam = {"h": False}
if "h" in self.uparam:
@@ -276,7 +329,9 @@ class HttpCli(object):
return self.tx_browser()
def handle_options(self):
self.log("OPTIONS " + self.req)
if self.do_log:
self.log("OPTIONS " + self.req)
self.send_headers(
None,
204,
@@ -377,7 +432,7 @@ class HttpCli(object):
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
path = os.path.join(fdir, fn)
with open(path, "wb", 512 * 1024) as f:
with open(fsenc(path), "wb", 512 * 1024) as f:
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
self.conn.hsrv.broker.put(
@@ -497,9 +552,9 @@ class HttpCli(object):
if sub:
try:
dst = os.path.join(vfs.realpath, rem)
os.makedirs(dst)
os.makedirs(fsenc(dst))
except:
if not os.path.isdir(dst):
if not os.path.isdir(fsenc(dst)):
raise Pebkac(400, "some file got your folder name")
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
@@ -534,7 +589,7 @@ class HttpCli(object):
self.log("qj: " + repr(vbody))
hits = idx.fsearch(vols, body)
msg = repr(hits)
taglist = []
taglist = {}
else:
# search by query params
self.log("qj: " + repr(body))
@@ -587,7 +642,7 @@ class HttpCli(object):
reader = read_socket(self.sr, remains)
with open(path, "rb+", 512 * 1024) as f:
with open(fsenc(path), "rb+", 512 * 1024) as f:
f.seek(cstart[0])
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
@@ -626,11 +681,11 @@ class HttpCli(object):
self.loud_reply(x, status=500)
return False
if not WINDOWS and num_left == 0:
if not ANYWIN and num_left == 0:
times = (int(time.time()), int(lastmod))
self.log("no more chunks, setting times {}".format(times))
try:
os.utime(path, times)
os.utime(fsenc(path), times)
except:
self.log("failed to utime ({}, {})".format(path, times))
@@ -645,13 +700,16 @@ class HttpCli(object):
if pwd in self.auth.iuser:
msg = "login ok"
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
else:
msg = "naw dude"
pwd = "x" # nosec
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp)
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
self.reply(html.encode("utf-8"), headers=h)
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
return True
def handle_mkdir(self):
@@ -680,14 +738,7 @@ class HttpCli(object):
raise Pebkac(500, "mkdir failed, check the logs")
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
esc_paths = [quotep(vpath), html_escape(vpath)]
html = self.j2(
"msg",
h2='<a href="/{}">go to /{}</a>'.format(*esc_paths),
pre="aight",
click=True,
)
self.reply(html.encode("utf-8", "replace"))
self.redirect(vpath)
return True
def handle_new_md(self):
@@ -714,15 +765,7 @@ class HttpCli(object):
f.write(b"`GRUNNUR`\n")
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
html = self.j2(
"msg",
h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
quotep(vpath), html_escape(vpath)
),
pre="aight",
click=True,
)
self.reply(html.encode("utf-8", "replace"))
self.redirect(vpath, "?edit")
return True
def handle_plain_upload(self):
@@ -741,7 +784,9 @@ class HttpCli(object):
if p_file and not nullwrite:
fdir = os.path.join(vfs.realpath, rem)
fname = sanitize_fn(p_file)
fname = sanitize_fn(
p_file, bad=[".prologue.html", ".epilogue.html"]
)
if not os.path.isdir(fsenc(fdir)):
raise Pebkac(404, "that folder does not exist")
@@ -761,7 +806,7 @@ class HttpCli(object):
if sz == 0:
raise Pebkac(400, "empty files in post")
files.append([sz, sha512_hex])
files.append([sz, sha512_hex, p_file, fname])
self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname
)
@@ -770,12 +815,16 @@ class HttpCli(object):
except Pebkac:
if fname != os.devnull:
fp = os.path.join(fdir, fname)
fp2 = fp
if self.args.dotpart:
fp2 = os.path.join(fdir, "." + fname)
suffix = ".PARTIAL"
try:
os.rename(fsenc(fp), fsenc(fp + suffix))
os.rename(fsenc(fp), fsenc(fp2 + suffix))
except:
fp = fp[: -len(suffix)]
os.rename(fsenc(fp), fsenc(fp + suffix))
fp2 = fp2[: -len(suffix) - 1]
os.rename(fsenc(fp), fsenc(fp2 + suffix))
raise
@@ -792,10 +841,13 @@ class HttpCli(object):
errmsg = "ERROR: " + errmsg
status = "ERROR"
msg = "{0} // {1} bytes // {2:.3f} MiB/s\n".format(status, sz_total, spd)
msg = "{} // {} bytes // {:.3f} MiB/s\n".format(status, sz_total, spd)
for sz, sha512 in files:
msg += "sha512: {0} // {1} bytes\n".format(sha512[:56], sz)
for sz, sha512, ofn, lfn in files:
vpath = self.vpath + "/" + lfn
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True)
)
# truncated SHA-512 prevents length extension attacks;
# using SHA-512/224, optionally SHA-512/256 = :64
@@ -803,32 +855,13 @@ class HttpCli(object):
self.log("{} {}".format(vspd, msg))
if not nullwrite:
# TODO this is bad
log_fn = "up.{:.6f}.txt".format(t0)
with open(log_fn, "wb") as f:
f.write(
(
"\n".join(
unicode(x)
for x in [
":".join(unicode(x) for x in [self.ip, self.addr[1]]),
msg.rstrip(),
]
)
+ "\n"
+ errmsg
+ "\n"
).encode("utf-8")
)
ft = "{}:{}".format(self.ip, self.addr[1])
ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg)
f.write(ft.encode("utf-8"))
html = self.j2(
"msg",
h2='<a href="/{}">return to /{}</a>'.format(
quotep(self.vpath), html_escape(self.vpath)
),
pre=msg,
)
self.reply(html.encode("utf-8", "replace"))
self.redirect(self.vpath, msg=msg, flavor="return to", click=False)
self.parser.drop()
return True
@@ -903,16 +936,16 @@ class HttpCli(object):
mdir, mfile = os.path.split(fp)
mfile2 = "{}.{:.3f}.md".format(mfile[:-3], srv_lastmod)
try:
os.mkdir(os.path.join(mdir, ".hist"))
os.mkdir(fsenc(os.path.join(mdir, ".hist")))
except:
pass
os.rename(fp, os.path.join(mdir, ".hist", mfile2))
os.rename(fsenc(fp), fsenc(os.path.join(mdir, ".hist", mfile2)))
p_field, _, p_data = next(self.parser.gen)
if p_field != "body":
raise Pebkac(400, "expected body, got {}".format(p_field))
with open(fp, "wb", 512 * 1024) as f:
with open(fsenc(fp), "wb", 512 * 1024) as f:
sz, sha512, _ = hashcopy(self.conn, p_data, f)
new_lastmod = os.stat(fsenc(fp)).st_mtime
@@ -928,13 +961,11 @@ class HttpCli(object):
return True
def _chk_lastmod(self, file_ts):
file_dt = datetime.utcfromtimestamp(file_ts)
file_lastmod = file_dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
file_lastmod = http_ts(file_ts)
cli_lastmod = self.headers.get("if-modified-since")
if cli_lastmod:
try:
cli_dt = time.strptime(cli_lastmod, "%a, %d %b %Y %H:%M:%S GMT")
cli_dt = time.strptime(cli_lastmod, HTTP_TS_FMT)
cli_ts = calendar.timegm(cli_dt)
return file_lastmod, int(file_ts) > int(cli_ts)
except Exception as ex:
@@ -1081,19 +1112,21 @@ class HttpCli(object):
# send reply
if not is_compressed:
self.out_headers["Cache-Control"] = "no-cache"
self.out_headers.update(NO_CACHE)
self.out_headers["Accept-Ranges"] = "bytes"
self.send_headers(
length=upper - lower,
status=status,
mime=guess_mime(req_path)[0] or "application/octet-stream",
mime=guess_mime(req_path),
)
logmsg += unicode(status) + logtail
if self.mode == "HEAD" or not do_send:
self.log(logmsg)
if self.do_log:
self.log(logmsg)
return True
ret = True
@@ -1107,7 +1140,9 @@ class HttpCli(object):
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
spd = self._spd((upper - lower) - remains)
self.log("{}, {}".format(logmsg, spd))
if self.do_log:
self.log("{}, {}".format(logmsg, spd))
return ret
def tx_zip(self, fmt, uarg, vn, rem, items, dots):
@@ -1173,6 +1208,34 @@ class HttpCli(object):
self.log("{}, {}".format(logmsg, spd))
return True
def tx_ico(self, ext, exact=False):
if ext.endswith("/"):
ext = "folder"
exact = True
bad = re.compile(r"[](){}/[]|^[0-9_-]*$")
n = ext.split(".")[::-1]
if not exact:
n = n[:-1]
ext = ""
for v in n:
if len(v) > 7 or bad.search(v):
break
ext = "{}.{}".format(v, ext)
ext = ext.rstrip(".") or "unk"
if len(ext) > 11:
ext = "" + ext[-9:]
mime, ico = self.ico.get(ext, not exact)
dt = datetime.utcfromtimestamp(E.t0)
lm = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
self.reply(ico, mime=mime, headers={"Last-Modified": lm})
return True
def tx_md(self, fs_path):
logmsg = "{:4} {} ".format("", self.req)
@@ -1181,50 +1244,60 @@ class HttpCli(object):
template = self.j2(tpl)
st = os.stat(fsenc(fs_path))
# sz_md = st.st_size
ts_md = st.st_mtime
st = os.stat(fsenc(html_path))
ts_html = st.st_mtime
# TODO dont load into memory ;_;
# (trivial fix, count the &'s)
with open(fsenc(fs_path), "rb") as f:
md = f.read().replace(b"&", b"&amp;")
sz_md = len(md)
sz_md = 0
for buf in yieldfile(fs_path):
sz_md += len(buf)
for c, v in [[b"&", 4], [b"<", 3], [b">", 3]]:
sz_md += (len(buf) - len(buf.replace(c, b""))) * v
file_ts = max(ts_md, ts_html)
file_lastmod, do_send = self._chk_lastmod(file_ts)
self.out_headers["Last-Modified"] = file_lastmod
self.out_headers["Cache-Control"] = "no-cache"
self.out_headers.update(NO_CACHE)
status = 200 if do_send else 304
boundary = "\roll\tide"
targs = {
"edit": "edit" in self.uparam,
"title": html_escape(self.vpath),
"title": html_escape(self.vpath, crlf=True),
"lastmod": int(ts_md * 1000),
"md_plug": "true" if self.args.emp else "false",
"md_chk_rate": self.args.mcr,
"md": "",
"md": boundary,
}
sz_html = len(template.render(**targs).encode("utf-8"))
self.send_headers(sz_html + sz_md, status)
html = template.render(**targs).encode("utf-8", "replace")
html = html.split(boundary.encode("utf-8"))
if len(html) != 2:
raise Exception("boundary appears in " + html_path)
self.send_headers(sz_md + len(html[0]) + len(html[1]), status)
logmsg += unicode(status)
if self.mode == "HEAD" or not do_send:
self.log(logmsg)
if self.do_log:
self.log(logmsg)
return True
# TODO jinja2 can stream this right?
targs["md"] = md.decode("utf-8", "replace")
html = template.render(**targs).encode("utf-8")
try:
self.s.sendall(html)
self.s.sendall(html[0])
for buf in yieldfile(fs_path):
self.s.sendall(html_bescape(buf))
self.s.sendall(html[1])
except:
self.log(logmsg + " \033[31md/c\033[0m")
return False
self.log(logmsg + " " + unicode(len(html)))
if self.do_log:
self.log(logmsg + " " + unicode(len(html)))
return True
def tx_mounts(self):
@@ -1232,7 +1305,7 @@ class HttpCli(object):
rvol = [x + "/" if x else x for x in self.rvol]
wvol = [x + "/" if x else x for x in self.wvol]
html = self.j2("splash", this=self, rvol=rvol, wvol=wvol, url_suf=suf)
self.reply(html.encode("utf-8"))
self.reply(html.encode("utf-8"), headers=NO_STORE)
return True
def tx_tree(self):
@@ -1300,26 +1373,130 @@ class HttpCli(object):
else:
vpath += "/" + node
vpnodes.append([quotep(vpath) + "/", html_escape(node)])
vpnodes.append([quotep(vpath) + "/", html_escape(node, crlf=True)])
vn, rem = self.auth.vfs.get(
self.vpath, self.uname, self.readable, self.writable
)
abspath = vn.canonical(rem)
if not os.path.exists(fsenc(abspath)):
# print(abspath)
try:
st = os.stat(fsenc(abspath))
except:
raise Pebkac(404)
if not os.path.isdir(fsenc(abspath)):
if abspath.endswith(".md") and "raw" not in self.uparam:
return self.tx_md(abspath)
if self.readable and not stat.S_ISDIR(st.st_mode):
if rem.startswith(".hist/up2k."):
raise Pebkac(403)
th_fmt = self.uparam.get("th")
if th_fmt is not None:
thp = None
if self.thumbcli:
thp = self.thumbcli.get(vn.realpath, rem, int(st.st_mtime), th_fmt)
if thp:
return self.tx_file(thp)
return self.tx_ico(rem)
if abspath.endswith(".md") and "raw" not in self.uparam:
return self.tx_md(abspath)
return self.tx_file(abspath)
srv_info = []
try:
if not self.args.nih:
srv_info.append(unicode(socket.gethostname()).split(".")[0])
except:
self.log("#wow #whoa")
try:
# some fuses misbehave
if not self.args.nid:
if WINDOWS:
bfree = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
)
srv_info.append(humansize(bfree.value) + " free")
else:
sv = os.statvfs(fsenc(abspath))
free = humansize(sv.f_frsize * sv.f_bfree, True)
total = humansize(sv.f_frsize * sv.f_blocks, True)
srv_info.append(free + " free")
srv_info.append(total)
except:
pass
srv_info = "</span> /// <span>".join(srv_info)
perms = []
if self.readable:
perms.append("read")
if self.writable:
perms.append("write")
url_suf = self.urlq()
is_ls = "ls" in self.uparam
ts = "" # "?{}".format(time.time())
tpl = "browser"
if "b" in self.uparam:
tpl = "browser2"
logues = ["", ""]
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
fn = os.path.join(abspath, fn)
if os.path.exists(fsenc(fn)):
with open(fsenc(fn), "rb") as f:
logues[n] = f.read().decode("utf-8")
ls_ret = {
"dirs": [],
"files": [],
"taglist": [],
"srvinf": srv_info,
"perms": perms,
"logues": logues,
}
j2a = {
"vdir": quotep(self.vpath),
"vpnodes": vpnodes,
"files": [],
"ts": ts,
"perms": json.dumps(perms),
"taglist": [],
"tag_order": [],
"have_up2k_idx": ("e2d" in vn.flags),
"have_tags_idx": ("e2t" in vn.flags),
"have_zip": (not self.args.no_zip),
"have_b_u": (self.writable and self.uparam.get("b") == "u"),
"url_suf": url_suf,
"logues": logues,
"title": html_escape(self.vpath, crlf=True),
"srv_info": srv_info,
}
if not self.readable:
if is_ls:
ret = json.dumps(ls_ret)
self.reply(
ret.encode("utf-8", "replace"),
mime="application/json",
headers=NO_STORE,
)
return True
if not stat.S_ISDIR(st.st_mode):
raise Pebkac(404)
html = self.j2(tpl, **j2a)
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
return True
for k in ["zip", "tar"]:
v = self.uparam.get(k)
if v is not None:
@@ -1354,15 +1531,11 @@ class HttpCli(object):
if rem == ".hist":
hidden = ["up2k."]
is_ls = "ls" in self.uparam
icur = None
if "e2t" in vn.flags:
idx = self.conn.get_u2idx()
icur = idx.get_cur(vn.realpath)
url_suf = self.urlq()
dirs = []
files = []
for fn in vfs_ls:
@@ -1394,7 +1567,7 @@ class HttpCli(object):
margin = '<a href="{}?zip">zip</a>'.format(quotep(href))
elif fn in hist:
margin = '<a href="{}.hist/{}">#{}</a>'.format(
base, html_escape(hist[fn][2], quote=True), hist[fn][0]
base, html_escape(hist[fn][2], quote=True, crlf=True), hist[fn][0]
)
else:
margin = "-"
@@ -1453,91 +1626,25 @@ class HttpCli(object):
for f in dirs:
f["tags"] = {}
srv_info = []
try:
if not self.args.nih:
srv_info.append(unicode(socket.gethostname()).split(".")[0])
except:
self.log("#wow #whoa")
pass
try:
# some fuses misbehave
if not self.args.nid:
if WINDOWS:
bfree = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
)
srv_info.append(humansize(bfree.value) + " free")
else:
sv = os.statvfs(abspath)
free = humansize(sv.f_frsize * sv.f_bfree, True)
total = humansize(sv.f_frsize * sv.f_blocks, True)
srv_info.append(free + " free")
srv_info.append(total)
except:
pass
srv_info = "</span> /// <span>".join(srv_info)
perms = []
if self.readable:
perms.append("read")
if self.writable:
perms.append("write")
logues = ["", ""]
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
fn = os.path.join(abspath, fn)
if os.path.exists(fsenc(fn)):
with open(fsenc(fn), "rb") as f:
logues[n] = f.read().decode("utf-8")
if is_ls:
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
ret = {
"dirs": dirs,
"files": files,
"srvinf": srv_info,
"perms": perms,
"logues": logues,
"taglist": taglist,
}
ret = json.dumps(ret)
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
ls_ret["dirs"] = dirs
ls_ret["files"] = files
ls_ret["taglist"] = taglist
ret = json.dumps(ls_ret)
self.reply(
ret.encode("utf-8", "replace"),
mime="application/json",
headers=NO_STORE,
)
return True
ts = ""
# ts = "?{}".format(time.time())
j2a["files"] = dirs + files
j2a["logues"] = logues
j2a["taglist"] = taglist
if "mte" in vn.flags:
j2a["tag_order"] = json.dumps(vn.flags["mte"].split(","))
dirs.extend(files)
tpl = "browser"
if "b" in self.uparam:
tpl = "browser2"
html = self.j2(
tpl,
vdir=quotep(self.vpath),
vpnodes=vpnodes,
files=dirs,
ts=ts,
perms=json.dumps(perms),
taglist=taglist,
tag_order=json.dumps(
vn.flags["mte"].split(",") if "mte" in vn.flags else []
),
have_up2k_idx=("e2d" in vn.flags),
have_tags_idx=("e2t" in vn.flags),
have_zip=(not self.args.no_zip),
have_b_u=(self.writable and self.uparam.get("b") == "u"),
url_suf=url_suf,
logues=logues,
title=html_escape(self.vpath),
srv_info=srv_info,
)
self.reply(html.encode("utf-8", "replace"))
html = self.j2(tpl, **j2a)
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
return True

View File

@@ -1,6 +1,7 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import re
import os
import sys
import time
@@ -16,6 +17,9 @@ from .__init__ import E
from .util import Unrecv
from .httpcli import HttpCli
from .u2idx import U2idx
from .th_cli import ThumbCli
from .th_srv import HAVE_PIL
from .ico import Ico
class HttpConn(object):
@@ -33,11 +37,16 @@ class HttpConn(object):
self.auth = hsrv.auth
self.cert_path = hsrv.cert_path
enth = HAVE_PIL and not self.args.no_thumb
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
self.ico = Ico(self.args)
self.t0 = time.time()
self.nbyte = 0
self.workload = 0
self.u2idx = None
self.log_func = hsrv.log
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None
self.set_rproxy()
def set_rproxy(self, ip=None):

39
copyparty/ico.py Normal file
View File

@@ -0,0 +1,39 @@
import hashlib
import colorsys
from .__init__ import PY2
class Ico(object):
def __init__(self, args):
self.args = args
def get(self, ext, as_thumb):
"""placeholder to make thumbnails not break"""
h = hashlib.md5(ext.encode("utf-8")).digest()[:2]
if PY2:
h = [ord(x) for x in h]
c1 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 0.3)
c2 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 1)
c = list(c1) + list(c2)
c = [int(x * 255) for x in c]
c = "".join(["{:02x}".format(x) for x in c])
h = 30
if not self.args.th_no_crop and as_thumb:
w, h = self.args.th_size.split("x")
h = int(100 / (float(w) / float(h)))
svg = """\
<?xml version="1.0" encoding="UTF-8"?>
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
<rect width="100%" height="100%" fill="#{}" />
<text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" xml:space="preserve"
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
</g></svg>
"""
svg = svg.format(h, c[:6], c[6:], ext).encode("utf-8")
return ["image/svg+xml", svg]

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import re
import os
import sys
import json
import shutil
import subprocess as sp
@@ -14,6 +15,204 @@ if not PY2:
unicode = str
def have_ff(cmd):
if PY2:
cmd = (cmd + " -version").encode("ascii").split(b" ")
try:
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE).communicate()
return True
except:
return False
else:
return bool(shutil.which(cmd))
HAVE_FFMPEG = have_ff("ffmpeg")
HAVE_FFPROBE = have_ff("ffprobe")
class MParser(object):
def __init__(self, cmdline):
self.tag, args = cmdline.split("=", 1)
self.tags = self.tag.split(",")
self.timeout = 30
self.force = False
self.audio = "y"
self.ext = []
while True:
try:
bp = os.path.expanduser(args)
if os.path.exists(bp):
self.bin = bp
return
except:
pass
arg, args = args.split(",", 1)
arg = arg.lower()
if arg.startswith("a"):
self.audio = arg[1:] # [r]equire [n]ot [d]ontcare
continue
if arg == "f":
self.force = True
continue
if arg.startswith("t"):
self.timeout = int(arg[1:])
continue
if arg.startswith("e"):
self.ext.append(arg[1:])
continue
raise Exception()
def ffprobe(abspath):
cmd = [
b"ffprobe",
b"-hide_banner",
b"-show_streams",
b"-show_format",
b"--",
fsenc(abspath),
]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
r = p.communicate()
txt = r[0].decode("utf-8", "replace")
return parse_ffprobe(txt)
def parse_ffprobe(txt):
"""ffprobe -show_format -show_streams"""
streams = []
fmt = {}
g = None
for ln in [x.rstrip("\r") for x in txt.split("\n")]:
try:
k, v = ln.split("=", 1)
g[k] = v
continue
except:
pass
if ln == "[STREAM]":
g = {}
streams.append(g)
if ln == "[FORMAT]":
g = {"codec_type": "format"} # heh
fmt = g
streams = [fmt] + streams
ret = {} # processed
md = {} # raw tags
have = {}
for strm in streams:
typ = strm.get("codec_type")
if typ in have:
continue
have[typ] = True
kvm = []
if typ == "audio":
kvm = [
["codec_name", "ac"],
["channel_layout", "chs"],
["sample_rate", ".hz"],
["bit_rate", ".aq"],
["duration", ".dur"],
]
if typ == "video":
if strm.get("DISPOSITION:attached_pic") == "1" or fmt.get(
"format_name"
) in ["mp3", "ogg", "flac"]:
continue
kvm = [
["codec_name", "vc"],
["pix_fmt", "pixfmt"],
["r_frame_rate", ".fps"],
["bit_rate", ".vq"],
["width", ".resw"],
["height", ".resh"],
["duration", ".dur"],
]
if typ == "format":
kvm = [["duration", ".dur"], ["bit_rate", ".q"]]
for sk, rk in kvm:
v = strm.get(sk)
if v is None:
continue
if rk.startswith("."):
try:
v = float(v)
v2 = ret.get(rk)
if v2 is None or v > v2:
ret[rk] = v
except:
# sqlite doesnt care but the code below does
if v not in ["N/A"]:
ret[rk] = v
else:
ret[rk] = v
if ret.get("vc") == "ansi": # shellscript
return {}, {}
for strm in streams:
for k, v in strm.items():
if not k.startswith("TAG:"):
continue
k = k[4:].strip()
v = v.strip()
if k and v:
md[k] = [v]
for k in [".q", ".vq", ".aq"]:
if k in ret:
ret[k] /= 1000 # bit_rate=320000
for k in [".q", ".vq", ".aq", ".resw", ".resh"]:
if k in ret:
ret[k] = int(ret[k])
if ".fps" in ret:
fps = ret[".fps"]
if "/" in fps:
fa, fb = fps.split("/")
fps = int(fa) * 1.0 / int(fb)
if fps < 1000 and fmt.get("format_name") not in ["image2", "png_pipe"]:
ret[".fps"] = round(fps, 3)
else:
del ret[".fps"]
if ".dur" in ret:
if ret[".dur"] < 0.1:
del ret[".dur"]
if ".q" in ret:
del ret[".q"]
if ".resw" in ret and ".resh" in ret:
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
ret = {k: [0, v] for k, v in ret.items()}
return ret, md
class MTag(object):
def __init__(self, log_func, args):
self.log_func = log_func
@@ -35,15 +234,7 @@ class MTag(object):
self.get = self.get_ffprobe
self.prefer_mt = True
# about 20x slower
if PY2:
cmd = [b"ffprobe", b"-version"]
try:
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
except:
self.usable = False
else:
if not shutil.which("ffprobe"):
self.usable = False
self.usable = HAVE_FFPROBE
if self.usable and WINDOWS and sys.version_info < (3, 8):
self.usable = False
@@ -52,8 +243,10 @@ class MTag(object):
self.log(msg, c=1)
if not self.usable:
msg = "need mutagen{} to read media tags so please run this:\n {} -m pip install --user mutagen"
self.log(msg.format(or_ffprobe, os.path.basename(sys.executable)), c=1)
msg = "need mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
self.log(
msg.format(or_ffprobe, " " * 37, os.path.basename(sys.executable)), c=1
)
return
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
@@ -201,7 +394,7 @@ class MTag(object):
import mutagen
try:
md = mutagen.File(abspath, easy=True)
md = mutagen.File(fsenc(abspath), easy=True)
x = md.info.length
except Exception as ex:
return {}
@@ -212,7 +405,7 @@ class MTag(object):
try:
q = int(md.info.bitrate / 1024)
except:
q = int((os.path.getsize(abspath) / dur) / 128)
q = int((os.path.getsize(fsenc(abspath)) / dur) / 128)
ret[".dur"] = [0, dur]
ret[".q"] = [0, q]
@@ -222,101 +415,7 @@ class MTag(object):
return self.normalize_tags(ret, md)
def get_ffprobe(self, abspath):
cmd = [b"ffprobe", b"-hide_banner", b"--", fsenc(abspath)]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
r = p.communicate()
txt = r[1].decode("utf-8", "replace")
txt = [x.rstrip("\r") for x in txt.split("\n")]
"""
note:
tags which contain newline will be truncated on first \n,
ffprobe emits \n and spacepads the : to align visually
note:
the Stream ln always mentions Audio: if audio
the Stream ln usually has kb/s, is more accurate
the Duration ln always has kb/s
the Metadata: after Chapter may contain BPM info,
title : Tempo: 126.0
Input #0, wav,
Metadata:
date : <OK>
Duration:
Chapter #
Metadata:
title : <NG>
Input #0, mp3,
Metadata:
album : <OK>
Duration:
Stream #0:0: Audio:
Stream #0:1: Video:
Metadata:
comment : <NG>
"""
ptn_md_beg = re.compile("^( +)Metadata:$")
ptn_md_kv = re.compile("^( +)([^:]+) *: (.*)")
ptn_dur = re.compile("^ *Duration: ([^ ]+)(, |$)")
ptn_br1 = re.compile("^ *Duration: .*, bitrate: ([0-9]+) kb/s(, |$)")
ptn_br2 = re.compile("^ *Stream.*: Audio:.* ([0-9]+) kb/s(, |$)")
ptn_audio = re.compile("^ *Stream .*: Audio: ")
ptn_au_parent = re.compile("^ *(Input #|Stream .*: Audio: )")
ret = {}
md = {}
in_md = False
is_audio = False
au_parent = False
for ln in txt:
m = ptn_md_kv.match(ln)
if m and in_md and len(m.group(1)) == in_md:
_, k, v = [x.strip() for x in m.groups()]
if k != "" and v != "":
md[k] = [v]
continue
else:
in_md = False
m = ptn_md_beg.match(ln)
if m and au_parent:
in_md = len(m.group(1)) + 2
continue
au_parent = bool(ptn_au_parent.search(ln))
if ptn_audio.search(ln):
is_audio = True
m = ptn_dur.search(ln)
if m:
sec = 0
tstr = m.group(1)
if tstr.lower() != "n/a":
try:
tf = tstr.split(",")[0].split(".")[0].split(":")
for f in tf:
sec *= 60
sec += int(f)
except:
self.log("invalid timestr from ffprobe: [{}]".format(tstr), c=3)
ret[".dur"] = sec
m = ptn_br1.search(ln)
if m:
ret[".q"] = m.group(1)
m = ptn_br2.search(ln)
if m:
ret[".q"] = m.group(1)
if not is_audio:
return {}
ret = {k: [0, v] for k, v in ret.items()}
ret, md = ffprobe(abspath)
return self.normalize_tags(ret, md)
def get_bin(self, parsers, abspath):
@@ -327,10 +426,10 @@ class MTag(object):
env["PYTHONPATH"] = pypath
ret = {}
for tagname, (binpath, timeout) in parsers.items():
for tagname, mp in parsers.items():
try:
cmd = [sys.executable, binpath, abspath]
args = {"env": env, "timeout": timeout}
cmd = [sys.executable, mp.bin, abspath]
args = {"env": env, "timeout": mp.timeout}
if WINDOWS:
args["creationflags"] = 0x4000
@@ -339,8 +438,16 @@ class MTag(object):
cmd = [fsenc(x) for x in cmd]
v = sp.check_output(cmd, **args).strip()
if v:
if not v:
continue
if "," not in tagname:
ret[tagname] = v.decode("utf-8")
else:
v = json.loads(v)
for tag in tagname.split(","):
if tag and tag in v:
ret[tag] = v[tag]
except:
pass

View File

@@ -2,6 +2,7 @@
from __future__ import print_function, unicode_literals
import re
import os
import sys
import time
import threading
@@ -9,9 +10,11 @@ from datetime import datetime, timedelta
import calendar
from .__init__ import PY2, WINDOWS, MACOS, VT100
from .util import mp
from .authsrv import AuthSrv
from .tcpsrv import TcpSrv
from .up2k import Up2k
from .util import mp
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
class SvcHub(object):
@@ -34,9 +37,27 @@ class SvcHub(object):
self.log = self._log_disabled if args.q else self._log_enabled
# jank goes here
auth = AuthSrv(self.args, self.log, False)
# initiate all services to manage
self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self)
self.up2k = Up2k(self, auth.vfs.all_vols)
self.thumbsrv = None
if not args.no_thumb:
if HAVE_PIL:
if not HAVE_WEBP:
args.th_no_webp = True
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
self.log("thumb", msg, c=3)
self.thumbsrv = ThumbSrv(self, auth.vfs.all_vols)
else:
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n"
self.log(
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3
)
# decide which worker impl to use
if self.check_mp_enable():
@@ -63,6 +84,17 @@ class SvcHub(object):
self.tcpsrv.shutdown()
self.broker.shutdown()
if self.thumbsrv:
self.thumbsrv.shutdown()
for n in range(200): # 10s
time.sleep(0.05)
if self.thumbsrv.stopped():
break
if n == 3:
print("waiting for thumbsrv...")
print("nailed it")
def _log_disabled(self, src, msg, c=0):

View File

@@ -87,7 +87,7 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
ret += struct.pack("<LL", vsz, vsz)
# windows support (the "?" replace below too)
fn = sanitize_fn(fn, "/")
fn = sanitize_fn(fn, ok="/")
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
z64_len = len(z64v) * 8 + 4 if z64v else 0

49
copyparty/th_cli.py Normal file
View File

@@ -0,0 +1,49 @@
import os
import time
from .util import Cooldown
from .th_srv import thumb_path, THUMBABLE, FMT_FF
class ThumbCli(object):
def __init__(self, broker):
self.broker = broker
self.args = broker.args
# cache on both sides for less broker spam
self.cooldown = Cooldown(self.args.th_poke)
def get(self, ptop, rem, mtime, fmt):
ext = rem.rsplit(".")[-1].lower()
if ext not in THUMBABLE:
return None
if self.args.no_vthumb and ext in FMT_FF:
return None
if fmt == "j" and self.args.th_no_jpg:
fmt = "w"
if fmt == "w" and self.args.th_no_webp:
fmt = "j"
tpath = thumb_path(ptop, rem, mtime, fmt)
ret = None
try:
st = os.stat(tpath)
if st.st_size:
ret = tpath
else:
return None
except:
pass
if ret:
tdir = os.path.dirname(tpath)
if self.cooldown.poke(tdir):
self.broker.put(False, "thumbsrv.poke", tdir)
return ret
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)
return x.get()

375
copyparty/th_srv.py Normal file
View File

@@ -0,0 +1,375 @@
import os
import sys
import time
import shutil
import base64
import hashlib
import threading
import subprocess as sp
from .__init__ import PY2
from .util import fsenc, mchkcmd, Queue, Cooldown, BytesIO
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
if not PY2:
unicode = str
HAVE_PIL = False
HAVE_HEIF = False
HAVE_AVIF = False
HAVE_WEBP = False
try:
from PIL import Image, ImageOps
HAVE_PIL = True
try:
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
HAVE_WEBP = True
except:
pass
try:
from pyheif_pillow_opener import register_heif_opener
register_heif_opener()
HAVE_HEIF = True
except:
pass
try:
import pillow_avif
HAVE_AVIF = True
except:
pass
except:
pass
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# ffmpeg -formats
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
if HAVE_HEIF:
FMT_PIL += " heif heifs heic heics"
if HAVE_AVIF:
FMT_PIL += " avif avifs"
FMT_PIL, FMT_FF = [{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FF]]
THUMBABLE = {}
if HAVE_PIL:
THUMBABLE.update(FMT_PIL)
if HAVE_FFMPEG and HAVE_FFPROBE:
THUMBABLE.update(FMT_FF)
def thumb_path(ptop, rem, mtime, fmt):
# base16 = 16 = 256
# b64-lc = 38 = 1444
# base64 = 64 = 4096
try:
rd, fn = rem.rsplit("/", 1)
except:
rd = ""
fn = rem
if rd:
h = hashlib.sha512(fsenc(rd)).digest()[:24]
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
else:
rd = "top"
# could keep original filenames but this is safer re pathlen
h = hashlib.sha512(fsenc(fn)).digest()[:24]
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
return "{}/.hist/th/{}/{}.{:x}.{}".format(
ptop, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
)
class ThumbSrv(object):
def __init__(self, hub, vols):
self.hub = hub
self.vols = [v.realpath for v in vols.values()]
self.args = hub.args
self.log_func = hub.log
res = hub.args.th_size.split("x")
self.res = tuple([int(x) for x in res])
self.poke_cd = Cooldown(self.args.th_poke)
self.mutex = threading.Lock()
self.busy = {}
self.stopping = False
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
self.q = Queue(self.nthr * 4)
for _ in range(self.nthr):
t = threading.Thread(target=self.worker)
t.daemon = True
t.start()
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE):
missing = []
if not HAVE_FFMPEG:
missing.append("ffmpeg")
if not HAVE_FFPROBE:
missing.append("ffprobe")
msg = "cannot create video thumbnails because some of the required programs are not available: "
msg += ", ".join(missing)
self.log(msg, c=1)
t = threading.Thread(target=self.cleaner)
t.daemon = True
t.start()
def log(self, msg, c=0):
self.log_func("thumb", msg, c)
def shutdown(self):
self.stopping = True
for _ in range(self.nthr):
self.q.put(None)
def stopped(self):
with self.mutex:
return not self.nthr
def get(self, ptop, rem, mtime, fmt):
tpath = thumb_path(ptop, rem, mtime, fmt)
abspath = os.path.join(ptop, rem)
cond = threading.Condition()
with self.mutex:
try:
self.busy[tpath].append(cond)
self.log("wait {}".format(tpath))
except:
thdir = os.path.dirname(tpath)
try:
os.makedirs(thdir)
except:
pass
inf_path = os.path.join(thdir, "dir.txt")
if not os.path.exists(inf_path):
with open(inf_path, "wb") as f:
f.write(fsenc(os.path.dirname(abspath)))
self.busy[tpath] = [cond]
self.q.put([abspath, tpath])
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
while not self.stopping:
with self.mutex:
if tpath not in self.busy:
break
with cond:
cond.wait()
try:
st = os.stat(tpath)
if st.st_size:
return tpath
except:
pass
return None
def worker(self):
while not self.stopping:
task = self.q.get()
if not task:
break
abspath, tpath = task
ext = abspath.split(".")[-1].lower()
fun = None
if not os.path.exists(tpath):
if ext in FMT_PIL:
fun = self.conv_pil
elif ext in FMT_FF:
fun = self.conv_ffmpeg
if fun:
try:
fun(abspath, tpath)
except Exception as ex:
msg = "{} failed on {}\n {!r}"
self.log(msg.format(fun.__name__, abspath, ex), 3)
with open(tpath, "wb") as _:
pass
with self.mutex:
subs = self.busy[tpath]
del self.busy[tpath]
for x in subs:
with x:
x.notify_all()
with self.mutex:
self.nthr -= 1
def conv_pil(self, abspath, tpath):
with Image.open(fsenc(abspath)) as im:
crop = not self.args.th_no_crop
res2 = self.res
if crop:
res2 = (res2[0] * 2, res2[1] * 2)
try:
im.thumbnail(res2, resample=Image.LANCZOS)
if crop:
iw, ih = im.size
dw, dh = self.res
res = (min(iw, dw), min(ih, dh))
im = ImageOps.fit(im, res, method=Image.LANCZOS)
except:
im.thumbnail(self.res)
if im.mode not in ("RGB", "L"):
im = im.convert("RGB")
if tpath.endswith(".webp"):
# quality 80 = pillow-default
# quality 75 = ffmpeg-default
# method 0 = pillow-default, fast
# method 4 = ffmpeg-default
# method 6 = max, slow
im.save(tpath, quality=40, method=6)
else:
im.save(tpath, quality=40) # default=75
def conv_ffmpeg(self, abspath, tpath):
ret, _ = ffprobe(abspath)
dur = ret[".dur"][1] if ".dur" in ret else 4
seek = "{:.0f}".format(dur / 3)
scale = "scale={0}:{1}:force_original_aspect_ratio="
if self.args.th_no_crop:
scale += "decrease,setsar=1:1"
else:
scale += "increase,crop={0}:{1},setsar=1:1"
scale = scale.format(*list(self.res)).encode("utf-8")
cmd = [
b"ffmpeg",
b"-nostdin",
b"-hide_banner",
b"-ss",
seek,
b"-i",
fsenc(abspath),
b"-vf",
scale,
b"-vframes",
b"1",
]
if tpath.endswith(".jpg"):
cmd += [
b"-q:v",
b"6", # default=??
]
else:
cmd += [
b"-q:v",
b"50", # default=75
b"-compression_level:v",
b"6", # default=4, 0=fast, 6=max
]
cmd += [fsenc(tpath)]
mchkcmd(cmd)
def poke(self, tdir):
if not self.poke_cd.poke(tdir):
return
ts = int(time.time())
try:
p1 = os.path.dirname(tdir)
p2 = os.path.dirname(p1)
for dp in [tdir, p1, p2]:
os.utime(fsenc(dp), (ts, ts))
except:
pass
def cleaner(self):
interval = self.args.th_clean
while True:
time.sleep(interval)
for vol in self.vols:
vol += "/.hist/th"
self.log("cln {}/".format(vol))
self.clean(vol)
self.log("cln ok")
def clean(self, vol):
# self.log("cln {}".format(vol))
maxage = self.args.th_maxage
now = time.time()
prev_b64 = None
prev_fp = None
try:
ents = os.listdir(vol)
except:
return
for f in sorted(ents):
fp = os.path.join(vol, f)
cmp = fp.lower().replace("\\", "/")
# "top" or b64 prefix/full (a folder)
if len(f) <= 3 or len(f) == 24:
age = now - os.path.getmtime(fp)
if age > maxage:
with self.mutex:
safe = True
for k in self.busy.keys():
if k.lower().replace("\\", "/").startswith(cmp):
safe = False
break
if safe:
self.log("rm -rf [{}]".format(fp))
shutil.rmtree(fp, ignore_errors=True)
else:
self.clean(fp)
continue
# thumb file
try:
b64, ts, ext = f.split(".")
if len(b64) != 24 or len(ts) != 8 or ext not in ["jpg", "webp"]:
raise Exception()
ts = int(ts, 16)
except:
if f != "dir.txt":
self.log("foreign file in thumbs dir: [{}]".format(fp), 1)
continue
if b64 == prev_b64:
self.log("rm replaced [{}]".format(fp))
os.unlink(prev_fp)
prev_b64 = b64
prev_fp = fp

View File

@@ -163,7 +163,7 @@ class U2idx(object):
if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn)
rp = os.path.join(vtop, rd, fn).replace("\\", "/")
rp = "/".join([vtop, rd, fn])
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
for hit in sret:

View File

@@ -16,7 +16,7 @@ import traceback
import subprocess as sp
from copy import deepcopy
from .__init__ import WINDOWS
from .__init__ import WINDOWS, ANYWIN
from .util import (
Pebkac,
Queue,
@@ -31,8 +31,7 @@ from .util import (
statdir,
s2hms,
)
from .mtag import MTag
from .authsrv import AuthSrv
from .mtag import MTag, MParser
try:
HAVE_SQLITE3 = True
@@ -49,13 +48,14 @@ class Up2k(object):
* ~/.config flatfiles for active jobs
"""
def __init__(self, broker):
self.broker = broker
self.args = broker.args
self.log_func = broker.log
def __init__(self, hub, all_vols):
self.hub = hub
self.args = hub.args
self.log_func = hub.log
self.all_vols = all_vols
# config
self.salt = broker.args.salt
self.salt = self.args.salt
# state
self.mutex = threading.Lock()
@@ -79,7 +79,7 @@ class Up2k(object):
if self.sqlite_ver < (3, 9):
self.no_expr_idx = True
if WINDOWS:
if ANYWIN:
# usually fails to set lastmod too quickly
self.lastmod_q = Queue()
thr = threading.Thread(target=self._lastmodder)
@@ -92,9 +92,7 @@ class Up2k(object):
if not HAVE_SQLITE3:
self.log("could not initialize sqlite3, will use in-memory registry only")
# this is kinda jank
auth = AuthSrv(self.args, self.log_func, False)
have_e2d = self.init_indexes(auth)
have_e2d = self.init_indexes()
if have_e2d:
thr = threading.Thread(target=self._snapshot)
@@ -139,9 +137,9 @@ class Up2k(object):
return True, ret
def init_indexes(self, auth):
def init_indexes(self):
self.pp = ProgressPrinter()
vols = auth.vfs.all_vols.values()
vols = self.all_vols.values()
t0 = time.time()
have_e2d = False
@@ -242,9 +240,14 @@ class Up2k(object):
with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8")
reg = json.loads(j)
for _, job in reg.items():
job["poke"] = time.time()
reg2 = json.loads(j)
for k, job in reg2.items():
path = os.path.join(job["ptop"], job["prel"], job["name"])
if os.path.exists(fsenc(path)):
reg[k] = job
job["poke"] = time.time()
else:
self.log("ign deleted file in snap: [{}]".format(path))
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
m = [m] + self._vis_reg_progress(reg)
@@ -511,7 +514,6 @@ class Up2k(object):
def _run_all_mtp(self):
t0 = time.time()
self.mtp_force = {}
self.mtp_parsers = {}
for ptop, flags in self.flags.items():
if "mtp" in flags:
@@ -527,43 +529,18 @@ class Up2k(object):
entags = self.entags[ptop]
force = {}
timeout = {}
parsers = {}
for parser in self.flags[ptop]["mtp"]:
orig = parser
tag, parser = parser.split("=", 1)
if tag not in entags:
continue
try:
parser = MParser(parser)
except:
self.log("invalid argument: " + parser, 1)
return
while True:
try:
bp = os.path.expanduser(parser)
if os.path.exists(bp):
parsers[tag] = [bp, timeout.get(tag, 30)]
break
except:
pass
for tag in entags:
if tag in parser.tags:
parsers[parser.tag] = parser
try:
arg, parser = parser.split(",", 1)
arg = arg.lower()
if arg == "f":
force[tag] = True
continue
if arg.startswith("t"):
timeout[tag] = int(arg[1:])
continue
raise Exception()
except:
self.log("invalid argument: " + orig, 1)
return
self.mtp_force[ptop] = force
self.mtp_parsers[ptop] = parsers
q = "select count(w) from mt where k = 't:mtp'"
@@ -596,8 +573,8 @@ class Up2k(object):
have = cur.execute(q, (w,)).fetchall()
have = [x[0] for x in have]
if ".dur" not in have and ".dur" in entags:
# skip non-audio
parsers = self._get_parsers(ptop, have, abspath)
if not parsers:
to_delete[w] = True
n_left -= 1
continue
@@ -605,10 +582,7 @@ class Up2k(object):
if w in in_progress:
continue
task_parsers = {
k: v for k, v in parsers.items() if k in force or k not in have
}
jobs.append([task_parsers, None, w, abspath])
jobs.append([parsers, None, w, abspath])
in_progress[w] = True
done = self._flush_mpool(wcur)
@@ -667,16 +641,46 @@ class Up2k(object):
wcur.close()
cur.close()
def _start_mpool(self):
if WINDOWS and False:
nah = open(os.devnull, "wb")
wmic = "processid={}".format(os.getpid())
wmic = ["wmic", "process", "where", wmic, "call", "setpriority"]
sp.call(wmic + ["below normal"], stdout=nah, stderr=nah)
def _get_parsers(self, ptop, have, abspath):
try:
all_parsers = self.mtp_parsers[ptop]
except:
return {}
entags = self.entags[ptop]
parsers = {}
for k, v in all_parsers.items():
if "ac" in entags or ".aq" in entags:
if "ac" in have or ".aq" in have:
# is audio, require non-audio?
if v.audio == "n":
continue
# is not audio, require audio?
elif v.audio == "y":
continue
if v.ext:
match = False
for ext in v.ext:
if abspath.lower().endswith("." + ext):
match = True
break
if not match:
continue
parsers[k] = v
parsers = {k: v for k, v in parsers.items() if v.force or k not in have}
return parsers
def _start_mpool(self):
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
# both do crazy runahead so lets reinvent another wheel
nw = os.cpu_count() if hasattr(os, "cpu_count") else 4
if self.args.no_mtag_mt:
nw = 1
if self.pending_tags is None:
self.log("using {}x {}".format(nw, self.mtag.backend))
self.pending_tags = []
@@ -698,12 +702,6 @@ class Up2k(object):
mpool.join()
done = self._flush_mpool(wcur)
if WINDOWS and False:
nah = open(os.devnull, "wb")
wmic = "processid={}".format(os.getpid())
wmic = ["wmic", "process", "where", wmic, "call", "setpriority"]
sp.call(wmic + ["below normal"], stdout=nah, stderr=nah)
return done
def _tag_thr(self, q):
@@ -722,7 +720,8 @@ class Up2k(object):
vtags = [
"\033[36m{} \033[33m{}".format(k, v) for k, v in tags.items()
]
self.log("{}\033[0m [{}]".format(" ".join(vtags), abspath))
if vtags:
self.log("{}\033[0m [{}]".format(" ".join(vtags), abspath))
with self.mutex:
self.pending_tags.append([entags, wark, abspath, tags])
@@ -903,7 +902,7 @@ class Up2k(object):
if cj["ptop"] not in self.registry:
raise Pebkac(410, "location unavailable")
cj["name"] = sanitize_fn(cj["name"])
cj["name"] = sanitize_fn(cj["name"], bad=[".prologue.html", ".epilogue.html"])
cj["poke"] = time.time()
wark = self._get_wark(cj)
now = time.time()
@@ -924,7 +923,7 @@ class Up2k(object):
if dp_dir.startswith("//") or dp_fn.startswith("//"):
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
dp_abs = os.path.join(cj["ptop"], dp_dir, dp_fn).replace("\\", "/")
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
# relying on path.exists to return false on broken symlinks
if os.path.exists(fsenc(dp_abs)):
job = {
@@ -950,7 +949,7 @@ class Up2k(object):
for fn in names:
path = os.path.join(job["ptop"], job["prel"], fn)
try:
if os.path.getsize(path) > 0:
if os.path.getsize(fsenc(path)) > 0:
# upload completed or both present
break
except:
@@ -1074,6 +1073,9 @@ class Up2k(object):
raise Pebkac(400, "unknown wark")
if chash not in job["need"]:
msg = "chash = {} , need:\n".format(chash)
msg += "\n".join(job["need"])
self.log(msg)
raise Pebkac(400, "already got that but thanks??")
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
@@ -1110,8 +1112,9 @@ class Up2k(object):
atomic_move(src, dst)
if WINDOWS:
self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))])
if ANYWIN:
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
self.lastmod_q.put(a)
# legit api sware 2 me mum
if self.idx_wark(
@@ -1178,10 +1181,10 @@ class Up2k(object):
return wark
def _hashlist_from_file(self, path):
fsz = os.path.getsize(path)
fsz = os.path.getsize(fsenc(path))
csz = up2k_chunksize(fsz)
ret = []
with open(path, "rb", 512 * 1024) as f:
with open(fsenc(path), "rb", 512 * 1024) as f:
while fsz > 0:
self.pp.msg = "{} MB, {}".format(int(fsz / 1024 / 1024), path)
hashobj = hashlib.sha512()
@@ -1209,9 +1212,23 @@ class Up2k(object):
# raise Exception("aaa")
tnam = job["name"] + ".PARTIAL"
if self.args.dotpart:
tnam = "." + tnam
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
f, job["tnam"] = f["orz"]
if (
ANYWIN
and self.args.sparse
and self.args.sparse * 1024 * 1024 <= job["size"]
):
fp = os.path.join(pdir, job["tnam"])
try:
sp.check_call(["fsutil", "sparse", "setflag", fp])
except:
self.log("could not sparse [{}]".format(fp), 3)
f.seek(job["size"] - 1)
f.write(b"e")
@@ -1223,13 +1240,19 @@ class Up2k(object):
# self.log("lmod: got {}".format(len(ready)))
time.sleep(5)
for path, times in ready:
for path, sz, times in ready:
self.log("lmod: setting times {} on {}".format(times, path))
try:
os.utime(fsenc(path), times)
except:
self.log("lmod: failed to utime ({}, {})".format(path, times))
if self.args.sparse and self.args.sparse * 1024 * 1024 <= sz:
try:
sp.check_call(["fsutil", "sparse", "setflag", path, "0"])
except:
self.log("could not unsparse [{}]".format(path), 3)
def _snapshot(self):
persist_interval = 30 # persist unfinished uploads index every 30 sec
discard_interval = 21600 # drop unfinished uploads after 6 hours inactivity
@@ -1252,13 +1275,13 @@ class Up2k(object):
try:
# remove the filename reservation
path = os.path.join(job["ptop"], job["prel"], job["name"])
if os.path.getsize(path) == 0:
os.unlink(path)
if os.path.getsize(fsenc(path)) == 0:
os.unlink(fsenc(path))
if len(job["hash"]) == len(job["need"]):
# PARTIAL is empty, delete that too
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
os.unlink(path)
os.unlink(fsenc(path))
except:
pass
@@ -1266,8 +1289,8 @@ class Up2k(object):
if not reg:
if k not in prev or prev[k] is not None:
prev[k] = None
if os.path.exists(path):
os.unlink(path)
if os.path.exists(fsenc(path)):
os.unlink(fsenc(path))
return
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
@@ -1299,13 +1322,9 @@ class Up2k(object):
abspath = os.path.join(ptop, rd, fn)
tags = self.mtag.get(abspath)
ntags1 = len(tags)
if self.mtp_parsers.get(ptop, {}):
parser = {
k: v
for k, v in self.mtp_parsers[ptop].items()
if k in self.mtp_force[ptop] or k not in tags
}
tags.update(self.mtag.get_bin(parser, abspath))
parsers = self._get_parsers(ptop, tags, abspath)
if parsers:
tags.update(self.mtag.get_bin(parsers, abspath))
with self.mutex:
cur = self.cur[ptop]

View File

@@ -15,8 +15,9 @@ import threading
import mimetypes
import contextlib
import subprocess as sp # nosec
from datetime import datetime
from .__init__ import PY2, WINDOWS
from .__init__ import PY2, WINDOWS, ANYWIN
from .stolen import surrogateescape
FAKE_MP = False
@@ -34,10 +35,12 @@ if not PY2:
from urllib.parse import unquote_to_bytes as unquote
from urllib.parse import quote_from_bytes as quote
from queue import Queue
from io import BytesIO
else:
from urllib import unquote # pylint: disable=no-name-in-module
from urllib import quote # pylint: disable=no-name-in-module
from Queue import Queue # pylint: disable=import-error,no-name-in-module
from StringIO import StringIO as BytesIO
surrogateescape.register_surrogateescape()
FS_ENCODING = sys.getfilesystemencoding()
@@ -45,10 +48,14 @@ if WINDOWS and PY2:
FS_ENCODING = "utf-8"
HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT"
HTTPCODE = {
200: "OK",
204: "No Content",
206: "Partial Content",
302: "Found",
304: "Not Modified",
400: "Bad Request",
403: "Forbidden",
@@ -72,6 +79,13 @@ IMPLICATIONS = [
]
MIMES = {
"md": "text/plain; charset=UTF-8",
"opus": "audio/ogg; codecs=opus",
"webp": "image/webp",
}
REKOBO_KEY = {
v: ln.split(" ", 1)[0]
for ln in """
@@ -123,6 +137,32 @@ class Counter(object):
self.v = absval
class Cooldown(object):
def __init__(self, maxage):
self.maxage = maxage
self.mutex = threading.Lock()
self.hist = {}
self.oldest = 0
def poke(self, key):
with self.mutex:
now = time.time()
ret = False
v = self.hist.get(key, 0)
if now - v > self.maxage:
self.hist[key] = now
ret = True
if self.oldest - now > self.maxage * 2:
self.hist = {
k: v for k, v in self.hist.items() if now - v < self.maxage
}
self.oldest = sorted(self.hist.values())[0]
return ret
class Unrecv(object):
"""
undo any number of socket recv ops
@@ -242,7 +282,7 @@ def ren_open(fname, *args, **kwargs):
else:
fpath = fname
if suffix and os.path.exists(fpath):
if suffix and os.path.exists(fsenc(fpath)):
fpath += suffix
fname += suffix
ext += suffix
@@ -576,12 +616,12 @@ def undot(path):
return "/".join(ret)
def sanitize_fn(fn, ok=""):
def sanitize_fn(fn, ok="", bad=[]):
if "/" not in ok:
fn = fn.replace("\\", "/").split("/")[-1]
if WINDOWS:
for bad, good in [x for x in [
if ANYWIN:
remap = [
["<", ""],
[">", ""],
[":", ""],
@@ -591,15 +631,16 @@ def sanitize_fn(fn, ok=""):
["|", ""],
["?", ""],
["*", ""],
] if x[0] not in ok]:
fn = fn.replace(bad, good)
]
for a, b in [x for x in remap if x[0] not in ok]:
fn = fn.replace(a, b)
bad = ["con", "prn", "aux", "nul"]
bad.extend(["con", "prn", "aux", "nul"])
for n in range(1, 10):
bad += "com{0} lpt{0}".format(n).split(" ")
if fn.lower() in bad:
fn = "_" + fn
if fn.lower() in bad:
fn = "_" + fn
return fn.strip()
@@ -615,17 +656,29 @@ def exclude_dotfiles(filepaths):
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
def html_escape(s, quote=False):
def http_ts(ts):
file_dt = datetime.utcfromtimestamp(ts)
return file_dt.strftime(HTTP_TS_FMT)
def html_escape(s, quote=False, crlf=False):
"""html.escape but also newlines"""
s = (
s.replace("&", "&amp;")
.replace("<", "&lt;")
.replace(">", "&gt;")
.replace("\r", "&#13;")
.replace("\n", "&#10;")
)
s = s.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
if quote:
s = s.replace('"', "&quot;").replace("'", "&#x27;")
if crlf:
s = s.replace("\r", "&#13;").replace("\n", "&#10;")
return s
def html_bescape(s, quote=False, crlf=False):
"""html.escape but bytestrings"""
s = s.replace(b"&", b"&amp;").replace(b"<", b"&lt;").replace(b">", b"&gt;")
if quote:
s = s.replace(b'"', b"&quot;").replace(b"'", b"&#x27;")
if crlf:
s = s.replace(b"\r", b"&#13;").replace(b"\n", b"&#10;")
return s
@@ -714,6 +767,8 @@ def s3dec(rd, fn):
def atomic_move(src, dst):
src = fsenc(src)
dst = fsenc(dst)
if not PY2:
os.replace(src, dst)
else:
@@ -905,11 +960,13 @@ def unescape_cookie(orig):
return ret
def guess_mime(url):
if url.endswith(".md"):
return ["text/plain; charset=UTF-8"]
def guess_mime(url, fallback="application/octet-stream"):
try:
_, ext = url.rsplit(".", 1)
except:
return fallback
return mimetypes.guess_type(url)
return MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
def runcmd(*argv):
@@ -928,6 +985,17 @@ def chkcmd(*argv):
return sout, serr
def mchkcmd(argv, timeout=10):
if PY2:
with open(os.devnull, "wb") as f:
rv = sp.call(argv, stdout=f, stderr=f)
else:
rv = sp.call(argv, stdout=sp.DEVNULL, stderr=sp.DEVNULL, timeout=timeout)
if rv:
raise sp.CalledProcessError(rv, (argv[0], b"...", argv[-1]))
def gzip_orig_sz(fn):
with open(fsenc(fn), "rb") as f:
f.seek(-4, 2)

View File

@@ -1,3 +1,6 @@
:root {
--grid-sz: 10em;
}
* {
line-height: 1.2em;
}
@@ -6,7 +9,7 @@ html,body,tr,th,td,#files,a {
background: none;
font-weight: inherit;
font-size: inherit;
padding: none;
padding: 0;
border: none;
}
html {
@@ -64,11 +67,16 @@ a, #files tbody div a:last-child {
background: #161616;
text-decoration: underline;
}
#files thead {
background: #333;
position: sticky;
top: 0;
}
#files thead a {
color: #999;
font-weight: normal;
}
#files tr+tr:hover {
#files tr:hover {
background: #1c1c1c;
}
#files thead th {
@@ -90,8 +98,6 @@ a, #files tbody div a:last-child {
#files td {
margin: 0;
padding: 0 .5em;
}
#files td {
border-bottom: 1px solid #111;
}
#files td+td+td {
@@ -182,11 +188,42 @@ a, #files tbody div a:last-child {
color: #840;
text-shadow: 0 0 .3em #b80;
}
#files tbody tr.sel td {
#files tbody tr.sel td,
#ggrid a.sel,
html.light #ggrid a.sel {
color: #fff;
background: #925;
border-color: #c37;
}
#files tbody tr.sel:hover td,
#ggrid a.sel:hover,
html.light #ggrid a.sel:hover {
color: #fff;
background: #d39;
border-color: #d48;
text-shadow: 1px 1px 0 #804;
}
#ggrid a.sel,
html.light #ggrid a.sel {
border-top: 1px solid #d48;
box-shadow: 0 .1em 1.2em #b36;
transition: all 0.2s cubic-bezier(.2, 2.2, .5, 1); /* https://cubic-bezier.com/#.4,2,.7,1 */
}
#ggrid a.sel img {
opacity: .7;
box-shadow: 0 0 1em #b36;
filter: contrast(130%) brightness(107%);
}
#files tr.sel a {
color: #fff;
}
#files tr.sel a.play {
color: #fc5;
}
#files tr.sel a.play.act {
color: #fff;
text-shadow: 0 0 1px #fff;
}
#blocked {
position: fixed;
top: 0;
@@ -244,7 +281,10 @@ a, #files tbody div a:last-child {
background: #3c3c3c;
}
#wtico {
cursor: url(/.cpr/dd/1.png), pointer;
cursor: url(/.cpr/dd/4.png), pointer;
animation: cursor 500ms;
}
#wtico:hover {
animation: cursor 500ms infinite;
}
@keyframes cursor {
@@ -252,7 +292,7 @@ a, #files tbody div a:last-child {
30% {cursor: url(/.cpr/dd/3.png), pointer}
50% {cursor: url(/.cpr/dd/4.png), pointer}
75% {cursor: url(/.cpr/dd/5.png), pointer}
85% {cursor: url(/.cpr/dd/1.png), pointer}
85% {cursor: url(/.cpr/dd/4.png), pointer}
}
@keyframes spin {
100% {transform: rotate(360deg)}
@@ -273,29 +313,48 @@ a, #files tbody div a:last-child {
padding: .2em 0 0 .07em;
color: #fff;
}
#wzip {
#wzip, #wnp {
display: none;
margin-right: .3em;
padding-right: .3em;
border-right: .1em solid #555;
}
#wnp a {
position: relative;
font-size: .47em;
margin: 0 .1em;
top: -.4em;
}
#wnp a+a {
margin-left: .33em;
}
#wtoggle,
#wtoggle * {
line-height: 1em;
}
#wtoggle.np {
width: 5.5em;
}
#wtoggle.sel {
width: 6.4em;
}
#wtoggle.sel #wzip {
#wtoggle.sel #wzip,
#wtoggle.np #wnp {
display: inline-block;
}
#wtoggle.sel #wzip a {
#wtoggle.sel.np #wnp {
display: none;
}
#wzip a {
font-size: .4em;
padding: 0 .3em;
margin: -.3em .2em;
position: relative;
display: inline-block;
}
#wzip a+a {
margin-left: .8em;
}
#wtoggle.sel #wzip #selzip {
top: -.6em;
padding: .4em .3em;
@@ -343,10 +402,10 @@ a, #files tbody div a:last-child {
width: calc(100% - 10.5em);
background: rgba(0,0,0,0.2);
}
@media (min-width: 90em) {
@media (min-width: 80em) {
#barpos,
#barbuf {
width: calc(100% - 24em);
width: calc(100% - 21em);
left: 9.8em;
top: .7em;
height: 1.6em;
@@ -356,6 +415,9 @@ a, #files tbody div a:last-child {
bottom: -3.2em;
height: 3.2em;
}
#pvol {
max-width: 9em;
}
}
@@ -407,6 +469,7 @@ a, #files tbody div a:last-child {
padding: .3em .6em;
border-radius: .3em;
border-width: .15em 0;
white-space: nowrap;
}
.opbox {
background: #2d2d2d;
@@ -487,9 +550,6 @@ input[type="checkbox"]:checked+label {
border-collapse: collapse;
width: 100%;
}
#files td div a:last-child {
width: 100%;
}
#wrap {
margin-top: 2em;
}
@@ -499,7 +559,6 @@ input[type="checkbox"]:checked+label {
left: 0;
bottom: 0;
top: 7em;
padding-top: .2em;
overflow-y: auto;
-ms-scroll-chaining: none;
overscroll-behavior-y: none;
@@ -508,9 +567,7 @@ input[type="checkbox"]:checked+label {
#thx_ff {
padding: 5em 0;
}
#tree::-webkit-scrollbar-track {
background: #333;
}
#tree::-webkit-scrollbar-track,
#tree::-webkit-scrollbar {
background: #333;
}
@@ -525,8 +582,7 @@ input[type="checkbox"]:checked+label {
left: -1.7em;
width: calc(100% + 1.3em);
}
.tglbtn,
#tree>a+a {
.btn {
padding: .2em .4em;
font-size: 1.2em;
background: #2a2a2a;
@@ -536,12 +592,10 @@ input[type="checkbox"]:checked+label {
position: relative;
top: -.2em;
}
.tglbtn:hover,
#tree>a+a:hover {
.btn:hover {
background: #805;
}
.tglbtn.on,
#tree>a+a.on {
.tgl.btn.on {
background: #fc4;
color: #400;
text-shadow: none;
@@ -549,6 +603,7 @@ input[type="checkbox"]:checked+label {
#detree {
padding: .3em .5em;
font-size: 1.5em;
line-height: 1.5em;
}
#tree ul,
#tree li {
@@ -685,6 +740,93 @@ input[type="checkbox"]:checked+label {
font-family: monospace, monospace;
line-height: 2em;
}
#griden.on+#thumbs {
opacity: .3;
}
#ghead {
background: #3c3c3c;
border: 1px solid #444;
border-radius: .3em;
padding: .5em;
margin: 0 1.5em 1em .4em;
position: sticky;
top: -.3em;
}
html.light #ghead {
background: #f7f7f7;
border-color: #ddd;
}
#ghead .btn {
position: relative;
top: 0;
}
#ggrid {
padding-top: .5em;
}
#ggrid a {
display: inline-block;
width: var(--grid-sz);
vertical-align: top;
overflow-wrap: break-word;
background: #383838;
border: 1px solid #444;
border-top: 1px solid #555;
box-shadow: 0 .1em .2em #222;
border-radius: .3em;
padding: .3em;
margin: .5em;
}
#ggrid a img {
border-radius: .2em;
max-width: var(--grid-sz);
max-height: calc(var(--grid-sz)/1.25);
margin: 0 auto;
display: block;
}
#ggrid a span {
padding: .2em .3em;
display: block;
}
#ggrid a:hover {
background: #444;
border-color: #555;
color: #fd9;
}
html.light #ggrid a {
background: #f7f7f7;
border-color: #ddd;
box-shadow: 0 .1em .2em #ddd;
}
html.light #ggrid a:hover {
background: #fff;
border-color: #ccc;
color: #015;
box-shadow: 0 .1em .5em #aaa;
}
#pvol,
#barbuf,
#barpos,
#u2conf label {
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
@@ -704,25 +846,22 @@ html.light #srch_form {
}
html.light #ops a.act {
box-shadow: 0 .2em .2em #ccc;
background: #f7f7f7;
background: #fff;
border-color: #07a;
padding-top: .4em;
}
html.light #op_cfg h3 {
border-color: #ccc;
}
html.light .tglbtn,
html.light #tree > a + a {
html.light .btn {
color: #666;
background: #ddd;
box-shadow: none;
}
html.light .tglbtn:hover,
html.light #tree > a + a:hover {
html.light .btn:hover {
background: #caf;
}
html.light .tglbtn.on,
html.light #tree > a + a.on {
html.light .tgl.btn.on {
background: #4a0;
color: #fff;
}
@@ -761,7 +900,7 @@ html.light #files {
html.light #files thead th {
background: #eee;
}
html.light #files tr+tr td {
html.light #files tr td {
border-top: 1px solid #ddd;
}
html.light #files td {
@@ -785,8 +924,12 @@ html.light tr.play td {
html.light tr.play a {
color: #406;
}
html.light #files th:hover .cfg,
html.light #files th.min .cfg {
background: #ccc;
}
html.light #files > thead > tr > th.min span {
background: linear-gradient(90deg, rgba(68,68,68,0), rgba(68,68,68,0.2) 70%, rgba(68,68,68,0.5));
background: linear-gradient(90deg, rgba(204,204,204,0), rgba(204,204,204,0.5) 70%, #ccc);
}
html.light #blocked {
background: #eee;
@@ -808,6 +951,9 @@ html.light #files tr.sel td {
html.light #files tr.sel a {
color: #fff;
}
html.light #files tr.sel a.play.act {
color: #fb0;
}
html.light input[type="checkbox"] + label {
color: #333;
}
@@ -854,4 +1000,14 @@ html.light #files a:hover,
html.light #files tr.sel a:hover {
color: #000;
background: #fff;
}
html.light #tree {
scrollbar-color: #a70 #ddd;
}
html.light #tree::-webkit-scrollbar-track,
html.light #tree::-webkit-scrollbar {
background: #ddd;
}
#tree::-webkit-scrollbar-thumb {
background: #da0;
}

View File

@@ -21,7 +21,7 @@
{%- endif %}
<a href="#" data-perm="write" data-dest="bup" data-desc="bup: basic uploader, even supports netscape 4.0">🎈</a>
<a href="#" data-perm="write" data-dest="mkdir" data-desc="mkdir: create a new directory">📂</a>
<a href="#" data-perm="write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
<a href="#" data-perm="read write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
<a href="#" data-perm="write" data-dest="msg" data-desc="msg: send a message to the server log">📟</a>
<a href="#" data-dest="cfg" data-desc="configuration options">⚙️</a>
<div id="opdesc"></div>
@@ -41,8 +41,10 @@
<div id="op_cfg" class="opview opbox">
<h3>switches</h3>
<div>
<a id="tooltips" class="tglbtn" href="#">tooltips</a>
<a id="lightmode" class="tglbtn" href="#">lightmode</a>
<a id="tooltips" class="tgl btn" href="#">tooltips</a>
<a id="lightmode" class="tgl btn" href="#">lightmode</a>
<a id="griden" class="tgl btn" href="#">the grid</a>
<a id="thumbs" class="tgl btn" href="#">thumbs</a>
</div>
{%- if have_zip %}
<h3>folder download</h3>
@@ -61,9 +63,9 @@
<div id="tree">
<a href="#" id="detree">🍞...</a>
<a href="#" step="2" id="twobytwo">+</a>
<a href="#" step="-2" id="twig">&ndash;</a>
<a href="#" class="tglbtn" id="dyntree">a</a>
<a href="#" class="btn" step="2" id="twobytwo">+</a>
<a href="#" class="btn" step="-2" id="twig">&ndash;</a>
<a href="#" class="tgl btn" id="dyntree">a</a>
<ul id="treeul"></ul>
<div id="thx_ff">&nbsp;</div>
</div>
@@ -114,22 +116,7 @@
<div id="srv_info"><span>{{ srv_info }}</span></div>
{%- endif %}
<div id="widget">
<div id="wtoggle">
<span id="wzip">
<a href="#" id="selall">sel.<br />all</a>
<a href="#" id="selinv">sel.<br />inv.</a>
<a href="#" id="selzip">zip</a>
</span><a
href="#" id="wtico"></a>
</div>
<div id="widgeti">
<div id="pctl"><a href="#" id="bprev"></a><a href="#" id="bplay"></a><a href="#" id="bnext"></a></div>
<canvas id="pvol" width="288" height="38"></canvas>
<canvas id="barpos"></canvas>
<canvas id="barbuf"></canvas>
</div>
</div>
<div id="widget"></div>
<script>
var tag_order_cfg = {{ tag_order }};

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,11 @@
<title>{{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<style>
html{font-family:sans-serif}
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
a{display:block}
</style>
</head>
<body>
@@ -49,7 +54,7 @@
<div>{{ logues[1] }}</div><br />
{%- endif %}
<h2><a href="{{ url_suf }}&amp;h">control-panel</a></h2>
<h2><a href="{{ url_suf }}{{ url_suf and '&amp;' or '?' }}h">control-panel</a></h2>
</body>
</html>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 248 B

View File

@@ -50,6 +50,9 @@ pre code:last-child {
pre code::before {
content: counter(precode);
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
display: inline-block;
text-align: right;
font-size: .75em;
@@ -591,12 +594,3 @@ blink {
color: #940;
}
}
/*
*[data-ln]:before {
content: attr(data-ln);
font-size: .8em;
margin: 0 .4em;
color: #f0c;
}
*/

View File

@@ -46,7 +46,7 @@ function statify(obj) {
var ua = navigator.userAgent;
if (ua.indexOf(') Gecko/') !== -1 && /Linux| Mac /.exec(ua)) {
// necessary on ff-68.7 at least
var s = document.createElement('style');
var s = mknod('style');
s.innerHTML = '@page { margin: .5in .6in .8in .6in; }';
console.log(s.innerHTML);
document.head.appendChild(s);
@@ -175,12 +175,12 @@ function md_plug_err(ex, js) {
msg = "Line " + ln + ", " + msg;
var lns = js.split('\n');
if (ln < lns.length) {
o = document.createElement('span');
o = mknod('span');
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
o.textContent = lns[ln - 1];
}
}
errbox = document.createElement('div');
errbox = mknod('div');
errbox.setAttribute('id', 'md_errbox');
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
errbox.textContent = msg;

View File

@@ -1,126 +1,125 @@
#toc {
display: none;
display: none;
}
#mtw {
display: block;
position: fixed;
left: .5em;
bottom: 0;
width: calc(100% - 56em);
display: block;
position: fixed;
left: .5em;
bottom: 0;
width: calc(100% - 56em);
}
#mw {
left: calc(100% - 55em);
overflow-y: auto;
position: fixed;
bottom: 0;
left: calc(100% - 55em);
overflow-y: auto;
position: fixed;
bottom: 0;
}
/* single-screen */
#mtw.preview,
#mw.editor {
opacity: 0;
z-index: 1;
opacity: 0;
z-index: 1;
}
#mw.preview,
#mtw.editor {
z-index: 5;
z-index: 5;
}
#mtw.single,
#mw.single {
margin: 0;
left: 1em;
left: max(1em, calc((100% - 56em) / 2));
margin: 0;
left: 1em;
left: max(1em, calc((100% - 56em) / 2));
}
#mtw.single {
width: 55em;
width: min(55em, calc(100% - 2em));
width: 55em;
width: min(55em, calc(100% - 2em));
}
#mp {
position: relative;
position: relative;
}
#mt, #mtr {
width: 100%;
height: calc(100% - 1px);
color: #444;
background: #f7f7f7;
border: 1px solid #999;
outline: none;
padding: 0;
margin: 0;
font-family: 'consolas', monospace, monospace;
white-space: pre-wrap;
word-break: break-word;
overflow-wrap: break-word;
word-wrap: break-word; /*ie*/
overflow-y: scroll;
line-height: 1.3em;
font-size: .9em;
position: relative;
scrollbar-color: #eb0 #f7f7f7;
width: 100%;
height: calc(100% - 1px);
color: #444;
background: #f7f7f7;
border: 1px solid #999;
outline: none;
padding: 0;
margin: 0;
font-family: 'consolas', monospace, monospace;
white-space: pre-wrap;
word-break: break-word;
overflow-wrap: break-word;
word-wrap: break-word; /*ie*/
overflow-y: scroll;
line-height: 1.3em;
font-size: .9em;
position: relative;
scrollbar-color: #eb0 #f7f7f7;
}
html.dark #mt {
color: #eee;
background: #222;
border: 1px solid #777;
scrollbar-color: #b80 #282828;
color: #eee;
background: #222;
border: 1px solid #777;
scrollbar-color: #b80 #282828;
}
#mtr {
position: absolute;
top: 0;
left: 0;
position: absolute;
top: 0;
left: 0;
}
#save.force-save {
color: #400;
background: #f97;
border-radius: .15em;
color: #400;
background: #f97;
border-radius: .15em;
}
html.dark #save.force-save {
color: #fca;
background: #720;
color: #fca;
background: #720;
}
#save.disabled {
opacity: .4;
opacity: .4;
}
#helpbox,
#toast {
background: #f7f7f7;
border-radius: .4em;
z-index: 9001;
background: #f7f7f7;
border-radius: .4em;
z-index: 9001;
}
#helpbox {
display: none;
position: fixed;
padding: 2em;
top: 4em;
overflow-y: auto;
box-shadow: 0 .5em 2em #777;
height: calc(100% - 12em);
left: calc(50% - 15em);
right: 0;
width: 30em;
display: none;
position: fixed;
padding: 2em;
top: 4em;
overflow-y: auto;
box-shadow: 0 .5em 2em #777;
height: calc(100% - 12em);
left: calc(50% - 15em);
right: 0;
width: 30em;
}
#helpclose {
display: block;
display: block;
}
html.dark #helpbox {
box-shadow: 0 .5em 2em #444;
box-shadow: 0 .5em 2em #444;
}
html.dark #helpbox,
html.dark #toast {
background: #222;
border: 1px solid #079;
border-width: 1px 0;
background: #222;
border: 1px solid #079;
border-width: 1px 0;
}
#toast {
font-weight: bold;
text-align: center;
padding: .6em 0;
position: fixed;
z-index: 9001;
top: 30%;
transition: opacity 0.2s ease-in-out;
opacity: 1;
font-weight: bold;
text-align: center;
padding: .6em 0;
position: fixed;
top: 30%;
transition: opacity 0.2s ease-in-out;
opacity: 1;
}

View File

@@ -16,7 +16,7 @@ var dom_sbs = ebi('sbs');
var dom_nsbs = ebi('nsbs');
var dom_tbox = ebi('toolsbox');
var dom_ref = (function () {
var d = document.createElement('div');
var d = mknod('div');
d.setAttribute('id', 'mtr');
dom_swrap.appendChild(d);
d = ebi('mtr');
@@ -71,7 +71,7 @@ var map_src = [];
var map_pre = [];
function genmap(dom, oldmap) {
var find = nlines;
while (oldmap && find --> 0) {
while (oldmap && find-- > 0) {
var tmap = genmapq(dom, '*[data-ln="' + find + '"]');
if (!tmap || !tmap.length)
continue;
@@ -94,7 +94,7 @@ var nlines = 0;
var draw_md = (function () {
var delay = 1;
function draw_md() {
var t0 = new Date().getTime();
var t0 = Date.now();
var src = dom_src.value;
convert_markdown(src, dom_pre);
@@ -110,7 +110,7 @@ var draw_md = (function () {
cls(ebi('save'), 'disabled', src == server_md);
var t1 = new Date().getTime();
var t1 = Date.now();
delay = t1 - t0 > 100 ? 25 : 1;
}
@@ -252,7 +252,7 @@ function Modpoll() {
}
console.log('modpoll...');
var url = (document.location + '').split('?')[0] + '?raw&_=' + new Date().getTime();
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
var xhr = new XMLHttpRequest();
xhr.modpoll = this;
xhr.open('GET', url, true);
@@ -399,7 +399,7 @@ function save_cb() {
function run_savechk(lastmod, txt, btn, ntry) {
// download the saved doc from the server and compare
var url = (document.location + '').split('?')[0] + '?raw&_=' + new Date().getTime();
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.responseType = 'text';
@@ -455,7 +455,7 @@ function toast(autoclose, style, width, msg) {
ok.parentNode.removeChild(ok);
style = "width:" + width + "em;left:calc(50% - " + (width / 2) + "em);" + style;
ok = document.createElement('div');
ok = mknod('div');
ok.setAttribute('id', 'toast');
ok.setAttribute('style', style);
ok.innerHTML = msg;
@@ -1049,7 +1049,7 @@ action_stack = (function () {
var p1 = from.length,
p2 = to.length;
while (p1 --> 0 && p2 --> 0)
while (p1-- > 0 && p2-- > 0)
if (from[p1] != to[p2])
break;
@@ -1142,14 +1142,3 @@ action_stack = (function () {
_ref: ref
}
})();
/*
ebi('help').onclick = function () {
var c1 = getComputedStyle(dom_src).cssText.split(';');
var c2 = getComputedStyle(dom_ref).cssText.split(';');
var max = Math.min(c1.length, c2.length);
for (var a = 0; a < max; a++)
if (c1[a] !== c2[a])
console.log(c1[a] + '\n' + c2[a]);
}
*/

View File

@@ -8,68 +8,58 @@ html .editor-toolbar>i.separator { border-left: 1px solid #ccc; }
html .editor-toolbar.disabled-for-preview>button:not(.no-disable) { opacity: .35 }
html {
line-height: 1.5em;
line-height: 1.5em;
}
html, body {
margin: 0;
padding: 0;
min-height: 100%;
font-family: sans-serif;
background: #f7f7f7;
color: #333;
margin: 0;
padding: 0;
min-height: 100%;
font-family: sans-serif;
background: #f7f7f7;
color: #333;
}
#mn {
font-weight: normal;
margin: 1.3em 0 .7em 1em;
font-weight: normal;
margin: 1.3em 0 .7em 1em;
}
#mn a {
color: #444;
margin: 0 0 0 -.2em;
padding: 0 0 0 .4em;
text-decoration: none;
/* ie: */
border-bottom: .1em solid #777\9;
margin-right: 1em\9;
color: #444;
margin: 0 0 0 -.2em;
padding: 0 0 0 .4em;
text-decoration: none;
/* ie: */
border-bottom: .1em solid #777\9;
margin-right: 1em\9;
}
#mn a:first-child {
padding-left: .5em;
padding-left: .5em;
}
#mn a:last-child {
padding-right: .5em;
padding-right: .5em;
}
#mn a:not(:last-child):after {
content: '';
width: 1.05em;
height: 1.05em;
margin: -.2em .3em -.2em -.4em;
display: inline-block;
border: 1px solid rgba(0,0,0,0.2);
border-width: .2em .2em 0 0;
transform: rotate(45deg);
content: '';
width: 1.05em;
height: 1.05em;
margin: -.2em .3em -.2em -.4em;
display: inline-block;
border: 1px solid rgba(0,0,0,0.2);
border-width: .2em .2em 0 0;
transform: rotate(45deg);
}
#mn a:hover {
color: #000;
text-decoration: underline;
color: #000;
text-decoration: underline;
}
html .editor-toolbar>button.disabled {
opacity: .35;
pointer-events: none;
opacity: .35;
pointer-events: none;
}
html .editor-toolbar>button.save.force-save {
background: #f97;
background: #f97;
}
/*
*[data-ln]:before {
content: attr(data-ln);
font-size: .8em;
margin: 0 .4em;
color: #f0c;
}
.cm-header { font-size: .4em !important }
*/
@@ -101,29 +91,29 @@ html .editor-toolbar>button.save.force-save {
line-height: 1.1em;
}
.mdo a {
color: #fff;
background: #39b;
text-decoration: none;
padding: 0 .3em;
border: none;
border-bottom: .07em solid #079;
color: #fff;
background: #39b;
text-decoration: none;
padding: 0 .3em;
border: none;
border-bottom: .07em solid #079;
}
.mdo h2 {
color: #fff;
background: #555;
margin-top: 2em;
border-bottom: .22em solid #999;
border-top: none;
color: #fff;
background: #555;
margin-top: 2em;
border-bottom: .22em solid #999;
border-top: none;
}
.mdo h1 {
color: #fff;
background: #444;
font-weight: normal;
border-top: .4em solid #fb0;
border-bottom: .4em solid #777;
border-radius: 0 1em 0 1em;
margin: 3em 0 1em 0;
padding: .5em 0;
color: #fff;
background: #444;
font-weight: normal;
border-top: .4em solid #fb0;
border-bottom: .4em solid #777;
border-radius: 0 1em 0 1em;
margin: 3em 0 1em 0;
padding: .5em 0;
}
h1, h2 {
line-height: 1.5em;
@@ -197,14 +187,14 @@ th {
/* mde support */
.mdo {
padding: 1em;
background: #f7f7f7;
padding: 1em;
background: #f7f7f7;
}
html.dark .mdo {
background: #1c1c1c;
background: #1c1c1c;
}
.CodeMirror {
background: #f7f7f7;
background: #f7f7f7;
}
@@ -214,108 +204,108 @@ html.dark .mdo {
/* darkmode */
html.dark .mdo,
html.dark .CodeMirror {
border-color: #222;
border-color: #222;
}
html.dark,
html.dark body,
html.dark .CodeMirror {
background: #222;
color: #ccc;
background: #222;
color: #ccc;
}
html.dark .CodeMirror-cursor {
border-color: #fff;
border-color: #fff;
}
html.dark .CodeMirror-selected {
box-shadow: 0 0 1px #0cf inset;
box-shadow: 0 0 1px #0cf inset;
}
html.dark .CodeMirror-selected,
html.dark .CodeMirror-selectedtext {
border-radius: .1em;
background: #246;
color: #fff;
border-radius: .1em;
background: #246;
color: #fff;
}
html.dark .mdo a {
background: #057;
background: #057;
}
html.dark .mdo h1 a, html.dark .mdo h4 a,
html.dark .mdo h2 a, html.dark .mdo h5 a,
html.dark .mdo h3 a, html.dark .mdo h6 a {
color: inherit;
background: none;
color: inherit;
background: none;
}
html.dark pre,
html.dark code {
color: #8c0;
background: #1a1a1a;
border: .07em solid #333;
color: #8c0;
background: #1a1a1a;
border: .07em solid #333;
}
html.dark .mdo ul,
html.dark .mdo ol {
border-color: #444;
border-color: #444;
}
html.dark .mdo>ul,
html.dark .mdo>ol {
border-color: #555;
border-color: #555;
}
html.dark strong {
color: #fff;
color: #fff;
}
html.dark p>em,
html.dark li>em,
html.dark td>em {
color: #f94;
border-color: #666;
color: #f94;
border-color: #666;
}
html.dark h1 {
background: #383838;
border-top: .4em solid #b80;
border-bottom: .4em solid #4c4c4c;
background: #383838;
border-top: .4em solid #b80;
border-bottom: .4em solid #4c4c4c;
}
html.dark h2 {
background: #444;
border-bottom: .22em solid #555;
background: #444;
border-bottom: .22em solid #555;
}
html.dark td,
html.dark th {
border-color: #444;
border-color: #444;
}
html.dark blockquote {
background: #282828;
border: .07em dashed #444;
background: #282828;
border: .07em dashed #444;
}
html.dark #mn a {
color: #ccc;
color: #ccc;
}
html.dark #mn a:not(:last-child):after {
border-color: rgba(255,255,255,0.3);
border-color: rgba(255,255,255,0.3);
}
html.dark .editor-toolbar {
border-color: #2c2c2c;
background: #1c1c1c;
border-color: #2c2c2c;
background: #1c1c1c;
}
html.dark .editor-toolbar>i.separator {
border-left: 1px solid #444;
border-right: 1px solid #111;
border-left: 1px solid #444;
border-right: 1px solid #111;
}
html.dark .editor-toolbar>button {
margin-left: -1px; border: 1px solid rgba(255,255,255,0.1);
color: #aaa;
margin-left: -1px; border: 1px solid rgba(255,255,255,0.1);
color: #aaa;
}
html.dark .editor-toolbar>button:hover {
color: #333;
color: #333;
}
html.dark .editor-toolbar>button.active {
color: #333;
border-color: #ec1;
background: #c90;
color: #333;
border-color: #ec1;
background: #c90;
}
html.dark .editor-toolbar::after,
html.dark .editor-toolbar::before {
background: none;
background: none;
}

View File

@@ -71,7 +71,7 @@ var mde = (function () {
})();
function set_jumpto() {
document.querySelector('.editor-preview-side').onclick = jumpto;
QS('.editor-preview-side').onclick = jumpto;
}
function jumpto(ev) {
@@ -94,7 +94,7 @@ function md_changed(mde, on_srv) {
window.md_saved = mde.value();
var md_now = mde.value();
var save_btn = document.querySelector('.editor-toolbar button.save');
var save_btn = QS('.editor-toolbar button.save');
if (md_now == window.md_saved)
save_btn.classList.add('disabled');
@@ -105,7 +105,7 @@ function md_changed(mde, on_srv) {
}
function save(mde) {
var save_btn = document.querySelector('.editor-toolbar button.save');
var save_btn = QS('.editor-toolbar button.save');
if (save_btn.classList.contains('disabled')) {
alert('there is nothing to save');
return;
@@ -212,7 +212,7 @@ function save_chk() {
last_modified = this.lastmod;
md_changed(this.mde, true);
var ok = document.createElement('div');
var ok = mknod('div');
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
ok.innerHTML = 'OK✔';
var parent = ebi('m');

View File

@@ -3,7 +3,7 @@ html,body,tr,th,td,#files,a {
background: none;
font-weight: inherit;
font-size: inherit;
padding: none;
padding: 0;
border: none;
}
html {
@@ -20,8 +20,8 @@ body {
padding-bottom: 5em;
}
#box {
padding: .5em 1em;
background: #2c2c2c;
padding: .5em 1em;
background: #2c2c2c;
}
pre {
font-family: monospace, monospace;

View File

@@ -13,19 +13,23 @@
<div id="wrap">
<p>hello {{ this.uname }}</p>
{%- if rvol %}
<h1>you can browse these:</h1>
<ul>
{% for mp in rvol %}
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
{% endfor %}
</ul>
{%- endif %}
{%- if wvol %}
<h1>you can upload to:</h1>
<ul>
{% for mp in wvol %}
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
{% endfor %}
</ul>
{%- endif %}
<h1>login for more:</h1>
<ul>

View File

@@ -18,14 +18,14 @@ function goto_up2k() {
// usually it's undefined but some chromes throw on invoke
var up2k = null;
try {
crypto.subtle.digest(
'SHA-512', new Uint8Array(1)
).then(
function (x) { up2k = up2k_init(true) },
function (x) { up2k = up2k_init(false) }
var cf = crypto.subtle || crypto.webkitSubtle;
cf.digest('SHA-512', new Uint8Array(1)).then(
function (x) { console.log('sha-ok'); up2k = up2k_init(cf); },
function (x) { console.log('sha-ng:', x); up2k = up2k_init(false); }
);
}
catch (ex) {
console.log('sha-na:', ex);
try {
up2k = up2k_init(false);
}
@@ -55,7 +55,7 @@ function up2k_flagbus() {
dbg(who, 'hi me (??)');
return;
}
flag.act = new Date().getTime();
flag.act = Date.now();
if (what == "want") {
// lowest id wins, don't care if that's us
if (who < flag.id) {
@@ -209,7 +209,7 @@ function U2pvis(act, btns) {
};
this.perc = function (bd, bd0, sz, t0) {
var td = new Date().getTime() - t0,
var td = Date.now() - t0,
p = bd * 100.0 / sz,
nb = bd - bd0,
spd = nb / (td / 1000),
@@ -219,25 +219,28 @@ function U2pvis(act, btns) {
};
this.hashed = function (fobj) {
var fo = this.tab[fobj.n];
var nb = fo.bt * (++fo.nh / fo.cb.length);
var p = this.perc(nb, 0, fobj.size, fobj.t1);
var fo = this.tab[fobj.n],
nb = fo.bt * (++fo.nh / fo.cb.length),
p = this.perc(nb, 0, fobj.size, fobj.t1);
fo.hp = '{0}%, {1}, {2} MB/s'.format(
p[0].toFixed(2), p[1], p[2].toFixed(2)
);
if (!this.is_act(fo.in))
return;
var obj = ebi('f{0}p'.format(fobj.n));
var obj = ebi('f{0}p'.format(fobj.n)),
o1 = p[0] - 2, o2 = p[0] - 0.1, o3 = p[0];
obj.innerHTML = fo.hp;
obj.style.color = '#fff';
var o1 = p[0] - 2, o2 = p[0] - 0.1, o3 = p[0];
obj.style.background = 'linear-gradient(90deg, #025, #06a ' + o1 + '%, #09d ' + o2 + '%, #333 ' + o3 + '%, #333 99%, #777)';
};
this.prog = function (fobj, nchunk, cbd) {
var fo = this.tab[fobj.n];
var delta = cbd - fo.cb[nchunk];
var fo = this.tab[fobj.n],
delta = cbd - fo.cb[nchunk];
fo.cb[nchunk] = cbd;
fo.bd += delta;
@@ -249,10 +252,11 @@ function U2pvis(act, btns) {
if (!this.is_act(fo.in))
return;
var obj = ebi('f{0}p'.format(fobj.n));
var obj = ebi('f{0}p'.format(fobj.n)),
o1 = p[0] - 2, o2 = p[0] - 0.1, o3 = p[0];
obj.innerHTML = fo.hp;
obj.style.color = '#fff';
var o1 = p[0] - 2, o2 = p[0] - 0.1, o3 = p[0];
obj.style.background = 'linear-gradient(90deg, #050, #270 ' + o1 + '%, #4b0 ' + o2 + '%, #333 ' + o3 + '%, #333 99%, #777)';
};
@@ -287,24 +291,14 @@ function U2pvis(act, btns) {
}
};
this.bzw_log = function (first, last) {
console.log("first %d head %d tail %d last %d", first, this.head, this.tail, last);
var trs = document.querySelectorAll('#u2tab>tbody>tr'), msg = [];
for (var a = 0; a < trs.length; a++)
msg.push(trs[a].getAttribute('id'));
console.log(msg.join(' '));
}
this.bzw = function () {
var first = document.querySelector('#u2tab>tbody>tr:first-child');
var first = QS('#u2tab>tbody>tr:first-child');
if (!first)
return;
var last = document.querySelector('#u2tab>tbody>tr:last-child');
var last = QS('#u2tab>tbody>tr:last-child');
first = parseInt(first.getAttribute('id').slice(1));
last = parseInt(last.getAttribute('id').slice(1));
//this.bzw_log(first, last);
while (this.head - first > this.wsz) {
var obj = ebi('f' + (first++));
@@ -315,12 +309,10 @@ function U2pvis(act, btns) {
if (!obj)
this.addrow(last);
}
//this.bzw_log(first, last);
//console.log('--');
};
this.drawcard = function (cat) {
var cards = document.querySelectorAll('#u2cards>a>span');
var cards = QSA('#u2cards>a>span');
if (cat == "q") {
cards[4].innerHTML = this.ctr[cat];
@@ -343,9 +335,9 @@ function U2pvis(act, btns) {
this.changecard = function (card) {
this.act = card;
var html = [];
this.head = -1;
this.tail = -1;
var html = [];
for (var a = 0; a < this.tab.length; a++) {
var rt = this.tab[a].in;
if (this.is_act(rt)) {
@@ -382,7 +374,7 @@ function U2pvis(act, btns) {
if (as_html)
return '<tr id="f' + nfile + '">' + ret + '</tr>';
var obj = document.createElement('tr');
var obj = mknod('tr');
obj.setAttribute('id', 'f' + nfile);
obj.innerHTML = ret;
return obj;
@@ -394,7 +386,7 @@ function U2pvis(act, btns) {
};
var that = this;
btns = document.querySelectorAll(btns + '>a[act]');
btns = QSA(btns + '>a[act]');
for (var a = 0; a < btns.length; a++) {
btns[a].onclick = function (e) {
ev(e);
@@ -409,9 +401,7 @@ function U2pvis(act, btns) {
}
function up2k_init(have_crypto) {
//have_crypto = false;
function up2k_init(subtle) {
// show modal message
function showmodal(msg) {
ebi('u2notbtn').innerHTML = msg;
@@ -428,15 +418,18 @@ function up2k_init(have_crypto) {
ebi('u2notbtn').innerHTML = '';
}
var shame = 'your browser <a href="https://www.chromium.org/blink/webcrypto">disables sha512</a> unless you <a href="' + (window.location + '').replace(':', 's:') + '">use https</a>'
var is_https = (window.location + '').indexOf('https:') === 0;
var suggest_up2k = 'this is the basic uploader; <a href="#" id="u2yea">up2k</a> is better';
var shame = 'your browser <a href="https://www.chromium.org/blink/webcrypto">disables sha512</a> unless you <a href="' + (window.location + '').replace(':', 's:') + '">use https</a>',
is_https = (window.location + '').indexOf('https:') === 0;
if (is_https)
// chrome<37 firefox<34 edge<12 ie<11 opera<24 safari<10.1
// chrome<37 firefox<34 edge<12 opera<24 safari<7
shame = 'your browser is impressively ancient';
// upload ui hidden by default, clicking the header shows it
function init_deps() {
if (!have_crypto && !window.asmCrypto) {
if (!subtle && !window.asmCrypto) {
showmodal('<h1>loading sha512.js</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
import_js('/.cpr/deps/sha512.js', unmodal);
@@ -448,34 +441,43 @@ function up2k_init(have_crypto) {
}
// show uploader if the user only has write-access
if (!ebi('files'))
var perms = document.body.getAttribute('perms');
if (perms && !has(perms.split(' '), 'read'))
goto('up2k');
// shows or clears an error message in the basic uploader ui
function setmsg(msg) {
// shows or clears a message in the basic uploader ui
function setmsg(msg, type) {
if (msg !== undefined) {
ebi('u2err').setAttribute('class', 'err');
ebi('u2err').setAttribute('class', type);
ebi('u2err').innerHTML = msg;
}
else {
ebi('u2err').setAttribute('class', '');
ebi('u2err').innerHTML = '';
}
if (msg == suggest_up2k) {
ebi('u2yea').onclick = function (e) {
ev(e);
goto('up2k');
};
}
}
// switches to the basic uploader with msg as error message
function un2k(msg) {
setmsg(msg);
setmsg(msg, 'err');
return false;
}
// handle user intent to use the basic uploader instead
ebi('u2nope').onclick = function (e) {
ev(e);
setmsg();
setmsg(suggest_up2k, 'msg');
goto('bup');
};
setmsg(suggest_up2k, 'msg');
if (!String.prototype.format) {
String.prototype.format = function () {
var args = arguments;
@@ -486,13 +488,14 @@ function up2k_init(have_crypto) {
};
}
var parallel_uploads = icfg_get('nthread');
var multitask = bcfg_get('multitask', true);
var ask_up = bcfg_get('ask_up', true);
var flag_en = bcfg_get('flag_en', false);
var fsearch = bcfg_get('fsearch', false);
var parallel_uploads = icfg_get('nthread'),
multitask = bcfg_get('multitask', true),
ask_up = bcfg_get('ask_up', true),
flag_en = bcfg_get('flag_en', false),
fsearch = bcfg_get('fsearch', false),
fdom_ctr = 0,
min_filebuf = 0;
var fdom_ctr = 0;
var st = {
"files": [],
"todo": {
@@ -542,8 +545,9 @@ function up2k_init(have_crypto) {
e.stopPropagation();
e.preventDefault();
var files;
var is_itemlist = false;
var files,
is_itemlist = false;
if (e.dataTransfer) {
if (e.dataTransfer.items) {
files = e.dataTransfer.items; // DataTransferItemList
@@ -557,9 +561,10 @@ function up2k_init(have_crypto) {
return alert('no files selected??');
more_one_file();
var bad_files = [];
var good_files = [];
var dirs = [];
var bad_files = [],
good_files = [],
dirs = [];
for (var a = 0; a < files.length; a++) {
var fobj = files[a];
if (is_itemlist) {
@@ -644,12 +649,13 @@ function up2k_init(have_crypto) {
function gotallfiles(good_files, bad_files) {
if (bad_files.length > 0) {
var ntot = bad_files.length + good_files.length;
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, ntot);
var ntot = bad_files.length + good_files.length,
msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, ntot);
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
msg += '-- ' + bad_files[a] + '\n';
if (good_files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
if (good_files.length - bad_files.length <= 1 && ANDROID)
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
alert(msg);
@@ -663,9 +669,10 @@ function up2k_init(have_crypto) {
return;
for (var a = 0; a < good_files.length; a++) {
var fobj = good_files[a][0];
var now = new Date().getTime();
var lmod = fobj.lastModified || now;
var fobj = good_files[a][0],
now = Date.now(),
lmod = fobj.lastModified || now;
var entry = {
"n": parseInt(st.files.length.toString()),
"t0": now,
@@ -689,7 +696,7 @@ function up2k_init(have_crypto) {
pvis.addfile([
fsearch ? esc(entry.name) : linksplit(
esc(uricom_dec(entry.purl)[0] + entry.name)).join(' '),
uricom_dec(entry.purl)[0] + entry.name).join(' '),
'📐 hash',
''
], fobj.size);
@@ -701,7 +708,7 @@ function up2k_init(have_crypto) {
function more_one_file() {
fdom_ctr++;
var elm = document.createElement('div')
var elm = mknod('div');
elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" />'.format(fdom_ctr);
ebi('u2form').appendChild(elm);
ebi('file' + fdom_ctr).addEventListener('change', gotfile, false);
@@ -748,26 +755,23 @@ function up2k_init(have_crypto) {
}
var tasker = (function () {
var mutex = false;
var was_busy = false;
var tto = null,
running = false,
was_busy = false;
function defer() {
running = false;
clearTimeout(tto);
tto = setTimeout(taskerd, 100);
}
function taskerd() {
if (mutex)
if (running)
return;
mutex = true;
clearTimeout(tto);
running = true;
while (true) {
if (false) {
ebi('srv_info').innerHTML =
new Date().getTime() + ", " +
st.todo.hash.length + ", " +
st.todo.handshake.length + ", " +
st.todo.upload.length + ", " +
st.busy.hash.length + ", " +
st.busy.handshake.length + ", " +
st.busy.upload.length;
}
var is_busy = 0 !=
st.todo.hash.length +
st.todo.handshake.length +
@@ -779,21 +783,16 @@ function up2k_init(have_crypto) {
if (was_busy != is_busy) {
was_busy = is_busy;
if (is_busy)
window.addEventListener("beforeunload", warn_uploader_busy);
else
window.removeEventListener("beforeunload", warn_uploader_busy);
window[(is_busy ? "add" : "remove") +
"EventListener"]("beforeunload", warn_uploader_busy);
}
if (flag) {
if (is_busy) {
var now = new Date().getTime();
var now = Date.now();
flag.take(now);
if (!flag.ours) {
setTimeout(taskerd, 100);
mutex = false;
return;
}
if (!flag.ours)
return defer();
}
else if (flag.ours) {
flag.give();
@@ -835,11 +834,8 @@ function up2k_init(have_crypto) {
mou_ikkai = true;
}
if (!mou_ikkai) {
setTimeout(taskerd, 100);
mutex = false;
return;
}
if (!mou_ikkai)
return defer();
}
}
taskerd();
@@ -853,47 +849,47 @@ function up2k_init(have_crypto) {
// https://gist.github.com/jonleighton/958841
function buf2b64(arrayBuffer) {
var base64 = '';
var encodings = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_';
var bytes = new Uint8Array(arrayBuffer);
var byteLength = bytes.byteLength;
var byteRemainder = byteLength % 3;
var mainLength = byteLength - byteRemainder;
var a, b, c, d;
var chunk;
var base64 = '',
cset = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_',
src = new Uint8Array(arrayBuffer),
nbytes = src.byteLength,
byteRem = nbytes % 3,
mainLen = nbytes - byteRem,
a, b, c, d, chunk;
for (var i = 0; i < mainLength; i = i + 3) {
chunk = (bytes[i] << 16) | (bytes[i + 1] << 8) | bytes[i + 2];
for (var i = 0; i < mainLen; i = i + 3) {
chunk = (src[i] << 16) | (src[i + 1] << 8) | src[i + 2];
// create 8*3=24bit segment then split into 6bit segments
a = (chunk & 16515072) >> 18; // 16515072 = (2^6 - 1) << 18
b = (chunk & 258048) >> 12; // 258048 = (2^6 - 1) << 12
c = (chunk & 4032) >> 6; // 4032 = (2^6 - 1) << 6
d = chunk & 63; // 63 = 2^6 - 1
a = (chunk & 16515072) >> 18; // (2^6 - 1) << 18
b = (chunk & 258048) >> 12; // (2^6 - 1) << 12
c = (chunk & 4032) >> 6; // (2^6 - 1) << 6
d = chunk & 63; // 2^6 - 1
// Convert the raw binary segments to the appropriate ASCII encoding
base64 += encodings[a] + encodings[b] + encodings[c] + encodings[d];
base64 += cset[a] + cset[b] + cset[c] + cset[d];
}
if (byteRemainder == 1) {
chunk = bytes[mainLength];
a = (chunk & 252) >> 2; // 252 = (2^6 - 1) << 2
b = (chunk & 3) << 4; // 3 = 2^2 - 1 (zero 4 LSB)
base64 += encodings[a] + encodings[b];//+ '==';
if (byteRem == 1) {
chunk = src[mainLen];
a = (chunk & 252) >> 2; // (2^6 - 1) << 2
b = (chunk & 3) << 4; // 2^2 - 1 (zero 4 LSB)
base64 += cset[a] + cset[b];//+ '==';
}
else if (byteRemainder == 2) {
chunk = (bytes[mainLength] << 8) | bytes[mainLength + 1];
a = (chunk & 64512) >> 10; // 64512 = (2^6 - 1) << 10
b = (chunk & 1008) >> 4; // 1008 = (2^6 - 1) << 4
c = (chunk & 15) << 2; // 15 = 2^4 - 1 (zero 2 LSB)
base64 += encodings[a] + encodings[b] + encodings[c];//+ '=';
else if (byteRem == 2) {
chunk = (src[mainLen] << 8) | src[mainLen + 1];
a = (chunk & 64512) >> 10; // (2^6 - 1) << 10
b = (chunk & 1008) >> 4; // (2^6 - 1) << 4
c = (chunk & 15) << 2; // 2^4 - 1 (zero 2 LSB)
base64 += cset[a] + cset[b] + cset[c];//+ '=';
}
return base64;
}
function get_chunksize(filesize) {
var chunksize = 1024 * 1024;
var stepsize = 512 * 1024;
var chunksize = 1024 * 1024,
stepsize = 512 * 1024;
while (true) {
for (var mul = 1; mul <= 2; mul++) {
var nchunks = Math.ceil(filesize / chunksize);
@@ -906,25 +902,11 @@ function up2k_init(have_crypto) {
}
}
function ensure_rendered(func) {
var hidden = false;
var keys = ['hidden', 'msHidden', 'webkitHidden'];
for (var a = 0; a < keys.length; a++)
if (typeof document[keys[a]] !== "undefined")
hidden = document[keys[a]];
if (hidden)
return func();
window.requestAnimationFrame(func);
}
function exec_hash() {
var t = st.todo.hash.shift();
st.busy.hash.push(t);
st.bytes.hashed += t.size;
t.bytes_uploaded = 0;
t.t1 = new Date().getTime();
var bpend = 0,
nchunk = 0,
@@ -936,13 +918,14 @@ function up2k_init(have_crypto) {
pvis.move(t.n, 'bz');
var segm_next = function () {
if (nchunk >= nchunks || (bpend > chunksize && bpend >= 32 * 1024 * 1024))
if (nchunk >= nchunks || (bpend > chunksize && bpend >= min_filebuf))
return false;
var reader = new FileReader(),
nch = nchunk++,
car = nch * chunksize,
cdr = car + chunksize;
cdr = car + chunksize,
t0 = Date.now();
if (cdr >= t.size)
cdr = t.size;
@@ -950,9 +933,19 @@ function up2k_init(have_crypto) {
bpend += cdr - car;
reader.onload = function (e) {
if (!min_filebuf && nch == 1) {
min_filebuf = 1;
var td = Date.now() - t0;
if (td > 50) {
ebi('u2foot').innerHTML += "<p>excessive filereader latency (" + td + " ms), increasing readahead</p>";
min_filebuf = 32 * 1024 * 1024;
}
}
hash_calc(nch, e.target.result);
};
reader.onerror = segm_err;
reader.onerror = function () {
alert('y o u b r o k e i t\nerror: ' + reader.error);
};
reader.readAsArrayBuffer(
bobslice.call(t.fobj, car, cdr));
@@ -963,8 +956,9 @@ function up2k_init(have_crypto) {
while (segm_next());
var hash_done = function (hashbuf) {
var hslice = new Uint8Array(hashbuf).subarray(0, 32);
var b64str = buf2b64(hslice).replace(/=$/, '');
var hslice = new Uint8Array(hashbuf).subarray(0, 32),
b64str = buf2b64(hslice).replace(/=$/, '');
hashtab[nch] = b64str;
t.hash.push(nch);
pvis.hashed(t);
@@ -978,7 +972,7 @@ function up2k_init(have_crypto) {
t.hash.push(hashtab[a]);
}
t.t2 = new Date().getTime();
t.t2 = Date.now();
if (t.n == 0 && window.location.hash == '#dbg') {
var spd = (t.size / ((t.t2 - t.t1) / 1000.)) / (1024 * 1024.);
alert('{0} ms, {1} MB/s\n'.format(t.t2 - t.t1, spd.toFixed(3)) + t.hash.join('\n'));
@@ -990,20 +984,17 @@ function up2k_init(have_crypto) {
st.todo.handshake.push(t);
};
if (have_crypto)
crypto.subtle.digest('SHA-512', buf).then(hash_done);
else {
if (subtle)
subtle.digest('SHA-512', buf).then(hash_done);
else setTimeout(function () {
var hasher = new asmCrypto.Sha512();
hasher.process(new Uint8Array(buf));
hasher.finish();
hash_done(hasher.result);
}
};
var segm_err = function () {
alert('y o u b r o k e i t\nerror: ' + reader.error);
}, 1);
};
t.t1 = Date.now();
segm_next();
}
@@ -1022,8 +1013,9 @@ function up2k_init(have_crypto) {
var response = JSON.parse(xhr.responseText);
if (!response.name) {
var msg = '';
var smsg = '';
var msg = '',
smsg = '';
if (!response || !response.hits || !response.hits.length) {
msg = 'not found on server';
smsg = '404';
@@ -1031,7 +1023,7 @@ function up2k_init(have_crypto) {
else {
smsg = 'found';
var hit = response.hits[0],
msg = linksplit(esc(hit.rp)).join(''),
msg = linksplit(hit.rp).join(''),
tr = unix2iso(hit.ts),
tu = unix2iso(t.lmod),
diff = parseInt(t.lmod) - parseInt(hit.ts),
@@ -1053,13 +1045,14 @@ function up2k_init(have_crypto) {
if (response.name !== t.name) {
// file exists; server renamed us
t.name = response.name;
pvis.seth(t.n, 0, linksplit(esc(t.purl + t.name)).join(' '));
pvis.seth(t.n, 0, linksplit(t.purl + t.name).join(' '));
}
var chunksize = get_chunksize(t.size);
var cdr_idx = Math.ceil(t.size / chunksize) - 1;
var cdr_sz = (t.size % chunksize) || chunksize;
var cbd = [];
var chunksize = get_chunksize(t.size),
cdr_idx = Math.ceil(t.size / chunksize) - 1,
cdr_sz = (t.size % chunksize) || chunksize,
cbd = [];
for (var a = 0; a <= cdr_idx; a++) {
cbd.push(a == cdr_idx ? cdr_sz : chunksize);
}
@@ -1080,8 +1073,9 @@ function up2k_init(have_crypto) {
pvis.setat(t.n, cbd);
pvis.prog(t, 0, cbd[0]);
var done = true;
var msg = '&#x1f3b7;&#x1f41b;';
var done = true,
msg = '&#x1f3b7;&#x1f41b;';
if (t.postlist.length > 0) {
for (var a = 0; a < t.postlist.length; a++)
st.todo.upload.push({
@@ -1098,10 +1092,12 @@ function up2k_init(have_crypto) {
if (done) {
t.done = true;
st.bytes.uploaded += t.size - t.bytes_uploaded;
var spd1 = (t.size / ((t.t2 - t.t1) / 1000.)) / (1024 * 1024.);
var spd2 = (t.size / ((t.t4 - t.t3) / 1000.)) / (1024 * 1024.);
var spd1 = (t.size / ((t.t2 - t.t1) / 1000.)) / (1024 * 1024.),
spd2 = (t.size / ((t.t4 - t.t3) / 1000.)) / (1024 * 1024.);
pvis.seth(t.n, 2, 'hash {0}, up {1} MB/s'.format(
spd1.toFixed(2), spd2.toFixed(2)));
pvis.move(t.n, 'ok');
}
else t.t4 = undefined;
@@ -1168,16 +1164,18 @@ function up2k_init(have_crypto) {
var upt = st.todo.upload.shift();
st.busy.upload.push(upt);
var npart = upt.npart;
var t = st.files[upt.nfile];
var npart = upt.npart,
t = st.files[upt.nfile];
if (!t.t3)
t.t3 = new Date().getTime();
t.t3 = Date.now();
pvis.seth(t.n, 1, "🚀 send");
var chunksize = get_chunksize(t.size);
var car = npart * chunksize;
var cdr = car + chunksize;
var chunksize = get_chunksize(t.size),
car = npart * chunksize,
cdr = car + chunksize;
if (cdr >= t.size)
cdr = t.size;
@@ -1193,7 +1191,7 @@ function up2k_init(have_crypto) {
st.busy.upload.splice(st.busy.upload.indexOf(upt), 1);
t.postlist.splice(t.postlist.indexOf(npart), 1);
if (t.postlist.length == 0) {
t.t4 = new Date().getTime();
t.t4 = Date.now();
pvis.seth(t.n, 1, 'verifying');
st.todo.handshake.unshift(t);
}
@@ -1242,20 +1240,24 @@ function up2k_init(have_crypto) {
onresize();
function desc_show(e) {
var msg = this.getAttribute('alt');
msg = msg.replace(/\$N/g, "<br />");
var cdesc = ebi('u2cdesc');
cdesc.innerHTML = msg;
var cfg = sread('tooltips');
if (cfg !== null && cfg != '1')
return;
var msg = this.getAttribute('alt'),
cdesc = ebi('u2cdesc');
cdesc.innerHTML = msg.replace(/\$N/g, "<br />");
cdesc.setAttribute('class', 'show');
}
function desc_hide(e) {
ebi('u2cdesc').setAttribute('class', '');
}
var o = document.querySelectorAll('#u2conf *[alt]');
var o = QSA('#u2conf *[alt]');
for (var a = o.length - 1; a >= 0; a--) {
o[a].parentNode.getElementsByTagName('input')[0].setAttribute('alt', o[a].getAttribute('alt'));
}
var o = document.querySelectorAll('#u2conf *[alt]');
var o = QSA('#u2conf *[alt]');
for (var a = 0; a < o.length; a++) {
o[a].onfocus = desc_show;
o[a].onblur = desc_hide;
@@ -1309,15 +1311,22 @@ function up2k_init(have_crypto) {
}
function set_fsearch(new_state) {
var perms = document.body.getAttribute('perms');
var read_only = false;
var perms = document.body.getAttribute('perms'),
fixed = false;
if (!ebi('fsearch')) {
new_state = false;
}
else if (perms && perms.indexOf('write') === -1) {
new_state = true;
read_only = true;
else if (perms) {
perms = perms.split(' ');
if (!has(perms, 'write')) {
new_state = true;
fixed = true;
}
if (!has(perms, 'read')) {
new_state = false;
fixed = true;
}
}
if (new_state !== undefined) {
@@ -1326,16 +1335,16 @@ function up2k_init(have_crypto) {
}
try {
document.querySelector('label[for="fsearch"]').style.opacity = read_only ? '0' : '1';
QS('label[for="fsearch"]').style.display = QS('#fsearch').style.display = fixed ? 'none' : '';
}
catch (ex) { }
try {
var fun = fsearch ? 'add' : 'remove';
ebi('op_up2k').classList[fun]('srch');
var fun = fsearch ? 'add' : 'remove',
ico = fsearch ? '🔎' : '🚀',
desc = fsearch ? 'Search' : 'Upload';
var ico = fsearch ? '🔎' : '🚀';
var desc = fsearch ? 'Search' : 'Upload';
ebi('op_up2k').classList[fun]('srch');
ebi('u2bm').innerHTML = ico + ' <sup>' + desc + '</sup>';
}
catch (ex) { }
@@ -1402,5 +1411,5 @@ function warn_uploader_busy(e) {
}
if (document.querySelector('#op_up2k.act'))
if (QS('#op_up2k.act'))
goto_up2k();

View File

@@ -19,6 +19,11 @@
color: #f87;
padding: .5em;
}
#u2err.msg {
color: #999;
padding: .5em;
font-size: .9em;
}
#u2btn {
color: #eee;
background: #555;
@@ -86,12 +91,15 @@
font-family: sans-serif;
width: auto;
}
#u2tab tr+tr:hover td {
#u2tab tbody tr:hover td {
background: #222;
}
#u2cards {
margin: 2.5em auto -2.5em auto;
padding: 1em 0 .3em 1em;
margin: 1.5em auto -2.5em auto;
white-space: nowrap;
text-align: center;
overflow: hidden;
}
#u2cards.w {
width: 45em;
@@ -110,10 +118,15 @@
border-radius: 0 .4em 0 0;
}
#u2cards a.act {
border-width: 1px 1px 0 1px;
padding-bottom: .5em;
border-width: 1px 1px .1em 1px;
border-radius: .3em .3em 0 0;
margin-left: -1px;
background: transparent;
background: linear-gradient(to bottom, #464, #333 80%);
box-shadow: 0 -.17em .67em #280;
border-color: #7c5 #583 #333 #583;
position: relative;
color: #fd7;
}
#u2cards span {
color: #fff;
@@ -134,12 +147,13 @@
outline: none;
}
#u2conf .txtbox {
width: 4em;
width: 3em;
color: #fff;
background: #444;
border: 1px solid #777;
font-size: 1.2em;
padding: .15em 0;
height: 1.05em;
}
#u2conf .txtbox.err {
background: #922;
@@ -151,13 +165,12 @@
border-radius: .1em;
font-size: 1.5em;
padding: .1em 0;
margin: 0 -.25em;
margin: 0 -1px;
width: 1.5em;
height: 1em;
display: inline-block;
position: relative;
line-height: 1em;
bottom: -.08em;
bottom: -0.08em;
}
#u2conf input+a {
background: #d80;
@@ -168,7 +181,6 @@
height: 1em;
padding: .4em 0;
display: block;
user-select: none;
border-radius: .25em;
}
#u2conf input[type="checkbox"] {
@@ -208,12 +220,13 @@
text-align: center;
overflow: hidden;
margin: 0 -2em;
height: 0;
padding: 0 1em;
height: 0;
opacity: .1;
transition: all 0.14s ease-in-out;
border-radius: .4em;
transition: all 0.14s ease-in-out;
box-shadow: 0 .2em .5em #222;
border-radius: .4em;
z-index: 1;
}
#u2cdesc.show {
padding: 1em;
@@ -256,7 +269,10 @@ html.light #u2cards a {
background: linear-gradient(to bottom, #eee, #fff);
}
html.light #u2cards a.act {
color: #037;
background: inherit;
box-shadow: 0 -.17em .67em #0ad;
border-color: #09c #05a #eee #05a;
}
html.light #u2conf .txtbox {
background: #fff;
@@ -272,4 +288,10 @@ html.light #u2cdesc {
}
html.light #op_up2k.srch #u2btn {
border-color: #a80;
}
}
html.light #u2foot {
color: #000;
}
html.light #u2tab tbody tr:hover td {
background: #fff;
}

View File

@@ -36,7 +36,7 @@
<table id="u2conf">
<tr>
<td>parallel uploads</td>
<td><br />parallel uploads:</td>
<td rowspan="2">
<input type="checkbox" id="multitask" />
<label for="multitask" alt="continue hashing other files while uploading">🏃</label>
@@ -59,9 +59,9 @@
</tr>
<tr>
<td>
<a href="#" id="nthread_sub">&ndash;</a>
<input class="txtbox" id="nthread" value="2" />
<a href="#" id="nthread_add">+</a>
<a href="#" id="nthread_sub">&ndash;</a><input
class="txtbox" id="nthread" value="2"/><a
href="#" id="nthread_add">+</a><br />&nbsp;
</td>
</tr>
</table>
@@ -99,5 +99,5 @@
</table>
<p id="u2foot"></p>
<p id="u2footfoot">( if you don't need lastmod timestamps, resumable uploads or progress bars just use the <a href="#" id="u2nope">basic uploader</a>)</p>
<p id="u2footfoot" data-perm="write">( you can use the <a href="#" id="u2nope">basic uploader</a> if you don't need lastmod timestamps, resumable uploads, or progress bars )</p>
</div>

View File

@@ -6,7 +6,8 @@ if (!window['console'])
};
var clickev = window.Touch ? 'touchstart' : 'click';
var clickev = window.Touch ? 'touchstart' : 'click',
ANDROID = /(android)/i.test(navigator.userAgent);
// error handler for mobile devices
@@ -49,9 +50,11 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
}
function ebi(id) {
return document.getElementById(id);
}
var ebi = document.getElementById.bind(document),
QS = document.querySelector.bind(document),
QSA = document.querySelectorAll.bind(document),
mknod = document.createElement.bind(document);
function ev(e) {
e = e || window.event;
@@ -89,7 +92,7 @@ if (!String.startsWith) {
// https://stackoverflow.com/a/950146
function import_js(url, cb) {
var head = document.head || document.getElementsByTagName('head')[0];
var script = document.createElement('script');
var script = mknod('script');
script.type = 'text/javascript';
script.src = url;
@@ -132,7 +135,7 @@ function clmod(obj, cls, add) {
function sortfiles(nodes) {
var sopts = jread('fsort', [["lead", -1, ""], ["href", 1, ""]]);
var sopts = jread('fsort', [["href", 1, ""]]);
try {
var is_srch = false;
@@ -149,6 +152,9 @@ function sortfiles(nodes) {
if (!name)
continue;
if (name == 'ts')
typ = 'int';
if (name.indexOf('tags/') === 0) {
name = name.slice(5);
for (var b = 0, bb = nodes.length; b < bb; b++)
@@ -160,8 +166,12 @@ function sortfiles(nodes) {
if ((v + '').indexOf('<a ') === 0)
v = v.split('>')[1];
else if (name == "href" && v)
else if (name == "href" && v) {
if (v.slice(-1) == '/')
v = '\t' + v;
v = uricom_dec(v)[0]
}
nodes[b]._sv = v;
}
@@ -195,6 +205,8 @@ function sortfiles(nodes) {
}
catch (ex) {
console.log("failed to apply sort config: " + ex);
console.log("resetting fsort " + sread('fsort'))
localStorage.removeItem('fsort');
}
return nodes;
}
@@ -274,7 +286,7 @@ function makeSortable(table, cb) {
(function () {
var ops = document.querySelectorAll('#ops>a');
var ops = QSA('#ops>a');
for (var a = 0; a < ops.length; a++) {
ops[a].onclick = opclick;
}
@@ -289,25 +301,25 @@ function opclick(e) {
swrite('opmode', dest || null);
var input = document.querySelector('.opview.act input:not([type="hidden"])')
var input = QS('.opview.act input:not([type="hidden"])')
if (input)
input.focus();
}
function goto(dest) {
var obj = document.querySelectorAll('.opview.act');
var obj = QSA('.opview.act');
for (var a = obj.length - 1; a >= 0; a--)
clmod(obj[a], 'act');
obj = document.querySelectorAll('#ops>a');
obj = QSA('#ops>a');
for (var a = obj.length - 1; a >= 0; a--)
clmod(obj[a], 'act');
if (dest) {
var ui = ebi('op_' + dest);
clmod(ui, 'act', true);
document.querySelector('#ops>a[data-dest=' + dest + ']').className += " act";
QS('#ops>a[data-dest=' + dest + ']').className += " act";
var fn = window['goto_' + dest];
if (fn)
@@ -346,12 +358,16 @@ function linksplit(rp) {
link = rp.slice(0, ofs + 1);
rp = rp.slice(ofs + 1);
}
var vlink = link;
if (link.indexOf('/') !== -1)
vlink = link.slice(0, -1) + '<span>/</span>';
var vlink = esc(link),
elink = uricom_enc(link);
ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
apath += link;
if (link.indexOf('/') !== -1) {
vlink = vlink.slice(0, -1) + '<span>/</span>';
elink = elink.slice(0, -3) + '/';
}
ret.push('<a href="' + apath + elink + '">' + vlink + '</a>');
apath += elink;
}
return ret;
}
@@ -453,11 +469,15 @@ function jwrite(key, val) {
}
function icfg_get(name, defval) {
return parseInt(fcfg_get(name, defval));
}
function fcfg_get(name, defval) {
var o = ebi(name);
var val = parseInt(sread(name));
var val = parseFloat(sread(name));
if (isNaN(val))
return parseInt(o ? o.value : defval);
return parseFloat(o ? o.value : defval);
if (o)
o.value = val;

View File

@@ -32,9 +32,13 @@ r
# and a folder where anyone can upload
# but nobody can see the contents
# and set the e2d flag to enable the uploads database
# and set the nodupe flag to reject duplicate uploads
/home/ed/inc
/dump
w
c e2d
c nodupe
# this entire config file can be replaced with these arguments:
# -u ed:123 -u k:k -v .::r:aed -v priv:priv:rk:aed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w

32
docs/minimal-up2k.html Normal file
View File

@@ -0,0 +1,32 @@
<!--
save this as .epilogue.html inside a write-only folder to declutter the UI, makes it look like
https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png
-->
<style>
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
#u2cards /* and the upload progress tabs */
{display: none !important} /* do it! */
/* add some margins because now it's weird */
.opview {margin-top: 2.5em}
#op_up2k {margin-top: 3em}
/* and embiggen the upload button */
#u2conf #u2btn, #u2btn {padding:1.5em 0}
/* adjust the button area a bit */
#u2conf.has_btn {width: 35em !important; margin: 5em auto}
</style>
<a href="#" onclick="this.parentNode.innerHTML='';">show advanced options</a>

View File

@@ -171,7 +171,7 @@ Range: bytes=26- Content-Range: bytes */26
var tsh = [];
function convert_markdown(md_text, dest_dom) {
tsh.push(new Date().getTime());
tsh.push(Date.now());
while (tsh.length > 10)
tsh.shift();
if (tsh.length > 1) {

82
docs/nuitka.txt Normal file
View File

@@ -0,0 +1,82 @@
# recipe for building an exe with nuitka (extreme jank edition)
#
# NOTE: win7 and win10 builds both work on win10 but
# on win7 they immediately c0000005 in kernelbase.dll
#
# first install python-3.6.8-amd64.exe
# [x] add to path
#
# copypaste the rest of this file into cmd
rem from pypi
cd \users\ed\downloads
python -m pip install --user Nuitka-0.6.14.7.tar.gz
rem https://github.com/brechtsanders/winlibs_mingw/releases/download/10.2.0-11.0.0-8.0.0-r5/winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip
mkdir C:\Users\ed\AppData\Local\Nuitka\
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\10.2.0-11.0.0-8.0.0-r5\
copy c:\users\ed\downloads\winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\10.2.0-11.0.0-8.0.0-r5\winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip
rem https://github.com/ccache/ccache/releases/download/v3.7.12/ccache-3.7.12-windows-32.zip
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\v3.7.12\
copy c:\users\ed\downloads\ccache-3.7.12-windows-32.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\v3.7.12\ccache-3.7.12-windows-32.zip
rem https://dependencywalker.com/depends22_x64.zip
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\x86_64\
copy c:\users\ed\downloads\depends22_x64.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\x86_64\depends22_x64.zip
cd \
rd /s /q %appdata%\..\local\temp\pe-copyparty
cd \users\ed\downloads
python copyparty-sfx.py -h
cd %appdata%\..\local\temp\pe-copyparty\copyparty
python
import os, re
os.rename('../dep-j2/jinja2', '../jinja2')
os.rename('../dep-j2/markupsafe', '../markupsafe')
print("# nuitka dies if .__init__.stuff is imported")
with open('__init__.py','r',encoding='utf-8') as f:
t1 = f.read()
with open('util.py','r',encoding='utf-8') as f:
t2 = f.read().split('\n')[3:]
t2 = [x for x in t2 if 'from .__init__' not in x]
t = t1 + '\n'.join(t2)
with open('__init__.py','w',encoding='utf-8') as f:
f.write('\n')
with open('util.py','w',encoding='utf-8') as f:
f.write(t)
print("# local-imports fail, prefix module names")
ptn = re.compile(r'^( *from )(\.[^ ]+ import .*)')
for d, _, fs in os.walk('.'):
for f in fs:
fp = os.path.join(d, f)
if not fp.endswith('.py'):
continue
t = ''
with open(fp,'r',encoding='utf-8') as f:
for ln in [x.rstrip('\r\n') for x in f]:
m = ptn.match(ln)
if not m:
t += ln + '\n'
continue
p1, p2 = m.groups()
t += "{}copyparty{}\n".format(p1, p2).replace("__init__", "util")
with open(fp,'w',encoding='utf-8') as f:
f.write(t)
exit()
cd ..
rd /s /q bout & python -m nuitka --standalone --onefile --windows-onefile-tempdir --python-flag=no_site --assume-yes-for-downloads --include-data-dir=copyparty\web=copyparty\web --include-data-dir=copyparty\res=copyparty\res --run --output-dir=bout --mingw64 --include-package=markupsafe --include-package=jinja2 copyparty

View File

@@ -117,7 +117,7 @@ cd sfx
ver=
git describe --tags >/dev/null 2>/dev/null && {
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//; s/-g?/./g')";
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//')";
t_ver=
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+$' && {
@@ -163,7 +163,7 @@ find .. -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
echo use smol web deps
rm -f copyparty/web/deps/*.full.* copyparty/web/{Makefile,splash.js}
rm -f copyparty/web/deps/*.full.* copyparty/web/Makefile
# it's fine dw
grep -lE '\.full\.(js|css)' copyparty/web/* |
@@ -199,12 +199,19 @@ find | grep -E '\.(js|css|html)$' | while IFS= read -r f; do
tmv "$f"
done
echo gen tarlist
for d in copyparty dep-j2; do find $d -type f; done |
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
(grep -vE 'gz$' list1; grep -E 'gz$' list1) >list
echo creating tar
args=(--owner=1000 --group=1000)
[ "$OSTYPE" = msys ] &&
args=()
tar -cf tar "${args[@]}" --numeric-owner copyparty dep-j2
tar -cf tar "${args[@]}" --numeric-owner -T list
pc=bzip2
pe=bz2

View File

@@ -2,7 +2,8 @@
# coding: latin-1
from __future__ import print_function, unicode_literals
import os, sys, time, shutil, threading, tarfile, hashlib, platform, tempfile, traceback
import re, os, sys, time, shutil, signal, threading, tarfile, hashlib, platform, tempfile, traceback
import subprocess as sp
"""
run me with any version of python, i will unpack and run copyparty
@@ -26,22 +27,21 @@ CKSUM = None
STAMP = None
PY2 = sys.version_info[0] == 2
WINDOWS = sys.platform == "win32"
WINDOWS = sys.platform in ["win32", "msys"]
sys.dont_write_bytecode = True
me = os.path.abspath(os.path.realpath(__file__))
cpp = None
def eprint(*args, **kwargs):
kwargs["file"] = sys.stderr
print(*args, **kwargs)
def eprint(*a, **ka):
ka["file"] = sys.stderr
print(*a, **ka)
def msg(*args, **kwargs):
if args:
args = ["[SFX]", args[0]] + list(args[1:])
def msg(*a, **ka):
if a:
a = ["[SFX]", a[0]] + list(a[1:])
eprint(*args, **kwargs)
eprint(*a, **ka)
# skip 1
@@ -156,6 +156,9 @@ def encode(data, size, cksum, ver, ts):
skip = True
continue
if ln.strip().startswith("# fmt: "):
continue
unpk += ln + "\n"
for k, v in [
@@ -209,11 +212,11 @@ def yieldfile(fn):
def hashfile(fn):
hasher = hashlib.md5()
h = hashlib.md5()
for block in yieldfile(fn):
hasher.update(block)
h.update(block)
return hasher.hexdigest()
return h.hexdigest()
def unpack():
@@ -222,9 +225,10 @@ def unpack():
tag = "v" + str(STAMP)
withpid = "{}.{}".format(name, os.getpid())
top = tempfile.gettempdir()
final = os.path.join(top, name)
mine = os.path.join(top, withpid)
tar = os.path.join(mine, "tar")
opj = os.path.join
final = opj(top, name)
mine = opj(top, withpid)
tar = opj(mine, "tar")
try:
if tag in os.listdir(final):
@@ -233,28 +237,24 @@ def unpack():
except:
pass
nwrite = 0
sz = 0
os.mkdir(mine)
with open(tar, "wb") as f:
for buf in get_payload():
nwrite += len(buf)
sz += len(buf)
f.write(buf)
if nwrite != SIZE:
t = "\n\n bad file:\n expected {} bytes, got {}\n".format(SIZE, nwrite)
raise Exception(t)
cksum = hashfile(tar)
if cksum != CKSUM:
t = "\n\n bad file:\n {} expected,\n {} obtained\n".format(CKSUM, cksum)
raise Exception(t)
ck = hashfile(tar)
if ck != CKSUM:
t = "\n\nexpected {} ({} byte)\nobtained {} ({} byte)\nsfx corrupt"
raise Exception(t.format(CKSUM, SIZE, ck, sz))
with tarfile.open(tar, "r:bz2") as tf:
tf.extractall(mine)
os.remove(tar)
with open(os.path.join(mine, tag), "wb") as f:
with open(opj(mine, tag), "wb") as f:
f.write(b"h\n")
try:
@@ -272,25 +272,25 @@ def unpack():
except:
pass
for fn in u8(os.listdir(top)):
if fn.startswith(name) and fn != withpid:
try:
old = opj(top, fn)
if time.time() - os.path.getmtime(old) > 86400:
shutil.rmtree(old)
except:
pass
try:
os.symlink(mine, final)
except:
try:
os.rename(mine, final)
return final
except:
msg("reloc fail,", mine)
return mine
for fn in u8(os.listdir(top)):
if fn.startswith(name) and fn not in [name, withpid]:
try:
old = os.path.join(top, fn)
if time.time() - os.path.getmtime(old) > 10:
shutil.rmtree(old)
except:
pass
return final
return mine
def get_payload():
@@ -307,37 +307,33 @@ def get_payload():
if ofs < 0:
raise Exception("could not find archive marker")
# start reading from the final b"\n"
# start at final b"\n"
fpos = ofs + len(ptn) - 3
# msg("tar found at", fpos)
f.seek(fpos)
dpos = 0
leftovers = b""
rem = b""
while True:
rbuf = f.read(1024 * 32)
if rbuf:
buf = leftovers + rbuf
buf = rem + rbuf
ofs = buf.rfind(b"\n")
if len(buf) <= 4:
leftovers = buf
rem = buf
continue
if ofs >= len(buf) - 4:
leftovers = buf[ofs:]
rem = buf[ofs:]
buf = buf[:ofs]
else:
leftovers = b"\n# "
rem = b"\n# "
else:
buf = leftovers
buf = rem
fpos += len(buf) + 1
buf = (
buf.replace(b"\n# ", b"")
.replace(b"\n#r", b"\r")
.replace(b"\n#n", b"\n")
)
dpos += len(buf) - 1
for a, b in [[b"\n# ", b""], [b"\n#r", b"\r"], [b"\n#n", b"\n"]]:
buf = buf.replace(a, b)
dpos += len(buf) - 1
yield buf
if not rbuf:
@@ -361,7 +357,7 @@ def utime(top):
def confirm(rv):
msg()
msg(traceback.format_exc())
msg("retcode", rv if rv else traceback.format_exc())
msg("*** hit enter to exit ***")
try:
raw_input() if PY2 else input()
@@ -371,10 +367,8 @@ def confirm(rv):
sys.exit(rv)
def run(tmp, j2ver):
global cpp
msg("jinja2:", j2ver or "bundled")
def run(tmp, j2):
msg("jinja2:", j2 or "bundled")
msg("sfxdir:", tmp)
msg()
@@ -384,7 +378,6 @@ def run(tmp, j2ver):
fd = os.open(tmp, os.O_RDONLY)
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
tmp = os.readlink(tmp) # can't flock a symlink, even with O_NOFOLLOW
except Exception as ex:
if not WINDOWS:
msg("\033[31mflock:", repr(ex))
@@ -394,22 +387,39 @@ def run(tmp, j2ver):
t.start()
ld = [tmp, os.path.join(tmp, "dep-j2")]
if j2ver:
if j2:
del ld[-1]
if any([re.match(r"^-.*j[0-9]", x) for x in sys.argv]):
run_s(ld)
else:
run_i(ld)
def run_i(ld):
for x in ld:
sys.path.insert(0, x)
try:
from copyparty.__main__ import main as copyparty
from copyparty.__main__ import main as p
copyparty()
p()
except SystemExit as ex:
if ex.code:
confirm(ex.code)
except:
confirm(1)
def run_s(ld):
# fmt: off
c = "import sys,runpy;" + "".join(['sys.path.insert(0,r"' + x + '");' for x in ld]) + 'runpy.run_module("copyparty",run_name="__main__")'
c = [str(x) for x in [sys.executable, "-c", c] + list(sys.argv[1:])]
# fmt: on
msg("\n", c, "\n")
p = sp.Popen(c)
def bye(*a):
p.send_signal(signal.SIGINT)
signal.signal(signal.SIGTERM, bye)
p.wait()
raise SystemExit(p.returncode)
def main():
@@ -443,14 +453,23 @@ def main():
# skip 0
tmp = unpack()
tmp = os.path.realpath(unpack())
try:
from jinja2 import __version__ as j2ver
from jinja2 import __version__ as j2
except:
j2ver = None
j2 = None
run(tmp, j2ver)
try:
run(tmp, j2)
except SystemExit as ex:
c = ex.code
if c not in [0, -15]:
confirm(ex.code)
except KeyboardInterrupt:
pass
except:
confirm(0)
if __name__ == "__main__":

View File

@@ -17,14 +17,15 @@ __license__ = "MIT"
__url__ = "https://github.com/9001/copyparty/"
def get_spd(nbyte, nsec):
def get_spd(nbyte, nfiles, nsec):
if not nsec:
return "0.000 MB 0.000 sec 0.000 MB/s"
return "0.000 MB 0 files 0.000 sec 0.000 MB/s 0.000 f/s"
mb = nbyte / (1024 * 1024.0)
spd = mb / nsec
nspd = nfiles / nsec
return f"{mb:.3f} MB {nsec:.3f} sec {spd:.3f} MB/s"
return f"{mb:.3f} MB {nfiles} files {nsec:.3f} sec {spd:.3f} MB/s {nspd:.3f} f/s"
class Inf(object):
@@ -36,6 +37,7 @@ class Inf(object):
self.mtx_reports = threading.Lock()
self.n_byte = 0
self.n_file = 0
self.n_sec = 0
self.n_done = 0
self.t0 = t0
@@ -63,7 +65,8 @@ class Inf(object):
continue
msgs = msgs[-64:]
msgs = [f"{get_spd(self.n_byte, self.n_sec)} {x}" for x in msgs]
spd = get_spd(self.n_byte, len(self.reports), self.n_sec)
msgs = [f"{spd} {x}" for x in msgs]
print("\n".join(msgs))
def report(self, fn, n_byte, n_sec):
@@ -131,8 +134,9 @@ def main():
num_threads = 8
read_sz = 32 * 1024
targs = (q, inf, read_sz)
for _ in range(num_threads):
thr = threading.Thread(target=worker, args=(q, inf, read_sz,))
thr = threading.Thread(target=worker, args=targs)
thr.daemon = True
thr.start()
@@ -151,14 +155,14 @@ def main():
log = inf.reports
log.sort()
for nbyte, nsec, fn in log[-64:]:
print(f"{get_spd(nbyte, nsec)} {fn}")
spd = get_spd(nbyte, len(log), nsec)
print(f"{spd} {fn}")
print()
print("\n".join(inf.errors))
print(get_spd(inf.n_byte, t2 - t0))
print(get_spd(inf.n_byte, len(log), t2 - t0))
if __name__ == "__main__":
main()

View File

@@ -5,22 +5,7 @@ from __future__ import print_function
import os
import sys
from shutil import rmtree
setuptools_available = True
try:
# need setuptools to build wheel
from setuptools import setup, Command, find_packages
except ImportError:
# works in a pinch
setuptools_available = False
from distutils.core import setup, Command
from distutils.spawn import spawn
if "bdist_wheel" in sys.argv and not setuptools_available:
print("cannot build wheel without setuptools")
sys.exit(1)
from setuptools import setup, Command, find_packages
NAME = "copyparty"
@@ -100,9 +85,8 @@ args = {
"author_email": "copyparty@ocv.me",
"url": "https://github.com/9001/copyparty",
"license": "MIT",
"data_files": data_files,
"classifiers": [
"Development Status :: 3 - Alpha",
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
@@ -120,35 +104,16 @@ args = {
"Environment :: Console",
"Environment :: No Input/Output (Daemon)",
"Topic :: Communications :: File Sharing",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
],
"include_package_data": True,
"data_files": data_files,
"packages": find_packages(),
"install_requires": ["jinja2"],
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
"scripts": ["bin/copyparty-fuse.py"],
"cmdclass": {"clean2": clean2},
}
if setuptools_available:
args.update(
{
"packages": find_packages(),
"install_requires": ["jinja2"],
"extras_require": {"thumbnails": ["Pillow"]},
"include_package_data": True,
"entry_points": {
"console_scripts": ["copyparty = copyparty.__main__:main"]
},
"scripts": ["bin/copyparty-fuse.py"],
}
)
else:
args.update(
{
"packages": ["copyparty", "copyparty.stolen"],
"scripts": ["bin/copyparty-fuse.py"],
}
)
# import pprint
# pprint.PrettyPrinter().pprint(args)
# sys.exit(0)
setup(**args)

33
tests/run.py Executable file
View File

@@ -0,0 +1,33 @@
#!/usr/bin/env python3
import sys
import runpy
host = sys.argv[1]
sys.argv = sys.argv[:1] + sys.argv[2:]
sys.path.insert(0, ".")
def rp():
runpy.run_module("unittest", run_name="__main__")
if host == "vmprof":
rp()
elif host == "cprofile":
import cProfile
import pstats
log_fn = "cprofile.log"
cProfile.run("rp()", log_fn)
p = pstats.Stats(log_fn)
p.sort_stats(pstats.SortKey.CUMULATIVE).print_stats(64)
"""
python3.9 tests/run.py cprofile -v tests/test_httpcli.py
python3.9 -m pip install --user vmprof
python3.9 -m vmprof --lines -o vmprof.log tests/run.py vmprof -v tests/test_httpcli.py
"""

202
tests/test_httpcli.py Normal file
View File

@@ -0,0 +1,202 @@
#!/usr/bin/env python
# coding: utf-8
from __future__ import print_function, unicode_literals
import io
import os
import time
import shutil
import pprint
import tarfile
import unittest
from argparse import Namespace
from copyparty.authsrv import AuthSrv
from copyparty.httpcli import HttpCli
from tests import util as tu
def hdr(query):
h = "GET /{} HTTP/1.1\r\nCookie: cppwd=o\r\nConnection: close\r\n\r\n"
return h.format(query).encode("utf-8")
class Cfg(Namespace):
def __init__(self, a=[], v=[], c=None):
super(Cfg, self).__init__(
a=a,
v=v,
c=c,
ed=False,
no_zip=False,
no_scandir=False,
no_sendfile=True,
nih=True,
mtp=[],
mte="a",
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
)
class TestHttpCli(unittest.TestCase):
def test(self):
td = os.path.join(tu.get_ramdisk(), "vfs")
try:
shutil.rmtree(td)
except OSError:
pass
os.mkdir(td)
os.chdir(td)
self.dtypes = ["ra", "ro", "rx", "wa", "wo", "wx", "aa", "ao", "ax"]
self.can_read = ["ra", "ro", "aa", "ao"]
self.can_write = ["wa", "wo", "aa", "ao"]
self.fn = "g{:x}g".format(int(time.time() * 3))
allfiles = []
allvols = []
for top in self.dtypes:
allvols.append(top)
allfiles.append("/".join([top, self.fn]))
for s1 in self.dtypes:
p = "/".join([top, s1])
allvols.append(p)
allfiles.append(p + "/" + self.fn)
allfiles.append(p + "/n/" + self.fn)
for s2 in self.dtypes:
p = "/".join([top, s1, "n", s2])
os.makedirs(p)
allvols.append(p)
allfiles.append(p + "/" + self.fn)
for fp in allfiles:
with open(fp, "w") as f:
f.write("ok {}\n".format(fp))
for top in self.dtypes:
vcfg = []
for vol in allvols:
if not vol.startswith(top):
continue
mode = vol[-2]
usr = vol[-1]
if usr == "a":
usr = ""
if "/" not in vol:
vol += "/"
top, sub = vol.split("/", 1)
vcfg.append("{0}/{1}:{1}:{2}{3}".format(top, sub, mode, usr))
pprint.pprint(vcfg)
self.args = Cfg(v=vcfg, a=["o:o", "x:x"])
self.auth = AuthSrv(self.args, self.log)
vfiles = [x for x in allfiles if x.startswith(top)]
for fp in vfiles:
rok, wok = self.can_rw(fp)
furl = fp.split("/", 1)[1]
durl = furl.rsplit("/", 1)[0] if "/" in furl else ""
# file download
h, ret = self.curl(furl)
res = "ok " + fp in ret
print("[{}] {} {} = {}".format(fp, rok, wok, res))
if rok != res:
print("\033[33m{}\n# {}\033[0m".format(ret, furl))
self.fail()
# file browser: html
h, ret = self.curl(durl)
res = "'{}'".format(self.fn) in ret
print(res)
if rok != res:
print("\033[33m{}\n# {}\033[0m".format(ret, durl))
self.fail()
# file browser: json
url = durl + "?ls"
h, ret = self.curl(url)
res = '"{}"'.format(self.fn) in ret
print(res)
if rok != res:
print("\033[33m{}\n# {}\033[0m".format(ret, url))
self.fail()
# tar
url = durl + "?tar"
h, b = self.curl(url, True)
# with open(os.path.join(td, "tar"), "wb") as f:
# f.write(b)
try:
tar = tarfile.open(fileobj=io.BytesIO(b)).getnames()
except:
tar = []
tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar]
tar = [[x] + self.can_rw(x) for x in tar]
tar_ok = [x[0] for x in tar if x[1]]
tar_ng = [x[0] for x in tar if not x[1]]
self.assertEqual([], tar_ng)
if durl.split("/")[-1] in self.can_read:
ref = [x for x in vfiles if self.in_dive(top + "/" + durl, x)]
for f in ref:
print("{}: {}".format("ok" if f in tar_ok else "NG", f))
ref.sort()
tar_ok.sort()
self.assertEqual(ref, tar_ok)
# stash
h, ret = self.put(url)
res = h.startswith("HTTP/1.1 200 ")
self.assertEqual(res, wok)
def can_rw(self, fp):
# lowest non-neutral folder declares permissions
expect = fp.split("/")[:-1]
for x in reversed(expect):
if x != "n":
expect = x
break
return [expect in self.can_read, expect in self.can_write]
def in_dive(self, top, fp):
# archiver bails at first inaccessible subvolume
top = top.strip("/").split("/")
fp = fp.split("/")
for f1, f2 in zip(top, fp):
if f1 != f2:
return False
for f in fp[len(top) :]:
if f == self.fn:
return True
if f not in self.can_read and f != "n":
return False
return True
def put(self, url):
buf = "PUT /{0} HTTP/1.1\r\nCookie: cppwd=o\r\nConnection: close\r\nContent-Length: {1}\r\n\r\nok {0}\n"
buf = buf.format(url, len(url) + 4).encode("utf-8")
conn = tu.VHttpConn(self.args, self.auth, self.log, buf)
HttpCli(conn).run()
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
def curl(self, url, binary=False):
conn = tu.VHttpConn(self.args, self.auth, self.log, hdr(url))
HttpCli(conn).run()
if binary:
h, b = conn.s._reply.split(b"\r\n\r\n", 1)
return [h.decode("utf-8"), b]
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
def log(self, src, msg, c=0):
# print(repr(msg))
pass

View File

@@ -3,18 +3,18 @@
from __future__ import print_function, unicode_literals
import os
import time
import json
import shutil
import tempfile
import unittest
import subprocess as sp # nosec
from textwrap import dedent
from argparse import Namespace
from copyparty.authsrv import AuthSrv
from copyparty import util
from tests import util as tu
class Cfg(Namespace):
def __init__(self, a=[], v=[], c=None):
@@ -51,52 +51,11 @@ class TestVFS(unittest.TestCase):
real = [x[0] for x in real]
return fsdir, real, virt
def runcmd(self, *argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate()
stdout = stdout.decode("utf-8")
stderr = stderr.decode("utf-8")
return [p.returncode, stdout, stderr]
def chkcmd(self, *argv):
ok, sout, serr = self.runcmd(*argv)
if ok != 0:
raise Exception(serr)
return sout, serr
def get_ramdisk(self):
for vol in ["/dev/shm", "/Volumes/cptd"]: # nosec (singleton test)
if os.path.exists(vol):
return vol
if os.path.exists("/Volumes"):
devname, _ = self.chkcmd("hdiutil", "attach", "-nomount", "ram://8192")
devname = devname.strip()
print("devname: [{}]".format(devname))
for _ in range(10):
try:
_, _ = self.chkcmd(
"diskutil", "eraseVolume", "HFS+", "cptd", devname
)
return "/Volumes/cptd"
except Exception as ex:
print(repr(ex))
time.sleep(0.25)
raise Exception("ramdisk creation failed")
ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
try:
os.mkdir(ret)
finally:
return ret
def log(self, src, msg, c=0):
pass
def test(self):
td = os.path.join(self.get_ramdisk(), "vfs")
td = os.path.join(tu.get_ramdisk(), "vfs")
try:
shutil.rmtree(td)
except OSError:
@@ -268,7 +227,7 @@ class TestVFS(unittest.TestCase):
self.assertEqual(list(v1), list(v2))
# config file parser
cfg_path = os.path.join(self.get_ramdisk(), "test.cfg")
cfg_path = os.path.join(tu.get_ramdisk(), "test.cfg")
with open(cfg_path, "wb") as f:
f.write(
dedent(

97
tests/util.py Normal file
View File

@@ -0,0 +1,97 @@
import os
import time
import jinja2
import tempfile
import subprocess as sp
from copyparty.util import Unrecv
J2_ENV = jinja2.Environment(loader=jinja2.BaseLoader)
J2_FILES = J2_ENV.from_string("{{ files|join('\n') }}")
def runcmd(*argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate()
stdout = stdout.decode("utf-8")
stderr = stderr.decode("utf-8")
return [p.returncode, stdout, stderr]
def chkcmd(*argv):
ok, sout, serr = runcmd(*argv)
if ok != 0:
raise Exception(serr)
return sout, serr
def get_ramdisk():
for vol in ["/dev/shm", "/Volumes/cptd"]: # nosec (singleton test)
if os.path.exists(vol):
return vol
if os.path.exists("/Volumes"):
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://32768")
devname = devname.strip()
print("devname: [{}]".format(devname))
for _ in range(10):
try:
_, _ = chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
return "/Volumes/cptd"
except Exception as ex:
print(repr(ex))
time.sleep(0.25)
raise Exception("ramdisk creation failed")
ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
try:
os.mkdir(ret)
finally:
return ret
class NullBroker(object):
def put(*args):
pass
class VSock(object):
def __init__(self, buf):
self._query = buf
self._reply = b""
self.sendall = self.send
def recv(self, sz):
ret = self._query[:sz]
self._query = self._query[sz:]
return ret
def send(self, buf):
self._reply += buf
return len(buf)
class VHttpSrv(object):
def __init__(self):
self.broker = NullBroker()
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
self.j2 = {x: J2_FILES for x in aliases}
class VHttpConn(object):
def __init__(self, args, auth, log, buf):
self.s = VSock(buf)
self.sr = Unrecv(self.s)
self.addr = ("127.0.0.1", "42069")
self.args = args
self.auth = auth
self.log_func = log
self.log_src = "a"
self.hsrv = VHttpSrv()
self.nbyte = 0
self.workload = 0
self.t0 = time.time()