mirror of
https://github.com/9001/copyparty.git
synced 2025-10-24 00:24:04 +00:00
Compare commits
173 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0061d29534 | ||
|
|
a891f34a93 | ||
|
|
d6a1e62a95 | ||
|
|
cda36ea8b4 | ||
|
|
909a76434a | ||
|
|
39348ef659 | ||
|
|
99d30edef3 | ||
|
|
b63ab15bf9 | ||
|
|
485cb4495c | ||
|
|
df018eb1f2 | ||
|
|
49aa47a9b8 | ||
|
|
7d20eb202a | ||
|
|
c533da9129 | ||
|
|
5cba31a814 | ||
|
|
1d824cb26c | ||
|
|
83b903d60e | ||
|
|
9c8ccabe8e | ||
|
|
b1f2c4e70d | ||
|
|
273ca0c8da | ||
|
|
d6f516b34f | ||
|
|
83127858ca | ||
|
|
d89329757e | ||
|
|
49ffec5320 | ||
|
|
2eaae2b66a | ||
|
|
ea4441e25c | ||
|
|
e5f34042f9 | ||
|
|
271096874a | ||
|
|
8efd780a72 | ||
|
|
41bcf7308d | ||
|
|
d102bb3199 | ||
|
|
d0bed95415 | ||
|
|
2528729971 | ||
|
|
292c18b3d0 | ||
|
|
0be7c5e2d8 | ||
|
|
eb5aaddba4 | ||
|
|
d8fd82bcb5 | ||
|
|
97be495861 | ||
|
|
8b53c159fc | ||
|
|
81e281f703 | ||
|
|
3948214050 | ||
|
|
c5e9a643e7 | ||
|
|
d25881d5c3 | ||
|
|
38d8d9733f | ||
|
|
118ebf668d | ||
|
|
a86f09fa46 | ||
|
|
dd4fb35c8f | ||
|
|
621eb4cf95 | ||
|
|
deea66ad0b | ||
|
|
bf99445377 | ||
|
|
7b54a63396 | ||
|
|
0fcb015f9a | ||
|
|
0a22b1ffb6 | ||
|
|
68cecc52ab | ||
|
|
53657ccfff | ||
|
|
96223fda01 | ||
|
|
374ff3433e | ||
|
|
5d63949e98 | ||
|
|
6b065d507d | ||
|
|
e79997498a | ||
|
|
f7ee02ec35 | ||
|
|
69dc433e1c | ||
|
|
c880cd848c | ||
|
|
5752b6db48 | ||
|
|
b36f905eab | ||
|
|
483dd527c6 | ||
|
|
e55678e28f | ||
|
|
3f4a8b9d6f | ||
|
|
02a856ecb4 | ||
|
|
4dff726310 | ||
|
|
cbc449036f | ||
|
|
8f53152220 | ||
|
|
bbb1e165d6 | ||
|
|
fed8d94885 | ||
|
|
58040cc0ed | ||
|
|
03d692db66 | ||
|
|
903f8e8453 | ||
|
|
405ae1308e | ||
|
|
8a0f583d71 | ||
|
|
b6d7017491 | ||
|
|
0f0217d203 | ||
|
|
a203e33347 | ||
|
|
3b8f697dd4 | ||
|
|
78ba16f722 | ||
|
|
0fcfe79994 | ||
|
|
c0e6df4b63 | ||
|
|
322abdcb43 | ||
|
|
31100787ce | ||
|
|
c57d721be4 | ||
|
|
3b5a03e977 | ||
|
|
ed807ee43e | ||
|
|
073c130ae6 | ||
|
|
8810e0be13 | ||
|
|
f93016ab85 | ||
|
|
b19cf260c2 | ||
|
|
db03e1e7eb | ||
|
|
e0d975e36a | ||
|
|
cfeb15259f | ||
|
|
3b3f8fc8fb | ||
|
|
88bd2c084c | ||
|
|
bd367389b0 | ||
|
|
58ba71a76f | ||
|
|
d03e34d55d | ||
|
|
24f239a46c | ||
|
|
2c0826f85a | ||
|
|
c061461d01 | ||
|
|
e7982a04fe | ||
|
|
33b91a7513 | ||
|
|
9bb1323e44 | ||
|
|
e62bb807a5 | ||
|
|
3fc0d2cc4a | ||
|
|
0c786b0766 | ||
|
|
68c7528911 | ||
|
|
26e18ae800 | ||
|
|
c30dc0b546 | ||
|
|
f94aa46a11 | ||
|
|
403261a293 | ||
|
|
c7d9cbb11f | ||
|
|
57e1c53cbb | ||
|
|
0754b553dd | ||
|
|
50661d941b | ||
|
|
c5db7c1a0c | ||
|
|
2cef5365f7 | ||
|
|
fbc4e94007 | ||
|
|
037ed5a2ad | ||
|
|
69dfa55705 | ||
|
|
a79a5c4e3e | ||
|
|
7e80eabfe6 | ||
|
|
375b72770d | ||
|
|
e2dd683def | ||
|
|
9eba50c6e4 | ||
|
|
5a579dba52 | ||
|
|
e86c719575 | ||
|
|
0e87f35547 | ||
|
|
b6d3d791a5 | ||
|
|
c9c3302664 | ||
|
|
c3e4d65b80 | ||
|
|
27a03510c5 | ||
|
|
ed7727f7cb | ||
|
|
127ec10c0d | ||
|
|
5a9c0ad225 | ||
|
|
7e8daf650e | ||
|
|
0cf737b4ce | ||
|
|
74635e0113 | ||
|
|
e5c4f49901 | ||
|
|
e4654ee7f1 | ||
|
|
e5d05c05ed | ||
|
|
73c4f99687 | ||
|
|
28c12ef3bf | ||
|
|
eed82dbb54 | ||
|
|
2c4b4ab928 | ||
|
|
505a8fc6f6 | ||
|
|
e4801d9b06 | ||
|
|
04f1b2cf3a | ||
|
|
c06d928bb5 | ||
|
|
ab09927e7b | ||
|
|
779437db67 | ||
|
|
28cbdb652e | ||
|
|
2b2415a7d8 | ||
|
|
746a8208aa | ||
|
|
a2a041a98a | ||
|
|
10b436e449 | ||
|
|
4d62b34786 | ||
|
|
0546210687 | ||
|
|
f8c11faada | ||
|
|
16d6e9be1f | ||
|
|
aff8185f2e | ||
|
|
217d15fe81 | ||
|
|
171e93c201 | ||
|
|
acc1d2e9e3 | ||
|
|
49c2f37154 | ||
|
|
69e54497aa | ||
|
|
9aa1885669 | ||
|
|
4418508513 |
12
.vscode/launch.py
vendored
12
.vscode/launch.py
vendored
@@ -5,20 +5,28 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shlex
|
||||
|
||||
sys.path.insert(0, os.getcwd())
|
||||
|
||||
import jstyleson
|
||||
from copyparty.__main__ import main as copyparty
|
||||
|
||||
with open(".vscode/launch.json", "r") as f:
|
||||
with open(".vscode/launch.json", "r", encoding="utf-8") as f:
|
||||
tj = f.read()
|
||||
|
||||
oj = jstyleson.loads(tj)
|
||||
argv = oj["configurations"][0]["args"]
|
||||
|
||||
try:
|
||||
sargv = " ".join([shlex.quote(x) for x in argv])
|
||||
print(sys.executable + " -m copyparty " + sargv + "\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
|
||||
try:
|
||||
copyparty(argv)
|
||||
copyparty(["a"] + argv)
|
||||
except SystemExit as ex:
|
||||
if ex.code:
|
||||
raise
|
||||
|
||||
263
README.md
263
README.md
@@ -9,9 +9,12 @@
|
||||
turn your phone or raspi into a portable file server with resumable uploads/downloads using IE6 or any other browser
|
||||
|
||||
* server runs on anything with `py2.7` or `py3.3+`
|
||||
* *resumable* uploads need `firefox 12+` / `chrome 6+` / `safari 6+` / `IE 10+`
|
||||
* browse/upload with IE4 / netscape4.0 on win3.11 (heh)
|
||||
* *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+` for full speed
|
||||
* code standard: `black`
|
||||
|
||||
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [thumbnails](#thumbnails) // [md-viewer](#markdown-viewer) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [ie4](#browser-support)
|
||||
|
||||
|
||||
## readme toc
|
||||
|
||||
@@ -20,8 +23,18 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* [notes](#notes)
|
||||
* [status](#status)
|
||||
* [bugs](#bugs)
|
||||
* [usage](#usage)
|
||||
* [general bugs](#general-bugs)
|
||||
* [not my bugs](#not-my-bugs)
|
||||
* [the browser](#the-browser)
|
||||
* [tabs](#tabs)
|
||||
* [hotkeys](#hotkeys)
|
||||
* [tree-mode](#tree-mode)
|
||||
* [thumbnails](#thumbnails)
|
||||
* [zip downloads](#zip-downloads)
|
||||
* [uploading](#uploading)
|
||||
* [file-search](#file-search)
|
||||
* [markdown viewer](#markdown-viewer)
|
||||
* [other tricks](#other-tricks)
|
||||
* [searching](#searching)
|
||||
* [search configuration](#search-configuration)
|
||||
* [metadata from audio files](#metadata-from-audio-files)
|
||||
@@ -29,7 +42,10 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* [complete examples](#complete-examples)
|
||||
* [browser support](#browser-support)
|
||||
* [client examples](#client-examples)
|
||||
* [up2k](#up2k)
|
||||
* [dependencies](#dependencies)
|
||||
* [optional dependencies](#optional-dependencies)
|
||||
* [install recommended deps](#install-recommended-deps)
|
||||
* [optional gpl stuff](#optional-gpl-stuff)
|
||||
* [sfx](#sfx)
|
||||
* [sfx repack](#sfx-repack)
|
||||
@@ -43,25 +59,27 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
|
||||
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
|
||||
|
||||
running the sfx without arguments (for example doubleclicking it on Windows) will let anyone access the current folder; see `-h` for help if you want accounts and volumes etc
|
||||
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone full access to the current folder; see `-h` for help if you want accounts and volumes etc
|
||||
|
||||
you may also want these, especially on servers:
|
||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for legit https)
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
|
||||
|
||||
|
||||
## notes
|
||||
|
||||
* iPhone/iPad: use Firefox to download files
|
||||
* Android-Chrome: set max "parallel uploads" for 200% upload speed (android bug)
|
||||
* Android-Firefox: takes a while to select files (in order to avoid the above android-chrome issue)
|
||||
* Desktop-Firefox: may use gigabytes of RAM if your connection is great and your files are massive
|
||||
* Android-Chrome: increase "parallel uploads" for higher speed (android bug)
|
||||
* Android-Firefox: takes a while to select files (their fix for ☝️)
|
||||
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
|
||||
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
|
||||
* because no browsers currently implement the media-query to do this properly orz
|
||||
|
||||
|
||||
## status
|
||||
|
||||
summary: all planned features work! now please enjoy the bloatening
|
||||
|
||||
* backend stuff
|
||||
* ☑ sanic multipart parser
|
||||
* ☑ load balancer (multiprocessing)
|
||||
@@ -79,9 +97,12 @@ you may also want these, especially on servers:
|
||||
* browser
|
||||
* ☑ tree-view
|
||||
* ☑ media player
|
||||
* ✖ thumbnails
|
||||
* ✖ SPA (browse while uploading)
|
||||
* currently safe using the file-tree on the left only, not folders in the file list
|
||||
* ☑ thumbnails
|
||||
* ☑ images using Pillow
|
||||
* ☑ videos using FFmpeg
|
||||
* ☑ cache eviction (max-age; maybe max-size eventually)
|
||||
* ☑ SPA (browse while uploading)
|
||||
* if you use the file-tree on the left only, not folders in the file list
|
||||
* server indexing
|
||||
* ☑ locate files by contents
|
||||
* ☑ search by name/path/date/size
|
||||
@@ -90,26 +111,70 @@ you may also want these, especially on servers:
|
||||
* ☑ viewer
|
||||
* ☑ editor (sure why not)
|
||||
|
||||
summary: it works! you can use it! (but technically not even close to beta)
|
||||
|
||||
|
||||
# bugs
|
||||
|
||||
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
|
||||
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
||||
* Windows: python 2.7 cannot handle filenames with mojibake
|
||||
|
||||
## general bugs
|
||||
|
||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||
* cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1`
|
||||
* hiding the contents at url `/d1/d2/d3` using `-v :d1/d2/d3:cd2d` has the side-effect of creating databases (for files/tags) inside folders d1 and d2, and those databases take precedence over the main db at the top of the vfs - this means all files in d2 and below will be reindexed unless you already had a vfs entry at or below d2
|
||||
* probably more, pls let me know
|
||||
|
||||
## not my bugs
|
||||
|
||||
# usage
|
||||
* Windows: msys2-python 3.8.6 occasionally throws "RuntimeError: release unlocked lock" when leaving a scoped mutex in up2k
|
||||
* this is an msys2 bug, the regular windows edition of python is fine
|
||||
|
||||
|
||||
# the browser
|
||||
|
||||

|
||||
|
||||
|
||||
## tabs
|
||||
|
||||
* `[🔎]` search by size, date, path/name, mp3-tags ... see [searching](#searching)
|
||||
* `[🚀]` and `[🎈]` are the uploaders, see [uploading](#uploading)
|
||||
* `[📂]` mkdir, create directories
|
||||
* `[📝]` new-md, create a new markdown document
|
||||
* `[📟]` send-msg, either to server-log or into textfiles if `--urlform save`
|
||||
* `[⚙️]` client configuration options
|
||||
|
||||
|
||||
## hotkeys
|
||||
|
||||
the browser has the following hotkeys
|
||||
* `0..9` jump to 10%..90%
|
||||
* `U/O` skip 10sec back/forward
|
||||
* `J/L` prev/next song
|
||||
* `I/K` prev/next folder
|
||||
* `P` parent folder
|
||||
* `G` toggle list / grid view
|
||||
* `T` toggle thumbnails / icons
|
||||
* when playing audio:
|
||||
* `0..9` jump to 10%..90%
|
||||
* `U/O` skip 10sec back/forward
|
||||
* `J/L` prev/next song
|
||||
* `J` also starts playing the folder
|
||||
* in the grid view:
|
||||
* `S` toggle multiselect
|
||||
* `A/D` zoom
|
||||
|
||||
|
||||
## tree-mode
|
||||
|
||||
by default there's a breadcrumbs path; you can replace this with a tree-browser sidebar thing by clicking the 🌲
|
||||
|
||||
click `[-]` and `[+]` to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
|
||||
|
||||
|
||||
## thumbnails
|
||||
|
||||

|
||||
|
||||
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how destructive your users are
|
||||
|
||||
|
||||
## zip downloads
|
||||
@@ -128,12 +193,80 @@ the `zip` link next to folders can produce various types of zip/tar files using
|
||||
* `zip_crc` will take longer to download since the server has to read each file twice
|
||||
* please let me know if you find a program old enough to actually need this
|
||||
|
||||
you can also zip a selection of files or folders by clicking them in the browser, that brings up a selection editor and zip button in the bottom right
|
||||
|
||||

|
||||
|
||||
## uploading
|
||||
|
||||
two upload methods are available in the html client:
|
||||
* `🎈 bup`, the basic uploader, supports almost every browser since netscape 4.0
|
||||
* `🚀 up2k`, the fancy one
|
||||
|
||||
up2k has several advantages:
|
||||
* you can drop folders into the browser (files are added recursively)
|
||||
* files are processed in chunks, and each chunk is checksummed
|
||||
* uploads resume if they are interrupted (for example by a reboot)
|
||||
* server detects any corruption; the client reuploads affected chunks
|
||||
* the client doesn't upload anything that already exists on the server
|
||||
* the last-modified timestamp of the file is preserved
|
||||
|
||||
see [up2k](#up2k) for details on how it works
|
||||
|
||||

|
||||
|
||||
**protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||
|
||||
the up2k UI is the epitome of polished inutitive experiences:
|
||||
* "parallel uploads" specifies how many chunks to upload at the same time
|
||||
* `[🏃]` analysis of other files should continue while one is uploading
|
||||
* `[💭]` ask for confirmation before files are added to the list
|
||||
* `[💤]` sync uploading between other copyparty tabs so only one is active
|
||||
* `[🔎]` switch between upload and file-search mode
|
||||
|
||||
and then theres the tabs below it,
|
||||
* `[ok]` is uploads which completed successfully
|
||||
* `[ng]` is the uploads which failed / got rejected (already exists, ...)
|
||||
* `[done]` shows a combined list of `[ok]` and `[ng]`, chronological order
|
||||
* `[busy]` files which are currently hashing, pending-upload, or uploading
|
||||
* plus up to 3 entries each from `[done]` and `[que]` for context
|
||||
* `[que]` is all the files that are still queued
|
||||
|
||||
### file-search
|
||||
|
||||

|
||||
|
||||
in the `[🚀 up2k]` tab, after toggling the `[🔎]` switch green, any files/folders you drop onto the dropzone will be hashed on the client-side. Each hash is sent to the server which checks if that file exists somewhere already
|
||||
|
||||
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
|
||||
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
|
||||
|
||||
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files
|
||||
|
||||
note that since up2k has to read the file twice, `[🎈 bup]` can be up to 2x faster in extreme cases (if your internet connection is faster than the read-speed of your HDD)
|
||||
|
||||
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well thanks to tls also functioning as an integrity check
|
||||
|
||||
|
||||
## markdown viewer
|
||||
|
||||

|
||||
|
||||
* the document preview has a max-width which is the same as an A4 paper when printed
|
||||
|
||||
|
||||
## other tricks
|
||||
|
||||
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
|
||||
|
||||
|
||||
# searching
|
||||
|
||||

|
||||
|
||||
when started with `-e2dsa` copyparty will scan/index all your files. This avoids duplicates on upload, and also makes the volumes searchable through the web-ui:
|
||||
* make search queries by `size`/`date`/`directory-path`/`filename`, or...
|
||||
* drag/drop a local file to see if the same contents exist somewhere on the server (you get the URL if it does)
|
||||
* drag/drop a local file to see if the same contents exist somewhere on the server, see [file-search](#file-search)
|
||||
|
||||
path/name queries are space-separated, AND'ed together, and words are negated with a `-` prefix, so for example:
|
||||
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
|
||||
@@ -161,6 +294,8 @@ the same arguments can be set as volume flags, in addition to `d2d` and `d2t` fo
|
||||
|
||||
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
|
||||
|
||||
the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||
|
||||
|
||||
## metadata from audio files
|
||||
|
||||
@@ -178,6 +313,7 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy
|
||||
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
|
||||
* is about 20x slower than mutagen
|
||||
* catches a few tags that mutagen doesn't
|
||||
* melodic key, video resolution, framerate, pixfmt
|
||||
* avoids pulling any GPL code into copyparty
|
||||
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
|
||||
|
||||
@@ -190,6 +326,11 @@ copyparty can invoke external programs to collect additional metadata for files
|
||||
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
|
||||
* `-v ~/music::r:cmtp=.bpm=~/bin/audio-bpm.py:cmtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly
|
||||
|
||||
*but wait, there's more!* `-mtp` can be used for non-audio files as well using the `a` flag: `ay` only do audio files, `an` only do non-audio files, or `ad` do all files (d as in dontcare)
|
||||
|
||||
* `-mtp ext=an,~/bin/file-ext.py` runs `~/bin/file-ext.py` to get the `ext` tag only if file is not audio (`an`)
|
||||
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
|
||||
|
||||
|
||||
## complete examples
|
||||
|
||||
@@ -199,6 +340,8 @@ copyparty can invoke external programs to collect additional metadata for files
|
||||
|
||||
# browser support
|
||||
|
||||

|
||||
|
||||
`ie` = internet-explorer, `ff` = firefox, `c` = chrome, `iOS` = iPhone/iPad, `Andr` = Android
|
||||
|
||||
| feature | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
|
||||
@@ -223,14 +366,18 @@ copyparty can invoke external programs to collect additional metadata for files
|
||||
* `*2` using a wasm decoder which can sometimes get stuck and consumes a bit more power
|
||||
|
||||
quick summary of more eccentric web-browsers trying to view a directory index:
|
||||
* safari (14.0.3/macos) is chrome with janky wasm, so playing opus can deadlock the javascript engine
|
||||
* safari (14.0.1/iOS) same as macos, except it recovers from the deadlocks if you poke it a bit
|
||||
* links (2.21/macports) can browse, login, upload/mkdir/msg
|
||||
* lynx (2.8.9/macports) can browse, login, upload/mkdir/msg
|
||||
* w3m (0.5.3/macports) can browse, login, upload at 100kB/s, mkdir/msg
|
||||
* netsurf (3.10/arch) is basically ie6 with much better css (javascript has almost no effect)
|
||||
* netscape 4.0 and 4.5 can browse (text is yellow on white), upload with `?b=u`
|
||||
* SerenityOS (22d13d8) hits a page fault, works with `?b=u`, file input not-impl, url params are multiplying
|
||||
|
||||
| browser | will it blend |
|
||||
| ------- | ------------- |
|
||||
| **safari** (14.0.3/macos) | is chrome with janky wasm, so playing opus can deadlock the javascript engine |
|
||||
| **safari** (14.0.1/iOS) | same as macos, except it recovers from the deadlocks if you poke it a bit |
|
||||
| **links** (2.21/macports) | can browse, login, upload/mkdir/msg |
|
||||
| **lynx** (2.8.9/macports) | can browse, login, upload/mkdir/msg |
|
||||
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
|
||||
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
|
||||
| **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` |
|
||||
| **SerenityOS** (22d13d8) | hits a page fault, works with `?b=u`, file input not-impl, url params are multiplying |
|
||||
|
||||
|
||||
# client examples
|
||||
|
||||
@@ -250,36 +397,72 @@ quick summary of more eccentric web-browsers trying to view a directory index:
|
||||
* cross-platform python client available in [./bin/](bin/)
|
||||
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
||||
|
||||
* sharex (screenshot utility): see [./contrib/sharex.sxcu](contrib/#sharexsxcu)
|
||||
|
||||
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
||||
|
||||
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
|
||||
b512 <movie.mkv
|
||||
|
||||
|
||||
# up2k
|
||||
|
||||
quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
|
||||
* the up2k client splits a file into an "optimal" number of chunks
|
||||
* 1 MiB each, unless that becomes more than 256 chunks
|
||||
* tries 1.5M, 2M, 3, 4, 6, ... until <= 256 chunks or size >= 32M
|
||||
* client posts the list of hashes, filename, size, last-modified
|
||||
* server creates the `wark`, an identifier for this upload
|
||||
* `sha512( salt + filesize + chunk_hashes )`
|
||||
* and a sparse file is created for the chunks to drop into
|
||||
* client uploads each chunk
|
||||
* header entries for the chunk-hash and wark
|
||||
* server writes chunks into place based on the hash
|
||||
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
* `jinja2` (is built into the SFX)
|
||||
|
||||
**optional,** enables music tags:
|
||||
|
||||
## optional dependencies
|
||||
|
||||
enable music tags:
|
||||
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
|
||||
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
|
||||
|
||||
**optional,** will eventually enable thumbnails:
|
||||
enable image thumbnails:
|
||||
* `Pillow` (requires py2.7 or py3.5+)
|
||||
|
||||
enable video thumbnails:
|
||||
* `ffmpeg` and `ffprobe` somewhere in `$PATH`
|
||||
|
||||
enable reading HEIF pictures:
|
||||
* `pyheif-pillow-opener` (requires Linux or a C compiler)
|
||||
|
||||
enable reading AVIF pictures:
|
||||
* `pillow-avif-plugin`
|
||||
|
||||
|
||||
## install recommended deps
|
||||
```
|
||||
python -m pip install --user -U jinja2 mutagen Pillow
|
||||
```
|
||||
|
||||
|
||||
## optional gpl stuff
|
||||
|
||||
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
|
||||
|
||||
these are standalone and will never be imported / evaluated by copyparty
|
||||
these are standalone programs and will never be imported / evaluated by copyparty
|
||||
|
||||
|
||||
# sfx
|
||||
|
||||
currently there are two self-contained binaries:
|
||||
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere
|
||||
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos
|
||||
currently there are two self-contained "binaries":
|
||||
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere, **recommended**
|
||||
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos, kinda deprecated
|
||||
|
||||
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
|
||||
|
||||
@@ -339,21 +522,25 @@ in the `scripts` folder:
|
||||
|
||||
roughly sorted by priority
|
||||
|
||||
* audio link with timestamp
|
||||
* separate sqlite table per tag
|
||||
* audio fingerprinting
|
||||
* readme.md as epilogue
|
||||
* single sha512 across all up2k chunks? maybe
|
||||
* reduce up2k roundtrips
|
||||
* start from a chunk index and just go
|
||||
* terminate client on bad data
|
||||
* `os.copy_file_range` for up2k cloning
|
||||
* support pillow-simd
|
||||
* figure out the deal with pixel3a not being connectable as hotspot
|
||||
* pixel3a having unpredictable 3sec latency in general :||||
|
||||
|
||||
discarded ideas
|
||||
|
||||
* separate sqlite table per tag
|
||||
* performance fixed by skipping some indexes (`+mt.k`)
|
||||
* audio fingerprinting
|
||||
* only makes sense if there can be a wasm client and that doesn't exist yet (except for olaf which is agpl hence counts as not existing)
|
||||
* `os.copy_file_range` for up2k cloning
|
||||
* almost never hit this path anyways
|
||||
* up2k partials ui
|
||||
* feels like there isn't much point
|
||||
* cache sha512 chunks on client
|
||||
* too dangerous
|
||||
* comment field
|
||||
* nah
|
||||
* look into android thumbnail cache file format
|
||||
* absolutely not
|
||||
|
||||
@@ -45,3 +45,18 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
||||
# [`mtag/`](mtag/)
|
||||
* standalone programs which perform misc. file analysis
|
||||
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
||||
|
||||
|
||||
# [`dbtool.py`](dbtool.py)
|
||||
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty recommends to wipe the DB and reindex because it now collects additional metadata during analysis, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
||||
|
||||
for that example (upgrading to v0.11.0), first move the old db aside, launch copyparty, let it rebuild the db until the point where it starts running mtp (colored messages as it adds the mtp tags), then CTRL-C and patch in the old mtp tags from the old db instead
|
||||
|
||||
so assuming you have `-mtp` parsers to provide the tags `key` and `.bpm`:
|
||||
|
||||
```
|
||||
~/bin/dbtool.py -ls up2k.db
|
||||
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -cmp
|
||||
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy key
|
||||
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy .bpm -vac
|
||||
```
|
||||
|
||||
198
bin/dbtool.py
Executable file
198
bin/dbtool.py
Executable file
@@ -0,0 +1,198 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sqlite3
|
||||
import argparse
|
||||
|
||||
DB_VER = 3
|
||||
|
||||
|
||||
def die(msg):
|
||||
print("\033[31m\n" + msg + "\n\033[0m")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def read_ver(db):
|
||||
for tab in ["ki", "kv"]:
|
||||
try:
|
||||
c = db.execute(r"select v from {} where k = 'sver'".format(tab))
|
||||
except:
|
||||
continue
|
||||
|
||||
rows = c.fetchall()
|
||||
if rows:
|
||||
return int(rows[0][0])
|
||||
|
||||
return "corrupt"
|
||||
|
||||
|
||||
def ls(db):
|
||||
nfiles = next(db.execute("select count(w) from up"))[0]
|
||||
ntags = next(db.execute("select count(w) from mt"))[0]
|
||||
print(f"{nfiles} files")
|
||||
print(f"{ntags} tags\n")
|
||||
|
||||
print("number of occurences for each tag,")
|
||||
print(" 'x' = file has no tags")
|
||||
print(" 't:mtp' = the mtp flag (file not mtp processed yet)")
|
||||
print()
|
||||
for k, nk in db.execute("select k, count(k) from mt group by k order by k"):
|
||||
print(f"{nk:9} {k}")
|
||||
|
||||
|
||||
def compare(n1, d1, n2, d2, verbose):
|
||||
nt = next(d1.execute("select count(w) from up"))[0]
|
||||
n = 0
|
||||
miss = 0
|
||||
for w, rd, fn in d1.execute("select w, rd, fn from up"):
|
||||
n += 1
|
||||
if n % 25_000 == 0:
|
||||
m = f"\033[36mchecked {n:,} of {nt:,} files in {n1} against {n2}\033[0m"
|
||||
print(m)
|
||||
|
||||
q = "select w from up where substr(w,1,16) = ?"
|
||||
hit = d2.execute(q, (w[:16],)).fetchone()
|
||||
if not hit:
|
||||
miss += 1
|
||||
if verbose:
|
||||
print(f"file in {n1} missing in {n2}: [{w}] {rd}/{fn}")
|
||||
|
||||
print(f" {miss} files in {n1} missing in {n2}\n")
|
||||
|
||||
nt = next(d1.execute("select count(w) from mt"))[0]
|
||||
n = 0
|
||||
miss = {}
|
||||
nmiss = 0
|
||||
for w, k, v in d1.execute("select * from mt"):
|
||||
n += 1
|
||||
if n % 100_000 == 0:
|
||||
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
|
||||
print(m)
|
||||
|
||||
v2 = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
|
||||
if v2:
|
||||
v2 = v2[0]
|
||||
|
||||
# if v != v2 and v2 and k in [".bpm", "key"] and n2 == "src":
|
||||
# print(f"{w} [{rd}/{fn}] {k} = [{v}] / [{v2}]")
|
||||
|
||||
if v2 is not None:
|
||||
if k.startswith("."):
|
||||
try:
|
||||
diff = abs(float(v) - float(v2))
|
||||
if diff > float(v) / 0.9:
|
||||
v2 = None
|
||||
else:
|
||||
v2 = v
|
||||
except:
|
||||
pass
|
||||
|
||||
if v != v2:
|
||||
v2 = None
|
||||
|
||||
if v2 is None:
|
||||
nmiss += 1
|
||||
try:
|
||||
miss[k] += 1
|
||||
except:
|
||||
miss[k] = 1
|
||||
|
||||
if verbose:
|
||||
q = "select rd, fn from up where substr(w,1,16) = ?"
|
||||
rd, fn = d1.execute(q, (w,)).fetchone()
|
||||
print(f"missing in {n2}: [{w}] [{rd}/{fn}] {k} = {v}")
|
||||
|
||||
for k, v in sorted(miss.items()):
|
||||
if v:
|
||||
print(f"{n1} has {v:6} more {k:<6} tags than {n2}")
|
||||
|
||||
print(f"in total, {nmiss} missing tags in {n2}\n")
|
||||
|
||||
|
||||
def copy_mtp(d1, d2, tag, rm):
|
||||
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
|
||||
n = 0
|
||||
ndone = 0
|
||||
for w, k, v in d1.execute("select * from mt where k = ?", (tag,)):
|
||||
n += 1
|
||||
if n % 25_000 == 0:
|
||||
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
|
||||
print(m)
|
||||
|
||||
hit = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
|
||||
if hit:
|
||||
hit = hit[0]
|
||||
|
||||
if hit != v:
|
||||
ndone += 1
|
||||
if hit is not None:
|
||||
d2.execute("delete from mt where w = ? and +k = ?", (w, k))
|
||||
|
||||
d2.execute("insert into mt values (?,?,?)", (w, k, v))
|
||||
if rm:
|
||||
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w,))
|
||||
|
||||
d2.commit()
|
||||
print(f"copied {ndone} {tag} tags over")
|
||||
|
||||
|
||||
def main():
|
||||
os.system("")
|
||||
print()
|
||||
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument("db", help="database to work on")
|
||||
ap.add_argument("-src", metavar="DB", type=str, help="database to copy from")
|
||||
|
||||
ap2 = ap.add_argument_group("informational / read-only stuff")
|
||||
ap2.add_argument("-v", action="store_true", help="verbose")
|
||||
ap2.add_argument("-ls", action="store_true", help="list summary for db")
|
||||
ap2.add_argument("-cmp", action="store_true", help="compare databases")
|
||||
|
||||
ap2 = ap.add_argument_group("options which modify target db")
|
||||
ap2.add_argument("-copy", metavar="TAG", type=str, help="mtp tag to copy over")
|
||||
ap2.add_argument(
|
||||
"-rm-mtp-flag",
|
||||
action="store_true",
|
||||
help="when an mtp tag is copied over, also mark that as done, so copyparty won't run mtp on it",
|
||||
)
|
||||
ap2.add_argument("-vac", action="store_true", help="optimize DB")
|
||||
|
||||
ar = ap.parse_args()
|
||||
|
||||
for v in [ar.db, ar.src]:
|
||||
if v and not os.path.exists(v):
|
||||
die("database must exist")
|
||||
|
||||
db = sqlite3.connect(ar.db)
|
||||
ds = sqlite3.connect(ar.src) if ar.src else None
|
||||
|
||||
for d, n in [[ds, "src"], [db, "dst"]]:
|
||||
if not d:
|
||||
continue
|
||||
|
||||
ver = read_ver(d)
|
||||
if ver == "corrupt":
|
||||
die("{} database appears to be corrupt, sorry")
|
||||
|
||||
if ver != DB_VER:
|
||||
m = f"{n} db is version {ver}, this tool only supports version {DB_VER}, please upgrade it with copyparty first"
|
||||
die(m)
|
||||
|
||||
if ar.ls:
|
||||
ls(db)
|
||||
|
||||
if ar.cmp:
|
||||
if not ds:
|
||||
die("need src db to compare against")
|
||||
|
||||
compare("src", ds, "dst", db, ar.v)
|
||||
compare("dst", db, "src", ds, ar.v)
|
||||
|
||||
if ar.copy:
|
||||
copy_mtp(ds, db, ar.copy, ar.rm_mtp_flag)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
96
bin/mtag/exe.py
Normal file
96
bin/mtag/exe.py
Normal file
@@ -0,0 +1,96 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
import pefile
|
||||
|
||||
"""
|
||||
retrieve exe info,
|
||||
example for multivalue providers
|
||||
"""
|
||||
|
||||
|
||||
def unk(v):
|
||||
return "unk({:04x})".format(v)
|
||||
|
||||
|
||||
class PE2(pefile.PE):
|
||||
def __init__(self, *a, **ka):
|
||||
for k in [
|
||||
# -- parse_data_directories:
|
||||
"parse_import_directory",
|
||||
"parse_export_directory",
|
||||
# "parse_resources_directory",
|
||||
"parse_debug_directory",
|
||||
"parse_relocations_directory",
|
||||
"parse_directory_tls",
|
||||
"parse_directory_load_config",
|
||||
"parse_delay_import_directory",
|
||||
"parse_directory_bound_imports",
|
||||
# -- full_load:
|
||||
"parse_rich_header",
|
||||
]:
|
||||
setattr(self, k, self.noop)
|
||||
|
||||
super(PE2, self).__init__(*a, **ka)
|
||||
|
||||
def noop(*a, **ka):
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pe = PE2(sys.argv[1], fast_load=False)
|
||||
except:
|
||||
sys.exit(0)
|
||||
|
||||
arch = pe.FILE_HEADER.Machine
|
||||
if arch == 0x14C:
|
||||
arch = "x86"
|
||||
elif arch == 0x8664:
|
||||
arch = "x64"
|
||||
else:
|
||||
arch = unk(arch)
|
||||
|
||||
try:
|
||||
buildtime = time.gmtime(pe.FILE_HEADER.TimeDateStamp)
|
||||
buildtime = time.strftime("%Y-%m-%d_%H:%M:%S", buildtime)
|
||||
except:
|
||||
buildtime = "invalid"
|
||||
|
||||
ui = pe.OPTIONAL_HEADER.Subsystem
|
||||
if ui == 2:
|
||||
ui = "GUI"
|
||||
elif ui == 3:
|
||||
ui = "cmdline"
|
||||
else:
|
||||
ui = unk(ui)
|
||||
|
||||
extra = {}
|
||||
if hasattr(pe, "FileInfo"):
|
||||
for v1 in pe.FileInfo:
|
||||
for v2 in v1:
|
||||
if v2.name != "StringFileInfo":
|
||||
continue
|
||||
|
||||
for v3 in v2.StringTable:
|
||||
for k, v in v3.entries.items():
|
||||
v = v.decode("utf-8", "replace").strip()
|
||||
if not v:
|
||||
continue
|
||||
|
||||
if k in [b"FileVersion", b"ProductVersion"]:
|
||||
extra["ver"] = v
|
||||
|
||||
if k in [b"OriginalFilename", b"InternalName"]:
|
||||
extra["orig"] = v
|
||||
|
||||
r = {
|
||||
"arch": arch,
|
||||
"built": buildtime,
|
||||
"ui": ui,
|
||||
"cksum": "{:08x}".format(pe.OPTIONAL_HEADER.CheckSum),
|
||||
}
|
||||
r.update(extra)
|
||||
|
||||
print(json.dumps(r, indent=4))
|
||||
9
bin/mtag/file-ext.py
Normal file
9
bin/mtag/file-ext.py
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
|
||||
"""
|
||||
example that just prints the file extension
|
||||
"""
|
||||
|
||||
print(sys.argv[1].split(".")[-1])
|
||||
@@ -9,6 +9,16 @@
|
||||
* assumes the webserver and copyparty is running on the same server/IP
|
||||
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
|
||||
|
||||
### [`sharex.sxcu`](sharex.sxcu)
|
||||
* sharex config file to upload screenshots and grab the URL
|
||||
* `RequestURL`: full URL to the target folder
|
||||
* `pw`: password (remove the `pw` line if anon-write)
|
||||
|
||||
however if your copyparty is behind a reverse-proxy, you may want to use [`sharex-html.sxcu`](sharex-html.sxcu) instead:
|
||||
* `RequestURL`: full URL to the target folder
|
||||
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
|
||||
* `pw`: password (remove `Parameters` if anon-write)
|
||||
|
||||
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
|
||||
* disables thumbnails and folder-type detection in windows explorer
|
||||
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
|
||||
|
||||
19
contrib/sharex-html.sxcu
Normal file
19
contrib/sharex-html.sxcu
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"Version": "13.5.0",
|
||||
"Name": "copyparty-html",
|
||||
"DestinationType": "ImageUploader",
|
||||
"RequestMethod": "POST",
|
||||
"RequestURL": "http://127.0.0.1:3923/sharex",
|
||||
"Parameters": {
|
||||
"pw": "wark"
|
||||
},
|
||||
"Body": "MultipartFormData",
|
||||
"Arguments": {
|
||||
"act": "bput"
|
||||
},
|
||||
"FileFormName": "f",
|
||||
"RegexList": [
|
||||
"bytes // <a href=\"/([^\"]+)\""
|
||||
],
|
||||
"URL": "http://127.0.0.1:3923/$regex:1|1$"
|
||||
}
|
||||
17
contrib/sharex.sxcu
Normal file
17
contrib/sharex.sxcu
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"Version": "13.5.0",
|
||||
"Name": "copyparty",
|
||||
"DestinationType": "ImageUploader",
|
||||
"RequestMethod": "POST",
|
||||
"RequestURL": "http://127.0.0.1:3923/sharex",
|
||||
"Parameters": {
|
||||
"pw": "wark",
|
||||
"j": null
|
||||
},
|
||||
"Body": "MultipartFormData",
|
||||
"Arguments": {
|
||||
"act": "bput"
|
||||
},
|
||||
"FileFormName": "f",
|
||||
"URL": "$json:files[0].url$"
|
||||
}
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import platform
|
||||
import time
|
||||
import sys
|
||||
import os
|
||||
|
||||
@@ -16,12 +17,18 @@ if platform.system() == "Windows":
|
||||
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393]
|
||||
# introduced in anniversary update
|
||||
|
||||
ANYWIN = WINDOWS or sys.platform in ["msys"]
|
||||
|
||||
MACOS = platform.system() == "Darwin"
|
||||
|
||||
|
||||
class EnvParams(object):
|
||||
def __init__(self):
|
||||
self.t0 = time.time()
|
||||
self.mod = os.path.dirname(os.path.realpath(__file__))
|
||||
if self.mod.endswith("__init__"):
|
||||
self.mod = os.path.dirname(self.mod)
|
||||
|
||||
if sys.platform == "win32":
|
||||
self.cfg = os.path.normpath(os.environ["APPDATA"] + "/copyparty")
|
||||
elif sys.platform == "darwin":
|
||||
|
||||
@@ -225,6 +225,19 @@ def run_argparse(argv, formatter):
|
||||
--ciphers help = available ssl/tls ciphers,
|
||||
--ssl-ver help = available ssl/tls versions,
|
||||
default is what python considers safe, usually >= TLS1
|
||||
|
||||
values for --ls:
|
||||
"USR" is a user to browse as; * is anonymous, ** is all users
|
||||
"VOL" is a single volume to scan, default is * (all vols)
|
||||
"FLAG" is flags;
|
||||
"v" in addition to realpaths, print usernames and vpaths
|
||||
"ln" only prints symlinks leaving the volume mountpoint
|
||||
"p" exits 1 if any such symlinks are found
|
||||
"r" resumes startup after the listing
|
||||
examples:
|
||||
--ls '**' # list all files which are possible to read
|
||||
--ls '**,*,ln' # check for dangerous symlinks
|
||||
--ls '**,*,ln,p,r' # check, then start normally if safe
|
||||
"""
|
||||
),
|
||||
)
|
||||
@@ -237,19 +250,33 @@ def run_argparse(argv, formatter):
|
||||
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
|
||||
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
|
||||
ap.add_argument("-q", action="store_true", help="quiet")
|
||||
ap.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
||||
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
||||
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||
ap.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
||||
ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)")
|
||||
ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)")
|
||||
ap.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
||||
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms")
|
||||
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
|
||||
|
||||
ap2 = ap.add_argument_group('admin panel options')
|
||||
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
||||
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
||||
|
||||
ap2 = ap.add_argument_group('thumbnail options')
|
||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
|
||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
|
||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
|
||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
|
||||
|
||||
ap2 = ap.add_argument_group('database options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||
@@ -261,7 +288,7 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q")
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||
|
||||
@@ -272,6 +299,15 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
||||
|
||||
ap2 = ap.add_argument_group('debug options')
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", help="scan all volumes")
|
||||
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing")
|
||||
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
|
||||
|
||||
return ap.parse_args(args=argv[1:])
|
||||
# fmt: on
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 10, 8)
|
||||
CODENAME = "zip it"
|
||||
BUILD_DT = (2021, 4, 11)
|
||||
VERSION = (0, 11, 8)
|
||||
CODENAME = "the grid"
|
||||
BUILD_DT = (2021, 6, 6)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -14,11 +14,12 @@ from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint
|
||||
class VFS(object):
|
||||
"""single level in the virtual fs"""
|
||||
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[], flags={}):
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[], uadm=[], flags={}):
|
||||
self.realpath = realpath # absolute path on host filesystem
|
||||
self.vpath = vpath # absolute path in the virtual filesystem
|
||||
self.uread = uread # users who can read this
|
||||
self.uwrite = uwrite # users who can write this
|
||||
self.uadm = uadm # users who are regular admins
|
||||
self.flags = flags # config switches
|
||||
self.nodes = {} # child nodes
|
||||
self.all_vols = {vpath: self} # flattened recursive
|
||||
@@ -27,7 +28,7 @@ class VFS(object):
|
||||
return "VFS({})".format(
|
||||
", ".join(
|
||||
"{}={!r}".format(k, self.__dict__[k])
|
||||
for k in "realpath vpath uread uwrite flags".split()
|
||||
for k in "realpath vpath uread uwrite uadm flags".split()
|
||||
)
|
||||
)
|
||||
|
||||
@@ -52,6 +53,7 @@ class VFS(object):
|
||||
"{}/{}".format(self.vpath, name).lstrip("/"),
|
||||
self.uread,
|
||||
self.uwrite,
|
||||
self.uadm,
|
||||
self.flags,
|
||||
)
|
||||
self._trk(vn)
|
||||
@@ -111,9 +113,29 @@ class VFS(object):
|
||||
if rem:
|
||||
rp += "/" + rem
|
||||
|
||||
return fsdec(os.path.realpath(fsenc(rp)))
|
||||
try:
|
||||
return fsdec(os.path.realpath(fsenc(rp)))
|
||||
except:
|
||||
if not WINDOWS:
|
||||
raise
|
||||
|
||||
def ls(self, rem, uname, scandir, lstat=False):
|
||||
# cpython bug introduced in 3.8, still exists in 3.9.1;
|
||||
# some win7sp1 and win10:20H2 boxes cannot realpath a
|
||||
# networked drive letter such as b"n:" or b"n:\\"
|
||||
#
|
||||
# requirements to trigger:
|
||||
# * bytestring (not unicode str)
|
||||
# * just the drive letter (subfolders are ok)
|
||||
# * networked drive (regular disks and vmhgfs are ok)
|
||||
# * on an enterprise network (idk, cannot repro with samba)
|
||||
#
|
||||
# hits the following exceptions in succession:
|
||||
# * access denied at L601: "path = _getfinalpathname(path)"
|
||||
# * "cant concat str to bytes" at L621: "return path + tail"
|
||||
#
|
||||
return os.path.realpath(rp)
|
||||
|
||||
def ls(self, rem, uname, scandir, incl_wo=False, lstat=False):
|
||||
"""return user-readable [fsdir,real,virt] items at vpath"""
|
||||
virt_vis = {} # nodes readable by user
|
||||
abspath = self.canonical(rem)
|
||||
@@ -121,7 +143,12 @@ class VFS(object):
|
||||
real.sort()
|
||||
if not rem:
|
||||
for name, vn2 in sorted(self.nodes.items()):
|
||||
if uname in vn2.uread or "*" in vn2.uread:
|
||||
ok = uname in vn2.uread or "*" in vn2.uread
|
||||
|
||||
if not ok and incl_wo:
|
||||
ok = uname in vn2.uwrite or "*" in vn2.uwrite
|
||||
|
||||
if ok:
|
||||
virt_vis[name] = vn2
|
||||
|
||||
# no vfs nodes in the list of real inodes
|
||||
@@ -135,7 +162,7 @@ class VFS(object):
|
||||
rel is a unix-style user-defined vpath (not vfs-related)
|
||||
"""
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, lstat)
|
||||
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, False, lstat)
|
||||
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
|
||||
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||
|
||||
@@ -201,17 +228,19 @@ class VFS(object):
|
||||
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
|
||||
yield f
|
||||
|
||||
def user_tree(self, uname, readable=False, writable=False):
|
||||
ret = []
|
||||
opt1 = readable and (uname in self.uread or "*" in self.uread)
|
||||
opt2 = writable and (uname in self.uwrite or "*" in self.uwrite)
|
||||
if opt1 or opt2:
|
||||
ret.append(self.vpath)
|
||||
def user_tree(self, uname, readable, writable, admin):
|
||||
is_readable = False
|
||||
if uname in self.uread or "*" in self.uread:
|
||||
readable.append(self.vpath)
|
||||
is_readable = True
|
||||
|
||||
if uname in self.uwrite or "*" in self.uwrite:
|
||||
writable.append(self.vpath)
|
||||
if is_readable:
|
||||
admin.append(self.vpath)
|
||||
|
||||
for _, vn in sorted(self.nodes.items()):
|
||||
ret.extend(vn.user_tree(uname, readable, writable))
|
||||
|
||||
return ret
|
||||
vn.user_tree(uname, readable, writable, admin)
|
||||
|
||||
|
||||
class AuthSrv(object):
|
||||
@@ -221,6 +250,7 @@ class AuthSrv(object):
|
||||
self.args = args
|
||||
self.log_func = log_func
|
||||
self.warn_anonwrite = warn_anonwrite
|
||||
self.line_ctr = 0
|
||||
|
||||
if WINDOWS:
|
||||
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
|
||||
@@ -243,10 +273,12 @@ class AuthSrv(object):
|
||||
|
||||
yield prev, True
|
||||
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, mflags, mount):
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, madm, mflags, mount):
|
||||
vol_src = None
|
||||
vol_dst = None
|
||||
self.line_ctr = 0
|
||||
for ln in [x.decode("utf-8").strip() for x in fd]:
|
||||
self.line_ctr += 1
|
||||
if not ln and vol_src is not None:
|
||||
vol_src = None
|
||||
vol_dst = None
|
||||
@@ -273,15 +305,26 @@ class AuthSrv(object):
|
||||
mount[vol_dst] = vol_src
|
||||
mread[vol_dst] = []
|
||||
mwrite[vol_dst] = []
|
||||
madm[vol_dst] = []
|
||||
mflags[vol_dst] = {}
|
||||
continue
|
||||
|
||||
lvl, uname = ln.split(" ")
|
||||
if len(ln) > 1:
|
||||
lvl, uname = ln.split(" ")
|
||||
else:
|
||||
lvl = ln
|
||||
uname = "*"
|
||||
|
||||
self._read_vol_str(
|
||||
lvl, uname, mread[vol_dst], mwrite[vol_dst], mflags[vol_dst]
|
||||
lvl,
|
||||
uname,
|
||||
mread[vol_dst],
|
||||
mwrite[vol_dst],
|
||||
madm[vol_dst],
|
||||
mflags[vol_dst],
|
||||
)
|
||||
|
||||
def _read_vol_str(self, lvl, uname, mr, mw, mf):
|
||||
def _read_vol_str(self, lvl, uname, mr, mw, ma, mf):
|
||||
if lvl == "c":
|
||||
cval = True
|
||||
if "=" in uname:
|
||||
@@ -299,6 +342,9 @@ class AuthSrv(object):
|
||||
if lvl in "wa":
|
||||
mw.append(uname)
|
||||
|
||||
if lvl == "a":
|
||||
ma.append(uname)
|
||||
|
||||
def _read_volflag(self, flags, name, value, is_list):
|
||||
if name not in ["mtp"]:
|
||||
flags[name] = value
|
||||
@@ -322,6 +368,7 @@ class AuthSrv(object):
|
||||
user = {} # username:password
|
||||
mread = {} # mountpoint:[username]
|
||||
mwrite = {} # mountpoint:[username]
|
||||
madm = {} # mountpoint:[username]
|
||||
mflags = {} # mountpoint:[flag]
|
||||
mount = {} # dst:src (mountpoint:realpath)
|
||||
|
||||
@@ -345,16 +392,26 @@ class AuthSrv(object):
|
||||
mount[dst] = src
|
||||
mread[dst] = []
|
||||
mwrite[dst] = []
|
||||
madm[dst] = []
|
||||
mflags[dst] = {}
|
||||
|
||||
perms = perms.split(":")
|
||||
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
|
||||
self._read_vol_str(lvl, uname, mread[dst], mwrite[dst], mflags[dst])
|
||||
self._read_vol_str(
|
||||
lvl, uname, mread[dst], mwrite[dst], madm[dst], mflags[dst]
|
||||
)
|
||||
|
||||
if self.args.c:
|
||||
for cfg_fn in self.args.c:
|
||||
with open(cfg_fn, "rb") as f:
|
||||
self._parse_config_file(f, user, mread, mwrite, mflags, mount)
|
||||
try:
|
||||
self._parse_config_file(
|
||||
f, user, mread, mwrite, madm, mflags, mount
|
||||
)
|
||||
except:
|
||||
m = "\n\033[1;31m\nerror in config file {} on line {}:\n\033[0m"
|
||||
print(m.format(cfg_fn, self.line_ctr))
|
||||
raise
|
||||
|
||||
if not mount:
|
||||
# -h says our defaults are CWD at root and read/write for everyone
|
||||
@@ -372,12 +429,15 @@ class AuthSrv(object):
|
||||
|
||||
if dst == "":
|
||||
# rootfs was mapped; fully replaces the default CWD vfs
|
||||
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst], mflags[dst])
|
||||
vfs = VFS(
|
||||
mount[dst], dst, mread[dst], mwrite[dst], madm[dst], mflags[dst]
|
||||
)
|
||||
continue
|
||||
|
||||
v = vfs.add(mount[dst], dst)
|
||||
v.uread = mread[dst]
|
||||
v.uwrite = mwrite[dst]
|
||||
v.uadm = madm[dst]
|
||||
v.flags = mflags[dst]
|
||||
|
||||
missing_users = {}
|
||||
@@ -437,8 +497,10 @@ class AuthSrv(object):
|
||||
# verify tags mentioned by -mt[mp] are used by -mte
|
||||
local_mtp = {}
|
||||
local_only_mtp = {}
|
||||
for a in vol.flags.get("mtp", []) + vol.flags.get("mtm", []):
|
||||
a = a.split("=")[0]
|
||||
tags = vol.flags.get("mtp", []) + vol.flags.get("mtm", [])
|
||||
tags = [x.split("=")[0] for x in tags]
|
||||
tags = [y for x in tags for y in x.split(",")]
|
||||
for a in tags:
|
||||
local_mtp[a] = True
|
||||
local = True
|
||||
for b in self.args.mtp or []:
|
||||
@@ -467,8 +529,10 @@ class AuthSrv(object):
|
||||
self.log(m.format(vol.vpath, mtp), 1)
|
||||
errors = True
|
||||
|
||||
for mtp in self.args.mtp or []:
|
||||
mtp = mtp.split("=")[0]
|
||||
tags = self.args.mtp or []
|
||||
tags = [x.split("=")[0] for x in tags]
|
||||
tags = [y for x in tags for y in x.split(",")]
|
||||
for mtp in tags:
|
||||
if mtp not in all_mte:
|
||||
m = 'metadata tag "{}" is defined by "-mtm" or "-mtp", but is not used by "-mte" (or by any "cmte" volume-flag)'
|
||||
self.log(m.format(mtp), 1)
|
||||
@@ -493,3 +557,90 @@ class AuthSrv(object):
|
||||
|
||||
# import pprint
|
||||
# pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount})
|
||||
|
||||
def dbg_ls(self):
|
||||
users = self.args.ls
|
||||
vols = "*"
|
||||
flags = []
|
||||
|
||||
try:
|
||||
users, vols = users.split(",", 1)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
vols, flags = vols.split(",", 1)
|
||||
flags = flags.split(",")
|
||||
except:
|
||||
pass
|
||||
|
||||
if users == "**":
|
||||
users = list(self.user.keys()) + ["*"]
|
||||
else:
|
||||
users = [users]
|
||||
|
||||
for u in users:
|
||||
if u not in self.user and u != "*":
|
||||
raise Exception("user not found: " + u)
|
||||
|
||||
if vols == "*":
|
||||
vols = ["/" + x for x in self.vfs.all_vols.keys()]
|
||||
else:
|
||||
vols = [vols]
|
||||
|
||||
for v in vols:
|
||||
if not v.startswith("/"):
|
||||
raise Exception("volumes must start with /")
|
||||
|
||||
if v[1:] not in self.vfs.all_vols:
|
||||
raise Exception("volume not found: " + v)
|
||||
|
||||
self.log({"users": users, "vols": vols, "flags": flags})
|
||||
for k, v in self.vfs.all_vols.items():
|
||||
self.log("/{}: read({}) write({})".format(k, v.uread, v.uwrite))
|
||||
|
||||
flag_v = "v" in flags
|
||||
flag_ln = "ln" in flags
|
||||
flag_p = "p" in flags
|
||||
flag_r = "r" in flags
|
||||
|
||||
n_bads = 0
|
||||
for v in vols:
|
||||
v = v[1:]
|
||||
vtop = "/{}/".format(v) if v else "/"
|
||||
for u in users:
|
||||
self.log("checking /{} as {}".format(v, u))
|
||||
try:
|
||||
vn, _ = self.vfs.get(v, u, True, False)
|
||||
except:
|
||||
continue
|
||||
|
||||
atop = vn.realpath
|
||||
g = vn.walk("", "", u, True, not self.args.no_scandir, lstat=False)
|
||||
for vpath, apath, files, _, _ in g:
|
||||
fnames = [n[0] for n in files]
|
||||
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
|
||||
vpaths = [vtop + x for x in vpaths]
|
||||
apaths = [os.path.join(apath, n) for n in fnames]
|
||||
files = list(zip(vpaths, apaths))
|
||||
|
||||
if flag_ln:
|
||||
files = [x for x in files if not x[1].startswith(atop + os.sep)]
|
||||
n_bads += len(files)
|
||||
|
||||
if flag_v:
|
||||
msg = [
|
||||
'# user "{}", vpath "{}"\n{}'.format(u, vp, ap)
|
||||
for vp, ap in files
|
||||
]
|
||||
else:
|
||||
msg = [x[1] for x in files]
|
||||
|
||||
if msg:
|
||||
nuprint("\n".join(msg))
|
||||
|
||||
if n_bads and flag_p:
|
||||
raise Exception("found symlink leaving volume, and strict is set")
|
||||
|
||||
if not flag_r:
|
||||
sys.exit(0)
|
||||
|
||||
@@ -10,10 +10,11 @@ import json
|
||||
import string
|
||||
import socket
|
||||
import ctypes
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
import calendar
|
||||
|
||||
from .__init__ import E, PY2, WINDOWS
|
||||
from .__init__ import E, PY2, WINDOWS, ANYWIN
|
||||
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||
from .szip import StreamZip
|
||||
from .star import StreamTar
|
||||
@@ -22,6 +23,10 @@ if not PY2:
|
||||
unicode = str
|
||||
|
||||
|
||||
NO_CACHE = {"Cache-Control": "no-cache"}
|
||||
NO_STORE = {"Cache-Control": "no-store; max-age=0"}
|
||||
|
||||
|
||||
class HttpCli(object):
|
||||
"""
|
||||
Spawned by HttpConn to process one http transaction
|
||||
@@ -36,6 +41,8 @@ class HttpCli(object):
|
||||
self.addr = conn.addr
|
||||
self.args = conn.args
|
||||
self.auth = conn.auth
|
||||
self.ico = conn.ico
|
||||
self.thumbcli = conn.thumbcli
|
||||
self.log_func = conn.log_func
|
||||
self.log_src = conn.log_src
|
||||
self.tls = hasattr(self.s, "cipher")
|
||||
@@ -100,6 +107,16 @@ class HttpCli(object):
|
||||
self.ip = v.split(",")[0]
|
||||
self.log_src = self.conn.set_rproxy(self.ip)
|
||||
|
||||
if self.args.ihead:
|
||||
keys = self.args.ihead
|
||||
if "*" in keys:
|
||||
keys = list(sorted(self.headers.keys()))
|
||||
|
||||
for k in keys:
|
||||
v = self.headers.get(k)
|
||||
if v is not None:
|
||||
self.log("[H] {}: \033[33m[{}]".format(k, v), 6)
|
||||
|
||||
# split req into vpath + uparam
|
||||
uparam = {}
|
||||
if "?" not in self.req:
|
||||
@@ -120,29 +137,34 @@ class HttpCli(object):
|
||||
else:
|
||||
uparam[k.lower()] = False
|
||||
|
||||
self.ouparam = {k: v for k, v in uparam.items()}
|
||||
|
||||
cookies = self.headers.get("cookie") or {}
|
||||
if cookies:
|
||||
cookies = [x.split("=", 1) for x in cookies.split(";") if "=" in x]
|
||||
cookies = {k.strip(): unescape_cookie(v) for k, v in cookies}
|
||||
for kc, ku in [["cppwd", "pw"], ["b", "b"]]:
|
||||
if kc in cookies and ku not in uparam:
|
||||
uparam[ku] = cookies[kc]
|
||||
|
||||
self.uparam = uparam
|
||||
self.cookies = cookies
|
||||
self.vpath = unquotep(vpath)
|
||||
|
||||
pwd = None
|
||||
if "cookie" in self.headers:
|
||||
cookies = self.headers["cookie"].split(";")
|
||||
for k, v in [x.split("=", 1) for x in cookies]:
|
||||
if k.strip() != "cppwd":
|
||||
continue
|
||||
|
||||
pwd = unescape_cookie(v)
|
||||
break
|
||||
|
||||
pwd = uparam.get("pw", pwd)
|
||||
pwd = uparam.get("pw")
|
||||
self.uname = self.auth.iuser.get(pwd, "*")
|
||||
if self.uname:
|
||||
self.rvol = self.auth.vfs.user_tree(self.uname, readable=True)
|
||||
self.wvol = self.auth.vfs.user_tree(self.uname, writable=True)
|
||||
self.rvol, self.wvol, self.avol = [[], [], []]
|
||||
self.auth.vfs.user_tree(self.uname, self.rvol, self.wvol, self.avol)
|
||||
|
||||
ua = self.headers.get("user-agent", "")
|
||||
if ua.startswith("rclone/"):
|
||||
self.is_rclone = ua.startswith("rclone/")
|
||||
if self.is_rclone:
|
||||
uparam["raw"] = False
|
||||
uparam["dots"] = False
|
||||
uparam["b"] = False
|
||||
cookies["b"] = False
|
||||
|
||||
self.do_log = not self.conn.lf_url or not self.conn.lf_url.search(self.req)
|
||||
|
||||
try:
|
||||
if self.mode in ["GET", "HEAD"]:
|
||||
@@ -182,10 +204,8 @@ class HttpCli(object):
|
||||
self.out_headers.update(headers)
|
||||
|
||||
# default to utf8 html if no content-type is set
|
||||
try:
|
||||
mime = mime or self.out_headers["Content-Type"]
|
||||
except KeyError:
|
||||
mime = "text/html; charset=UTF-8"
|
||||
if not mime:
|
||||
mime = self.out_headers.get("Content-Type", "text/html; charset=UTF-8")
|
||||
|
||||
self.out_headers["Content-Type"] = mime
|
||||
|
||||
@@ -220,7 +240,14 @@ class HttpCli(object):
|
||||
removing anything in rm, adding pairs in add
|
||||
"""
|
||||
|
||||
kv = {k: v for k, v in self.uparam.items() if k not in rm}
|
||||
if self.is_rclone:
|
||||
return ""
|
||||
|
||||
kv = {
|
||||
k: v
|
||||
for k, v in self.uparam.items()
|
||||
if k not in rm and self.cookies.get(k) != v
|
||||
}
|
||||
kv.update(add)
|
||||
if not kv:
|
||||
return ""
|
||||
@@ -228,21 +255,43 @@ class HttpCli(object):
|
||||
r = ["{}={}".format(k, quotep(v)) if v else k for k, v in kv.items()]
|
||||
return "?" + "&".join(r)
|
||||
|
||||
def redirect(
|
||||
self, vpath, suf="", msg="aight", flavor="go to", click=True, use302=False
|
||||
):
|
||||
html = self.j2(
|
||||
"msg",
|
||||
h2='<a href="/{}">{} /{}</a>'.format(
|
||||
quotep(vpath) + suf, flavor, html_escape(vpath, crlf=True) + suf
|
||||
),
|
||||
pre=msg,
|
||||
click=click,
|
||||
).encode("utf-8", "replace")
|
||||
|
||||
if use302:
|
||||
h = {"Location": "/" + vpath, "Cache-Control": "no-cache"}
|
||||
self.reply(html, status=302, headers=h)
|
||||
else:
|
||||
self.reply(html)
|
||||
|
||||
def handle_get(self):
|
||||
logmsg = "{:4} {}".format(self.mode, self.req)
|
||||
if self.do_log:
|
||||
logmsg = "{:4} {}".format(self.mode, self.req)
|
||||
|
||||
if "range" in self.headers:
|
||||
try:
|
||||
rval = self.headers["range"].split("=", 1)[1]
|
||||
except:
|
||||
rval = self.headers["range"]
|
||||
if "range" in self.headers:
|
||||
try:
|
||||
rval = self.headers["range"].split("=", 1)[1]
|
||||
except:
|
||||
rval = self.headers["range"]
|
||||
|
||||
logmsg += " [\033[36m" + rval + "\033[0m]"
|
||||
logmsg += " [\033[36m" + rval + "\033[0m]"
|
||||
|
||||
self.log(logmsg)
|
||||
self.log(logmsg)
|
||||
|
||||
# "embedded" resources
|
||||
if self.vpath.startswith(".cpr"):
|
||||
if self.vpath.startswith(".cpr/ico/"):
|
||||
return self.tx_ico(self.vpath.split("/")[-1], exact=True)
|
||||
|
||||
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
|
||||
return self.tx_file(static_path)
|
||||
|
||||
@@ -250,33 +299,45 @@ class HttpCli(object):
|
||||
return self.tx_tree()
|
||||
|
||||
# conditional redirect to single volumes
|
||||
if self.vpath == "" and not self.uparam:
|
||||
if self.vpath == "" and not self.ouparam:
|
||||
nread = len(self.rvol)
|
||||
nwrite = len(self.wvol)
|
||||
if nread + nwrite == 1 or (self.rvol == self.wvol and nread == 1):
|
||||
if nread == 1:
|
||||
self.vpath = self.rvol[0]
|
||||
vpath = self.rvol[0]
|
||||
else:
|
||||
self.vpath = self.wvol[0]
|
||||
vpath = self.wvol[0]
|
||||
|
||||
self.absolute_urls = True
|
||||
if self.vpath != vpath:
|
||||
self.redirect(vpath, flavor="redirecting to", use302=True)
|
||||
return True
|
||||
|
||||
# go home if verboten
|
||||
self.readable, self.writable = self.conn.auth.vfs.can_access(
|
||||
self.vpath, self.uname
|
||||
)
|
||||
if not self.readable and not self.writable:
|
||||
self.log("inaccessible: [{}]".format(self.vpath))
|
||||
if self.vpath:
|
||||
self.log("inaccessible: [{}]".format(self.vpath))
|
||||
raise Pebkac(404)
|
||||
|
||||
self.uparam = {"h": False}
|
||||
|
||||
if "h" in self.uparam:
|
||||
self.vpath = None
|
||||
return self.tx_mounts()
|
||||
|
||||
if "scan" in self.uparam:
|
||||
return self.scanvol()
|
||||
|
||||
if "stack" in self.uparam:
|
||||
return self.tx_stack()
|
||||
|
||||
return self.tx_browser()
|
||||
|
||||
def handle_options(self):
|
||||
self.log("OPTIONS " + self.req)
|
||||
if self.do_log:
|
||||
self.log("OPTIONS " + self.req)
|
||||
|
||||
self.send_headers(
|
||||
None,
|
||||
204,
|
||||
@@ -377,7 +438,7 @@ class HttpCli(object):
|
||||
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
||||
path = os.path.join(fdir, fn)
|
||||
|
||||
with open(path, "wb", 512 * 1024) as f:
|
||||
with open(fsenc(path), "wb", 512 * 1024) as f:
|
||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||
|
||||
self.conn.hsrv.broker.put(
|
||||
@@ -497,9 +558,9 @@ class HttpCli(object):
|
||||
if sub:
|
||||
try:
|
||||
dst = os.path.join(vfs.realpath, rem)
|
||||
os.makedirs(dst)
|
||||
os.makedirs(fsenc(dst))
|
||||
except:
|
||||
if not os.path.isdir(dst):
|
||||
if not os.path.isdir(fsenc(dst)):
|
||||
raise Pebkac(400, "some file got your folder name")
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||
@@ -534,11 +595,12 @@ class HttpCli(object):
|
||||
self.log("qj: " + repr(vbody))
|
||||
hits = idx.fsearch(vols, body)
|
||||
msg = repr(hits)
|
||||
taglist = []
|
||||
taglist = {}
|
||||
else:
|
||||
# search by query params
|
||||
self.log("qj: " + repr(body))
|
||||
hits, taglist = idx.search(vols, body)
|
||||
q = body["q"]
|
||||
self.log("qj: " + q)
|
||||
hits, taglist = idx.search(vols, q)
|
||||
msg = len(hits)
|
||||
|
||||
idx.p_end = time.time()
|
||||
@@ -587,7 +649,7 @@ class HttpCli(object):
|
||||
|
||||
reader = read_socket(self.sr, remains)
|
||||
|
||||
with open(path, "rb+", 512 * 1024) as f:
|
||||
with open(fsenc(path), "rb+", 512 * 1024) as f:
|
||||
f.seek(cstart[0])
|
||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||
|
||||
@@ -626,11 +688,11 @@ class HttpCli(object):
|
||||
self.loud_reply(x, status=500)
|
||||
return False
|
||||
|
||||
if not WINDOWS and num_left == 0:
|
||||
if not ANYWIN and num_left == 0:
|
||||
times = (int(time.time()), int(lastmod))
|
||||
self.log("no more chunks, setting times {}".format(times))
|
||||
try:
|
||||
os.utime(path, times)
|
||||
os.utime(fsenc(path), times)
|
||||
except:
|
||||
self.log("failed to utime ({}, {})".format(path, times))
|
||||
|
||||
@@ -645,13 +707,16 @@ class HttpCli(object):
|
||||
|
||||
if pwd in self.auth.iuser:
|
||||
msg = "login ok"
|
||||
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
|
||||
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||
else:
|
||||
msg = "naw dude"
|
||||
pwd = "x" # nosec
|
||||
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
||||
|
||||
h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
|
||||
ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp)
|
||||
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
|
||||
self.reply(html.encode("utf-8"), headers=h)
|
||||
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
|
||||
return True
|
||||
|
||||
def handle_mkdir(self):
|
||||
@@ -680,14 +745,7 @@ class HttpCli(object):
|
||||
raise Pebkac(500, "mkdir failed, check the logs")
|
||||
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
esc_paths = [quotep(vpath), html_escape(vpath)]
|
||||
html = self.j2(
|
||||
"msg",
|
||||
h2='<a href="/{}">go to /{}</a>'.format(*esc_paths),
|
||||
pre="aight",
|
||||
click=True,
|
||||
)
|
||||
self.reply(html.encode("utf-8", "replace"))
|
||||
self.redirect(vpath)
|
||||
return True
|
||||
|
||||
def handle_new_md(self):
|
||||
@@ -714,15 +772,7 @@ class HttpCli(object):
|
||||
f.write(b"`GRUNNUR`\n")
|
||||
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
html = self.j2(
|
||||
"msg",
|
||||
h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
|
||||
quotep(vpath), html_escape(vpath)
|
||||
),
|
||||
pre="aight",
|
||||
click=True,
|
||||
)
|
||||
self.reply(html.encode("utf-8", "replace"))
|
||||
self.redirect(vpath, "?edit")
|
||||
return True
|
||||
|
||||
def handle_plain_upload(self):
|
||||
@@ -741,7 +791,9 @@ class HttpCli(object):
|
||||
|
||||
if p_file and not nullwrite:
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
fname = sanitize_fn(p_file)
|
||||
fname = sanitize_fn(
|
||||
p_file, bad=[".prologue.html", ".epilogue.html"]
|
||||
)
|
||||
|
||||
if not os.path.isdir(fsenc(fdir)):
|
||||
raise Pebkac(404, "that folder does not exist")
|
||||
@@ -761,7 +813,7 @@ class HttpCli(object):
|
||||
if sz == 0:
|
||||
raise Pebkac(400, "empty files in post")
|
||||
|
||||
files.append([sz, sha512_hex])
|
||||
files.append([sz, sha512_hex, p_file, fname])
|
||||
self.conn.hsrv.broker.put(
|
||||
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname
|
||||
)
|
||||
@@ -770,12 +822,16 @@ class HttpCli(object):
|
||||
except Pebkac:
|
||||
if fname != os.devnull:
|
||||
fp = os.path.join(fdir, fname)
|
||||
fp2 = fp
|
||||
if self.args.dotpart:
|
||||
fp2 = os.path.join(fdir, "." + fname)
|
||||
|
||||
suffix = ".PARTIAL"
|
||||
try:
|
||||
os.rename(fsenc(fp), fsenc(fp + suffix))
|
||||
os.rename(fsenc(fp), fsenc(fp2 + suffix))
|
||||
except:
|
||||
fp = fp[: -len(suffix)]
|
||||
os.rename(fsenc(fp), fsenc(fp + suffix))
|
||||
fp2 = fp2[: -len(suffix) - 1]
|
||||
os.rename(fsenc(fp), fsenc(fp2 + suffix))
|
||||
|
||||
raise
|
||||
|
||||
@@ -792,43 +848,46 @@ class HttpCli(object):
|
||||
errmsg = "ERROR: " + errmsg
|
||||
status = "ERROR"
|
||||
|
||||
msg = "{0} // {1} bytes // {2:.3f} MiB/s\n".format(status, sz_total, spd)
|
||||
msg = "{} // {} bytes // {:.3f} MiB/s\n".format(status, sz_total, spd)
|
||||
jmsg = {"status": status, "sz": sz_total, "mbps": round(spd, 3), "files": []}
|
||||
|
||||
for sz, sha512 in files:
|
||||
msg += "sha512: {0} // {1} bytes\n".format(sha512[:56], sz)
|
||||
for sz, sha512, ofn, lfn in files:
|
||||
vpath = (self.vpath + "/" if self.vpath else "") + lfn
|
||||
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
|
||||
sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True)
|
||||
)
|
||||
# truncated SHA-512 prevents length extension attacks;
|
||||
# using SHA-512/224, optionally SHA-512/256 = :64
|
||||
jpart = {
|
||||
"url": "{}://{}/{}".format(
|
||||
"https" if self.tls else "http",
|
||||
self.headers.get("host", "copyparty"),
|
||||
vpath,
|
||||
),
|
||||
"sha512": sha512[:56],
|
||||
"sz": sz,
|
||||
"fn": lfn,
|
||||
"fn_orig": ofn,
|
||||
"path": vpath,
|
||||
}
|
||||
jmsg["files"].append(jpart)
|
||||
|
||||
vspd = self._spd(sz_total, False)
|
||||
self.log("{} {}".format(vspd, msg))
|
||||
|
||||
if not nullwrite:
|
||||
# TODO this is bad
|
||||
log_fn = "up.{:.6f}.txt".format(t0)
|
||||
with open(log_fn, "wb") as f:
|
||||
f.write(
|
||||
(
|
||||
"\n".join(
|
||||
unicode(x)
|
||||
for x in [
|
||||
":".join(unicode(x) for x in [self.ip, self.addr[1]]),
|
||||
msg.rstrip(),
|
||||
]
|
||||
)
|
||||
+ "\n"
|
||||
+ errmsg
|
||||
+ "\n"
|
||||
).encode("utf-8")
|
||||
)
|
||||
ft = "{}:{}".format(self.ip, self.addr[1])
|
||||
ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg)
|
||||
f.write(ft.encode("utf-8"))
|
||||
|
||||
if "j" in self.uparam:
|
||||
jtxt = json.dumps(jmsg, indent=2, sort_keys=True)
|
||||
self.reply(jtxt.encode("utf-8", "replace"), mime="application/json")
|
||||
else:
|
||||
self.redirect(self.vpath, msg=msg, flavor="return to", click=False)
|
||||
|
||||
html = self.j2(
|
||||
"msg",
|
||||
h2='<a href="/{}">return to /{}</a>'.format(
|
||||
quotep(self.vpath), html_escape(self.vpath)
|
||||
),
|
||||
pre=msg,
|
||||
)
|
||||
self.reply(html.encode("utf-8", "replace"))
|
||||
self.parser.drop()
|
||||
return True
|
||||
|
||||
@@ -903,16 +962,16 @@ class HttpCli(object):
|
||||
mdir, mfile = os.path.split(fp)
|
||||
mfile2 = "{}.{:.3f}.md".format(mfile[:-3], srv_lastmod)
|
||||
try:
|
||||
os.mkdir(os.path.join(mdir, ".hist"))
|
||||
os.mkdir(fsenc(os.path.join(mdir, ".hist")))
|
||||
except:
|
||||
pass
|
||||
os.rename(fp, os.path.join(mdir, ".hist", mfile2))
|
||||
os.rename(fsenc(fp), fsenc(os.path.join(mdir, ".hist", mfile2)))
|
||||
|
||||
p_field, _, p_data = next(self.parser.gen)
|
||||
if p_field != "body":
|
||||
raise Pebkac(400, "expected body, got {}".format(p_field))
|
||||
|
||||
with open(fp, "wb", 512 * 1024) as f:
|
||||
with open(fsenc(fp), "wb", 512 * 1024) as f:
|
||||
sz, sha512, _ = hashcopy(self.conn, p_data, f)
|
||||
|
||||
new_lastmod = os.stat(fsenc(fp)).st_mtime
|
||||
@@ -928,13 +987,11 @@ class HttpCli(object):
|
||||
return True
|
||||
|
||||
def _chk_lastmod(self, file_ts):
|
||||
file_dt = datetime.utcfromtimestamp(file_ts)
|
||||
file_lastmod = file_dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||
|
||||
file_lastmod = http_ts(file_ts)
|
||||
cli_lastmod = self.headers.get("if-modified-since")
|
||||
if cli_lastmod:
|
||||
try:
|
||||
cli_dt = time.strptime(cli_lastmod, "%a, %d %b %Y %H:%M:%S GMT")
|
||||
cli_dt = time.strptime(cli_lastmod, HTTP_TS_FMT)
|
||||
cli_ts = calendar.timegm(cli_dt)
|
||||
return file_lastmod, int(file_ts) > int(cli_ts)
|
||||
except Exception as ex:
|
||||
@@ -1081,19 +1138,21 @@ class HttpCli(object):
|
||||
# send reply
|
||||
|
||||
if not is_compressed:
|
||||
self.out_headers["Cache-Control"] = "no-cache"
|
||||
self.out_headers.update(NO_CACHE)
|
||||
|
||||
self.out_headers["Accept-Ranges"] = "bytes"
|
||||
self.send_headers(
|
||||
length=upper - lower,
|
||||
status=status,
|
||||
mime=guess_mime(req_path)[0] or "application/octet-stream",
|
||||
mime=guess_mime(req_path),
|
||||
)
|
||||
|
||||
logmsg += unicode(status) + logtail
|
||||
|
||||
if self.mode == "HEAD" or not do_send:
|
||||
self.log(logmsg)
|
||||
if self.do_log:
|
||||
self.log(logmsg)
|
||||
|
||||
return True
|
||||
|
||||
ret = True
|
||||
@@ -1107,7 +1166,9 @@ class HttpCli(object):
|
||||
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
||||
|
||||
spd = self._spd((upper - lower) - remains)
|
||||
self.log("{}, {}".format(logmsg, spd))
|
||||
if self.do_log:
|
||||
self.log("{}, {}".format(logmsg, spd))
|
||||
|
||||
return ret
|
||||
|
||||
def tx_zip(self, fmt, uarg, vn, rem, items, dots):
|
||||
@@ -1173,6 +1234,34 @@ class HttpCli(object):
|
||||
self.log("{}, {}".format(logmsg, spd))
|
||||
return True
|
||||
|
||||
def tx_ico(self, ext, exact=False):
|
||||
if ext.endswith("/"):
|
||||
ext = "folder"
|
||||
exact = True
|
||||
|
||||
bad = re.compile(r"[](){}/[]|^[0-9_-]*$")
|
||||
n = ext.split(".")[::-1]
|
||||
if not exact:
|
||||
n = n[:-1]
|
||||
|
||||
ext = ""
|
||||
for v in n:
|
||||
if len(v) > 7 or bad.search(v):
|
||||
break
|
||||
|
||||
ext = "{}.{}".format(v, ext)
|
||||
|
||||
ext = ext.rstrip(".") or "unk"
|
||||
if len(ext) > 11:
|
||||
ext = "⋯" + ext[-9:]
|
||||
|
||||
mime, ico = self.ico.get(ext, not exact)
|
||||
|
||||
dt = datetime.utcfromtimestamp(E.t0)
|
||||
lm = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||
self.reply(ico, mime=mime, headers={"Last-Modified": lm})
|
||||
return True
|
||||
|
||||
def tx_md(self, fs_path):
|
||||
logmsg = "{:4} {} ".format("", self.req)
|
||||
|
||||
@@ -1181,60 +1270,124 @@ class HttpCli(object):
|
||||
template = self.j2(tpl)
|
||||
|
||||
st = os.stat(fsenc(fs_path))
|
||||
# sz_md = st.st_size
|
||||
ts_md = st.st_mtime
|
||||
|
||||
st = os.stat(fsenc(html_path))
|
||||
ts_html = st.st_mtime
|
||||
|
||||
# TODO dont load into memory ;_;
|
||||
# (trivial fix, count the &'s)
|
||||
with open(fsenc(fs_path), "rb") as f:
|
||||
md = f.read().replace(b"&", b"&")
|
||||
sz_md = len(md)
|
||||
sz_md = 0
|
||||
for buf in yieldfile(fs_path):
|
||||
sz_md += len(buf)
|
||||
for c, v in [[b"&", 4], [b"<", 3], [b">", 3]]:
|
||||
sz_md += (len(buf) - len(buf.replace(c, b""))) * v
|
||||
|
||||
file_ts = max(ts_md, ts_html)
|
||||
file_lastmod, do_send = self._chk_lastmod(file_ts)
|
||||
self.out_headers["Last-Modified"] = file_lastmod
|
||||
self.out_headers["Cache-Control"] = "no-cache"
|
||||
self.out_headers.update(NO_CACHE)
|
||||
status = 200 if do_send else 304
|
||||
|
||||
boundary = "\roll\tide"
|
||||
targs = {
|
||||
"edit": "edit" in self.uparam,
|
||||
"title": html_escape(self.vpath),
|
||||
"title": html_escape(self.vpath, crlf=True),
|
||||
"lastmod": int(ts_md * 1000),
|
||||
"md_plug": "true" if self.args.emp else "false",
|
||||
"md_chk_rate": self.args.mcr,
|
||||
"md": "",
|
||||
"md": boundary,
|
||||
}
|
||||
sz_html = len(template.render(**targs).encode("utf-8"))
|
||||
self.send_headers(sz_html + sz_md, status)
|
||||
html = template.render(**targs).encode("utf-8", "replace")
|
||||
html = html.split(boundary.encode("utf-8"))
|
||||
if len(html) != 2:
|
||||
raise Exception("boundary appears in " + html_path)
|
||||
|
||||
self.send_headers(sz_md + len(html[0]) + len(html[1]), status)
|
||||
|
||||
logmsg += unicode(status)
|
||||
if self.mode == "HEAD" or not do_send:
|
||||
self.log(logmsg)
|
||||
if self.do_log:
|
||||
self.log(logmsg)
|
||||
|
||||
return True
|
||||
|
||||
# TODO jinja2 can stream this right?
|
||||
targs["md"] = md.decode("utf-8", "replace")
|
||||
html = template.render(**targs).encode("utf-8")
|
||||
try:
|
||||
self.s.sendall(html)
|
||||
self.s.sendall(html[0])
|
||||
for buf in yieldfile(fs_path):
|
||||
self.s.sendall(html_bescape(buf))
|
||||
|
||||
self.s.sendall(html[1])
|
||||
|
||||
except:
|
||||
self.log(logmsg + " \033[31md/c\033[0m")
|
||||
return False
|
||||
|
||||
self.log(logmsg + " " + unicode(len(html)))
|
||||
if self.do_log:
|
||||
self.log(logmsg + " " + unicode(len(html)))
|
||||
|
||||
return True
|
||||
|
||||
def tx_mounts(self):
|
||||
suf = self.urlq(rm=["h"])
|
||||
rvol = [x + "/" if x else x for x in self.rvol]
|
||||
wvol = [x + "/" if x else x for x in self.wvol]
|
||||
html = self.j2("splash", this=self, rvol=rvol, wvol=wvol, url_suf=suf)
|
||||
self.reply(html.encode("utf-8"))
|
||||
rvol, wvol, avol = [
|
||||
[("/" + x).rstrip("/") + "/" for x in y]
|
||||
for y in [self.rvol, self.wvol, self.avol]
|
||||
]
|
||||
|
||||
vstate = {}
|
||||
if self.avol and not self.args.no_rescan:
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.get_volstate")
|
||||
vstate = json.loads(x.get())
|
||||
vstate = {("/" + k).rstrip("/") + "/": v for k, v in vstate.items()}
|
||||
|
||||
html = self.j2(
|
||||
"splash",
|
||||
this=self,
|
||||
rvol=rvol,
|
||||
wvol=wvol,
|
||||
avol=avol,
|
||||
vstate=vstate,
|
||||
url_suf=suf,
|
||||
)
|
||||
self.reply(html.encode("utf-8"), headers=NO_STORE)
|
||||
return True
|
||||
|
||||
def scanvol(self):
|
||||
if not self.readable or not self.writable:
|
||||
raise Pebkac(403, "not admin")
|
||||
|
||||
if self.args.no_rescan:
|
||||
raise Pebkac(403, "disabled by argv")
|
||||
|
||||
vn, _ = self.auth.vfs.get(self.vpath, self.uname, True, True)
|
||||
|
||||
args = [self.auth.vfs.all_vols, [vn.vpath]]
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args)
|
||||
x = x.get()
|
||||
if not x:
|
||||
self.redirect("", "?h")
|
||||
return ""
|
||||
|
||||
raise Pebkac(500, x)
|
||||
|
||||
def tx_stack(self):
|
||||
if not self.readable or not self.writable:
|
||||
raise Pebkac(403, "not admin")
|
||||
|
||||
if self.args.no_stack:
|
||||
raise Pebkac(403, "disabled by argv")
|
||||
|
||||
ret = []
|
||||
names = dict([(t.ident, t.name) for t in threading.enumerate()])
|
||||
for tid, stack in sys._current_frames().items():
|
||||
ret.append("\n\n# {} ({:x})".format(names.get(tid), tid))
|
||||
for fn, lno, name, line in traceback.extract_stack(stack):
|
||||
ret.append('File: "{}", line {}, in {}'.format(fn, lno, name))
|
||||
if line:
|
||||
ret.append(" " + str(line.strip()))
|
||||
|
||||
ret = ("<pre>" + "\n".join(ret)).encode("utf-8")
|
||||
self.reply(ret)
|
||||
|
||||
def tx_tree(self):
|
||||
top = self.uparam["tree"] or ""
|
||||
dst = self.vpath
|
||||
@@ -1264,7 +1417,9 @@ class HttpCli(object):
|
||||
|
||||
try:
|
||||
vn, rem = self.auth.vfs.get(top, self.uname, True, False)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||
rem, self.uname, not self.args.no_scandir, True
|
||||
)
|
||||
except:
|
||||
vfs_ls = []
|
||||
vfs_virt = {}
|
||||
@@ -1300,32 +1455,138 @@ class HttpCli(object):
|
||||
else:
|
||||
vpath += "/" + node
|
||||
|
||||
vpnodes.append([quotep(vpath) + "/", html_escape(node)])
|
||||
vpnodes.append([quotep(vpath) + "/", html_escape(node, crlf=True)])
|
||||
|
||||
vn, rem = self.auth.vfs.get(
|
||||
self.vpath, self.uname, self.readable, self.writable
|
||||
)
|
||||
abspath = vn.canonical(rem)
|
||||
|
||||
if not os.path.exists(fsenc(abspath)):
|
||||
# print(abspath)
|
||||
try:
|
||||
st = os.stat(fsenc(abspath))
|
||||
except:
|
||||
raise Pebkac(404)
|
||||
|
||||
if not os.path.isdir(fsenc(abspath)):
|
||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||
return self.tx_md(abspath)
|
||||
|
||||
if self.readable and not stat.S_ISDIR(st.st_mode):
|
||||
if rem.startswith(".hist/up2k."):
|
||||
raise Pebkac(403)
|
||||
|
||||
th_fmt = self.uparam.get("th")
|
||||
if th_fmt is not None:
|
||||
thp = None
|
||||
if self.thumbcli:
|
||||
thp = self.thumbcli.get(vn.realpath, rem, int(st.st_mtime), th_fmt)
|
||||
|
||||
if thp:
|
||||
return self.tx_file(thp)
|
||||
|
||||
return self.tx_ico(rem)
|
||||
|
||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||
return self.tx_md(abspath)
|
||||
|
||||
return self.tx_file(abspath)
|
||||
|
||||
srv_info = []
|
||||
|
||||
try:
|
||||
if not self.args.nih:
|
||||
srv_info.append(unicode(socket.gethostname()).split(".")[0])
|
||||
except:
|
||||
self.log("#wow #whoa")
|
||||
|
||||
try:
|
||||
# some fuses misbehave
|
||||
if not self.args.nid:
|
||||
if WINDOWS:
|
||||
bfree = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
||||
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
|
||||
)
|
||||
srv_info.append(humansize(bfree.value) + " free")
|
||||
else:
|
||||
sv = os.statvfs(fsenc(abspath))
|
||||
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
||||
total = humansize(sv.f_frsize * sv.f_blocks, True)
|
||||
|
||||
srv_info.append(free + " free")
|
||||
srv_info.append(total)
|
||||
except:
|
||||
pass
|
||||
|
||||
srv_info = "</span> /// <span>".join(srv_info)
|
||||
|
||||
perms = []
|
||||
if self.readable:
|
||||
perms.append("read")
|
||||
if self.writable:
|
||||
perms.append("write")
|
||||
|
||||
url_suf = self.urlq()
|
||||
is_ls = "ls" in self.uparam
|
||||
ts = "" # "?{}".format(time.time())
|
||||
|
||||
tpl = "browser"
|
||||
if "b" in self.uparam:
|
||||
tpl = "browser2"
|
||||
|
||||
logues = ["", ""]
|
||||
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
|
||||
fn = os.path.join(abspath, fn)
|
||||
if os.path.exists(fsenc(fn)):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
logues[n] = f.read().decode("utf-8")
|
||||
|
||||
ls_ret = {
|
||||
"dirs": [],
|
||||
"files": [],
|
||||
"taglist": [],
|
||||
"srvinf": srv_info,
|
||||
"perms": perms,
|
||||
"logues": logues,
|
||||
}
|
||||
j2a = {
|
||||
"vdir": quotep(self.vpath),
|
||||
"vpnodes": vpnodes,
|
||||
"files": [],
|
||||
"ts": ts,
|
||||
"perms": json.dumps(perms),
|
||||
"taglist": [],
|
||||
"tag_order": [],
|
||||
"have_up2k_idx": ("e2d" in vn.flags),
|
||||
"have_tags_idx": ("e2t" in vn.flags),
|
||||
"have_zip": (not self.args.no_zip),
|
||||
"have_b_u": (self.writable and self.uparam.get("b") == "u"),
|
||||
"url_suf": url_suf,
|
||||
"logues": logues,
|
||||
"title": html_escape(self.vpath, crlf=True),
|
||||
"srv_info": srv_info,
|
||||
}
|
||||
if not self.readable:
|
||||
if is_ls:
|
||||
ret = json.dumps(ls_ret)
|
||||
self.reply(
|
||||
ret.encode("utf-8", "replace"),
|
||||
mime="application/json",
|
||||
headers=NO_STORE,
|
||||
)
|
||||
return True
|
||||
|
||||
if not stat.S_ISDIR(st.st_mode):
|
||||
raise Pebkac(404)
|
||||
|
||||
html = self.j2(tpl, **j2a)
|
||||
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
|
||||
return True
|
||||
|
||||
for k in ["zip", "tar"]:
|
||||
v = self.uparam.get(k)
|
||||
if v is not None:
|
||||
return self.tx_zip(k, v, vn, rem, [], self.args.ed)
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||
rem, self.uname, not self.args.no_scandir, True
|
||||
)
|
||||
stats = {k: v for k, v in vfs_ls}
|
||||
vfs_ls = [x[0] for x in vfs_ls]
|
||||
vfs_ls.extend(vfs_virt.keys())
|
||||
@@ -1354,15 +1615,11 @@ class HttpCli(object):
|
||||
if rem == ".hist":
|
||||
hidden = ["up2k."]
|
||||
|
||||
is_ls = "ls" in self.uparam
|
||||
|
||||
icur = None
|
||||
if "e2t" in vn.flags:
|
||||
idx = self.conn.get_u2idx()
|
||||
icur = idx.get_cur(vn.realpath)
|
||||
|
||||
url_suf = self.urlq()
|
||||
|
||||
dirs = []
|
||||
files = []
|
||||
for fn in vfs_ls:
|
||||
@@ -1394,7 +1651,7 @@ class HttpCli(object):
|
||||
margin = '<a href="{}?zip">zip</a>'.format(quotep(href))
|
||||
elif fn in hist:
|
||||
margin = '<a href="{}.hist/{}">#{}</a>'.format(
|
||||
base, html_escape(hist[fn][2], quote=True), hist[fn][0]
|
||||
base, html_escape(hist[fn][2], quote=True, crlf=True), hist[fn][0]
|
||||
)
|
||||
else:
|
||||
margin = "-"
|
||||
@@ -1453,91 +1710,25 @@ class HttpCli(object):
|
||||
for f in dirs:
|
||||
f["tags"] = {}
|
||||
|
||||
srv_info = []
|
||||
|
||||
try:
|
||||
if not self.args.nih:
|
||||
srv_info.append(unicode(socket.gethostname()).split(".")[0])
|
||||
except:
|
||||
self.log("#wow #whoa")
|
||||
pass
|
||||
|
||||
try:
|
||||
# some fuses misbehave
|
||||
if not self.args.nid:
|
||||
if WINDOWS:
|
||||
bfree = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
||||
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
|
||||
)
|
||||
srv_info.append(humansize(bfree.value) + " free")
|
||||
else:
|
||||
sv = os.statvfs(abspath)
|
||||
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
||||
total = humansize(sv.f_frsize * sv.f_blocks, True)
|
||||
|
||||
srv_info.append(free + " free")
|
||||
srv_info.append(total)
|
||||
except:
|
||||
pass
|
||||
|
||||
srv_info = "</span> /// <span>".join(srv_info)
|
||||
|
||||
perms = []
|
||||
if self.readable:
|
||||
perms.append("read")
|
||||
if self.writable:
|
||||
perms.append("write")
|
||||
|
||||
logues = ["", ""]
|
||||
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
|
||||
fn = os.path.join(abspath, fn)
|
||||
if os.path.exists(fsenc(fn)):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
logues[n] = f.read().decode("utf-8")
|
||||
|
||||
if is_ls:
|
||||
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
|
||||
ret = {
|
||||
"dirs": dirs,
|
||||
"files": files,
|
||||
"srvinf": srv_info,
|
||||
"perms": perms,
|
||||
"logues": logues,
|
||||
"taglist": taglist,
|
||||
}
|
||||
ret = json.dumps(ret)
|
||||
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
|
||||
ls_ret["dirs"] = dirs
|
||||
ls_ret["files"] = files
|
||||
ls_ret["taglist"] = taglist
|
||||
ret = json.dumps(ls_ret)
|
||||
self.reply(
|
||||
ret.encode("utf-8", "replace"),
|
||||
mime="application/json",
|
||||
headers=NO_STORE,
|
||||
)
|
||||
return True
|
||||
|
||||
ts = ""
|
||||
# ts = "?{}".format(time.time())
|
||||
j2a["files"] = dirs + files
|
||||
j2a["logues"] = logues
|
||||
j2a["taglist"] = taglist
|
||||
if "mte" in vn.flags:
|
||||
j2a["tag_order"] = json.dumps(vn.flags["mte"].split(","))
|
||||
|
||||
dirs.extend(files)
|
||||
|
||||
tpl = "browser"
|
||||
if "b" in self.uparam:
|
||||
tpl = "browser2"
|
||||
|
||||
html = self.j2(
|
||||
tpl,
|
||||
vdir=quotep(self.vpath),
|
||||
vpnodes=vpnodes,
|
||||
files=dirs,
|
||||
ts=ts,
|
||||
perms=json.dumps(perms),
|
||||
taglist=taglist,
|
||||
tag_order=json.dumps(
|
||||
vn.flags["mte"].split(",") if "mte" in vn.flags else []
|
||||
),
|
||||
have_up2k_idx=("e2d" in vn.flags),
|
||||
have_tags_idx=("e2t" in vn.flags),
|
||||
have_zip=(not self.args.no_zip),
|
||||
have_b_u=(self.writable and self.uparam.get("b") == "u"),
|
||||
url_suf=url_suf,
|
||||
logues=logues,
|
||||
title=html_escape(self.vpath),
|
||||
srv_info=srv_info,
|
||||
)
|
||||
self.reply(html.encode("utf-8", "replace"))
|
||||
html = self.j2(tpl, **j2a)
|
||||
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
|
||||
return True
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
@@ -16,6 +17,9 @@ from .__init__ import E
|
||||
from .util import Unrecv
|
||||
from .httpcli import HttpCli
|
||||
from .u2idx import U2idx
|
||||
from .th_cli import ThumbCli
|
||||
from .th_srv import HAVE_PIL
|
||||
from .ico import Ico
|
||||
|
||||
|
||||
class HttpConn(object):
|
||||
@@ -33,11 +37,16 @@ class HttpConn(object):
|
||||
self.auth = hsrv.auth
|
||||
self.cert_path = hsrv.cert_path
|
||||
|
||||
enth = HAVE_PIL and not self.args.no_thumb
|
||||
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
|
||||
self.ico = Ico(self.args)
|
||||
|
||||
self.t0 = time.time()
|
||||
self.nbyte = 0
|
||||
self.workload = 0
|
||||
self.u2idx = None
|
||||
self.log_func = hsrv.log
|
||||
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None
|
||||
self.set_rproxy()
|
||||
|
||||
def set_rproxy(self, ip=None):
|
||||
|
||||
39
copyparty/ico.py
Normal file
39
copyparty/ico.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import hashlib
|
||||
import colorsys
|
||||
|
||||
from .__init__ import PY2
|
||||
|
||||
|
||||
class Ico(object):
|
||||
def __init__(self, args):
|
||||
self.args = args
|
||||
|
||||
def get(self, ext, as_thumb):
|
||||
"""placeholder to make thumbnails not break"""
|
||||
|
||||
h = hashlib.md5(ext.encode("utf-8")).digest()[:2]
|
||||
if PY2:
|
||||
h = [ord(x) for x in h]
|
||||
|
||||
c1 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 0.3)
|
||||
c2 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 1)
|
||||
c = list(c1) + list(c2)
|
||||
c = [int(x * 255) for x in c]
|
||||
c = "".join(["{:02x}".format(x) for x in c])
|
||||
|
||||
h = 30
|
||||
if not self.args.th_no_crop and as_thumb:
|
||||
w, h = self.args.th_size.split("x")
|
||||
h = int(100 / (float(w) / float(h)))
|
||||
|
||||
svg = """\
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
|
||||
<rect width="100%" height="100%" fill="#{}" />
|
||||
<text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" xml:space="preserve"
|
||||
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
|
||||
</g></svg>
|
||||
"""
|
||||
svg = svg.format(h, c[:6], c[6:], ext).encode("utf-8")
|
||||
|
||||
return ["image/svg+xml", svg]
|
||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
|
||||
@@ -14,6 +15,204 @@ if not PY2:
|
||||
unicode = str
|
||||
|
||||
|
||||
def have_ff(cmd):
|
||||
if PY2:
|
||||
cmd = (cmd + " -version").encode("ascii").split(b" ")
|
||||
try:
|
||||
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE).communicate()
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return bool(shutil.which(cmd))
|
||||
|
||||
|
||||
HAVE_FFMPEG = have_ff("ffmpeg")
|
||||
HAVE_FFPROBE = have_ff("ffprobe")
|
||||
|
||||
|
||||
class MParser(object):
|
||||
def __init__(self, cmdline):
|
||||
self.tag, args = cmdline.split("=", 1)
|
||||
self.tags = self.tag.split(",")
|
||||
|
||||
self.timeout = 30
|
||||
self.force = False
|
||||
self.audio = "y"
|
||||
self.ext = []
|
||||
|
||||
while True:
|
||||
try:
|
||||
bp = os.path.expanduser(args)
|
||||
if os.path.exists(bp):
|
||||
self.bin = bp
|
||||
return
|
||||
except:
|
||||
pass
|
||||
|
||||
arg, args = args.split(",", 1)
|
||||
arg = arg.lower()
|
||||
|
||||
if arg.startswith("a"):
|
||||
self.audio = arg[1:] # [r]equire [n]ot [d]ontcare
|
||||
continue
|
||||
|
||||
if arg == "f":
|
||||
self.force = True
|
||||
continue
|
||||
|
||||
if arg.startswith("t"):
|
||||
self.timeout = int(arg[1:])
|
||||
continue
|
||||
|
||||
if arg.startswith("e"):
|
||||
self.ext.append(arg[1:])
|
||||
continue
|
||||
|
||||
raise Exception()
|
||||
|
||||
|
||||
def ffprobe(abspath):
|
||||
cmd = [
|
||||
b"ffprobe",
|
||||
b"-hide_banner",
|
||||
b"-show_streams",
|
||||
b"-show_format",
|
||||
b"--",
|
||||
fsenc(abspath),
|
||||
]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
r = p.communicate()
|
||||
txt = r[0].decode("utf-8", "replace")
|
||||
return parse_ffprobe(txt)
|
||||
|
||||
|
||||
def parse_ffprobe(txt):
|
||||
"""ffprobe -show_format -show_streams"""
|
||||
streams = []
|
||||
fmt = {}
|
||||
g = None
|
||||
for ln in [x.rstrip("\r") for x in txt.split("\n")]:
|
||||
try:
|
||||
k, v = ln.split("=", 1)
|
||||
g[k] = v
|
||||
continue
|
||||
except:
|
||||
pass
|
||||
|
||||
if ln == "[STREAM]":
|
||||
g = {}
|
||||
streams.append(g)
|
||||
|
||||
if ln == "[FORMAT]":
|
||||
g = {"codec_type": "format"} # heh
|
||||
fmt = g
|
||||
|
||||
streams = [fmt] + streams
|
||||
ret = {} # processed
|
||||
md = {} # raw tags
|
||||
|
||||
have = {}
|
||||
for strm in streams:
|
||||
typ = strm.get("codec_type")
|
||||
if typ in have:
|
||||
continue
|
||||
|
||||
have[typ] = True
|
||||
kvm = []
|
||||
|
||||
if typ == "audio":
|
||||
kvm = [
|
||||
["codec_name", "ac"],
|
||||
["channel_layout", "chs"],
|
||||
["sample_rate", ".hz"],
|
||||
["bit_rate", ".aq"],
|
||||
["duration", ".dur"],
|
||||
]
|
||||
|
||||
if typ == "video":
|
||||
if strm.get("DISPOSITION:attached_pic") == "1" or fmt.get(
|
||||
"format_name"
|
||||
) in ["mp3", "ogg", "flac"]:
|
||||
continue
|
||||
|
||||
kvm = [
|
||||
["codec_name", "vc"],
|
||||
["pix_fmt", "pixfmt"],
|
||||
["r_frame_rate", ".fps"],
|
||||
["bit_rate", ".vq"],
|
||||
["width", ".resw"],
|
||||
["height", ".resh"],
|
||||
["duration", ".dur"],
|
||||
]
|
||||
|
||||
if typ == "format":
|
||||
kvm = [["duration", ".dur"], ["bit_rate", ".q"]]
|
||||
|
||||
for sk, rk in kvm:
|
||||
v = strm.get(sk)
|
||||
if v is None:
|
||||
continue
|
||||
|
||||
if rk.startswith("."):
|
||||
try:
|
||||
v = float(v)
|
||||
v2 = ret.get(rk)
|
||||
if v2 is None or v > v2:
|
||||
ret[rk] = v
|
||||
except:
|
||||
# sqlite doesnt care but the code below does
|
||||
if v not in ["N/A"]:
|
||||
ret[rk] = v
|
||||
else:
|
||||
ret[rk] = v
|
||||
|
||||
if ret.get("vc") == "ansi": # shellscript
|
||||
return {}, {}
|
||||
|
||||
for strm in streams:
|
||||
for k, v in strm.items():
|
||||
if not k.startswith("TAG:"):
|
||||
continue
|
||||
|
||||
k = k[4:].strip()
|
||||
v = v.strip()
|
||||
if k and v:
|
||||
md[k] = [v]
|
||||
|
||||
for k in [".q", ".vq", ".aq"]:
|
||||
if k in ret:
|
||||
ret[k] /= 1000 # bit_rate=320000
|
||||
|
||||
for k in [".q", ".vq", ".aq", ".resw", ".resh"]:
|
||||
if k in ret:
|
||||
ret[k] = int(ret[k])
|
||||
|
||||
if ".fps" in ret:
|
||||
fps = ret[".fps"]
|
||||
if "/" in fps:
|
||||
fa, fb = fps.split("/")
|
||||
fps = int(fa) * 1.0 / int(fb)
|
||||
|
||||
if fps < 1000 and fmt.get("format_name") not in ["image2", "png_pipe"]:
|
||||
ret[".fps"] = round(fps, 3)
|
||||
else:
|
||||
del ret[".fps"]
|
||||
|
||||
if ".dur" in ret:
|
||||
if ret[".dur"] < 0.1:
|
||||
del ret[".dur"]
|
||||
if ".q" in ret:
|
||||
del ret[".q"]
|
||||
|
||||
if ".resw" in ret and ".resh" in ret:
|
||||
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
|
||||
|
||||
ret = {k: [0, v] for k, v in ret.items()}
|
||||
|
||||
return ret, md
|
||||
|
||||
|
||||
class MTag(object):
|
||||
def __init__(self, log_func, args):
|
||||
self.log_func = log_func
|
||||
@@ -35,15 +234,7 @@ class MTag(object):
|
||||
self.get = self.get_ffprobe
|
||||
self.prefer_mt = True
|
||||
# about 20x slower
|
||||
if PY2:
|
||||
cmd = [b"ffprobe", b"-version"]
|
||||
try:
|
||||
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
except:
|
||||
self.usable = False
|
||||
else:
|
||||
if not shutil.which("ffprobe"):
|
||||
self.usable = False
|
||||
self.usable = HAVE_FFPROBE
|
||||
|
||||
if self.usable and WINDOWS and sys.version_info < (3, 8):
|
||||
self.usable = False
|
||||
@@ -52,8 +243,10 @@ class MTag(object):
|
||||
self.log(msg, c=1)
|
||||
|
||||
if not self.usable:
|
||||
msg = "need mutagen{} to read media tags so please run this:\n {} -m pip install --user mutagen"
|
||||
self.log(msg.format(or_ffprobe, os.path.basename(sys.executable)), c=1)
|
||||
msg = "need mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
|
||||
self.log(
|
||||
msg.format(or_ffprobe, " " * 37, os.path.basename(sys.executable)), c=1
|
||||
)
|
||||
return
|
||||
|
||||
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
@@ -201,7 +394,7 @@ class MTag(object):
|
||||
import mutagen
|
||||
|
||||
try:
|
||||
md = mutagen.File(abspath, easy=True)
|
||||
md = mutagen.File(fsenc(abspath), easy=True)
|
||||
x = md.info.length
|
||||
except Exception as ex:
|
||||
return {}
|
||||
@@ -212,7 +405,7 @@ class MTag(object):
|
||||
try:
|
||||
q = int(md.info.bitrate / 1024)
|
||||
except:
|
||||
q = int((os.path.getsize(abspath) / dur) / 128)
|
||||
q = int((os.path.getsize(fsenc(abspath)) / dur) / 128)
|
||||
|
||||
ret[".dur"] = [0, dur]
|
||||
ret[".q"] = [0, q]
|
||||
@@ -222,101 +415,7 @@ class MTag(object):
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_ffprobe(self, abspath):
|
||||
cmd = [b"ffprobe", b"-hide_banner", b"--", fsenc(abspath)]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
r = p.communicate()
|
||||
txt = r[1].decode("utf-8", "replace")
|
||||
txt = [x.rstrip("\r") for x in txt.split("\n")]
|
||||
|
||||
"""
|
||||
note:
|
||||
tags which contain newline will be truncated on first \n,
|
||||
ffprobe emits \n and spacepads the : to align visually
|
||||
note:
|
||||
the Stream ln always mentions Audio: if audio
|
||||
the Stream ln usually has kb/s, is more accurate
|
||||
the Duration ln always has kb/s
|
||||
the Metadata: after Chapter may contain BPM info,
|
||||
title : Tempo: 126.0
|
||||
|
||||
Input #0, wav,
|
||||
Metadata:
|
||||
date : <OK>
|
||||
Duration:
|
||||
Chapter #
|
||||
Metadata:
|
||||
title : <NG>
|
||||
|
||||
Input #0, mp3,
|
||||
Metadata:
|
||||
album : <OK>
|
||||
Duration:
|
||||
Stream #0:0: Audio:
|
||||
Stream #0:1: Video:
|
||||
Metadata:
|
||||
comment : <NG>
|
||||
"""
|
||||
|
||||
ptn_md_beg = re.compile("^( +)Metadata:$")
|
||||
ptn_md_kv = re.compile("^( +)([^:]+) *: (.*)")
|
||||
ptn_dur = re.compile("^ *Duration: ([^ ]+)(, |$)")
|
||||
ptn_br1 = re.compile("^ *Duration: .*, bitrate: ([0-9]+) kb/s(, |$)")
|
||||
ptn_br2 = re.compile("^ *Stream.*: Audio:.* ([0-9]+) kb/s(, |$)")
|
||||
ptn_audio = re.compile("^ *Stream .*: Audio: ")
|
||||
ptn_au_parent = re.compile("^ *(Input #|Stream .*: Audio: )")
|
||||
|
||||
ret = {}
|
||||
md = {}
|
||||
in_md = False
|
||||
is_audio = False
|
||||
au_parent = False
|
||||
for ln in txt:
|
||||
m = ptn_md_kv.match(ln)
|
||||
if m and in_md and len(m.group(1)) == in_md:
|
||||
_, k, v = [x.strip() for x in m.groups()]
|
||||
if k != "" and v != "":
|
||||
md[k] = [v]
|
||||
continue
|
||||
else:
|
||||
in_md = False
|
||||
|
||||
m = ptn_md_beg.match(ln)
|
||||
if m and au_parent:
|
||||
in_md = len(m.group(1)) + 2
|
||||
continue
|
||||
|
||||
au_parent = bool(ptn_au_parent.search(ln))
|
||||
|
||||
if ptn_audio.search(ln):
|
||||
is_audio = True
|
||||
|
||||
m = ptn_dur.search(ln)
|
||||
if m:
|
||||
sec = 0
|
||||
tstr = m.group(1)
|
||||
if tstr.lower() != "n/a":
|
||||
try:
|
||||
tf = tstr.split(",")[0].split(".")[0].split(":")
|
||||
for f in tf:
|
||||
sec *= 60
|
||||
sec += int(f)
|
||||
except:
|
||||
self.log("invalid timestr from ffprobe: [{}]".format(tstr), c=3)
|
||||
|
||||
ret[".dur"] = sec
|
||||
m = ptn_br1.search(ln)
|
||||
if m:
|
||||
ret[".q"] = m.group(1)
|
||||
|
||||
m = ptn_br2.search(ln)
|
||||
if m:
|
||||
ret[".q"] = m.group(1)
|
||||
|
||||
if not is_audio:
|
||||
return {}
|
||||
|
||||
ret = {k: [0, v] for k, v in ret.items()}
|
||||
|
||||
ret, md = ffprobe(abspath)
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_bin(self, parsers, abspath):
|
||||
@@ -327,10 +426,10 @@ class MTag(object):
|
||||
env["PYTHONPATH"] = pypath
|
||||
|
||||
ret = {}
|
||||
for tagname, (binpath, timeout) in parsers.items():
|
||||
for tagname, mp in parsers.items():
|
||||
try:
|
||||
cmd = [sys.executable, binpath, abspath]
|
||||
args = {"env": env, "timeout": timeout}
|
||||
cmd = [sys.executable, mp.bin, abspath]
|
||||
args = {"env": env, "timeout": mp.timeout}
|
||||
|
||||
if WINDOWS:
|
||||
args["creationflags"] = 0x4000
|
||||
@@ -339,8 +438,16 @@ class MTag(object):
|
||||
|
||||
cmd = [fsenc(x) for x in cmd]
|
||||
v = sp.check_output(cmd, **args).strip()
|
||||
if v:
|
||||
if not v:
|
||||
continue
|
||||
|
||||
if "," not in tagname:
|
||||
ret[tagname] = v.decode("utf-8")
|
||||
else:
|
||||
v = json.loads(v)
|
||||
for tag in tagname.split(","):
|
||||
if tag and tag in v:
|
||||
ret[tag] = v[tag]
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
@@ -9,9 +10,11 @@ from datetime import datetime, timedelta
|
||||
import calendar
|
||||
|
||||
from .__init__ import PY2, WINDOWS, MACOS, VT100
|
||||
from .util import mp
|
||||
from .authsrv import AuthSrv
|
||||
from .tcpsrv import TcpSrv
|
||||
from .up2k import Up2k
|
||||
from .util import mp
|
||||
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
|
||||
|
||||
|
||||
class SvcHub(object):
|
||||
@@ -34,9 +37,29 @@ class SvcHub(object):
|
||||
|
||||
self.log = self._log_disabled if args.q else self._log_enabled
|
||||
|
||||
# jank goes here
|
||||
auth = AuthSrv(self.args, self.log, False)
|
||||
if args.ls:
|
||||
auth.dbg_ls()
|
||||
|
||||
# initiate all services to manage
|
||||
self.tcpsrv = TcpSrv(self)
|
||||
self.up2k = Up2k(self)
|
||||
self.up2k = Up2k(self, auth.vfs.all_vols)
|
||||
|
||||
self.thumbsrv = None
|
||||
if not args.no_thumb:
|
||||
if HAVE_PIL:
|
||||
if not HAVE_WEBP:
|
||||
args.th_no_webp = True
|
||||
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
|
||||
self.log("thumb", msg, c=3)
|
||||
|
||||
self.thumbsrv = ThumbSrv(self, auth.vfs.all_vols)
|
||||
else:
|
||||
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n"
|
||||
self.log(
|
||||
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3
|
||||
)
|
||||
|
||||
# decide which worker impl to use
|
||||
if self.check_mp_enable():
|
||||
@@ -63,6 +86,17 @@ class SvcHub(object):
|
||||
|
||||
self.tcpsrv.shutdown()
|
||||
self.broker.shutdown()
|
||||
if self.thumbsrv:
|
||||
self.thumbsrv.shutdown()
|
||||
|
||||
for n in range(200): # 10s
|
||||
time.sleep(0.05)
|
||||
if self.thumbsrv.stopped():
|
||||
break
|
||||
|
||||
if n == 3:
|
||||
print("waiting for thumbsrv...")
|
||||
|
||||
print("nailed it")
|
||||
|
||||
def _log_disabled(self, src, msg, c=0):
|
||||
|
||||
@@ -87,7 +87,7 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
||||
ret += struct.pack("<LL", vsz, vsz)
|
||||
|
||||
# windows support (the "?" replace below too)
|
||||
fn = sanitize_fn(fn, "/")
|
||||
fn = sanitize_fn(fn, ok="/")
|
||||
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
|
||||
|
||||
z64_len = len(z64v) * 8 + 4 if z64v else 0
|
||||
|
||||
49
copyparty/th_cli.py
Normal file
49
copyparty/th_cli.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import os
|
||||
import time
|
||||
|
||||
from .util import Cooldown
|
||||
from .th_srv import thumb_path, THUMBABLE, FMT_FF
|
||||
|
||||
|
||||
class ThumbCli(object):
|
||||
def __init__(self, broker):
|
||||
self.broker = broker
|
||||
self.args = broker.args
|
||||
|
||||
# cache on both sides for less broker spam
|
||||
self.cooldown = Cooldown(self.args.th_poke)
|
||||
|
||||
def get(self, ptop, rem, mtime, fmt):
|
||||
ext = rem.rsplit(".")[-1].lower()
|
||||
if ext not in THUMBABLE:
|
||||
return None
|
||||
|
||||
if self.args.no_vthumb and ext in FMT_FF:
|
||||
return None
|
||||
|
||||
if fmt == "j" and self.args.th_no_jpg:
|
||||
fmt = "w"
|
||||
|
||||
if fmt == "w" and self.args.th_no_webp:
|
||||
fmt = "j"
|
||||
|
||||
tpath = thumb_path(ptop, rem, mtime, fmt)
|
||||
ret = None
|
||||
try:
|
||||
st = os.stat(tpath)
|
||||
if st.st_size:
|
||||
ret = tpath
|
||||
else:
|
||||
return None
|
||||
except:
|
||||
pass
|
||||
|
||||
if ret:
|
||||
tdir = os.path.dirname(tpath)
|
||||
if self.cooldown.poke(tdir):
|
||||
self.broker.put(False, "thumbsrv.poke", tdir)
|
||||
|
||||
return ret
|
||||
|
||||
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)
|
||||
return x.get()
|
||||
375
copyparty/th_srv.py
Normal file
375
copyparty/th_srv.py
Normal file
@@ -0,0 +1,375 @@
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
import base64
|
||||
import hashlib
|
||||
import threading
|
||||
import subprocess as sp
|
||||
|
||||
from .__init__ import PY2
|
||||
from .util import fsenc, mchkcmd, Queue, Cooldown, BytesIO
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||
|
||||
|
||||
if not PY2:
|
||||
unicode = str
|
||||
|
||||
|
||||
HAVE_PIL = False
|
||||
HAVE_HEIF = False
|
||||
HAVE_AVIF = False
|
||||
HAVE_WEBP = False
|
||||
|
||||
try:
|
||||
from PIL import Image, ImageOps
|
||||
|
||||
HAVE_PIL = True
|
||||
try:
|
||||
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
||||
HAVE_WEBP = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
from pyheif_pillow_opener import register_heif_opener
|
||||
|
||||
register_heif_opener()
|
||||
HAVE_HEIF = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
import pillow_avif
|
||||
|
||||
HAVE_AVIF = True
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
|
||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||
# ffmpeg -formats
|
||||
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
|
||||
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
|
||||
|
||||
if HAVE_HEIF:
|
||||
FMT_PIL += " heif heifs heic heics"
|
||||
|
||||
if HAVE_AVIF:
|
||||
FMT_PIL += " avif avifs"
|
||||
|
||||
FMT_PIL, FMT_FF = [{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FF]]
|
||||
|
||||
|
||||
THUMBABLE = {}
|
||||
|
||||
if HAVE_PIL:
|
||||
THUMBABLE.update(FMT_PIL)
|
||||
|
||||
if HAVE_FFMPEG and HAVE_FFPROBE:
|
||||
THUMBABLE.update(FMT_FF)
|
||||
|
||||
|
||||
def thumb_path(ptop, rem, mtime, fmt):
|
||||
# base16 = 16 = 256
|
||||
# b64-lc = 38 = 1444
|
||||
# base64 = 64 = 4096
|
||||
try:
|
||||
rd, fn = rem.rsplit("/", 1)
|
||||
except:
|
||||
rd = ""
|
||||
fn = rem
|
||||
|
||||
if rd:
|
||||
h = hashlib.sha512(fsenc(rd)).digest()[:24]
|
||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
||||
else:
|
||||
rd = "top"
|
||||
|
||||
# could keep original filenames but this is safer re pathlen
|
||||
h = hashlib.sha512(fsenc(fn)).digest()[:24]
|
||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
|
||||
return "{}/.hist/th/{}/{}.{:x}.{}".format(
|
||||
ptop, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
|
||||
)
|
||||
|
||||
|
||||
class ThumbSrv(object):
|
||||
def __init__(self, hub, vols):
|
||||
self.hub = hub
|
||||
self.vols = [v.realpath for v in vols.values()]
|
||||
|
||||
self.args = hub.args
|
||||
self.log_func = hub.log
|
||||
|
||||
res = hub.args.th_size.split("x")
|
||||
self.res = tuple([int(x) for x in res])
|
||||
self.poke_cd = Cooldown(self.args.th_poke)
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.busy = {}
|
||||
self.stopping = False
|
||||
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
self.q = Queue(self.nthr * 4)
|
||||
for _ in range(self.nthr):
|
||||
t = threading.Thread(target=self.worker)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
||||
missing = []
|
||||
if not HAVE_FFMPEG:
|
||||
missing.append("ffmpeg")
|
||||
|
||||
if not HAVE_FFPROBE:
|
||||
missing.append("ffprobe")
|
||||
|
||||
msg = "cannot create video thumbnails because some of the required programs are not available: "
|
||||
msg += ", ".join(missing)
|
||||
self.log(msg, c=3)
|
||||
|
||||
t = threading.Thread(target=self.cleaner)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def log(self, msg, c=0):
|
||||
self.log_func("thumb", msg, c)
|
||||
|
||||
def shutdown(self):
|
||||
self.stopping = True
|
||||
for _ in range(self.nthr):
|
||||
self.q.put(None)
|
||||
|
||||
def stopped(self):
|
||||
with self.mutex:
|
||||
return not self.nthr
|
||||
|
||||
def get(self, ptop, rem, mtime, fmt):
|
||||
tpath = thumb_path(ptop, rem, mtime, fmt)
|
||||
abspath = os.path.join(ptop, rem)
|
||||
cond = threading.Condition()
|
||||
with self.mutex:
|
||||
try:
|
||||
self.busy[tpath].append(cond)
|
||||
self.log("wait {}".format(tpath))
|
||||
except:
|
||||
thdir = os.path.dirname(tpath)
|
||||
try:
|
||||
os.makedirs(thdir)
|
||||
except:
|
||||
pass
|
||||
|
||||
inf_path = os.path.join(thdir, "dir.txt")
|
||||
if not os.path.exists(inf_path):
|
||||
with open(inf_path, "wb") as f:
|
||||
f.write(fsenc(os.path.dirname(abspath)))
|
||||
|
||||
self.busy[tpath] = [cond]
|
||||
self.q.put([abspath, tpath])
|
||||
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
||||
|
||||
while not self.stopping:
|
||||
with self.mutex:
|
||||
if tpath not in self.busy:
|
||||
break
|
||||
|
||||
with cond:
|
||||
cond.wait()
|
||||
|
||||
try:
|
||||
st = os.stat(tpath)
|
||||
if st.st_size:
|
||||
return tpath
|
||||
except:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def worker(self):
|
||||
while not self.stopping:
|
||||
task = self.q.get()
|
||||
if not task:
|
||||
break
|
||||
|
||||
abspath, tpath = task
|
||||
ext = abspath.split(".")[-1].lower()
|
||||
fun = None
|
||||
if not os.path.exists(tpath):
|
||||
if ext in FMT_PIL:
|
||||
fun = self.conv_pil
|
||||
elif ext in FMT_FF:
|
||||
fun = self.conv_ffmpeg
|
||||
|
||||
if fun:
|
||||
try:
|
||||
fun(abspath, tpath)
|
||||
except Exception as ex:
|
||||
msg = "{} failed on {}\n {!r}"
|
||||
self.log(msg.format(fun.__name__, abspath, ex), 3)
|
||||
with open(tpath, "wb") as _:
|
||||
pass
|
||||
|
||||
with self.mutex:
|
||||
subs = self.busy[tpath]
|
||||
del self.busy[tpath]
|
||||
|
||||
for x in subs:
|
||||
with x:
|
||||
x.notify_all()
|
||||
|
||||
with self.mutex:
|
||||
self.nthr -= 1
|
||||
|
||||
def conv_pil(self, abspath, tpath):
|
||||
with Image.open(fsenc(abspath)) as im:
|
||||
crop = not self.args.th_no_crop
|
||||
res2 = self.res
|
||||
if crop:
|
||||
res2 = (res2[0] * 2, res2[1] * 2)
|
||||
|
||||
try:
|
||||
im.thumbnail(res2, resample=Image.LANCZOS)
|
||||
if crop:
|
||||
iw, ih = im.size
|
||||
dw, dh = self.res
|
||||
res = (min(iw, dw), min(ih, dh))
|
||||
im = ImageOps.fit(im, res, method=Image.LANCZOS)
|
||||
except:
|
||||
im.thumbnail(self.res)
|
||||
|
||||
if im.mode not in ("RGB", "L"):
|
||||
im = im.convert("RGB")
|
||||
|
||||
if tpath.endswith(".webp"):
|
||||
# quality 80 = pillow-default
|
||||
# quality 75 = ffmpeg-default
|
||||
# method 0 = pillow-default, fast
|
||||
# method 4 = ffmpeg-default
|
||||
# method 6 = max, slow
|
||||
im.save(tpath, quality=40, method=6)
|
||||
else:
|
||||
im.save(tpath, quality=40) # default=75
|
||||
|
||||
def conv_ffmpeg(self, abspath, tpath):
|
||||
ret, _ = ffprobe(abspath)
|
||||
|
||||
dur = ret[".dur"][1] if ".dur" in ret else 4
|
||||
seek = "{:.0f}".format(dur / 3)
|
||||
|
||||
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
||||
if self.args.th_no_crop:
|
||||
scale += "decrease,setsar=1:1"
|
||||
else:
|
||||
scale += "increase,crop={0}:{1},setsar=1:1"
|
||||
|
||||
scale = scale.format(*list(self.res)).encode("utf-8")
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-ss",
|
||||
seek,
|
||||
b"-i",
|
||||
fsenc(abspath),
|
||||
b"-vf",
|
||||
scale,
|
||||
b"-vframes",
|
||||
b"1",
|
||||
]
|
||||
|
||||
if tpath.endswith(".jpg"):
|
||||
cmd += [
|
||||
b"-q:v",
|
||||
b"6", # default=??
|
||||
]
|
||||
else:
|
||||
cmd += [
|
||||
b"-q:v",
|
||||
b"50", # default=75
|
||||
b"-compression_level:v",
|
||||
b"6", # default=4, 0=fast, 6=max
|
||||
]
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
|
||||
mchkcmd(cmd)
|
||||
|
||||
def poke(self, tdir):
|
||||
if not self.poke_cd.poke(tdir):
|
||||
return
|
||||
|
||||
ts = int(time.time())
|
||||
try:
|
||||
p1 = os.path.dirname(tdir)
|
||||
p2 = os.path.dirname(p1)
|
||||
for dp in [tdir, p1, p2]:
|
||||
os.utime(fsenc(dp), (ts, ts))
|
||||
except:
|
||||
pass
|
||||
|
||||
def cleaner(self):
|
||||
interval = self.args.th_clean
|
||||
while True:
|
||||
time.sleep(interval)
|
||||
for vol in self.vols:
|
||||
vol += "/.hist/th"
|
||||
self.log("\033[Jcln {}/\033[A".format(vol))
|
||||
self.clean(vol)
|
||||
|
||||
self.log("\033[Jcln ok")
|
||||
|
||||
def clean(self, vol):
|
||||
# self.log("cln {}".format(vol))
|
||||
maxage = self.args.th_maxage
|
||||
now = time.time()
|
||||
prev_b64 = None
|
||||
prev_fp = None
|
||||
try:
|
||||
ents = os.listdir(vol)
|
||||
except:
|
||||
return
|
||||
|
||||
for f in sorted(ents):
|
||||
fp = os.path.join(vol, f)
|
||||
cmp = fp.lower().replace("\\", "/")
|
||||
|
||||
# "top" or b64 prefix/full (a folder)
|
||||
if len(f) <= 3 or len(f) == 24:
|
||||
age = now - os.path.getmtime(fp)
|
||||
if age > maxage:
|
||||
with self.mutex:
|
||||
safe = True
|
||||
for k in self.busy.keys():
|
||||
if k.lower().replace("\\", "/").startswith(cmp):
|
||||
safe = False
|
||||
break
|
||||
|
||||
if safe:
|
||||
self.log("rm -rf [{}]".format(fp))
|
||||
shutil.rmtree(fp, ignore_errors=True)
|
||||
else:
|
||||
self.clean(fp)
|
||||
continue
|
||||
|
||||
# thumb file
|
||||
try:
|
||||
b64, ts, ext = f.split(".")
|
||||
if len(b64) != 24 or len(ts) != 8 or ext not in ["jpg", "webp"]:
|
||||
raise Exception()
|
||||
|
||||
ts = int(ts, 16)
|
||||
except:
|
||||
if f != "dir.txt":
|
||||
self.log("foreign file in thumbs dir: [{}]".format(fp), 1)
|
||||
|
||||
continue
|
||||
|
||||
if b64 == prev_b64:
|
||||
self.log("rm replaced [{}]".format(fp))
|
||||
os.unlink(prev_fp)
|
||||
|
||||
prev_b64 = b64
|
||||
prev_fp = fp
|
||||
@@ -47,11 +47,11 @@ class U2idx(object):
|
||||
fhash = body["hash"]
|
||||
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
|
||||
|
||||
uq = "substr(w,1,16) = ? and w = ?"
|
||||
uq = "where substr(w,1,16) = ? and w = ?"
|
||||
uv = [wark[:16], wark]
|
||||
|
||||
try:
|
||||
return self.run_query(vols, uq, uv, {})[0]
|
||||
return self.run_query(vols, uq, uv)[0]
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
@@ -67,37 +67,121 @@ class U2idx(object):
|
||||
self.cur[ptop] = cur
|
||||
return cur
|
||||
|
||||
def search(self, vols, body):
|
||||
def search(self, vols, uq):
|
||||
"""search by query params"""
|
||||
if not HAVE_SQLITE3:
|
||||
return []
|
||||
|
||||
qobj = {}
|
||||
_conv_sz(qobj, body, "sz_min", "up.sz >= ?")
|
||||
_conv_sz(qobj, body, "sz_max", "up.sz <= ?")
|
||||
_conv_dt(qobj, body, "dt_min", "up.mt >= ?")
|
||||
_conv_dt(qobj, body, "dt_max", "up.mt <= ?")
|
||||
for seg, dk in [["path", "up.rd"], ["name", "up.fn"]]:
|
||||
if seg in body:
|
||||
_conv_txt(qobj, body, seg, dk)
|
||||
q = ""
|
||||
va = []
|
||||
joins = ""
|
||||
is_key = True
|
||||
is_size = False
|
||||
is_date = False
|
||||
kw_key = ["(", ")", "and ", "or ", "not "]
|
||||
kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "]
|
||||
ptn_mt = re.compile(r"^\.?[a-z]+$")
|
||||
mt_ctr = 0
|
||||
mt_keycmp = "substr(up.w,1,16)"
|
||||
mt_keycmp2 = None
|
||||
|
||||
uq, uv = _sqlize(qobj)
|
||||
while True:
|
||||
uq = uq.strip()
|
||||
if not uq:
|
||||
break
|
||||
|
||||
qobj = {}
|
||||
if "tags" in body:
|
||||
_conv_txt(qobj, body, "tags", "mt.v")
|
||||
ok = False
|
||||
for kw in kw_key + kw_val:
|
||||
if uq.startswith(kw):
|
||||
is_key = kw in kw_key
|
||||
uq = uq[len(kw) :]
|
||||
ok = True
|
||||
q += kw
|
||||
break
|
||||
|
||||
if "adv" in body:
|
||||
_conv_adv(qobj, body, "adv")
|
||||
if ok:
|
||||
continue
|
||||
|
||||
v, uq = (uq + " ").split(" ", 1)
|
||||
if is_key:
|
||||
is_key = False
|
||||
|
||||
if v == "size":
|
||||
v = "up.sz"
|
||||
is_size = True
|
||||
|
||||
elif v == "date":
|
||||
v = "up.mt"
|
||||
is_date = True
|
||||
|
||||
elif v == "path":
|
||||
v = "up.rd"
|
||||
|
||||
elif v == "name":
|
||||
v = "up.fn"
|
||||
|
||||
elif v == "tags" or ptn_mt.match(v):
|
||||
mt_ctr += 1
|
||||
mt_keycmp2 = "mt{}.w".format(mt_ctr)
|
||||
joins += "inner join mt mt{} on {} = {} ".format(
|
||||
mt_ctr, mt_keycmp, mt_keycmp2
|
||||
)
|
||||
mt_keycmp = mt_keycmp2
|
||||
if v == "tags":
|
||||
v = "mt{0}.v".format(mt_ctr)
|
||||
else:
|
||||
v = "+mt{0}.k = '{1}' and mt{0}.v".format(mt_ctr, v)
|
||||
|
||||
else:
|
||||
raise Pebkac(400, "invalid key [" + v + "]")
|
||||
|
||||
q += v + " "
|
||||
continue
|
||||
|
||||
head = ""
|
||||
tail = ""
|
||||
|
||||
if is_date:
|
||||
is_date = False
|
||||
v = v.upper().rstrip("Z").replace(",", " ").replace("T", " ")
|
||||
while " " in v:
|
||||
v = v.replace(" ", " ")
|
||||
|
||||
for fmt in [
|
||||
"%Y-%m-%d %H:%M:%S",
|
||||
"%Y-%m-%d %H:%M",
|
||||
"%Y-%m-%d %H",
|
||||
"%Y-%m-%d",
|
||||
]:
|
||||
try:
|
||||
v = datetime.strptime(v, fmt).timestamp()
|
||||
break
|
||||
except:
|
||||
pass
|
||||
|
||||
elif is_size:
|
||||
is_size = False
|
||||
v = int(float(v) * 1024 * 1024)
|
||||
|
||||
else:
|
||||
if v.startswith("*"):
|
||||
head = "'%'||"
|
||||
v = v[1:]
|
||||
|
||||
if v.endswith("*"):
|
||||
tail = "||'%'"
|
||||
v = v[:-1]
|
||||
|
||||
q += " {}?{} ".format(head, tail)
|
||||
va.append(v)
|
||||
is_key = True
|
||||
|
||||
try:
|
||||
return self.run_query(vols, uq, uv, qobj)
|
||||
return self.run_query(vols, joins + "where " + q, va)
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
def run_query(self, vols, uq, uv, targs):
|
||||
self.log("qs: {} {} , {}".format(uq, repr(uv), repr(targs)))
|
||||
|
||||
def run_query(self, vols, uq, uv):
|
||||
done_flag = []
|
||||
self.active_id = "{:.6f}_{}".format(
|
||||
time.time(), threading.current_thread().ident
|
||||
@@ -112,35 +196,14 @@ class U2idx(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if not targs:
|
||||
if not uq:
|
||||
q = "select * from up"
|
||||
v = ()
|
||||
else:
|
||||
q = "select * from up where " + uq
|
||||
v = tuple(uv)
|
||||
if not uq or not uv:
|
||||
q = "select * from up"
|
||||
v = ()
|
||||
else:
|
||||
q = "select up.* from up"
|
||||
keycmp = "substr(up.w,1,16)"
|
||||
where = []
|
||||
v = []
|
||||
ctr = 0
|
||||
for tq, tv in sorted(targs.items()):
|
||||
ctr += 1
|
||||
tq = tq.split("\n")[0]
|
||||
keycmp2 = "mt{}.w".format(ctr)
|
||||
q += " inner join mt mt{} on {} = {}".format(ctr, keycmp, keycmp2)
|
||||
keycmp = keycmp2
|
||||
where.append(tq.replace("mt.", keycmp[:-1]))
|
||||
v.append(tv)
|
||||
q = "select up.* from up " + uq
|
||||
v = tuple(uv)
|
||||
|
||||
if uq:
|
||||
where.append(uq)
|
||||
v.extend(uv)
|
||||
|
||||
q += " where " + (" and ".join(where))
|
||||
|
||||
# self.log("q2: {} {}".format(q, repr(v)))
|
||||
self.log("qs: {!r} {!r}".format(q, v))
|
||||
|
||||
ret = []
|
||||
lim = 1000
|
||||
@@ -163,7 +226,7 @@ class U2idx(object):
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
rp = os.path.join(vtop, rd, fn).replace("\\", "/")
|
||||
rp = "/".join([x for x in [vtop, rd, fn] if x])
|
||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||
|
||||
for hit in sret:
|
||||
@@ -204,78 +267,3 @@ def _open(ptop):
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
if os.path.exists(db_path):
|
||||
return sqlite3.connect(db_path).cursor()
|
||||
|
||||
|
||||
def _conv_sz(q, body, k, sql):
|
||||
if k in body:
|
||||
q[sql] = int(float(body[k]) * 1024 * 1024)
|
||||
|
||||
|
||||
def _conv_dt(q, body, k, sql):
|
||||
if k not in body:
|
||||
return
|
||||
|
||||
v = body[k].upper().rstrip("Z").replace(",", " ").replace("T", " ")
|
||||
while " " in v:
|
||||
v = v.replace(" ", " ")
|
||||
|
||||
for fmt in ["%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M", "%Y-%m-%d %H", "%Y-%m-%d"]:
|
||||
try:
|
||||
ts = datetime.strptime(v, fmt).timestamp()
|
||||
break
|
||||
except:
|
||||
ts = None
|
||||
|
||||
if ts:
|
||||
q[sql] = ts
|
||||
|
||||
|
||||
def _conv_txt(q, body, k, sql):
|
||||
for v in body[k].split(" "):
|
||||
inv = ""
|
||||
if v.startswith("-"):
|
||||
inv = "not"
|
||||
v = v[1:]
|
||||
|
||||
if not v:
|
||||
continue
|
||||
|
||||
head = "'%'||"
|
||||
if v.startswith("^"):
|
||||
head = ""
|
||||
v = v[1:]
|
||||
|
||||
tail = "||'%'"
|
||||
if v.endswith("$"):
|
||||
tail = ""
|
||||
v = v[:-1]
|
||||
|
||||
qk = "{} {} like {}?{}".format(sql, inv, head, tail)
|
||||
q[qk + "\n" + v] = u8safe(v)
|
||||
|
||||
|
||||
def _conv_adv(q, body, k):
|
||||
ptn = re.compile(r"^(\.?[a-z]+) *(==?|!=|<=?|>=?) *(.*)$")
|
||||
|
||||
parts = body[k].split(" ")
|
||||
parts = [x.strip() for x in parts if x.strip()]
|
||||
|
||||
for part in parts:
|
||||
m = ptn.match(part)
|
||||
if not m:
|
||||
p = html_escape(part)
|
||||
raise Pebkac(400, "invalid argument [" + p + "]")
|
||||
|
||||
k, op, v = m.groups()
|
||||
qk = "mt.k = '{}' and mt.v {} ?".format(k, op)
|
||||
q[qk + "\n" + v] = u8safe(v)
|
||||
|
||||
|
||||
def _sqlize(qobj):
|
||||
keys = []
|
||||
values = []
|
||||
for k, v in sorted(qobj.items()):
|
||||
keys.append(k.split("\n")[0])
|
||||
values.append(v)
|
||||
|
||||
return " and ".join(keys), values
|
||||
|
||||
@@ -16,7 +16,7 @@ import traceback
|
||||
import subprocess as sp
|
||||
from copy import deepcopy
|
||||
|
||||
from .__init__ import WINDOWS
|
||||
from .__init__ import WINDOWS, ANYWIN
|
||||
from .util import (
|
||||
Pebkac,
|
||||
Queue,
|
||||
@@ -31,8 +31,7 @@ from .util import (
|
||||
statdir,
|
||||
s2hms,
|
||||
)
|
||||
from .mtag import MTag
|
||||
from .authsrv import AuthSrv
|
||||
from .mtag import MTag, MParser
|
||||
|
||||
try:
|
||||
HAVE_SQLITE3 = True
|
||||
@@ -49,24 +48,26 @@ class Up2k(object):
|
||||
* ~/.config flatfiles for active jobs
|
||||
"""
|
||||
|
||||
def __init__(self, broker):
|
||||
self.broker = broker
|
||||
self.args = broker.args
|
||||
self.log_func = broker.log
|
||||
def __init__(self, hub, all_vols):
|
||||
self.hub = hub
|
||||
self.args = hub.args
|
||||
self.log_func = hub.log
|
||||
|
||||
# config
|
||||
self.salt = broker.args.salt
|
||||
self.salt = self.args.salt
|
||||
|
||||
# state
|
||||
self.mutex = threading.Lock()
|
||||
self.hashq = Queue()
|
||||
self.tagq = Queue()
|
||||
self.volstate = {}
|
||||
self.registry = {}
|
||||
self.entags = {}
|
||||
self.flags = {}
|
||||
self.cur = {}
|
||||
self.mtag = None
|
||||
self.pending_tags = None
|
||||
self.mtp_parsers = {}
|
||||
|
||||
self.mem_cur = None
|
||||
self.sqlite_ver = None
|
||||
@@ -79,7 +80,7 @@ class Up2k(object):
|
||||
if self.sqlite_ver < (3, 9):
|
||||
self.no_expr_idx = True
|
||||
|
||||
if WINDOWS:
|
||||
if ANYWIN:
|
||||
# usually fails to set lastmod too quickly
|
||||
self.lastmod_q = Queue()
|
||||
thr = threading.Thread(target=self._lastmodder)
|
||||
@@ -92,30 +93,50 @@ class Up2k(object):
|
||||
if not HAVE_SQLITE3:
|
||||
self.log("could not initialize sqlite3, will use in-memory registry only")
|
||||
|
||||
# this is kinda jank
|
||||
auth = AuthSrv(self.args, self.log_func, False)
|
||||
have_e2d = self.init_indexes(auth)
|
||||
if self.args.no_fastboot:
|
||||
self.deferred_init(all_vols)
|
||||
else:
|
||||
t = threading.Thread(target=self.deferred_init, args=(all_vols,))
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def deferred_init(self, all_vols):
|
||||
have_e2d = self.init_indexes(all_vols)
|
||||
|
||||
if have_e2d:
|
||||
thr = threading.Thread(target=self._snapshot)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
thr = threading.Thread(target=self._tagger)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
thr = threading.Thread(target=self._hasher)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
thr = threading.Thread(target=self._run_all_mtp)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
if self.mtag:
|
||||
thr = threading.Thread(target=self._tagger)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
thr = threading.Thread(target=self._run_all_mtp)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def log(self, msg, c=0):
|
||||
self.log_func("up2k", msg + "\033[K", c)
|
||||
|
||||
def get_volstate(self):
|
||||
return json.dumps(self.volstate, indent=4)
|
||||
|
||||
def rescan(self, all_vols, scan_vols):
|
||||
if hasattr(self, "pp"):
|
||||
return "cannot initiate; scan is already in progress"
|
||||
|
||||
args = (all_vols, scan_vols)
|
||||
t = threading.Thread(target=self.init_indexes, args=args)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
return None
|
||||
|
||||
def _vis_job_progress(self, job):
|
||||
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
@@ -138,9 +159,9 @@ class Up2k(object):
|
||||
|
||||
return True, ret
|
||||
|
||||
def init_indexes(self, auth):
|
||||
def init_indexes(self, all_vols, scan_vols=[]):
|
||||
self.pp = ProgressPrinter()
|
||||
vols = auth.vfs.all_vols.values()
|
||||
vols = all_vols.values()
|
||||
t0 = time.time()
|
||||
have_e2d = False
|
||||
|
||||
@@ -160,24 +181,35 @@ class Up2k(object):
|
||||
for vol in vols:
|
||||
try:
|
||||
os.listdir(vol.realpath)
|
||||
live_vols.append(vol)
|
||||
except:
|
||||
self.volstate[vol.vpath] = "OFFLINE (cannot access folder)"
|
||||
self.log("cannot access " + vol.realpath, c=1)
|
||||
continue
|
||||
|
||||
if not self.register_vpath(vol.realpath, vol.flags):
|
||||
# self.log("db not enabled for {}".format(m, vol.realpath))
|
||||
continue
|
||||
|
||||
if vol.vpath in scan_vols or not scan_vols:
|
||||
live_vols.append(vol)
|
||||
|
||||
if vol.vpath not in self.volstate:
|
||||
self.volstate[vol.vpath] = "OFFLINE (pending initialization)"
|
||||
|
||||
vols = live_vols
|
||||
need_vac = {}
|
||||
|
||||
need_mtag = False
|
||||
for vol in vols:
|
||||
if "e2t" in vol.flags:
|
||||
need_mtag = True
|
||||
|
||||
if need_mtag:
|
||||
if need_mtag and not self.mtag:
|
||||
self.mtag = MTag(self.log_func, self.args)
|
||||
if not self.mtag.usable:
|
||||
self.mtag = None
|
||||
|
||||
# e2ds(a) volumes first,
|
||||
# also covers tags where e2ts is set
|
||||
# e2ds(a) volumes first
|
||||
for vol in vols:
|
||||
en = {}
|
||||
if "mte" in vol.flags:
|
||||
@@ -189,26 +221,45 @@ class Up2k(object):
|
||||
have_e2d = True
|
||||
|
||||
if "e2ds" in vol.flags:
|
||||
r = self._build_file_index(vol, vols)
|
||||
if not r:
|
||||
needed_mutagen = True
|
||||
self.volstate[vol.vpath] = "busy (hashing files)"
|
||||
_, vac = self._build_file_index(vol, list(all_vols.values()))
|
||||
if vac:
|
||||
need_vac[vol] = True
|
||||
|
||||
if "e2ts" not in vol.flags:
|
||||
m = "online, idle"
|
||||
else:
|
||||
m = "online (tags pending)"
|
||||
|
||||
self.volstate[vol.vpath] = m
|
||||
|
||||
# open the rest + do any e2ts(a)
|
||||
needed_mutagen = False
|
||||
for vol in vols:
|
||||
r = self.register_vpath(vol.realpath, vol.flags)
|
||||
if not r or "e2ts" not in vol.flags:
|
||||
if "e2ts" not in vol.flags:
|
||||
continue
|
||||
|
||||
cur, db_path, sz0 = r
|
||||
n_add, n_rm, success = self._build_tags_index(vol.realpath)
|
||||
m = "online (reading tags)"
|
||||
self.volstate[vol.vpath] = m
|
||||
self.log("{} [{}]".format(m, vol.realpath))
|
||||
|
||||
nadd, nrm, success = self._build_tags_index(vol)
|
||||
if not success:
|
||||
needed_mutagen = True
|
||||
|
||||
if n_add or n_rm:
|
||||
self.vac(cur, db_path, n_add, n_rm, sz0)
|
||||
if nadd or nrm:
|
||||
need_vac[vol] = True
|
||||
|
||||
self.volstate[vol.vpath] = "online (mtp soon)"
|
||||
|
||||
for vol in need_vac:
|
||||
cur, _ = self.register_vpath(vol.realpath, vol.flags)
|
||||
with self.mutex:
|
||||
cur.connection.commit()
|
||||
cur.execute("vacuum")
|
||||
|
||||
self.pp.end = True
|
||||
|
||||
msg = "{} volumes in {:.2f} sec"
|
||||
self.log(msg.format(len(vols), time.time() - t0))
|
||||
|
||||
@@ -216,105 +267,104 @@ class Up2k(object):
|
||||
msg = "could not read tags because no backends are available (mutagen or ffprobe)"
|
||||
self.log(msg, c=1)
|
||||
|
||||
thr = None
|
||||
if self.mtag:
|
||||
m = "online (running mtp)"
|
||||
if scan_vols:
|
||||
thr = threading.Thread(target=self._run_all_mtp)
|
||||
thr.daemon = True
|
||||
else:
|
||||
del self.pp
|
||||
m = "online, idle"
|
||||
|
||||
for vol in vols:
|
||||
self.volstate[vol.vpath] = m
|
||||
|
||||
if thr:
|
||||
thr.start()
|
||||
|
||||
return have_e2d
|
||||
|
||||
def register_vpath(self, ptop, flags):
|
||||
with self.mutex:
|
||||
if ptop in self.registry:
|
||||
return None
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
if ptop in self.registry:
|
||||
return [self.cur[ptop], db_path]
|
||||
|
||||
_, flags = self._expr_idx_filter(flags)
|
||||
_, flags = self._expr_idx_filter(flags)
|
||||
|
||||
ft = "\033[0;32m{}{:.0}"
|
||||
ff = "\033[0;35m{}{:.0}"
|
||||
fv = "\033[0;36m{}:\033[1;30m{}"
|
||||
a = [
|
||||
(ft if v is True else ff if v is False else fv).format(k, str(v))
|
||||
for k, v in flags.items()
|
||||
]
|
||||
if a:
|
||||
self.log(" ".join(sorted(a)) + "\033[0m")
|
||||
ft = "\033[0;32m{}{:.0}"
|
||||
ff = "\033[0;35m{}{:.0}"
|
||||
fv = "\033[0;36m{}:\033[1;30m{}"
|
||||
a = [
|
||||
(ft if v is True else ff if v is False else fv).format(k, str(v))
|
||||
for k, v in flags.items()
|
||||
]
|
||||
if a:
|
||||
self.log(" ".join(sorted(a)) + "\033[0m")
|
||||
|
||||
reg = {}
|
||||
path = os.path.join(ptop, ".hist", "up2k.snap")
|
||||
if "e2d" in flags and os.path.exists(path):
|
||||
with gzip.GzipFile(path, "rb") as f:
|
||||
j = f.read().decode("utf-8")
|
||||
reg = {}
|
||||
path = os.path.join(ptop, ".hist", "up2k.snap")
|
||||
if "e2d" in flags and os.path.exists(path):
|
||||
with gzip.GzipFile(path, "rb") as f:
|
||||
j = f.read().decode("utf-8")
|
||||
|
||||
reg = json.loads(j)
|
||||
for _, job in reg.items():
|
||||
reg2 = json.loads(j)
|
||||
for k, job in reg2.items():
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
if os.path.exists(fsenc(path)):
|
||||
reg[k] = job
|
||||
job["poke"] = time.time()
|
||||
else:
|
||||
self.log("ign deleted file in snap: [{}]".format(path))
|
||||
|
||||
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
|
||||
m = [m] + self._vis_reg_progress(reg)
|
||||
self.log("\n".join(m))
|
||||
|
||||
self.flags[ptop] = flags
|
||||
self.registry[ptop] = reg
|
||||
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
|
||||
return None
|
||||
|
||||
try:
|
||||
os.mkdir(os.path.join(ptop, ".hist"))
|
||||
except:
|
||||
pass
|
||||
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
if ptop in self.cur:
|
||||
return None
|
||||
|
||||
try:
|
||||
sz0 = 0
|
||||
if os.path.exists(db_path):
|
||||
sz0 = os.path.getsize(db_path) // 1024
|
||||
|
||||
cur = self._open_db(db_path)
|
||||
self.cur[ptop] = cur
|
||||
return [cur, db_path, sz0]
|
||||
except:
|
||||
msg = "cannot use database at [{}]:\n{}"
|
||||
self.log(msg.format(ptop, traceback.format_exc()))
|
||||
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
|
||||
m = [m] + self._vis_reg_progress(reg)
|
||||
self.log("\n".join(m))
|
||||
|
||||
self.flags[ptop] = flags
|
||||
self.registry[ptop] = reg
|
||||
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
|
||||
return None
|
||||
|
||||
try:
|
||||
os.mkdir(os.path.join(ptop, ".hist"))
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
cur = self._open_db(db_path)
|
||||
self.cur[ptop] = cur
|
||||
return [cur, db_path]
|
||||
except:
|
||||
msg = "cannot use database at [{}]:\n{}"
|
||||
self.log(msg.format(ptop, traceback.format_exc()))
|
||||
|
||||
return None
|
||||
|
||||
def _build_file_index(self, vol, all_vols):
|
||||
do_vac = False
|
||||
top = vol.realpath
|
||||
reg = self.register_vpath(top, vol.flags)
|
||||
if not reg:
|
||||
return
|
||||
with self.mutex:
|
||||
cur, _ = self.register_vpath(top, vol.flags)
|
||||
|
||||
_, db_path, sz0 = reg
|
||||
dbw = [reg[0], 0, time.time()]
|
||||
self.pp.n = next(dbw[0].execute("select count(w) from up"))[0]
|
||||
dbw = [cur, 0, time.time()]
|
||||
self.pp.n = next(dbw[0].execute("select count(w) from up"))[0]
|
||||
|
||||
excl = [
|
||||
vol.realpath + "/" + d.vpath[len(vol.vpath) :].lstrip("/")
|
||||
for d in all_vols
|
||||
if d != vol and (d.vpath.startswith(vol.vpath + "/") or not vol.vpath)
|
||||
]
|
||||
n_add = self._build_dir(dbw, top, set(excl), top)
|
||||
n_rm = self._drop_lost(dbw[0], top)
|
||||
if dbw[1]:
|
||||
self.log("commit {} new files".format(dbw[1]))
|
||||
dbw[0].connection.commit()
|
||||
excl = [
|
||||
vol.realpath + "/" + d.vpath[len(vol.vpath) :].lstrip("/")
|
||||
for d in all_vols
|
||||
if d != vol and (d.vpath.startswith(vol.vpath + "/") or not vol.vpath)
|
||||
]
|
||||
if WINDOWS:
|
||||
excl = [x.replace("/", "\\") for x in excl]
|
||||
|
||||
n_add, n_rm, success = self._build_tags_index(vol.realpath)
|
||||
n_add = self._build_dir(dbw, top, set(excl), top)
|
||||
n_rm = self._drop_lost(dbw[0], top)
|
||||
if dbw[1]:
|
||||
self.log("commit {} new files".format(dbw[1]))
|
||||
dbw[0].connection.commit()
|
||||
|
||||
dbw[0].connection.commit()
|
||||
if n_add or n_rm or do_vac:
|
||||
self.vac(dbw[0], db_path, n_add, n_rm, sz0)
|
||||
|
||||
return success
|
||||
|
||||
def vac(self, cur, db_path, n_add, n_rm, sz0):
|
||||
sz1 = os.path.getsize(db_path) // 1024
|
||||
cur.execute("vacuum")
|
||||
sz2 = os.path.getsize(db_path) // 1024
|
||||
msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format(
|
||||
n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2
|
||||
)
|
||||
self.log(msg)
|
||||
return True, n_add or n_rm or do_vac
|
||||
|
||||
def _build_dir(self, dbw, top, excl, cdir):
|
||||
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
||||
@@ -409,45 +459,53 @@ class Up2k(object):
|
||||
|
||||
return len(rm)
|
||||
|
||||
def _build_tags_index(self, ptop):
|
||||
entags = self.entags[ptop]
|
||||
flags = self.flags[ptop]
|
||||
cur = self.cur[ptop]
|
||||
def _build_tags_index(self, vol):
|
||||
ptop = vol.realpath
|
||||
with self.mutex:
|
||||
_, db_path = self.register_vpath(ptop, vol.flags)
|
||||
entags = self.entags[ptop]
|
||||
flags = self.flags[ptop]
|
||||
cur = self.cur[ptop]
|
||||
|
||||
n_add = 0
|
||||
n_rm = 0
|
||||
n_buf = 0
|
||||
last_write = time.time()
|
||||
|
||||
if "e2tsr" in flags:
|
||||
n_rm = cur.execute("select count(w) from mt").fetchone()[0]
|
||||
if n_rm:
|
||||
self.log("discarding {} media tags for a full rescan".format(n_rm))
|
||||
cur.execute("delete from mt")
|
||||
else:
|
||||
self.log("volume has e2tsr but there are no media tags to discard")
|
||||
with self.mutex:
|
||||
n_rm = cur.execute("select count(w) from mt").fetchone()[0]
|
||||
if n_rm:
|
||||
self.log("discarding {} media tags for a full rescan".format(n_rm))
|
||||
cur.execute("delete from mt")
|
||||
|
||||
# integrity: drop tags for tracks that were deleted
|
||||
if "e2t" in flags:
|
||||
drops = []
|
||||
c2 = cur.connection.cursor()
|
||||
up_q = "select w from up where substr(w,1,16) = ?"
|
||||
for (w,) in cur.execute("select w from mt"):
|
||||
if not c2.execute(up_q, (w,)).fetchone():
|
||||
drops.append(w[:16])
|
||||
c2.close()
|
||||
with self.mutex:
|
||||
drops = []
|
||||
c2 = cur.connection.cursor()
|
||||
up_q = "select w from up where substr(w,1,16) = ?"
|
||||
for (w,) in cur.execute("select w from mt"):
|
||||
if not c2.execute(up_q, (w,)).fetchone():
|
||||
drops.append(w[:16])
|
||||
c2.close()
|
||||
|
||||
if drops:
|
||||
msg = "discarding media tags for {} deleted files"
|
||||
self.log(msg.format(len(drops)))
|
||||
n_rm += len(drops)
|
||||
for w in drops:
|
||||
cur.execute("delete from mt where w = ?", (w,))
|
||||
if drops:
|
||||
msg = "discarding media tags for {} deleted files"
|
||||
self.log(msg.format(len(drops)))
|
||||
n_rm += len(drops)
|
||||
for w in drops:
|
||||
cur.execute("delete from mt where w = ?", (w,))
|
||||
|
||||
# bail if a volume flag disables indexing
|
||||
if "d2t" in flags or "d2d" in flags:
|
||||
return n_add, n_rm, True
|
||||
|
||||
# add tags for new files
|
||||
gcur = cur
|
||||
with self.mutex:
|
||||
gcur.connection.commit()
|
||||
|
||||
if "e2ts" in flags:
|
||||
if not self.mtag:
|
||||
return n_add, n_rm, False
|
||||
@@ -456,8 +514,10 @@ class Up2k(object):
|
||||
if self.mtag.prefer_mt and not self.args.no_mtag_mt:
|
||||
mpool = self._start_mpool()
|
||||
|
||||
c2 = cur.connection.cursor()
|
||||
c3 = cur.connection.cursor()
|
||||
conn = sqlite3.connect(db_path, timeout=15)
|
||||
cur = conn.cursor()
|
||||
c2 = conn.cursor()
|
||||
c3 = conn.cursor()
|
||||
n_left = cur.execute("select count(w) from up").fetchone()[0]
|
||||
for w, rd, fn in cur.execute("select w, rd, fn from up"):
|
||||
n_left -= 1
|
||||
@@ -479,7 +539,8 @@ class Up2k(object):
|
||||
n_tags = self._tag_file(c3, *args)
|
||||
else:
|
||||
mpool.put(["mtag"] + args)
|
||||
n_tags = len(self._flush_mpool(c3))
|
||||
with self.mutex:
|
||||
n_tags = len(self._flush_mpool(c3))
|
||||
|
||||
n_add += n_tags
|
||||
n_buf += n_tags
|
||||
@@ -491,27 +552,33 @@ class Up2k(object):
|
||||
last_write = time.time()
|
||||
n_buf = 0
|
||||
|
||||
self._stop_mpool(mpool, c3)
|
||||
if mpool:
|
||||
self._stop_mpool(mpool)
|
||||
with self.mutex:
|
||||
n_add += len(self._flush_mpool(c3))
|
||||
|
||||
conn.commit()
|
||||
c3.close()
|
||||
c2.close()
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
with self.mutex:
|
||||
gcur.connection.commit()
|
||||
|
||||
return n_add, n_rm, True
|
||||
|
||||
def _flush_mpool(self, wcur):
|
||||
with self.mutex:
|
||||
ret = []
|
||||
for x in self.pending_tags:
|
||||
self._tag_file(wcur, *x)
|
||||
ret.append(x[1])
|
||||
ret = []
|
||||
for x in self.pending_tags:
|
||||
self._tag_file(wcur, *x)
|
||||
ret.append(x[1])
|
||||
|
||||
self.pending_tags = []
|
||||
return ret
|
||||
self.pending_tags = []
|
||||
return ret
|
||||
|
||||
def _run_all_mtp(self):
|
||||
t0 = time.time()
|
||||
self.mtp_force = {}
|
||||
self.mtp_parsers = {}
|
||||
for ptop, flags in self.flags.items():
|
||||
if "mtp" in flags:
|
||||
self._run_one_mtp(ptop)
|
||||
@@ -520,49 +587,26 @@ class Up2k(object):
|
||||
msg = "mtp finished in {:.2f} sec ({})"
|
||||
self.log(msg.format(td, s2hms(td, True)))
|
||||
|
||||
def _run_one_mtp(self, ptop):
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
sz0 = os.path.getsize(db_path) // 1024
|
||||
del self.pp
|
||||
for k in list(self.volstate.keys()):
|
||||
if "OFFLINE" not in self.volstate[k]:
|
||||
self.volstate[k] = "online, idle"
|
||||
|
||||
def _run_one_mtp(self, ptop):
|
||||
entags = self.entags[ptop]
|
||||
|
||||
force = {}
|
||||
timeout = {}
|
||||
parsers = {}
|
||||
for parser in self.flags[ptop]["mtp"]:
|
||||
orig = parser
|
||||
tag, parser = parser.split("=", 1)
|
||||
if tag not in entags:
|
||||
continue
|
||||
try:
|
||||
parser = MParser(parser)
|
||||
except:
|
||||
self.log("invalid argument: " + parser, 1)
|
||||
return
|
||||
|
||||
while True:
|
||||
try:
|
||||
bp = os.path.expanduser(parser)
|
||||
if os.path.exists(bp):
|
||||
parsers[tag] = [bp, timeout.get(tag, 30)]
|
||||
break
|
||||
except:
|
||||
pass
|
||||
for tag in entags:
|
||||
if tag in parser.tags:
|
||||
parsers[parser.tag] = parser
|
||||
|
||||
try:
|
||||
arg, parser = parser.split(",", 1)
|
||||
arg = arg.lower()
|
||||
|
||||
if arg == "f":
|
||||
force[tag] = True
|
||||
continue
|
||||
|
||||
if arg.startswith("t"):
|
||||
timeout[tag] = int(arg[1:])
|
||||
continue
|
||||
|
||||
raise Exception()
|
||||
|
||||
except:
|
||||
self.log("invalid argument: " + orig, 1)
|
||||
return
|
||||
|
||||
self.mtp_force[ptop] = force
|
||||
self.mtp_parsers[ptop] = parsers
|
||||
|
||||
q = "select count(w) from mt where k = 't:mtp'"
|
||||
@@ -595,8 +639,8 @@ class Up2k(object):
|
||||
have = cur.execute(q, (w,)).fetchall()
|
||||
have = [x[0] for x in have]
|
||||
|
||||
if ".dur" not in have and ".dur" in entags:
|
||||
# skip non-audio
|
||||
parsers = self._get_parsers(ptop, have, abspath)
|
||||
if not parsers:
|
||||
to_delete[w] = True
|
||||
n_left -= 1
|
||||
continue
|
||||
@@ -604,15 +648,11 @@ class Up2k(object):
|
||||
if w in in_progress:
|
||||
continue
|
||||
|
||||
task_parsers = {
|
||||
k: v for k, v in parsers.items() if k in force or k not in have
|
||||
}
|
||||
jobs.append([task_parsers, None, w, abspath])
|
||||
jobs.append([parsers, None, w, abspath])
|
||||
in_progress[w] = True
|
||||
|
||||
done = self._flush_mpool(wcur)
|
||||
|
||||
with self.mutex:
|
||||
done = self._flush_mpool(wcur)
|
||||
for w in done:
|
||||
to_delete[w] = True
|
||||
in_progress.pop(w)
|
||||
@@ -653,29 +693,60 @@ class Up2k(object):
|
||||
with self.mutex:
|
||||
cur.connection.commit()
|
||||
|
||||
done = self._stop_mpool(mpool, wcur)
|
||||
self._stop_mpool(mpool)
|
||||
with self.mutex:
|
||||
done = self._flush_mpool(wcur)
|
||||
for w in done:
|
||||
q = "delete from mt where w = ? and k = 't:mtp'"
|
||||
cur.execute(q, (w,))
|
||||
|
||||
cur.connection.commit()
|
||||
if n_done:
|
||||
self.vac(cur, db_path, n_done, 0, sz0)
|
||||
cur.execute("vacuum")
|
||||
|
||||
wcur.close()
|
||||
cur.close()
|
||||
|
||||
def _start_mpool(self):
|
||||
if WINDOWS and False:
|
||||
nah = open(os.devnull, "wb")
|
||||
wmic = "processid={}".format(os.getpid())
|
||||
wmic = ["wmic", "process", "where", wmic, "call", "setpriority"]
|
||||
sp.call(wmic + ["below normal"], stdout=nah, stderr=nah)
|
||||
def _get_parsers(self, ptop, have, abspath):
|
||||
try:
|
||||
all_parsers = self.mtp_parsers[ptop]
|
||||
except:
|
||||
return {}
|
||||
|
||||
entags = self.entags[ptop]
|
||||
parsers = {}
|
||||
for k, v in all_parsers.items():
|
||||
if "ac" in entags or ".aq" in entags:
|
||||
if "ac" in have or ".aq" in have:
|
||||
# is audio, require non-audio?
|
||||
if v.audio == "n":
|
||||
continue
|
||||
# is not audio, require audio?
|
||||
elif v.audio == "y":
|
||||
continue
|
||||
|
||||
if v.ext:
|
||||
match = False
|
||||
for ext in v.ext:
|
||||
if abspath.lower().endswith("." + ext):
|
||||
match = True
|
||||
break
|
||||
|
||||
if not match:
|
||||
continue
|
||||
|
||||
parsers[k] = v
|
||||
|
||||
parsers = {k: v for k, v in parsers.items() if v.force or k not in have}
|
||||
return parsers
|
||||
|
||||
def _start_mpool(self):
|
||||
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
|
||||
# both do crazy runahead so lets reinvent another wheel
|
||||
nw = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
if self.args.no_mtag_mt:
|
||||
nw = 1
|
||||
|
||||
if self.pending_tags is None:
|
||||
self.log("using {}x {}".format(nw, self.mtag.backend))
|
||||
self.pending_tags = []
|
||||
@@ -688,7 +759,7 @@ class Up2k(object):
|
||||
|
||||
return mpool
|
||||
|
||||
def _stop_mpool(self, mpool, wcur):
|
||||
def _stop_mpool(self, mpool):
|
||||
if not mpool:
|
||||
return
|
||||
|
||||
@@ -696,14 +767,6 @@ class Up2k(object):
|
||||
mpool.put(None)
|
||||
|
||||
mpool.join()
|
||||
done = self._flush_mpool(wcur)
|
||||
if WINDOWS and False:
|
||||
nah = open(os.devnull, "wb")
|
||||
wmic = "processid={}".format(os.getpid())
|
||||
wmic = ["wmic", "process", "where", wmic, "call", "setpriority"]
|
||||
sp.call(wmic + ["below normal"], stdout=nah, stderr=nah)
|
||||
|
||||
return done
|
||||
|
||||
def _tag_thr(self, q):
|
||||
while True:
|
||||
@@ -721,7 +784,8 @@ class Up2k(object):
|
||||
vtags = [
|
||||
"\033[36m{} \033[33m{}".format(k, v) for k, v in tags.items()
|
||||
]
|
||||
self.log("{}\033[0m [{}]".format(" ".join(vtags), abspath))
|
||||
if vtags:
|
||||
self.log("{}\033[0m [{}]".format(" ".join(vtags), abspath))
|
||||
|
||||
with self.mutex:
|
||||
self.pending_tags.append([entags, wark, abspath, tags])
|
||||
@@ -902,7 +966,7 @@ class Up2k(object):
|
||||
if cj["ptop"] not in self.registry:
|
||||
raise Pebkac(410, "location unavailable")
|
||||
|
||||
cj["name"] = sanitize_fn(cj["name"])
|
||||
cj["name"] = sanitize_fn(cj["name"], bad=[".prologue.html", ".epilogue.html"])
|
||||
cj["poke"] = time.time()
|
||||
wark = self._get_wark(cj)
|
||||
now = time.time()
|
||||
@@ -923,7 +987,7 @@ class Up2k(object):
|
||||
if dp_dir.startswith("//") or dp_fn.startswith("//"):
|
||||
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
|
||||
|
||||
dp_abs = os.path.join(cj["ptop"], dp_dir, dp_fn).replace("\\", "/")
|
||||
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
|
||||
# relying on path.exists to return false on broken symlinks
|
||||
if os.path.exists(fsenc(dp_abs)):
|
||||
job = {
|
||||
@@ -949,7 +1013,7 @@ class Up2k(object):
|
||||
for fn in names:
|
||||
path = os.path.join(job["ptop"], job["prel"], fn)
|
||||
try:
|
||||
if os.path.getsize(path) > 0:
|
||||
if os.path.getsize(fsenc(path)) > 0:
|
||||
# upload completed or both present
|
||||
break
|
||||
except:
|
||||
@@ -1068,9 +1132,14 @@ class Up2k(object):
|
||||
with self.mutex:
|
||||
job = self.registry[ptop].get(wark, None)
|
||||
if not job:
|
||||
known = " ".join([x for x in self.registry[ptop].keys()])
|
||||
self.log("unknown wark [{}], known: {}".format(wark, known))
|
||||
raise Pebkac(400, "unknown wark")
|
||||
|
||||
if chash not in job["need"]:
|
||||
msg = "chash = {} , need:\n".format(chash)
|
||||
msg += "\n".join(job["need"])
|
||||
self.log(msg)
|
||||
raise Pebkac(400, "already got that but thanks??")
|
||||
|
||||
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
|
||||
@@ -1107,8 +1176,9 @@ class Up2k(object):
|
||||
|
||||
atomic_move(src, dst)
|
||||
|
||||
if WINDOWS:
|
||||
self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))])
|
||||
if ANYWIN:
|
||||
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
|
||||
self.lastmod_q.put(a)
|
||||
|
||||
# legit api sware 2 me mum
|
||||
if self.idx_wark(
|
||||
@@ -1175,12 +1245,15 @@ class Up2k(object):
|
||||
return wark
|
||||
|
||||
def _hashlist_from_file(self, path):
|
||||
fsz = os.path.getsize(path)
|
||||
pp = self.pp if hasattr(self, "pp") else None
|
||||
fsz = os.path.getsize(fsenc(path))
|
||||
csz = up2k_chunksize(fsz)
|
||||
ret = []
|
||||
with open(path, "rb", 512 * 1024) as f:
|
||||
with open(fsenc(path), "rb", 512 * 1024) as f:
|
||||
while fsz > 0:
|
||||
self.pp.msg = "{} MB, {}".format(int(fsz / 1024 / 1024), path)
|
||||
if pp:
|
||||
pp.msg = "{} MB, {}".format(int(fsz / 1024 / 1024), path)
|
||||
|
||||
hashobj = hashlib.sha512()
|
||||
rem = min(csz, fsz)
|
||||
fsz -= rem
|
||||
@@ -1206,9 +1279,23 @@ class Up2k(object):
|
||||
# raise Exception("aaa")
|
||||
|
||||
tnam = job["name"] + ".PARTIAL"
|
||||
if self.args.dotpart:
|
||||
tnam = "." + tnam
|
||||
|
||||
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
||||
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
||||
f, job["tnam"] = f["orz"]
|
||||
if (
|
||||
ANYWIN
|
||||
and self.args.sparse
|
||||
and self.args.sparse * 1024 * 1024 <= job["size"]
|
||||
):
|
||||
fp = os.path.join(pdir, job["tnam"])
|
||||
try:
|
||||
sp.check_call(["fsutil", "sparse", "setflag", fp])
|
||||
except:
|
||||
self.log("could not sparse [{}]".format(fp), 3)
|
||||
|
||||
f.seek(job["size"] - 1)
|
||||
f.write(b"e")
|
||||
|
||||
@@ -1220,13 +1307,19 @@ class Up2k(object):
|
||||
|
||||
# self.log("lmod: got {}".format(len(ready)))
|
||||
time.sleep(5)
|
||||
for path, times in ready:
|
||||
for path, sz, times in ready:
|
||||
self.log("lmod: setting times {} on {}".format(times, path))
|
||||
try:
|
||||
os.utime(fsenc(path), times)
|
||||
except:
|
||||
self.log("lmod: failed to utime ({}, {})".format(path, times))
|
||||
|
||||
if self.args.sparse and self.args.sparse * 1024 * 1024 <= sz:
|
||||
try:
|
||||
sp.check_call(["fsutil", "sparse", "setflag", path, "0"])
|
||||
except:
|
||||
self.log("could not unsparse [{}]".format(path), 3)
|
||||
|
||||
def _snapshot(self):
|
||||
persist_interval = 30 # persist unfinished uploads index every 30 sec
|
||||
discard_interval = 21600 # drop unfinished uploads after 6 hours inactivity
|
||||
@@ -1249,13 +1342,13 @@ class Up2k(object):
|
||||
try:
|
||||
# remove the filename reservation
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
if os.path.getsize(path) == 0:
|
||||
os.unlink(path)
|
||||
if os.path.getsize(fsenc(path)) == 0:
|
||||
os.unlink(fsenc(path))
|
||||
|
||||
if len(job["hash"]) == len(job["need"]):
|
||||
# PARTIAL is empty, delete that too
|
||||
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
|
||||
os.unlink(path)
|
||||
os.unlink(fsenc(path))
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -1263,8 +1356,8 @@ class Up2k(object):
|
||||
if not reg:
|
||||
if k not in prev or prev[k] is not None:
|
||||
prev[k] = None
|
||||
if os.path.exists(path):
|
||||
os.unlink(path)
|
||||
if os.path.exists(fsenc(path)):
|
||||
os.unlink(fsenc(path))
|
||||
return
|
||||
|
||||
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
|
||||
@@ -1296,13 +1389,9 @@ class Up2k(object):
|
||||
abspath = os.path.join(ptop, rd, fn)
|
||||
tags = self.mtag.get(abspath)
|
||||
ntags1 = len(tags)
|
||||
if self.mtp_parsers.get(ptop, {}):
|
||||
parser = {
|
||||
k: v
|
||||
for k, v in self.mtp_parsers[ptop].items()
|
||||
if k in self.mtp_force[ptop] or k not in tags
|
||||
}
|
||||
tags.update(self.mtag.get_bin(parser, abspath))
|
||||
parsers = self._get_parsers(ptop, tags, abspath)
|
||||
if parsers:
|
||||
tags.update(self.mtag.get_bin(parsers, abspath))
|
||||
|
||||
with self.mutex:
|
||||
cur = self.cur[ptop]
|
||||
|
||||
@@ -15,8 +15,9 @@ import threading
|
||||
import mimetypes
|
||||
import contextlib
|
||||
import subprocess as sp # nosec
|
||||
from datetime import datetime
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
from .__init__ import PY2, WINDOWS, ANYWIN
|
||||
from .stolen import surrogateescape
|
||||
|
||||
FAKE_MP = False
|
||||
@@ -34,10 +35,12 @@ if not PY2:
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from queue import Queue
|
||||
from io import BytesIO
|
||||
else:
|
||||
from urllib import unquote # pylint: disable=no-name-in-module
|
||||
from urllib import quote # pylint: disable=no-name-in-module
|
||||
from Queue import Queue # pylint: disable=import-error,no-name-in-module
|
||||
from StringIO import StringIO as BytesIO
|
||||
|
||||
surrogateescape.register_surrogateescape()
|
||||
FS_ENCODING = sys.getfilesystemencoding()
|
||||
@@ -45,10 +48,14 @@ if WINDOWS and PY2:
|
||||
FS_ENCODING = "utf-8"
|
||||
|
||||
|
||||
HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT"
|
||||
|
||||
|
||||
HTTPCODE = {
|
||||
200: "OK",
|
||||
204: "No Content",
|
||||
206: "Partial Content",
|
||||
302: "Found",
|
||||
304: "Not Modified",
|
||||
400: "Bad Request",
|
||||
403: "Forbidden",
|
||||
@@ -72,6 +79,13 @@ IMPLICATIONS = [
|
||||
]
|
||||
|
||||
|
||||
MIMES = {
|
||||
"md": "text/plain; charset=UTF-8",
|
||||
"opus": "audio/ogg; codecs=opus",
|
||||
"webp": "image/webp",
|
||||
}
|
||||
|
||||
|
||||
REKOBO_KEY = {
|
||||
v: ln.split(" ", 1)[0]
|
||||
for ln in """
|
||||
@@ -123,6 +137,32 @@ class Counter(object):
|
||||
self.v = absval
|
||||
|
||||
|
||||
class Cooldown(object):
|
||||
def __init__(self, maxage):
|
||||
self.maxage = maxage
|
||||
self.mutex = threading.Lock()
|
||||
self.hist = {}
|
||||
self.oldest = 0
|
||||
|
||||
def poke(self, key):
|
||||
with self.mutex:
|
||||
now = time.time()
|
||||
|
||||
ret = False
|
||||
v = self.hist.get(key, 0)
|
||||
if now - v > self.maxage:
|
||||
self.hist[key] = now
|
||||
ret = True
|
||||
|
||||
if self.oldest - now > self.maxage * 2:
|
||||
self.hist = {
|
||||
k: v for k, v in self.hist.items() if now - v < self.maxage
|
||||
}
|
||||
self.oldest = sorted(self.hist.values())[0]
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class Unrecv(object):
|
||||
"""
|
||||
undo any number of socket recv ops
|
||||
@@ -222,6 +262,11 @@ def ren_open(fname, *args, **kwargs):
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
if suffix:
|
||||
ext = fname.split(".")[-1]
|
||||
if len(ext) < 7:
|
||||
suffix += "." + ext
|
||||
|
||||
orig_name = fname
|
||||
bname = fname
|
||||
ext = ""
|
||||
@@ -242,7 +287,7 @@ def ren_open(fname, *args, **kwargs):
|
||||
else:
|
||||
fpath = fname
|
||||
|
||||
if suffix and os.path.exists(fpath):
|
||||
if suffix and os.path.exists(fsenc(fpath)):
|
||||
fpath += suffix
|
||||
fname += suffix
|
||||
ext += suffix
|
||||
@@ -576,12 +621,12 @@ def undot(path):
|
||||
return "/".join(ret)
|
||||
|
||||
|
||||
def sanitize_fn(fn, ok=""):
|
||||
def sanitize_fn(fn, ok="", bad=[]):
|
||||
if "/" not in ok:
|
||||
fn = fn.replace("\\", "/").split("/")[-1]
|
||||
|
||||
if WINDOWS:
|
||||
for bad, good in [x for x in [
|
||||
if ANYWIN:
|
||||
remap = [
|
||||
["<", "<"],
|
||||
[">", ">"],
|
||||
[":", ":"],
|
||||
@@ -591,15 +636,16 @@ def sanitize_fn(fn, ok=""):
|
||||
["|", "|"],
|
||||
["?", "?"],
|
||||
["*", "*"],
|
||||
] if x[0] not in ok]:
|
||||
fn = fn.replace(bad, good)
|
||||
]
|
||||
for a, b in [x for x in remap if x[0] not in ok]:
|
||||
fn = fn.replace(a, b)
|
||||
|
||||
bad = ["con", "prn", "aux", "nul"]
|
||||
bad.extend(["con", "prn", "aux", "nul"])
|
||||
for n in range(1, 10):
|
||||
bad += "com{0} lpt{0}".format(n).split(" ")
|
||||
|
||||
if fn.lower() in bad:
|
||||
fn = "_" + fn
|
||||
if fn.lower() in bad:
|
||||
fn = "_" + fn
|
||||
|
||||
return fn.strip()
|
||||
|
||||
@@ -615,17 +661,29 @@ def exclude_dotfiles(filepaths):
|
||||
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
||||
|
||||
|
||||
def html_escape(s, quote=False):
|
||||
def http_ts(ts):
|
||||
file_dt = datetime.utcfromtimestamp(ts)
|
||||
return file_dt.strftime(HTTP_TS_FMT)
|
||||
|
||||
|
||||
def html_escape(s, quote=False, crlf=False):
|
||||
"""html.escape but also newlines"""
|
||||
s = (
|
||||
s.replace("&", "&")
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace("\r", " ")
|
||||
.replace("\n", " ")
|
||||
)
|
||||
s = s.replace("&", "&").replace("<", "<").replace(">", ">")
|
||||
if quote:
|
||||
s = s.replace('"', """).replace("'", "'")
|
||||
if crlf:
|
||||
s = s.replace("\r", " ").replace("\n", " ")
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def html_bescape(s, quote=False, crlf=False):
|
||||
"""html.escape but bytestrings"""
|
||||
s = s.replace(b"&", b"&").replace(b"<", b"<").replace(b">", b">")
|
||||
if quote:
|
||||
s = s.replace(b'"', b""").replace(b"'", b"'")
|
||||
if crlf:
|
||||
s = s.replace(b"\r", b" ").replace(b"\n", b" ")
|
||||
|
||||
return s
|
||||
|
||||
@@ -714,6 +772,8 @@ def s3dec(rd, fn):
|
||||
|
||||
|
||||
def atomic_move(src, dst):
|
||||
src = fsenc(src)
|
||||
dst = fsenc(dst)
|
||||
if not PY2:
|
||||
os.replace(src, dst)
|
||||
else:
|
||||
@@ -905,11 +965,13 @@ def unescape_cookie(orig):
|
||||
return ret
|
||||
|
||||
|
||||
def guess_mime(url):
|
||||
if url.endswith(".md"):
|
||||
return ["text/plain; charset=UTF-8"]
|
||||
def guess_mime(url, fallback="application/octet-stream"):
|
||||
try:
|
||||
_, ext = url.rsplit(".", 1)
|
||||
except:
|
||||
return fallback
|
||||
|
||||
return mimetypes.guess_type(url)
|
||||
return MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
|
||||
|
||||
|
||||
def runcmd(*argv):
|
||||
@@ -928,6 +990,17 @@ def chkcmd(*argv):
|
||||
return sout, serr
|
||||
|
||||
|
||||
def mchkcmd(argv, timeout=10):
|
||||
if PY2:
|
||||
with open(os.devnull, "wb") as f:
|
||||
rv = sp.call(argv, stdout=f, stderr=f)
|
||||
else:
|
||||
rv = sp.call(argv, stdout=sp.DEVNULL, stderr=sp.DEVNULL, timeout=timeout)
|
||||
|
||||
if rv:
|
||||
raise sp.CalledProcessError(rv, (argv[0], b"...", argv[-1]))
|
||||
|
||||
|
||||
def gzip_orig_sz(fn):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
f.seek(-4, 2)
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
:root {
|
||||
--grid-sz: 10em;
|
||||
}
|
||||
* {
|
||||
line-height: 1.2em;
|
||||
}
|
||||
@@ -6,7 +9,7 @@ html,body,tr,th,td,#files,a {
|
||||
background: none;
|
||||
font-weight: inherit;
|
||||
font-size: inherit;
|
||||
padding: none;
|
||||
padding: 0;
|
||||
border: none;
|
||||
}
|
||||
html {
|
||||
@@ -64,11 +67,16 @@ a, #files tbody div a:last-child {
|
||||
background: #161616;
|
||||
text-decoration: underline;
|
||||
}
|
||||
#files thead {
|
||||
background: #333;
|
||||
position: sticky;
|
||||
top: 0;
|
||||
}
|
||||
#files thead a {
|
||||
color: #999;
|
||||
font-weight: normal;
|
||||
}
|
||||
#files tr+tr:hover {
|
||||
#files tr:hover {
|
||||
background: #1c1c1c;
|
||||
}
|
||||
#files thead th {
|
||||
@@ -90,8 +98,6 @@ a, #files tbody div a:last-child {
|
||||
#files td {
|
||||
margin: 0;
|
||||
padding: 0 .5em;
|
||||
}
|
||||
#files td {
|
||||
border-bottom: 1px solid #111;
|
||||
}
|
||||
#files td+td+td {
|
||||
@@ -182,11 +188,42 @@ a, #files tbody div a:last-child {
|
||||
color: #840;
|
||||
text-shadow: 0 0 .3em #b80;
|
||||
}
|
||||
#files tbody tr.sel td {
|
||||
#files tbody tr.sel td,
|
||||
#ggrid a.sel,
|
||||
html.light #ggrid a.sel {
|
||||
color: #fff;
|
||||
background: #925;
|
||||
border-color: #c37;
|
||||
}
|
||||
#files tbody tr.sel:hover td,
|
||||
#ggrid a.sel:hover,
|
||||
html.light #ggrid a.sel:hover {
|
||||
color: #fff;
|
||||
background: #d39;
|
||||
border-color: #d48;
|
||||
text-shadow: 1px 1px 0 #804;
|
||||
}
|
||||
#ggrid a.sel,
|
||||
html.light #ggrid a.sel {
|
||||
border-top: 1px solid #d48;
|
||||
box-shadow: 0 .1em 1.2em #b36;
|
||||
transition: all 0.2s cubic-bezier(.2, 2.2, .5, 1); /* https://cubic-bezier.com/#.4,2,.7,1 */
|
||||
}
|
||||
#ggrid a.sel img {
|
||||
opacity: .7;
|
||||
box-shadow: 0 0 1em #b36;
|
||||
filter: contrast(130%) brightness(107%);
|
||||
}
|
||||
#files tr.sel a {
|
||||
color: #fff;
|
||||
}
|
||||
#files tr.sel a.play {
|
||||
color: #fc5;
|
||||
}
|
||||
#files tr.sel a.play.act {
|
||||
color: #fff;
|
||||
text-shadow: 0 0 1px #fff;
|
||||
}
|
||||
#blocked {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
@@ -244,7 +281,10 @@ a, #files tbody div a:last-child {
|
||||
background: #3c3c3c;
|
||||
}
|
||||
#wtico {
|
||||
cursor: url(/.cpr/dd/1.png), pointer;
|
||||
cursor: url(/.cpr/dd/4.png), pointer;
|
||||
animation: cursor 500ms;
|
||||
}
|
||||
#wtico:hover {
|
||||
animation: cursor 500ms infinite;
|
||||
}
|
||||
@keyframes cursor {
|
||||
@@ -252,7 +292,7 @@ a, #files tbody div a:last-child {
|
||||
30% {cursor: url(/.cpr/dd/3.png), pointer}
|
||||
50% {cursor: url(/.cpr/dd/4.png), pointer}
|
||||
75% {cursor: url(/.cpr/dd/5.png), pointer}
|
||||
85% {cursor: url(/.cpr/dd/1.png), pointer}
|
||||
85% {cursor: url(/.cpr/dd/4.png), pointer}
|
||||
}
|
||||
@keyframes spin {
|
||||
100% {transform: rotate(360deg)}
|
||||
@@ -273,29 +313,48 @@ a, #files tbody div a:last-child {
|
||||
padding: .2em 0 0 .07em;
|
||||
color: #fff;
|
||||
}
|
||||
#wzip {
|
||||
#wzip, #wnp {
|
||||
display: none;
|
||||
margin-right: .3em;
|
||||
padding-right: .3em;
|
||||
border-right: .1em solid #555;
|
||||
}
|
||||
#wnp a {
|
||||
position: relative;
|
||||
font-size: .47em;
|
||||
margin: 0 .1em;
|
||||
top: -.4em;
|
||||
}
|
||||
#wnp a+a {
|
||||
margin-left: .33em;
|
||||
}
|
||||
#wtoggle,
|
||||
#wtoggle * {
|
||||
line-height: 1em;
|
||||
}
|
||||
#wtoggle.sel {
|
||||
width: 6em;
|
||||
#wtoggle.np {
|
||||
width: 5.5em;
|
||||
}
|
||||
#wtoggle.sel #wzip {
|
||||
#wtoggle.sel {
|
||||
width: 6.4em;
|
||||
}
|
||||
#wtoggle.sel #wzip,
|
||||
#wtoggle.np #wnp {
|
||||
display: inline-block;
|
||||
}
|
||||
#wtoggle.sel #wzip a {
|
||||
#wtoggle.sel.np #wnp {
|
||||
display: none;
|
||||
}
|
||||
#wzip a {
|
||||
font-size: .4em;
|
||||
padding: 0 .3em;
|
||||
margin: -.3em .2em;
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
}
|
||||
#wzip a+a {
|
||||
margin-left: .8em;
|
||||
}
|
||||
#wtoggle.sel #wzip #selzip {
|
||||
top: -.6em;
|
||||
padding: .4em .3em;
|
||||
@@ -343,10 +402,10 @@ a, #files tbody div a:last-child {
|
||||
width: calc(100% - 10.5em);
|
||||
background: rgba(0,0,0,0.2);
|
||||
}
|
||||
@media (min-width: 90em) {
|
||||
@media (min-width: 80em) {
|
||||
#barpos,
|
||||
#barbuf {
|
||||
width: calc(100% - 24em);
|
||||
width: calc(100% - 21em);
|
||||
left: 9.8em;
|
||||
top: .7em;
|
||||
height: 1.6em;
|
||||
@@ -356,6 +415,9 @@ a, #files tbody div a:last-child {
|
||||
bottom: -3.2em;
|
||||
height: 3.2em;
|
||||
}
|
||||
#pvol {
|
||||
max-width: 9em;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -407,6 +469,7 @@ a, #files tbody div a:last-child {
|
||||
padding: .3em .6em;
|
||||
border-radius: .3em;
|
||||
border-width: .15em 0;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.opbox {
|
||||
background: #2d2d2d;
|
||||
@@ -466,6 +529,17 @@ input[type="checkbox"]:checked+label {
|
||||
height: 1em;
|
||||
margin: .2em 0 -1em 1.6em;
|
||||
}
|
||||
#tq_raw {
|
||||
width: calc(100% - 2em);
|
||||
margin: .3em 0 0 1.4em;
|
||||
}
|
||||
#tq_raw td+td {
|
||||
width: 100%;
|
||||
}
|
||||
#op_search #q_raw {
|
||||
width: 100%;
|
||||
display: block;
|
||||
}
|
||||
#files td div span {
|
||||
color: #fff;
|
||||
padding: 0 .4em;
|
||||
@@ -487,9 +561,6 @@ input[type="checkbox"]:checked+label {
|
||||
border-collapse: collapse;
|
||||
width: 100%;
|
||||
}
|
||||
#files td div a:last-child {
|
||||
width: 100%;
|
||||
}
|
||||
#wrap {
|
||||
margin-top: 2em;
|
||||
}
|
||||
@@ -499,7 +570,6 @@ input[type="checkbox"]:checked+label {
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
top: 7em;
|
||||
padding-top: .2em;
|
||||
overflow-y: auto;
|
||||
-ms-scroll-chaining: none;
|
||||
overscroll-behavior-y: none;
|
||||
@@ -508,9 +578,7 @@ input[type="checkbox"]:checked+label {
|
||||
#thx_ff {
|
||||
padding: 5em 0;
|
||||
}
|
||||
#tree::-webkit-scrollbar-track {
|
||||
background: #333;
|
||||
}
|
||||
#tree::-webkit-scrollbar-track,
|
||||
#tree::-webkit-scrollbar {
|
||||
background: #333;
|
||||
}
|
||||
@@ -525,8 +593,7 @@ input[type="checkbox"]:checked+label {
|
||||
left: -1.7em;
|
||||
width: calc(100% + 1.3em);
|
||||
}
|
||||
.tglbtn,
|
||||
#tree>a+a {
|
||||
.btn {
|
||||
padding: .2em .4em;
|
||||
font-size: 1.2em;
|
||||
background: #2a2a2a;
|
||||
@@ -536,12 +603,10 @@ input[type="checkbox"]:checked+label {
|
||||
position: relative;
|
||||
top: -.2em;
|
||||
}
|
||||
.tglbtn:hover,
|
||||
#tree>a+a:hover {
|
||||
.btn:hover {
|
||||
background: #805;
|
||||
}
|
||||
.tglbtn.on,
|
||||
#tree>a+a.on {
|
||||
.tgl.btn.on {
|
||||
background: #fc4;
|
||||
color: #400;
|
||||
text-shadow: none;
|
||||
@@ -549,6 +614,7 @@ input[type="checkbox"]:checked+label {
|
||||
#detree {
|
||||
padding: .3em .5em;
|
||||
font-size: 1.5em;
|
||||
line-height: 1.5em;
|
||||
}
|
||||
#tree ul,
|
||||
#tree li {
|
||||
@@ -685,3 +751,274 @@ input[type="checkbox"]:checked+label {
|
||||
font-family: monospace, monospace;
|
||||
line-height: 2em;
|
||||
}
|
||||
#griden.on+#thumbs {
|
||||
opacity: .3;
|
||||
}
|
||||
#ghead {
|
||||
background: #3c3c3c;
|
||||
border: 1px solid #444;
|
||||
border-radius: .3em;
|
||||
padding: .5em;
|
||||
margin: 0 1.5em 1em .4em;
|
||||
position: sticky;
|
||||
top: -.3em;
|
||||
}
|
||||
html.light #ghead {
|
||||
background: #f7f7f7;
|
||||
border-color: #ddd;
|
||||
}
|
||||
#ghead .btn {
|
||||
position: relative;
|
||||
top: 0;
|
||||
}
|
||||
#ggrid {
|
||||
padding-top: .5em;
|
||||
}
|
||||
#ggrid a {
|
||||
display: inline-block;
|
||||
width: var(--grid-sz);
|
||||
vertical-align: top;
|
||||
overflow-wrap: break-word;
|
||||
background: #383838;
|
||||
border: 1px solid #444;
|
||||
border-top: 1px solid #555;
|
||||
box-shadow: 0 .1em .2em #222;
|
||||
border-radius: .3em;
|
||||
padding: .3em;
|
||||
margin: .5em;
|
||||
}
|
||||
#ggrid a img {
|
||||
border-radius: .2em;
|
||||
max-width: var(--grid-sz);
|
||||
max-height: calc(var(--grid-sz)/1.25);
|
||||
margin: 0 auto;
|
||||
display: block;
|
||||
}
|
||||
#ggrid a span {
|
||||
padding: .2em .3em;
|
||||
display: block;
|
||||
}
|
||||
#ggrid a:hover {
|
||||
background: #444;
|
||||
border-color: #555;
|
||||
color: #fd9;
|
||||
}
|
||||
html.light #ggrid a {
|
||||
background: #f7f7f7;
|
||||
border-color: #ddd;
|
||||
box-shadow: 0 .1em .2em #ddd;
|
||||
}
|
||||
html.light #ggrid a:hover {
|
||||
background: #fff;
|
||||
border-color: #ccc;
|
||||
color: #015;
|
||||
box-shadow: 0 .1em .5em #aaa;
|
||||
}
|
||||
#pvol,
|
||||
#barbuf,
|
||||
#barpos,
|
||||
#u2conf label {
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
html.light {
|
||||
color: #333;
|
||||
background: #eee;
|
||||
text-shadow: none;
|
||||
}
|
||||
html.light #ops,
|
||||
html.light .opbox,
|
||||
html.light #srch_form {
|
||||
background: #f7f7f7;
|
||||
box-shadow: 0 0 .3em #ddd;
|
||||
border-color: #f7f7f7;
|
||||
}
|
||||
html.light #ops a.act {
|
||||
box-shadow: 0 .2em .2em #ccc;
|
||||
background: #fff;
|
||||
border-color: #07a;
|
||||
padding-top: .4em;
|
||||
}
|
||||
html.light #op_cfg h3 {
|
||||
border-color: #ccc;
|
||||
}
|
||||
html.light .btn {
|
||||
color: #666;
|
||||
background: #ddd;
|
||||
box-shadow: none;
|
||||
}
|
||||
html.light .btn:hover {
|
||||
background: #caf;
|
||||
}
|
||||
html.light .tgl.btn.on {
|
||||
background: #4a0;
|
||||
color: #fff;
|
||||
}
|
||||
html.light #srv_info {
|
||||
color: #c83;
|
||||
text-shadow: 1px 1px 0 #fff;
|
||||
}
|
||||
html.light #srv_info span {
|
||||
color: #000;
|
||||
}
|
||||
html.light #treeul a+a {
|
||||
background: inherit;
|
||||
color: #06a;
|
||||
}
|
||||
html.light #treeul a.hl {
|
||||
background: #07a;
|
||||
color: #fff;
|
||||
}
|
||||
html.light #tree li {
|
||||
border-color: #ddd #fff #f7f7f7 #fff;
|
||||
}
|
||||
html.light #tree ul {
|
||||
border-color: #ccc;
|
||||
}
|
||||
html.light a,
|
||||
html.light #ops a,
|
||||
html.light #files tbody div a:last-child {
|
||||
color: #06a;
|
||||
}
|
||||
html.light #files tbody {
|
||||
background: #f7f7f7;
|
||||
}
|
||||
html.light #files {
|
||||
box-shadow: 0 0 .3em #ccc;
|
||||
}
|
||||
html.light #files thead th {
|
||||
background: #eee;
|
||||
}
|
||||
html.light #files tr td {
|
||||
border-top: 1px solid #ddd;
|
||||
}
|
||||
html.light #files td {
|
||||
border-bottom: 1px solid #f7f7f7;
|
||||
}
|
||||
html.light #files tbody tr:last-child td {
|
||||
border-bottom: .2em solid #ccc;
|
||||
}
|
||||
html.light #files td:nth-child(2n) {
|
||||
color: #d38;
|
||||
}
|
||||
html.light #files tr:hover td {
|
||||
background: #fff;
|
||||
}
|
||||
html.light #files tbody a.play {
|
||||
color: #c0f;
|
||||
}
|
||||
html.light tr.play td {
|
||||
background: #fc5;
|
||||
}
|
||||
html.light tr.play a {
|
||||
color: #406;
|
||||
}
|
||||
html.light #files th:hover .cfg,
|
||||
html.light #files th.min .cfg {
|
||||
background: #ccc;
|
||||
}
|
||||
html.light #files > thead > tr > th.min span {
|
||||
background: linear-gradient(90deg, rgba(204,204,204,0), rgba(204,204,204,0.5) 70%, #ccc);
|
||||
}
|
||||
html.light #blocked {
|
||||
background: #eee;
|
||||
}
|
||||
html.light #blk_play a,
|
||||
html.light #blk_abrt a {
|
||||
background: #fff;
|
||||
box-shadow: 0 .2em .4em #ddd;
|
||||
}
|
||||
html.light #widget a {
|
||||
color: #fc5;
|
||||
}
|
||||
html.light #files tr.sel:hover td {
|
||||
background: #c37;
|
||||
}
|
||||
html.light #files tr.sel td {
|
||||
color: #fff;
|
||||
}
|
||||
html.light #files tr.sel a {
|
||||
color: #fff;
|
||||
}
|
||||
html.light #files tr.sel a.play.act {
|
||||
color: #fb0;
|
||||
}
|
||||
html.light input[type="checkbox"] + label {
|
||||
color: #333;
|
||||
}
|
||||
html.light .opview input[type="text"] {
|
||||
background: #fff;
|
||||
color: #333;
|
||||
box-shadow: 0 0 2px #888;
|
||||
border-color: #38d;
|
||||
}
|
||||
html.light #ops:hover #opdesc {
|
||||
background: #fff;
|
||||
box-shadow: 0 .3em 1em #ccc;
|
||||
}
|
||||
html.light #opdesc code {
|
||||
background: #060;
|
||||
color: #fff;
|
||||
}
|
||||
html.light #u2tab a>span,
|
||||
html.light #files td div span {
|
||||
color: #000;
|
||||
}
|
||||
html.light #path {
|
||||
background: #f7f7f7;
|
||||
text-shadow: none;
|
||||
box-shadow: 0 0 .3em #bbb;
|
||||
}
|
||||
html.light #path a {
|
||||
color: #333;
|
||||
}
|
||||
html.light #path a:not(:last-child)::after {
|
||||
border-color: #ccc;
|
||||
background: none;
|
||||
border-width: .1em .1em 0 0;
|
||||
margin: -.2em .3em -.2em -.3em;
|
||||
}
|
||||
html.light #path a:hover {
|
||||
background: none;
|
||||
color: #60a;
|
||||
}
|
||||
html.light #files tbody div a {
|
||||
color: #d38;
|
||||
}
|
||||
html.light #files a:hover,
|
||||
html.light #files tr.sel a:hover {
|
||||
color: #000;
|
||||
background: #fff;
|
||||
}
|
||||
html.light #tree {
|
||||
scrollbar-color: #a70 #ddd;
|
||||
}
|
||||
html.light #tree::-webkit-scrollbar-track,
|
||||
html.light #tree::-webkit-scrollbar {
|
||||
background: #ddd;
|
||||
}
|
||||
#tree::-webkit-scrollbar-thumb {
|
||||
background: #da0;
|
||||
}
|
||||
@@ -13,15 +13,15 @@
|
||||
<body>
|
||||
<div id="ops">
|
||||
<a href="#" data-dest="" data-desc="close submenu">---</a>
|
||||
<a href="#" data-perm="read" data-dest="search" data-desc="search for files by attributes, path/name, music tags, or any combination of those.<br /><br /><code>foo bar</code> = must contain both foo and bar,<br /><code>foo -bar</code> = must contain foo but not bar,<br /><code>^yana .opus$</code> = must start with yana and have the opus extension">🔎</a>
|
||||
{%- if have_up2k_idx %}
|
||||
<a href="#" data-perm="read" data-dest="search" data-desc="search for files by attributes, path/name, music tags, or any combination of those.<br /><br /><code>foo bar</code> = must contain both foo and bar,<br /><code>foo -bar</code> = must contain foo but not bar,<br /><code>^yana .opus$</code> = must start with yana and have the opus extension">🔎</a>
|
||||
<a href="#" data-dest="up2k" data-desc="up2k: upload files (if you have write-access) or toggle into the search-mode and drag files onto the search button to see if they exist somewhere on the server">🚀</a>
|
||||
{%- else %}
|
||||
<a href="#" data-perm="write" data-dest="up2k" data-desc="up2k: upload files with resume support (close your browser and drop the same files in later)">🚀</a>
|
||||
{%- endif %}
|
||||
<a href="#" data-perm="write" data-dest="bup" data-desc="bup: basic uploader, even supports netscape 4.0">🎈</a>
|
||||
<a href="#" data-perm="write" data-dest="mkdir" data-desc="mkdir: create a new directory">📂</a>
|
||||
<a href="#" data-perm="write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
|
||||
<a href="#" data-perm="read write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
|
||||
<a href="#" data-perm="write" data-dest="msg" data-desc="msg: send a message to the server log">📟</a>
|
||||
<a href="#" data-dest="cfg" data-desc="configuration options">⚙️</a>
|
||||
<div id="opdesc"></div>
|
||||
@@ -39,14 +39,19 @@
|
||||
{%- include 'upload.html' %}
|
||||
|
||||
<div id="op_cfg" class="opview opbox">
|
||||
<h3>key notation</h3>
|
||||
<div id="key_notation"></div>
|
||||
<h3>switches</h3>
|
||||
<div>
|
||||
<a id="tooltips" class="tgl btn" href="#">tooltips</a>
|
||||
<a id="lightmode" class="tgl btn" href="#">lightmode</a>
|
||||
<a id="griden" class="tgl btn" href="#">the grid</a>
|
||||
<a id="thumbs" class="tgl btn" href="#">thumbs</a>
|
||||
</div>
|
||||
{%- if have_zip %}
|
||||
<h3>folder download</h3>
|
||||
<div id="arc_fmt"></div>
|
||||
{%- endif %}
|
||||
<h3>tooltips</h3>
|
||||
<div><a id="tooltips" class="tglbtn" href="#">enable</a></div>
|
||||
<h3>key notation</h3>
|
||||
<div id="key_notation"></div>
|
||||
</div>
|
||||
|
||||
<h1 id="path">
|
||||
@@ -58,9 +63,9 @@
|
||||
|
||||
<div id="tree">
|
||||
<a href="#" id="detree">🍞...</a>
|
||||
<a href="#" step="2" id="twobytwo">+</a>
|
||||
<a href="#" step="-2" id="twig">–</a>
|
||||
<a href="#" class="tglbtn" id="dyntree">a</a>
|
||||
<a href="#" class="btn" step="2" id="twobytwo">+</a>
|
||||
<a href="#" class="btn" step="-2" id="twig">–</a>
|
||||
<a href="#" class="tgl btn" id="dyntree">a</a>
|
||||
<ul id="treeul"></ul>
|
||||
<div id="thx_ff"> </div>
|
||||
</div>
|
||||
@@ -111,22 +116,7 @@
|
||||
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
||||
{%- endif %}
|
||||
|
||||
<div id="widget">
|
||||
<div id="wtoggle">
|
||||
<span id="wzip">
|
||||
<a href="#" id="selall">sel.<br />all</a>
|
||||
<a href="#" id="selinv">sel.<br />inv.</a>
|
||||
<a href="#" id="selzip">zip</a>
|
||||
</span>
|
||||
<a href="#" id="wtico">♫</a>
|
||||
</div>
|
||||
<div id="widgeti">
|
||||
<div id="pctl"><a href="#" id="bprev">⏮</a><a href="#" id="bplay">▶</a><a href="#" id="bnext">⏭</a></div>
|
||||
<canvas id="pvol" width="288" height="38"></canvas>
|
||||
<canvas id="barpos"></canvas>
|
||||
<canvas id="barbuf"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
<div id="widget"></div>
|
||||
|
||||
<script>
|
||||
var tag_order_cfg = {{ tag_order }};
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,6 +6,11 @@
|
||||
<title>{{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<style>
|
||||
html{font-family:sans-serif}
|
||||
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
||||
a{display:block}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
@@ -49,7 +54,7 @@
|
||||
<div>{{ logues[1] }}</div><br />
|
||||
{%- endif %}
|
||||
|
||||
<h2><a href="{{ url_suf }}&h">control-panel</a></h2>
|
||||
<h2><a href="{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 248 B |
@@ -50,6 +50,9 @@ pre code:last-child {
|
||||
pre code::before {
|
||||
content: counter(precode);
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
display: inline-block;
|
||||
text-align: right;
|
||||
font-size: .75em;
|
||||
@@ -591,12 +594,3 @@ blink {
|
||||
color: #940;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
*[data-ln]:before {
|
||||
content: attr(data-ln);
|
||||
font-size: .8em;
|
||||
margin: 0 .4em;
|
||||
color: #f0c;
|
||||
}
|
||||
*/
|
||||
@@ -138,10 +138,10 @@ var md_opt = {
|
||||
document.documentElement.setAttribute("class", dark ? "dark" : "");
|
||||
btn.innerHTML = "go " + (dark ? "light" : "dark");
|
||||
if (window.localStorage)
|
||||
localStorage.setItem('darkmode', dark ? 1 : 0);
|
||||
localStorage.setItem('lightmode', dark ? 0 : 1);
|
||||
};
|
||||
btn.onclick = toggle;
|
||||
if (window.localStorage && localStorage.getItem('darkmode') == 1)
|
||||
if (window.localStorage && localStorage.getItem('lightmode') != 1)
|
||||
toggle();
|
||||
})();
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ function statify(obj) {
|
||||
var ua = navigator.userAgent;
|
||||
if (ua.indexOf(') Gecko/') !== -1 && /Linux| Mac /.exec(ua)) {
|
||||
// necessary on ff-68.7 at least
|
||||
var s = document.createElement('style');
|
||||
var s = mknod('style');
|
||||
s.innerHTML = '@page { margin: .5in .6in .8in .6in; }';
|
||||
console.log(s.innerHTML);
|
||||
document.head.appendChild(s);
|
||||
@@ -175,12 +175,12 @@ function md_plug_err(ex, js) {
|
||||
msg = "Line " + ln + ", " + msg;
|
||||
var lns = js.split('\n');
|
||||
if (ln < lns.length) {
|
||||
o = document.createElement('span');
|
||||
o = mknod('span');
|
||||
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
|
||||
o.textContent = lns[ln - 1];
|
||||
}
|
||||
}
|
||||
errbox = document.createElement('div');
|
||||
errbox = mknod('div');
|
||||
errbox.setAttribute('id', 'md_errbox');
|
||||
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
|
||||
errbox.textContent = msg;
|
||||
|
||||
@@ -1,126 +1,125 @@
|
||||
#toc {
|
||||
display: none;
|
||||
display: none;
|
||||
}
|
||||
#mtw {
|
||||
display: block;
|
||||
position: fixed;
|
||||
left: .5em;
|
||||
bottom: 0;
|
||||
width: calc(100% - 56em);
|
||||
display: block;
|
||||
position: fixed;
|
||||
left: .5em;
|
||||
bottom: 0;
|
||||
width: calc(100% - 56em);
|
||||
}
|
||||
#mw {
|
||||
left: calc(100% - 55em);
|
||||
overflow-y: auto;
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
left: calc(100% - 55em);
|
||||
overflow-y: auto;
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
}
|
||||
|
||||
|
||||
/* single-screen */
|
||||
#mtw.preview,
|
||||
#mw.editor {
|
||||
opacity: 0;
|
||||
z-index: 1;
|
||||
opacity: 0;
|
||||
z-index: 1;
|
||||
}
|
||||
#mw.preview,
|
||||
#mtw.editor {
|
||||
z-index: 5;
|
||||
z-index: 5;
|
||||
}
|
||||
#mtw.single,
|
||||
#mw.single {
|
||||
margin: 0;
|
||||
left: 1em;
|
||||
left: max(1em, calc((100% - 56em) / 2));
|
||||
margin: 0;
|
||||
left: 1em;
|
||||
left: max(1em, calc((100% - 56em) / 2));
|
||||
}
|
||||
#mtw.single {
|
||||
width: 55em;
|
||||
width: min(55em, calc(100% - 2em));
|
||||
width: 55em;
|
||||
width: min(55em, calc(100% - 2em));
|
||||
}
|
||||
|
||||
|
||||
#mp {
|
||||
position: relative;
|
||||
position: relative;
|
||||
}
|
||||
#mt, #mtr {
|
||||
width: 100%;
|
||||
height: calc(100% - 1px);
|
||||
color: #444;
|
||||
background: #f7f7f7;
|
||||
border: 1px solid #999;
|
||||
outline: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-family: 'consolas', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
overflow-y: scroll;
|
||||
line-height: 1.3em;
|
||||
font-size: .9em;
|
||||
position: relative;
|
||||
scrollbar-color: #eb0 #f7f7f7;
|
||||
width: 100%;
|
||||
height: calc(100% - 1px);
|
||||
color: #444;
|
||||
background: #f7f7f7;
|
||||
border: 1px solid #999;
|
||||
outline: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-family: 'consolas', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
overflow-y: scroll;
|
||||
line-height: 1.3em;
|
||||
font-size: .9em;
|
||||
position: relative;
|
||||
scrollbar-color: #eb0 #f7f7f7;
|
||||
}
|
||||
html.dark #mt {
|
||||
color: #eee;
|
||||
background: #222;
|
||||
border: 1px solid #777;
|
||||
scrollbar-color: #b80 #282828;
|
||||
color: #eee;
|
||||
background: #222;
|
||||
border: 1px solid #777;
|
||||
scrollbar-color: #b80 #282828;
|
||||
}
|
||||
#mtr {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
}
|
||||
#save.force-save {
|
||||
color: #400;
|
||||
background: #f97;
|
||||
border-radius: .15em;
|
||||
color: #400;
|
||||
background: #f97;
|
||||
border-radius: .15em;
|
||||
}
|
||||
html.dark #save.force-save {
|
||||
color: #fca;
|
||||
background: #720;
|
||||
color: #fca;
|
||||
background: #720;
|
||||
}
|
||||
#save.disabled {
|
||||
opacity: .4;
|
||||
opacity: .4;
|
||||
}
|
||||
#helpbox,
|
||||
#toast {
|
||||
background: #f7f7f7;
|
||||
border-radius: .4em;
|
||||
z-index: 9001;
|
||||
background: #f7f7f7;
|
||||
border-radius: .4em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#helpbox {
|
||||
display: none;
|
||||
position: fixed;
|
||||
padding: 2em;
|
||||
top: 4em;
|
||||
overflow-y: auto;
|
||||
box-shadow: 0 .5em 2em #777;
|
||||
height: calc(100% - 12em);
|
||||
left: calc(50% - 15em);
|
||||
right: 0;
|
||||
width: 30em;
|
||||
display: none;
|
||||
position: fixed;
|
||||
padding: 2em;
|
||||
top: 4em;
|
||||
overflow-y: auto;
|
||||
box-shadow: 0 .5em 2em #777;
|
||||
height: calc(100% - 12em);
|
||||
left: calc(50% - 15em);
|
||||
right: 0;
|
||||
width: 30em;
|
||||
}
|
||||
#helpclose {
|
||||
display: block;
|
||||
display: block;
|
||||
}
|
||||
html.dark #helpbox {
|
||||
box-shadow: 0 .5em 2em #444;
|
||||
box-shadow: 0 .5em 2em #444;
|
||||
}
|
||||
html.dark #helpbox,
|
||||
html.dark #toast {
|
||||
background: #222;
|
||||
border: 1px solid #079;
|
||||
border-width: 1px 0;
|
||||
background: #222;
|
||||
border: 1px solid #079;
|
||||
border-width: 1px 0;
|
||||
}
|
||||
#toast {
|
||||
font-weight: bold;
|
||||
text-align: center;
|
||||
padding: .6em 0;
|
||||
position: fixed;
|
||||
z-index: 9001;
|
||||
top: 30%;
|
||||
transition: opacity 0.2s ease-in-out;
|
||||
opacity: 1;
|
||||
font-weight: bold;
|
||||
text-align: center;
|
||||
padding: .6em 0;
|
||||
position: fixed;
|
||||
top: 30%;
|
||||
transition: opacity 0.2s ease-in-out;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ var dom_sbs = ebi('sbs');
|
||||
var dom_nsbs = ebi('nsbs');
|
||||
var dom_tbox = ebi('toolsbox');
|
||||
var dom_ref = (function () {
|
||||
var d = document.createElement('div');
|
||||
var d = mknod('div');
|
||||
d.setAttribute('id', 'mtr');
|
||||
dom_swrap.appendChild(d);
|
||||
d = ebi('mtr');
|
||||
@@ -71,7 +71,7 @@ var map_src = [];
|
||||
var map_pre = [];
|
||||
function genmap(dom, oldmap) {
|
||||
var find = nlines;
|
||||
while (oldmap && find --> 0) {
|
||||
while (oldmap && find-- > 0) {
|
||||
var tmap = genmapq(dom, '*[data-ln="' + find + '"]');
|
||||
if (!tmap || !tmap.length)
|
||||
continue;
|
||||
@@ -94,7 +94,7 @@ var nlines = 0;
|
||||
var draw_md = (function () {
|
||||
var delay = 1;
|
||||
function draw_md() {
|
||||
var t0 = new Date().getTime();
|
||||
var t0 = Date.now();
|
||||
var src = dom_src.value;
|
||||
convert_markdown(src, dom_pre);
|
||||
|
||||
@@ -110,7 +110,7 @@ var draw_md = (function () {
|
||||
|
||||
cls(ebi('save'), 'disabled', src == server_md);
|
||||
|
||||
var t1 = new Date().getTime();
|
||||
var t1 = Date.now();
|
||||
delay = t1 - t0 > 100 ? 25 : 1;
|
||||
}
|
||||
|
||||
@@ -252,7 +252,7 @@ function Modpoll() {
|
||||
}
|
||||
|
||||
console.log('modpoll...');
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + new Date().getTime();
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.modpoll = this;
|
||||
xhr.open('GET', url, true);
|
||||
@@ -399,7 +399,7 @@ function save_cb() {
|
||||
|
||||
function run_savechk(lastmod, txt, btn, ntry) {
|
||||
// download the saved doc from the server and compare
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + new Date().getTime();
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
@@ -455,7 +455,7 @@ function toast(autoclose, style, width, msg) {
|
||||
ok.parentNode.removeChild(ok);
|
||||
|
||||
style = "width:" + width + "em;left:calc(50% - " + (width / 2) + "em);" + style;
|
||||
ok = document.createElement('div');
|
||||
ok = mknod('div');
|
||||
ok.setAttribute('id', 'toast');
|
||||
ok.setAttribute('style', style);
|
||||
ok.innerHTML = msg;
|
||||
@@ -1049,7 +1049,7 @@ action_stack = (function () {
|
||||
var p1 = from.length,
|
||||
p2 = to.length;
|
||||
|
||||
while (p1 --> 0 && p2 --> 0)
|
||||
while (p1-- > 0 && p2-- > 0)
|
||||
if (from[p1] != to[p2])
|
||||
break;
|
||||
|
||||
@@ -1142,14 +1142,3 @@ action_stack = (function () {
|
||||
_ref: ref
|
||||
}
|
||||
})();
|
||||
|
||||
/*
|
||||
ebi('help').onclick = function () {
|
||||
var c1 = getComputedStyle(dom_src).cssText.split(';');
|
||||
var c2 = getComputedStyle(dom_ref).cssText.split(';');
|
||||
var max = Math.min(c1.length, c2.length);
|
||||
for (var a = 0; a < max; a++)
|
||||
if (c1[a] !== c2[a])
|
||||
console.log(c1[a] + '\n' + c2[a]);
|
||||
}
|
||||
*/
|
||||
|
||||
@@ -8,68 +8,58 @@ html .editor-toolbar>i.separator { border-left: 1px solid #ccc; }
|
||||
html .editor-toolbar.disabled-for-preview>button:not(.no-disable) { opacity: .35 }
|
||||
|
||||
html {
|
||||
line-height: 1.5em;
|
||||
line-height: 1.5em;
|
||||
}
|
||||
html, body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
min-height: 100%;
|
||||
font-family: sans-serif;
|
||||
background: #f7f7f7;
|
||||
color: #333;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
min-height: 100%;
|
||||
font-family: sans-serif;
|
||||
background: #f7f7f7;
|
||||
color: #333;
|
||||
}
|
||||
#mn {
|
||||
font-weight: normal;
|
||||
margin: 1.3em 0 .7em 1em;
|
||||
font-weight: normal;
|
||||
margin: 1.3em 0 .7em 1em;
|
||||
}
|
||||
#mn a {
|
||||
color: #444;
|
||||
margin: 0 0 0 -.2em;
|
||||
padding: 0 0 0 .4em;
|
||||
text-decoration: none;
|
||||
/* ie: */
|
||||
border-bottom: .1em solid #777\9;
|
||||
margin-right: 1em\9;
|
||||
color: #444;
|
||||
margin: 0 0 0 -.2em;
|
||||
padding: 0 0 0 .4em;
|
||||
text-decoration: none;
|
||||
/* ie: */
|
||||
border-bottom: .1em solid #777\9;
|
||||
margin-right: 1em\9;
|
||||
}
|
||||
#mn a:first-child {
|
||||
padding-left: .5em;
|
||||
padding-left: .5em;
|
||||
}
|
||||
#mn a:last-child {
|
||||
padding-right: .5em;
|
||||
padding-right: .5em;
|
||||
}
|
||||
#mn a:not(:last-child):after {
|
||||
content: '';
|
||||
width: 1.05em;
|
||||
height: 1.05em;
|
||||
margin: -.2em .3em -.2em -.4em;
|
||||
display: inline-block;
|
||||
border: 1px solid rgba(0,0,0,0.2);
|
||||
border-width: .2em .2em 0 0;
|
||||
transform: rotate(45deg);
|
||||
content: '';
|
||||
width: 1.05em;
|
||||
height: 1.05em;
|
||||
margin: -.2em .3em -.2em -.4em;
|
||||
display: inline-block;
|
||||
border: 1px solid rgba(0,0,0,0.2);
|
||||
border-width: .2em .2em 0 0;
|
||||
transform: rotate(45deg);
|
||||
}
|
||||
#mn a:hover {
|
||||
color: #000;
|
||||
text-decoration: underline;
|
||||
color: #000;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
html .editor-toolbar>button.disabled {
|
||||
opacity: .35;
|
||||
pointer-events: none;
|
||||
opacity: .35;
|
||||
pointer-events: none;
|
||||
}
|
||||
html .editor-toolbar>button.save.force-save {
|
||||
background: #f97;
|
||||
background: #f97;
|
||||
}
|
||||
|
||||
/*
|
||||
*[data-ln]:before {
|
||||
content: attr(data-ln);
|
||||
font-size: .8em;
|
||||
margin: 0 .4em;
|
||||
color: #f0c;
|
||||
}
|
||||
.cm-header { font-size: .4em !important }
|
||||
*/
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -101,29 +91,29 @@ html .editor-toolbar>button.save.force-save {
|
||||
line-height: 1.1em;
|
||||
}
|
||||
.mdo a {
|
||||
color: #fff;
|
||||
background: #39b;
|
||||
text-decoration: none;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
border-bottom: .07em solid #079;
|
||||
color: #fff;
|
||||
background: #39b;
|
||||
text-decoration: none;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
border-bottom: .07em solid #079;
|
||||
}
|
||||
.mdo h2 {
|
||||
color: #fff;
|
||||
background: #555;
|
||||
margin-top: 2em;
|
||||
border-bottom: .22em solid #999;
|
||||
border-top: none;
|
||||
color: #fff;
|
||||
background: #555;
|
||||
margin-top: 2em;
|
||||
border-bottom: .22em solid #999;
|
||||
border-top: none;
|
||||
}
|
||||
.mdo h1 {
|
||||
color: #fff;
|
||||
background: #444;
|
||||
font-weight: normal;
|
||||
border-top: .4em solid #fb0;
|
||||
border-bottom: .4em solid #777;
|
||||
border-radius: 0 1em 0 1em;
|
||||
margin: 3em 0 1em 0;
|
||||
padding: .5em 0;
|
||||
color: #fff;
|
||||
background: #444;
|
||||
font-weight: normal;
|
||||
border-top: .4em solid #fb0;
|
||||
border-bottom: .4em solid #777;
|
||||
border-radius: 0 1em 0 1em;
|
||||
margin: 3em 0 1em 0;
|
||||
padding: .5em 0;
|
||||
}
|
||||
h1, h2 {
|
||||
line-height: 1.5em;
|
||||
@@ -197,14 +187,14 @@ th {
|
||||
|
||||
/* mde support */
|
||||
.mdo {
|
||||
padding: 1em;
|
||||
background: #f7f7f7;
|
||||
padding: 1em;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
html.dark .mdo {
|
||||
background: #1c1c1c;
|
||||
background: #1c1c1c;
|
||||
}
|
||||
.CodeMirror {
|
||||
background: #f7f7f7;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
|
||||
|
||||
@@ -214,108 +204,108 @@ html.dark .mdo {
|
||||
/* darkmode */
|
||||
html.dark .mdo,
|
||||
html.dark .CodeMirror {
|
||||
border-color: #222;
|
||||
border-color: #222;
|
||||
}
|
||||
html.dark,
|
||||
html.dark body,
|
||||
html.dark .CodeMirror {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark .CodeMirror-cursor {
|
||||
border-color: #fff;
|
||||
border-color: #fff;
|
||||
}
|
||||
html.dark .CodeMirror-selected {
|
||||
box-shadow: 0 0 1px #0cf inset;
|
||||
box-shadow: 0 0 1px #0cf inset;
|
||||
}
|
||||
html.dark .CodeMirror-selected,
|
||||
html.dark .CodeMirror-selectedtext {
|
||||
border-radius: .1em;
|
||||
background: #246;
|
||||
color: #fff;
|
||||
border-radius: .1em;
|
||||
background: #246;
|
||||
color: #fff;
|
||||
}
|
||||
html.dark .mdo a {
|
||||
background: #057;
|
||||
background: #057;
|
||||
}
|
||||
html.dark .mdo h1 a, html.dark .mdo h4 a,
|
||||
html.dark .mdo h2 a, html.dark .mdo h5 a,
|
||||
html.dark .mdo h3 a, html.dark .mdo h6 a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
color: #8c0;
|
||||
background: #1a1a1a;
|
||||
border: .07em solid #333;
|
||||
color: #8c0;
|
||||
background: #1a1a1a;
|
||||
border: .07em solid #333;
|
||||
}
|
||||
html.dark .mdo ul,
|
||||
html.dark .mdo ol {
|
||||
border-color: #444;
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark .mdo>ul,
|
||||
html.dark .mdo>ol {
|
||||
border-color: #555;
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
html.dark h1 {
|
||||
background: #383838;
|
||||
border-top: .4em solid #b80;
|
||||
border-bottom: .4em solid #4c4c4c;
|
||||
background: #383838;
|
||||
border-top: .4em solid #b80;
|
||||
border-bottom: .4em solid #4c4c4c;
|
||||
}
|
||||
html.dark h2 {
|
||||
background: #444;
|
||||
border-bottom: .22em solid #555;
|
||||
background: #444;
|
||||
border-bottom: .22em solid #555;
|
||||
}
|
||||
html.dark td,
|
||||
html.dark th {
|
||||
border-color: #444;
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark blockquote {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark #mn a {
|
||||
color: #ccc;
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark #mn a:not(:last-child):after {
|
||||
border-color: rgba(255,255,255,0.3);
|
||||
border-color: rgba(255,255,255,0.3);
|
||||
}
|
||||
html.dark .editor-toolbar {
|
||||
border-color: #2c2c2c;
|
||||
background: #1c1c1c;
|
||||
border-color: #2c2c2c;
|
||||
background: #1c1c1c;
|
||||
}
|
||||
html.dark .editor-toolbar>i.separator {
|
||||
border-left: 1px solid #444;
|
||||
border-right: 1px solid #111;
|
||||
border-left: 1px solid #444;
|
||||
border-right: 1px solid #111;
|
||||
}
|
||||
html.dark .editor-toolbar>button {
|
||||
margin-left: -1px; border: 1px solid rgba(255,255,255,0.1);
|
||||
color: #aaa;
|
||||
margin-left: -1px; border: 1px solid rgba(255,255,255,0.1);
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark .editor-toolbar>button:hover {
|
||||
color: #333;
|
||||
color: #333;
|
||||
}
|
||||
html.dark .editor-toolbar>button.active {
|
||||
color: #333;
|
||||
border-color: #ec1;
|
||||
background: #c90;
|
||||
color: #333;
|
||||
border-color: #ec1;
|
||||
background: #c90;
|
||||
}
|
||||
html.dark .editor-toolbar::after,
|
||||
html.dark .editor-toolbar::before {
|
||||
background: none;
|
||||
background: none;
|
||||
}
|
||||
@@ -31,12 +31,12 @@ var md_opt = {
|
||||
|
||||
var lightswitch = (function () {
|
||||
var fun = function () {
|
||||
var dark = !!!document.documentElement.getAttribute("class");
|
||||
var dark = !document.documentElement.getAttribute("class");
|
||||
document.documentElement.setAttribute("class", dark ? "dark" : "");
|
||||
if (window.localStorage)
|
||||
localStorage.setItem('darkmode', dark ? 1 : 0);
|
||||
localStorage.setItem('lightmode', dark ? 0 : 1);
|
||||
};
|
||||
if (window.localStorage && localStorage.getItem('darkmode') == 1)
|
||||
if (window.localStorage && localStorage.getItem('lightmode') != 1)
|
||||
fun();
|
||||
|
||||
return fun;
|
||||
|
||||
@@ -71,7 +71,7 @@ var mde = (function () {
|
||||
})();
|
||||
|
||||
function set_jumpto() {
|
||||
document.querySelector('.editor-preview-side').onclick = jumpto;
|
||||
QS('.editor-preview-side').onclick = jumpto;
|
||||
}
|
||||
|
||||
function jumpto(ev) {
|
||||
@@ -94,7 +94,7 @@ function md_changed(mde, on_srv) {
|
||||
window.md_saved = mde.value();
|
||||
|
||||
var md_now = mde.value();
|
||||
var save_btn = document.querySelector('.editor-toolbar button.save');
|
||||
var save_btn = QS('.editor-toolbar button.save');
|
||||
|
||||
if (md_now == window.md_saved)
|
||||
save_btn.classList.add('disabled');
|
||||
@@ -105,7 +105,7 @@ function md_changed(mde, on_srv) {
|
||||
}
|
||||
|
||||
function save(mde) {
|
||||
var save_btn = document.querySelector('.editor-toolbar button.save');
|
||||
var save_btn = QS('.editor-toolbar button.save');
|
||||
if (save_btn.classList.contains('disabled')) {
|
||||
alert('there is nothing to save');
|
||||
return;
|
||||
@@ -212,7 +212,7 @@ function save_chk() {
|
||||
last_modified = this.lastmod;
|
||||
md_changed(this.mde, true);
|
||||
|
||||
var ok = document.createElement('div');
|
||||
var ok = mknod('div');
|
||||
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
|
||||
ok.innerHTML = 'OK✔️';
|
||||
var parent = ebi('m');
|
||||
|
||||
@@ -3,7 +3,7 @@ html,body,tr,th,td,#files,a {
|
||||
background: none;
|
||||
font-weight: inherit;
|
||||
font-size: inherit;
|
||||
padding: none;
|
||||
padding: 0;
|
||||
border: none;
|
||||
}
|
||||
html {
|
||||
@@ -20,8 +20,8 @@ body {
|
||||
padding-bottom: 5em;
|
||||
}
|
||||
#box {
|
||||
padding: .5em 1em;
|
||||
background: #2c2c2c;
|
||||
padding: .5em 1em;
|
||||
background: #2c2c2c;
|
||||
}
|
||||
pre {
|
||||
font-family: monospace, monospace;
|
||||
|
||||
@@ -26,6 +26,13 @@ a {
|
||||
border-radius: .2em;
|
||||
padding: .2em .8em;
|
||||
}
|
||||
td, th {
|
||||
padding: .3em .6em;
|
||||
text-align: left;
|
||||
}
|
||||
.btns {
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
|
||||
html.dark,
|
||||
|
||||
@@ -13,19 +13,40 @@
|
||||
<div id="wrap">
|
||||
<p>hello {{ this.uname }}</p>
|
||||
|
||||
{%- if avol %}
|
||||
<h1>admin panel:</h1>
|
||||
<table>
|
||||
<thead><tr><th>vol</th><th>action</th><th>status</th></tr></thead>
|
||||
<tbody>
|
||||
{% for mp in avol %}
|
||||
{%- if mp in vstate and vstate[mp] %}
|
||||
<tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
|
||||
{%- endif %}
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="btns">
|
||||
<a href="{{ avol[0] }}?stack">dump stack</a>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
{%- if rvol %}
|
||||
<h1>you can browse these:</h1>
|
||||
<ul>
|
||||
{% for mp in rvol %}
|
||||
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
|
||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{%- endif %}
|
||||
|
||||
{%- if wvol %}
|
||||
<h1>you can upload to:</h1>
|
||||
<ul>
|
||||
{% for mp in wvol %}
|
||||
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
|
||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{%- endif %}
|
||||
|
||||
<h1>login for more:</h1>
|
||||
<ul>
|
||||
@@ -38,7 +59,7 @@
|
||||
</div>
|
||||
<script>
|
||||
|
||||
if (window.localStorage && localStorage.getItem('darkmode') == 1)
|
||||
if (window.localStorage && localStorage.getItem('lightmode') != 1)
|
||||
document.documentElement.setAttribute("class", "dark");
|
||||
|
||||
</script>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -19,6 +19,11 @@
|
||||
color: #f87;
|
||||
padding: .5em;
|
||||
}
|
||||
#u2err.msg {
|
||||
color: #999;
|
||||
padding: .5em;
|
||||
font-size: .9em;
|
||||
}
|
||||
#u2btn {
|
||||
color: #eee;
|
||||
background: #555;
|
||||
@@ -47,6 +52,11 @@
|
||||
margin: -1.5em 0;
|
||||
padding: .8em 0;
|
||||
width: 100%;
|
||||
max-width: 12em;
|
||||
display: inline-block;
|
||||
}
|
||||
#u2conf #u2btn_cw {
|
||||
text-align: right;
|
||||
}
|
||||
#u2notbtn {
|
||||
display: none;
|
||||
@@ -72,6 +82,7 @@
|
||||
}
|
||||
#u2tab td:nth-child(2) {
|
||||
width: 5em;
|
||||
white-space: nowrap;
|
||||
}
|
||||
#u2tab td:nth-child(3) {
|
||||
width: 40%;
|
||||
@@ -80,9 +91,46 @@
|
||||
font-family: sans-serif;
|
||||
width: auto;
|
||||
}
|
||||
#u2tab tr+tr:hover td {
|
||||
#u2tab tbody tr:hover td {
|
||||
background: #222;
|
||||
}
|
||||
#u2cards {
|
||||
padding: 1em 0 .3em 1em;
|
||||
margin: 1.5em auto -2.5em auto;
|
||||
white-space: nowrap;
|
||||
text-align: center;
|
||||
overflow: hidden;
|
||||
}
|
||||
#u2cards.w {
|
||||
width: 45em;
|
||||
text-align: left;
|
||||
}
|
||||
#u2cards a {
|
||||
padding: .2em 1em;
|
||||
border: 1px solid #777;
|
||||
border-width: 0 0 1px 0;
|
||||
background: linear-gradient(to bottom, #333, #222);
|
||||
}
|
||||
#u2cards a:first-child {
|
||||
border-radius: .4em 0 0 0;
|
||||
}
|
||||
#u2cards a:last-child {
|
||||
border-radius: 0 .4em 0 0;
|
||||
}
|
||||
#u2cards a.act {
|
||||
padding-bottom: .5em;
|
||||
border-width: 1px 1px .1em 1px;
|
||||
border-radius: .3em .3em 0 0;
|
||||
margin-left: -1px;
|
||||
background: linear-gradient(to bottom, #464, #333 80%);
|
||||
box-shadow: 0 -.17em .67em #280;
|
||||
border-color: #7c5 #583 #333 #583;
|
||||
position: relative;
|
||||
color: #fd7;
|
||||
}
|
||||
#u2cards span {
|
||||
color: #fff;
|
||||
}
|
||||
#u2conf {
|
||||
margin: 1em auto;
|
||||
width: 30em;
|
||||
@@ -99,12 +147,16 @@
|
||||
outline: none;
|
||||
}
|
||||
#u2conf .txtbox {
|
||||
width: 4em;
|
||||
width: 3em;
|
||||
color: #fff;
|
||||
background: #444;
|
||||
border: 1px solid #777;
|
||||
font-size: 1.2em;
|
||||
padding: .15em 0;
|
||||
height: 1.05em;
|
||||
}
|
||||
#u2conf .txtbox.err {
|
||||
background: #922;
|
||||
}
|
||||
#u2conf a {
|
||||
color: #fff;
|
||||
@@ -113,13 +165,12 @@
|
||||
border-radius: .1em;
|
||||
font-size: 1.5em;
|
||||
padding: .1em 0;
|
||||
margin: 0 -.25em;
|
||||
margin: 0 -1px;
|
||||
width: 1.5em;
|
||||
height: 1em;
|
||||
display: inline-block;
|
||||
position: relative;
|
||||
line-height: 1em;
|
||||
bottom: -.08em;
|
||||
bottom: -0.08em;
|
||||
}
|
||||
#u2conf input+a {
|
||||
background: #d80;
|
||||
@@ -130,7 +181,6 @@
|
||||
height: 1em;
|
||||
padding: .4em 0;
|
||||
display: block;
|
||||
user-select: none;
|
||||
border-radius: .25em;
|
||||
}
|
||||
#u2conf input[type="checkbox"] {
|
||||
@@ -170,12 +220,13 @@
|
||||
text-align: center;
|
||||
overflow: hidden;
|
||||
margin: 0 -2em;
|
||||
height: 0;
|
||||
padding: 0 1em;
|
||||
height: 0;
|
||||
opacity: .1;
|
||||
transition: all 0.14s ease-in-out;
|
||||
border-radius: .4em;
|
||||
transition: all 0.14s ease-in-out;
|
||||
box-shadow: 0 .2em .5em #222;
|
||||
border-radius: .4em;
|
||||
z-index: 1;
|
||||
}
|
||||
#u2cdesc.show {
|
||||
padding: 1em;
|
||||
@@ -187,30 +238,16 @@
|
||||
color: #fff;
|
||||
font-style: italic;
|
||||
}
|
||||
#u2foot span {
|
||||
color: #999;
|
||||
font-size: .9em;
|
||||
}
|
||||
#u2footfoot {
|
||||
margin-bottom: -1em;
|
||||
}
|
||||
.prog {
|
||||
font-family: monospace;
|
||||
}
|
||||
.prog>div {
|
||||
display: inline-block;
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
height: 1.1em;
|
||||
margin-bottom: -.15em;
|
||||
box-shadow: -1px -1px 0 inset rgba(255,255,255,0.1);
|
||||
}
|
||||
.prog>div>div {
|
||||
width: 0%;
|
||||
position: absolute;
|
||||
left: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
background: #0a0;
|
||||
}
|
||||
#u2tab a>span {
|
||||
font-weight: bold;
|
||||
font-style: italic;
|
||||
@@ -221,3 +258,44 @@
|
||||
float: right;
|
||||
margin-bottom: -.3em;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
html.light #u2btn {
|
||||
box-shadow: .4em .4em 0 #ccc;
|
||||
}
|
||||
html.light #u2cards span {
|
||||
color: #000;
|
||||
}
|
||||
html.light #u2cards a {
|
||||
background: linear-gradient(to bottom, #eee, #fff);
|
||||
}
|
||||
html.light #u2cards a.act {
|
||||
color: #037;
|
||||
background: inherit;
|
||||
box-shadow: 0 -.17em .67em #0ad;
|
||||
border-color: #09c #05a #eee #05a;
|
||||
}
|
||||
html.light #u2conf .txtbox {
|
||||
background: #fff;
|
||||
color: #444;
|
||||
}
|
||||
html.light #u2conf .txtbox.err {
|
||||
background: #f96;
|
||||
color: #300;
|
||||
}
|
||||
html.light #u2cdesc {
|
||||
background: #fff;
|
||||
border: none;
|
||||
}
|
||||
html.light #op_up2k.srch #u2btn {
|
||||
border-color: #a80;
|
||||
}
|
||||
html.light #u2foot {
|
||||
color: #000;
|
||||
}
|
||||
html.light #u2tab tbody tr:hover td {
|
||||
background: #fff;
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
|
||||
<table id="u2conf">
|
||||
<tr>
|
||||
<td>parallel uploads</td>
|
||||
<td><br />parallel uploads:</td>
|
||||
<td rowspan="2">
|
||||
<input type="checkbox" id="multitask" />
|
||||
<label for="multitask" alt="continue hashing other files while uploading">🏃</label>
|
||||
@@ -59,9 +59,9 @@
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
<a href="#" id="nthread_sub">–</a>
|
||||
<input class="txtbox" id="nthread" value="2" />
|
||||
<a href="#" id="nthread_add">+</a>
|
||||
<a href="#" id="nthread_sub">–</a><input
|
||||
class="txtbox" id="nthread" value="2"/><a
|
||||
href="#" id="nthread_add">+</a><br />
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
@@ -79,14 +79,25 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="u2cards">
|
||||
<a href="#" act="ok">ok <span>0</span></a><a
|
||||
href="#" act="ng">ng <span>0</span></a><a
|
||||
href="#" act="done">done <span>0</span></a><a
|
||||
href="#" act="bz" class="act">busy <span>0</span></a><a
|
||||
href="#" act="q">que <span>0</span></a>
|
||||
</div>
|
||||
|
||||
<table id="u2tab">
|
||||
<tr>
|
||||
<td>filename</td>
|
||||
<td>status</td>
|
||||
<td>progress<a href="#" id="u2cleanup">cleanup</a></td>
|
||||
</tr>
|
||||
<thead>
|
||||
<tr>
|
||||
<td>filename</td>
|
||||
<td>status</td>
|
||||
<td>progress<a href="#" id="u2cleanup">cleanup</a></td>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
|
||||
<p id="u2foot"></p>
|
||||
<p id="u2footfoot">( if you don't need lastmod timestamps, resumable uploads or progress bars just use the <a href="#" id="u2nope">basic uploader</a>)</p>
|
||||
<p id="u2footfoot" data-perm="write">( you can use the <a href="#" id="u2nope">basic uploader</a> if you don't need lastmod timestamps, resumable uploads, or progress bars )</p>
|
||||
</div>
|
||||
|
||||
@@ -6,7 +6,8 @@ if (!window['console'])
|
||||
};
|
||||
|
||||
|
||||
var clickev = window.Touch ? 'touchstart' : 'click';
|
||||
var clickev = window.Touch ? 'touchstart' : 'click',
|
||||
ANDROID = /(android)/i.test(navigator.userAgent);
|
||||
|
||||
|
||||
// error handler for mobile devices
|
||||
@@ -49,9 +50,11 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
}
|
||||
|
||||
|
||||
function ebi(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
var ebi = document.getElementById.bind(document),
|
||||
QS = document.querySelector.bind(document),
|
||||
QSA = document.querySelectorAll.bind(document),
|
||||
mknod = document.createElement.bind(document);
|
||||
|
||||
|
||||
function ev(e) {
|
||||
e = e || window.event;
|
||||
@@ -89,7 +92,7 @@ if (!String.startsWith) {
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = document.createElement('script');
|
||||
var script = mknod('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
|
||||
@@ -132,7 +135,7 @@ function clmod(obj, cls, add) {
|
||||
|
||||
|
||||
function sortfiles(nodes) {
|
||||
var sopts = jread('fsort', [["lead", -1, ""], ["href", 1, ""]]);
|
||||
var sopts = jread('fsort', [["href", 1, ""]]);
|
||||
|
||||
try {
|
||||
var is_srch = false;
|
||||
@@ -149,6 +152,9 @@ function sortfiles(nodes) {
|
||||
if (!name)
|
||||
continue;
|
||||
|
||||
if (name == 'ts')
|
||||
typ = 'int';
|
||||
|
||||
if (name.indexOf('tags/') === 0) {
|
||||
name = name.slice(5);
|
||||
for (var b = 0, bb = nodes.length; b < bb; b++)
|
||||
@@ -160,8 +166,12 @@ function sortfiles(nodes) {
|
||||
|
||||
if ((v + '').indexOf('<a ') === 0)
|
||||
v = v.split('>')[1];
|
||||
else if (name == "href" && v)
|
||||
else if (name == "href" && v) {
|
||||
if (v.slice(-1) == '/')
|
||||
v = '\t' + v;
|
||||
|
||||
v = uricom_dec(v)[0]
|
||||
}
|
||||
|
||||
nodes[b]._sv = v;
|
||||
}
|
||||
@@ -195,6 +205,8 @@ function sortfiles(nodes) {
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("failed to apply sort config: " + ex);
|
||||
console.log("resetting fsort " + sread('fsort'))
|
||||
localStorage.removeItem('fsort');
|
||||
}
|
||||
return nodes;
|
||||
}
|
||||
@@ -274,7 +286,7 @@ function makeSortable(table, cb) {
|
||||
|
||||
|
||||
(function () {
|
||||
var ops = document.querySelectorAll('#ops>a');
|
||||
var ops = QSA('#ops>a');
|
||||
for (var a = 0; a < ops.length; a++) {
|
||||
ops[a].onclick = opclick;
|
||||
}
|
||||
@@ -289,25 +301,25 @@ function opclick(e) {
|
||||
|
||||
swrite('opmode', dest || null);
|
||||
|
||||
var input = document.querySelector('.opview.act input:not([type="hidden"])')
|
||||
var input = QS('.opview.act input:not([type="hidden"])')
|
||||
if (input)
|
||||
input.focus();
|
||||
}
|
||||
|
||||
|
||||
function goto(dest) {
|
||||
var obj = document.querySelectorAll('.opview.act');
|
||||
var obj = QSA('.opview.act');
|
||||
for (var a = obj.length - 1; a >= 0; a--)
|
||||
clmod(obj[a], 'act');
|
||||
|
||||
obj = document.querySelectorAll('#ops>a');
|
||||
obj = QSA('#ops>a');
|
||||
for (var a = obj.length - 1; a >= 0; a--)
|
||||
clmod(obj[a], 'act');
|
||||
|
||||
if (dest) {
|
||||
var ui = ebi('op_' + dest);
|
||||
clmod(ui, 'act', true);
|
||||
document.querySelector('#ops>a[data-dest=' + dest + ']').className += " act";
|
||||
QS('#ops>a[data-dest=' + dest + ']').className += " act";
|
||||
|
||||
var fn = window['goto_' + dest];
|
||||
if (fn)
|
||||
@@ -346,12 +358,16 @@ function linksplit(rp) {
|
||||
link = rp.slice(0, ofs + 1);
|
||||
rp = rp.slice(ofs + 1);
|
||||
}
|
||||
var vlink = link;
|
||||
if (link.indexOf('/') !== -1)
|
||||
vlink = link.slice(0, -1) + '<span>/</span>';
|
||||
var vlink = esc(link),
|
||||
elink = uricom_enc(link);
|
||||
|
||||
ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
|
||||
apath += link;
|
||||
if (link.indexOf('/') !== -1) {
|
||||
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
||||
elink = elink.slice(0, -3) + '/';
|
||||
}
|
||||
|
||||
ret.push('<a href="' + apath + elink + '">' + vlink + '</a>');
|
||||
apath += elink;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
@@ -453,11 +469,15 @@ function jwrite(key, val) {
|
||||
}
|
||||
|
||||
function icfg_get(name, defval) {
|
||||
return parseInt(fcfg_get(name, defval));
|
||||
}
|
||||
|
||||
function fcfg_get(name, defval) {
|
||||
var o = ebi(name);
|
||||
|
||||
var val = parseInt(sread(name));
|
||||
var val = parseFloat(sread(name));
|
||||
if (isNaN(val))
|
||||
return parseInt(o ? o.value : defval);
|
||||
return parseFloat(o ? o.value : defval);
|
||||
|
||||
if (o)
|
||||
o.value = val;
|
||||
|
||||
@@ -32,9 +32,13 @@ r
|
||||
|
||||
# and a folder where anyone can upload
|
||||
# but nobody can see the contents
|
||||
# and set the e2d flag to enable the uploads database
|
||||
# and set the nodupe flag to reject duplicate uploads
|
||||
/home/ed/inc
|
||||
/dump
|
||||
w
|
||||
c e2d
|
||||
c nodupe
|
||||
|
||||
# this entire config file can be replaced with these arguments:
|
||||
# -u ed:123 -u k:k -v .::r:aed -v priv:priv:rk:aed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w
|
||||
|
||||
32
docs/minimal-up2k.html
Normal file
32
docs/minimal-up2k.html
Normal file
@@ -0,0 +1,32 @@
|
||||
<!--
|
||||
save this as .epilogue.html inside a write-only folder to declutter the UI, makes it look like
|
||||
https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png
|
||||
-->
|
||||
|
||||
<style>
|
||||
|
||||
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
|
||||
|
||||
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
|
||||
|
||||
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||
|
||||
#u2cards /* and the upload progress tabs */
|
||||
|
||||
{display: none !important} /* do it! */
|
||||
|
||||
|
||||
|
||||
/* add some margins because now it's weird */
|
||||
.opview {margin-top: 2.5em}
|
||||
#op_up2k {margin-top: 3em}
|
||||
|
||||
/* and embiggen the upload button */
|
||||
#u2conf #u2btn, #u2btn {padding:1.5em 0}
|
||||
|
||||
/* adjust the button area a bit */
|
||||
#u2conf.has_btn {width: 35em !important; margin: 5em auto}
|
||||
|
||||
</style>
|
||||
|
||||
<a href="#" onclick="this.parentNode.innerHTML='';">show advanced options</a>
|
||||
@@ -80,6 +80,13 @@ command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (ti
|
||||
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
|
||||
|
||||
|
||||
##
|
||||
## bash oneliners
|
||||
|
||||
# get the size and video-id of all youtube vids in folder, assuming filename ends with -id.ext, and create a copyparty search query
|
||||
find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv|mp4|webm)$/{next} {sub(/\.[^\.]+$/,"");n=length($0);v=substr($0,n-10);print $1, v}' | tee /dev/stderr | awk 'BEGIN {p="("} {printf("%s name like *-%s.* ",p,$2);p="or"} END {print ")\n"}' | cat >&2
|
||||
|
||||
|
||||
##
|
||||
## sqlite3 stuff
|
||||
|
||||
@@ -171,7 +178,7 @@ Range: bytes=26- Content-Range: bytes */26
|
||||
|
||||
var tsh = [];
|
||||
function convert_markdown(md_text, dest_dom) {
|
||||
tsh.push(new Date().getTime());
|
||||
tsh.push(Date.now());
|
||||
while (tsh.length > 10)
|
||||
tsh.shift();
|
||||
if (tsh.length > 1) {
|
||||
|
||||
82
docs/nuitka.txt
Normal file
82
docs/nuitka.txt
Normal file
@@ -0,0 +1,82 @@
|
||||
# recipe for building an exe with nuitka (extreme jank edition)
|
||||
#
|
||||
# NOTE: win7 and win10 builds both work on win10 but
|
||||
# on win7 they immediately c0000005 in kernelbase.dll
|
||||
#
|
||||
# first install python-3.6.8-amd64.exe
|
||||
# [x] add to path
|
||||
#
|
||||
# copypaste the rest of this file into cmd
|
||||
|
||||
rem from pypi
|
||||
cd \users\ed\downloads
|
||||
python -m pip install --user Nuitka-0.6.14.7.tar.gz
|
||||
|
||||
rem https://github.com/brechtsanders/winlibs_mingw/releases/download/10.2.0-11.0.0-8.0.0-r5/winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\10.2.0-11.0.0-8.0.0-r5\
|
||||
copy c:\users\ed\downloads\winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\10.2.0-11.0.0-8.0.0-r5\winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip
|
||||
|
||||
rem https://github.com/ccache/ccache/releases/download/v3.7.12/ccache-3.7.12-windows-32.zip
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\v3.7.12\
|
||||
copy c:\users\ed\downloads\ccache-3.7.12-windows-32.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\v3.7.12\ccache-3.7.12-windows-32.zip
|
||||
|
||||
rem https://dependencywalker.com/depends22_x64.zip
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\x86_64\
|
||||
copy c:\users\ed\downloads\depends22_x64.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\x86_64\depends22_x64.zip
|
||||
|
||||
cd \
|
||||
rd /s /q %appdata%\..\local\temp\pe-copyparty
|
||||
cd \users\ed\downloads
|
||||
python copyparty-sfx.py -h
|
||||
cd %appdata%\..\local\temp\pe-copyparty\copyparty
|
||||
|
||||
python
|
||||
import os, re
|
||||
os.rename('../dep-j2/jinja2', '../jinja2')
|
||||
os.rename('../dep-j2/markupsafe', '../markupsafe')
|
||||
|
||||
print("# nuitka dies if .__init__.stuff is imported")
|
||||
with open('__init__.py','r',encoding='utf-8') as f:
|
||||
t1 = f.read()
|
||||
|
||||
with open('util.py','r',encoding='utf-8') as f:
|
||||
t2 = f.read().split('\n')[3:]
|
||||
|
||||
t2 = [x for x in t2 if 'from .__init__' not in x]
|
||||
t = t1 + '\n'.join(t2)
|
||||
with open('__init__.py','w',encoding='utf-8') as f:
|
||||
f.write('\n')
|
||||
|
||||
with open('util.py','w',encoding='utf-8') as f:
|
||||
f.write(t)
|
||||
|
||||
print("# local-imports fail, prefix module names")
|
||||
ptn = re.compile(r'^( *from )(\.[^ ]+ import .*)')
|
||||
for d, _, fs in os.walk('.'):
|
||||
for f in fs:
|
||||
fp = os.path.join(d, f)
|
||||
if not fp.endswith('.py'):
|
||||
continue
|
||||
t = ''
|
||||
with open(fp,'r',encoding='utf-8') as f:
|
||||
for ln in [x.rstrip('\r\n') for x in f]:
|
||||
m = ptn.match(ln)
|
||||
if not m:
|
||||
t += ln + '\n'
|
||||
continue
|
||||
p1, p2 = m.groups()
|
||||
t += "{}copyparty{}\n".format(p1, p2).replace("__init__", "util")
|
||||
with open(fp,'w',encoding='utf-8') as f:
|
||||
f.write(t)
|
||||
|
||||
exit()
|
||||
|
||||
cd ..
|
||||
|
||||
rd /s /q bout & python -m nuitka --standalone --onefile --windows-onefile-tempdir --python-flag=no_site --assume-yes-for-downloads --include-data-dir=copyparty\web=copyparty\web --include-data-dir=copyparty\res=copyparty\res --run --output-dir=bout --mingw64 --include-package=markupsafe --include-package=jinja2 copyparty
|
||||
@@ -9,6 +9,12 @@ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
||||
ver_zopfli=1.0.3
|
||||
|
||||
|
||||
# TODO
|
||||
# sha512.hw.js https://github.com/Daninet/hash-wasm
|
||||
# sha512.kc.js https://github.com/chm-diederichs/sha3-wasm
|
||||
# awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' <sha512.ac.js.orig >sha512.ac.js; for fn in sha512.ac.js.orig sha512.ac.js; do wc -c <$fn; wc -c <$fn.gz ; for n in {1..9}; do printf '%8d %d bz\n' $(bzip2 -c$n <$fn | wc -c) $n; done; done
|
||||
|
||||
|
||||
# download;
|
||||
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
||||
RUN mkdir -p /z/dist/no-pk \
|
||||
|
||||
12
scripts/install-githooks.sh
Executable file
12
scripts/install-githooks.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
|
||||
[ -e setup.py ] || ..
|
||||
[ -e setup.py ] || {
|
||||
echo u wot
|
||||
exit 1
|
||||
}
|
||||
|
||||
cd .git/hooks
|
||||
rm -f pre-commit
|
||||
ln -s ../../scripts/run-tests.sh pre-commit
|
||||
@@ -117,7 +117,7 @@ cd sfx
|
||||
ver=
|
||||
git describe --tags >/dev/null 2>/dev/null && {
|
||||
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
|
||||
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//; s/-g?/./g')";
|
||||
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//')";
|
||||
t_ver=
|
||||
|
||||
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+$' && {
|
||||
@@ -163,7 +163,7 @@ find .. -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
|
||||
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
|
||||
|
||||
echo use smol web deps
|
||||
rm -f copyparty/web/deps/*.full.* copyparty/web/{Makefile,splash.js}
|
||||
rm -f copyparty/web/deps/*.full.* copyparty/web/Makefile
|
||||
|
||||
# it's fine dw
|
||||
grep -lE '\.full\.(js|css)' copyparty/web/* |
|
||||
@@ -199,12 +199,19 @@ find | grep -E '\.(js|css|html)$' | while IFS= read -r f; do
|
||||
tmv "$f"
|
||||
done
|
||||
|
||||
echo gen tarlist
|
||||
for d in copyparty dep-j2; do find $d -type f; done |
|
||||
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
|
||||
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
|
||||
|
||||
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1) >list || true
|
||||
|
||||
echo creating tar
|
||||
args=(--owner=1000 --group=1000)
|
||||
[ "$OSTYPE" = msys ] &&
|
||||
args=()
|
||||
|
||||
tar -cf tar "${args[@]}" --numeric-owner copyparty dep-j2
|
||||
tar -cf tar "${args[@]}" --numeric-owner -T list
|
||||
|
||||
pc=bzip2
|
||||
pe=bz2
|
||||
|
||||
34
scripts/profile.py
Normal file
34
scripts/profile.py
Normal file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, ".")
|
||||
cmd = sys.argv[1]
|
||||
|
||||
if cmd == "cpp":
|
||||
from copyparty.__main__ import main
|
||||
|
||||
argv = ["__main__", "-v", "srv::r", "-v", "../../yt:yt:r"]
|
||||
main(argv=argv)
|
||||
|
||||
elif cmd == "test":
|
||||
from unittest import main
|
||||
|
||||
argv = ["__main__", "discover", "-s", "tests"]
|
||||
main(module=None, argv=argv)
|
||||
|
||||
else:
|
||||
raise Exception()
|
||||
|
||||
# import dis; print(dis.dis(main))
|
||||
|
||||
|
||||
# macos:
|
||||
# option1) python3.9 -m pip install --user -U vmprof==0.4.9
|
||||
# option2) python3.9 -m pip install --user -U https://github.com/vmprof/vmprof-python/archive/refs/heads/master.zip
|
||||
#
|
||||
# python -m vmprof -o prof --lines ./scripts/profile.py test
|
||||
|
||||
# linux: ~/.local/bin/vmprofshow prof tree | grep -vF '[1m 0.'
|
||||
# macos: ~/Library/Python/3.9/bin/vmprofshow prof tree | grep -vF '[1m 0.'
|
||||
# win: %appdata%\..\Roaming\Python\Python39\Scripts\vmprofshow.exe prof tree
|
||||
12
scripts/run-tests.sh
Executable file
12
scripts/run-tests.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
|
||||
pids=()
|
||||
for py in python{2,3}; do
|
||||
$py -m unittest discover -s tests >/dev/null &
|
||||
pids+=($!)
|
||||
done
|
||||
|
||||
for pid in ${pids[@]}; do
|
||||
wait $pid
|
||||
done
|
||||
161
scripts/sfx.py
161
scripts/sfx.py
@@ -2,7 +2,8 @@
|
||||
# coding: latin-1
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os, sys, time, shutil, threading, tarfile, hashlib, platform, tempfile, traceback
|
||||
import re, os, sys, time, shutil, signal, threading, tarfile, hashlib, platform, tempfile, traceback
|
||||
import subprocess as sp
|
||||
|
||||
"""
|
||||
run me with any version of python, i will unpack and run copyparty
|
||||
@@ -26,22 +27,21 @@ CKSUM = None
|
||||
STAMP = None
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
WINDOWS = sys.platform == "win32"
|
||||
WINDOWS = sys.platform in ["win32", "msys"]
|
||||
sys.dont_write_bytecode = True
|
||||
me = os.path.abspath(os.path.realpath(__file__))
|
||||
cpp = None
|
||||
|
||||
|
||||
def eprint(*args, **kwargs):
|
||||
kwargs["file"] = sys.stderr
|
||||
print(*args, **kwargs)
|
||||
def eprint(*a, **ka):
|
||||
ka["file"] = sys.stderr
|
||||
print(*a, **ka)
|
||||
|
||||
|
||||
def msg(*args, **kwargs):
|
||||
if args:
|
||||
args = ["[SFX]", args[0]] + list(args[1:])
|
||||
def msg(*a, **ka):
|
||||
if a:
|
||||
a = ["[SFX]", a[0]] + list(a[1:])
|
||||
|
||||
eprint(*args, **kwargs)
|
||||
eprint(*a, **ka)
|
||||
|
||||
|
||||
# skip 1
|
||||
@@ -156,6 +156,9 @@ def encode(data, size, cksum, ver, ts):
|
||||
skip = True
|
||||
continue
|
||||
|
||||
if ln.strip().startswith("# fmt: "):
|
||||
continue
|
||||
|
||||
unpk += ln + "\n"
|
||||
|
||||
for k, v in [
|
||||
@@ -209,11 +212,11 @@ def yieldfile(fn):
|
||||
|
||||
|
||||
def hashfile(fn):
|
||||
hasher = hashlib.md5()
|
||||
h = hashlib.md5()
|
||||
for block in yieldfile(fn):
|
||||
hasher.update(block)
|
||||
h.update(block)
|
||||
|
||||
return hasher.hexdigest()
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def unpack():
|
||||
@@ -222,9 +225,10 @@ def unpack():
|
||||
tag = "v" + str(STAMP)
|
||||
withpid = "{}.{}".format(name, os.getpid())
|
||||
top = tempfile.gettempdir()
|
||||
final = os.path.join(top, name)
|
||||
mine = os.path.join(top, withpid)
|
||||
tar = os.path.join(mine, "tar")
|
||||
opj = os.path.join
|
||||
final = opj(top, name)
|
||||
mine = opj(top, withpid)
|
||||
tar = opj(mine, "tar")
|
||||
|
||||
try:
|
||||
if tag in os.listdir(final):
|
||||
@@ -233,28 +237,24 @@ def unpack():
|
||||
except:
|
||||
pass
|
||||
|
||||
nwrite = 0
|
||||
sz = 0
|
||||
os.mkdir(mine)
|
||||
with open(tar, "wb") as f:
|
||||
for buf in get_payload():
|
||||
nwrite += len(buf)
|
||||
sz += len(buf)
|
||||
f.write(buf)
|
||||
|
||||
if nwrite != SIZE:
|
||||
t = "\n\n bad file:\n expected {} bytes, got {}\n".format(SIZE, nwrite)
|
||||
raise Exception(t)
|
||||
|
||||
cksum = hashfile(tar)
|
||||
if cksum != CKSUM:
|
||||
t = "\n\n bad file:\n {} expected,\n {} obtained\n".format(CKSUM, cksum)
|
||||
raise Exception(t)
|
||||
ck = hashfile(tar)
|
||||
if ck != CKSUM:
|
||||
t = "\n\nexpected {} ({} byte)\nobtained {} ({} byte)\nsfx corrupt"
|
||||
raise Exception(t.format(CKSUM, SIZE, ck, sz))
|
||||
|
||||
with tarfile.open(tar, "r:bz2") as tf:
|
||||
tf.extractall(mine)
|
||||
|
||||
os.remove(tar)
|
||||
|
||||
with open(os.path.join(mine, tag), "wb") as f:
|
||||
with open(opj(mine, tag), "wb") as f:
|
||||
f.write(b"h\n")
|
||||
|
||||
try:
|
||||
@@ -272,25 +272,25 @@ def unpack():
|
||||
except:
|
||||
pass
|
||||
|
||||
for fn in u8(os.listdir(top)):
|
||||
if fn.startswith(name) and fn != withpid:
|
||||
try:
|
||||
old = opj(top, fn)
|
||||
if time.time() - os.path.getmtime(old) > 86400:
|
||||
shutil.rmtree(old)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
os.symlink(mine, final)
|
||||
except:
|
||||
try:
|
||||
os.rename(mine, final)
|
||||
return final
|
||||
except:
|
||||
msg("reloc fail,", mine)
|
||||
return mine
|
||||
|
||||
for fn in u8(os.listdir(top)):
|
||||
if fn.startswith(name) and fn not in [name, withpid]:
|
||||
try:
|
||||
old = os.path.join(top, fn)
|
||||
if time.time() - os.path.getmtime(old) > 10:
|
||||
shutil.rmtree(old)
|
||||
except:
|
||||
pass
|
||||
|
||||
return final
|
||||
return mine
|
||||
|
||||
|
||||
def get_payload():
|
||||
@@ -307,37 +307,33 @@ def get_payload():
|
||||
if ofs < 0:
|
||||
raise Exception("could not find archive marker")
|
||||
|
||||
# start reading from the final b"\n"
|
||||
# start at final b"\n"
|
||||
fpos = ofs + len(ptn) - 3
|
||||
# msg("tar found at", fpos)
|
||||
f.seek(fpos)
|
||||
dpos = 0
|
||||
leftovers = b""
|
||||
rem = b""
|
||||
while True:
|
||||
rbuf = f.read(1024 * 32)
|
||||
if rbuf:
|
||||
buf = leftovers + rbuf
|
||||
buf = rem + rbuf
|
||||
ofs = buf.rfind(b"\n")
|
||||
if len(buf) <= 4:
|
||||
leftovers = buf
|
||||
rem = buf
|
||||
continue
|
||||
|
||||
if ofs >= len(buf) - 4:
|
||||
leftovers = buf[ofs:]
|
||||
rem = buf[ofs:]
|
||||
buf = buf[:ofs]
|
||||
else:
|
||||
leftovers = b"\n# "
|
||||
rem = b"\n# "
|
||||
else:
|
||||
buf = leftovers
|
||||
buf = rem
|
||||
|
||||
fpos += len(buf) + 1
|
||||
buf = (
|
||||
buf.replace(b"\n# ", b"")
|
||||
.replace(b"\n#r", b"\r")
|
||||
.replace(b"\n#n", b"\n")
|
||||
)
|
||||
dpos += len(buf) - 1
|
||||
for a, b in [[b"\n# ", b""], [b"\n#r", b"\r"], [b"\n#n", b"\n"]]:
|
||||
buf = buf.replace(a, b)
|
||||
|
||||
dpos += len(buf) - 1
|
||||
yield buf
|
||||
|
||||
if not rbuf:
|
||||
@@ -361,7 +357,7 @@ def utime(top):
|
||||
|
||||
def confirm(rv):
|
||||
msg()
|
||||
msg(traceback.format_exc())
|
||||
msg("retcode", rv if rv else traceback.format_exc())
|
||||
msg("*** hit enter to exit ***")
|
||||
try:
|
||||
raw_input() if PY2 else input()
|
||||
@@ -371,10 +367,8 @@ def confirm(rv):
|
||||
sys.exit(rv)
|
||||
|
||||
|
||||
def run(tmp, j2ver):
|
||||
global cpp
|
||||
|
||||
msg("jinja2:", j2ver or "bundled")
|
||||
def run(tmp, j2):
|
||||
msg("jinja2:", j2 or "bundled")
|
||||
msg("sfxdir:", tmp)
|
||||
msg()
|
||||
|
||||
@@ -384,7 +378,6 @@ def run(tmp, j2ver):
|
||||
|
||||
fd = os.open(tmp, os.O_RDONLY)
|
||||
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
tmp = os.readlink(tmp) # can't flock a symlink, even with O_NOFOLLOW
|
||||
except Exception as ex:
|
||||
if not WINDOWS:
|
||||
msg("\033[31mflock:", repr(ex))
|
||||
@@ -394,22 +387,39 @@ def run(tmp, j2ver):
|
||||
t.start()
|
||||
|
||||
ld = [tmp, os.path.join(tmp, "dep-j2")]
|
||||
if j2ver:
|
||||
if j2:
|
||||
del ld[-1]
|
||||
|
||||
if any([re.match(r"^-.*j[0-9]", x) for x in sys.argv]):
|
||||
run_s(ld)
|
||||
else:
|
||||
run_i(ld)
|
||||
|
||||
|
||||
def run_i(ld):
|
||||
for x in ld:
|
||||
sys.path.insert(0, x)
|
||||
|
||||
try:
|
||||
from copyparty.__main__ import main as copyparty
|
||||
from copyparty.__main__ import main as p
|
||||
|
||||
copyparty()
|
||||
p()
|
||||
|
||||
except SystemExit as ex:
|
||||
if ex.code:
|
||||
confirm(ex.code)
|
||||
except:
|
||||
confirm(1)
|
||||
|
||||
def run_s(ld):
|
||||
# fmt: off
|
||||
c = "import sys,runpy;" + "".join(['sys.path.insert(0,r"' + x + '");' for x in ld]) + 'runpy.run_module("copyparty",run_name="__main__")'
|
||||
c = [str(x) for x in [sys.executable, "-c", c] + list(sys.argv[1:])]
|
||||
# fmt: on
|
||||
msg("\n", c, "\n")
|
||||
p = sp.Popen(c)
|
||||
|
||||
def bye(*a):
|
||||
p.send_signal(signal.SIGINT)
|
||||
|
||||
signal.signal(signal.SIGTERM, bye)
|
||||
p.wait()
|
||||
|
||||
raise SystemExit(p.returncode)
|
||||
|
||||
|
||||
def main():
|
||||
@@ -443,14 +453,23 @@ def main():
|
||||
|
||||
# skip 0
|
||||
|
||||
tmp = unpack()
|
||||
tmp = os.path.realpath(unpack())
|
||||
|
||||
try:
|
||||
from jinja2 import __version__ as j2ver
|
||||
from jinja2 import __version__ as j2
|
||||
except:
|
||||
j2ver = None
|
||||
j2 = None
|
||||
|
||||
run(tmp, j2ver)
|
||||
try:
|
||||
run(tmp, j2)
|
||||
except SystemExit as ex:
|
||||
c = ex.code
|
||||
if c not in [0, -15]:
|
||||
confirm(ex.code)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except:
|
||||
confirm(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -17,14 +17,15 @@ __license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
|
||||
def get_spd(nbyte, nsec):
|
||||
def get_spd(nbyte, nfiles, nsec):
|
||||
if not nsec:
|
||||
return "0.000 MB 0.000 sec 0.000 MB/s"
|
||||
return "0.000 MB 0 files 0.000 sec 0.000 MB/s 0.000 f/s"
|
||||
|
||||
mb = nbyte / (1024 * 1024.0)
|
||||
spd = mb / nsec
|
||||
nspd = nfiles / nsec
|
||||
|
||||
return f"{mb:.3f} MB {nsec:.3f} sec {spd:.3f} MB/s"
|
||||
return f"{mb:.3f} MB {nfiles} files {nsec:.3f} sec {spd:.3f} MB/s {nspd:.3f} f/s"
|
||||
|
||||
|
||||
class Inf(object):
|
||||
@@ -36,6 +37,7 @@ class Inf(object):
|
||||
self.mtx_reports = threading.Lock()
|
||||
|
||||
self.n_byte = 0
|
||||
self.n_file = 0
|
||||
self.n_sec = 0
|
||||
self.n_done = 0
|
||||
self.t0 = t0
|
||||
@@ -63,7 +65,8 @@ class Inf(object):
|
||||
continue
|
||||
|
||||
msgs = msgs[-64:]
|
||||
msgs = [f"{get_spd(self.n_byte, self.n_sec)} {x}" for x in msgs]
|
||||
spd = get_spd(self.n_byte, len(self.reports), self.n_sec)
|
||||
msgs = [f"{spd} {x}" for x in msgs]
|
||||
print("\n".join(msgs))
|
||||
|
||||
def report(self, fn, n_byte, n_sec):
|
||||
@@ -131,8 +134,9 @@ def main():
|
||||
|
||||
num_threads = 8
|
||||
read_sz = 32 * 1024
|
||||
targs = (q, inf, read_sz)
|
||||
for _ in range(num_threads):
|
||||
thr = threading.Thread(target=worker, args=(q, inf, read_sz,))
|
||||
thr = threading.Thread(target=worker, args=targs)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
@@ -151,14 +155,14 @@ def main():
|
||||
log = inf.reports
|
||||
log.sort()
|
||||
for nbyte, nsec, fn in log[-64:]:
|
||||
print(f"{get_spd(nbyte, nsec)} {fn}")
|
||||
spd = get_spd(nbyte, len(log), nsec)
|
||||
print(f"{spd} {fn}")
|
||||
|
||||
print()
|
||||
print("\n".join(inf.errors))
|
||||
|
||||
print(get_spd(inf.n_byte, t2 - t0))
|
||||
print(get_spd(inf.n_byte, len(log), t2 - t0))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
||||
55
setup.py
55
setup.py
@@ -5,22 +5,7 @@ from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
from shutil import rmtree
|
||||
|
||||
setuptools_available = True
|
||||
try:
|
||||
# need setuptools to build wheel
|
||||
from setuptools import setup, Command, find_packages
|
||||
|
||||
except ImportError:
|
||||
# works in a pinch
|
||||
setuptools_available = False
|
||||
from distutils.core import setup, Command
|
||||
|
||||
from distutils.spawn import spawn
|
||||
|
||||
if "bdist_wheel" in sys.argv and not setuptools_available:
|
||||
print("cannot build wheel without setuptools")
|
||||
sys.exit(1)
|
||||
from setuptools import setup, Command, find_packages
|
||||
|
||||
|
||||
NAME = "copyparty"
|
||||
@@ -100,9 +85,8 @@ args = {
|
||||
"author_email": "copyparty@ocv.me",
|
||||
"url": "https://github.com/9001/copyparty",
|
||||
"license": "MIT",
|
||||
"data_files": data_files,
|
||||
"classifiers": [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Development Status :: 4 - Beta",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2",
|
||||
@@ -120,35 +104,16 @@ args = {
|
||||
"Environment :: Console",
|
||||
"Environment :: No Input/Output (Daemon)",
|
||||
"Topic :: Communications :: File Sharing",
|
||||
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
|
||||
],
|
||||
"include_package_data": True,
|
||||
"data_files": data_files,
|
||||
"packages": find_packages(),
|
||||
"install_requires": ["jinja2"],
|
||||
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
|
||||
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
|
||||
"scripts": ["bin/copyparty-fuse.py"],
|
||||
"cmdclass": {"clean2": clean2},
|
||||
}
|
||||
|
||||
|
||||
if setuptools_available:
|
||||
args.update(
|
||||
{
|
||||
"packages": find_packages(),
|
||||
"install_requires": ["jinja2"],
|
||||
"extras_require": {"thumbnails": ["Pillow"]},
|
||||
"include_package_data": True,
|
||||
"entry_points": {
|
||||
"console_scripts": ["copyparty = copyparty.__main__:main"]
|
||||
},
|
||||
"scripts": ["bin/copyparty-fuse.py"],
|
||||
}
|
||||
)
|
||||
else:
|
||||
args.update(
|
||||
{
|
||||
"packages": ["copyparty", "copyparty.stolen"],
|
||||
"scripts": ["bin/copyparty-fuse.py"],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# import pprint
|
||||
# pprint.PrettyPrinter().pprint(args)
|
||||
# sys.exit(0)
|
||||
|
||||
setup(**args)
|
||||
|
||||
33
tests/run.py
Executable file
33
tests/run.py
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import runpy
|
||||
|
||||
host = sys.argv[1]
|
||||
sys.argv = sys.argv[:1] + sys.argv[2:]
|
||||
sys.path.insert(0, ".")
|
||||
|
||||
|
||||
def rp():
|
||||
runpy.run_module("unittest", run_name="__main__")
|
||||
|
||||
|
||||
if host == "vmprof":
|
||||
rp()
|
||||
|
||||
elif host == "cprofile":
|
||||
import cProfile
|
||||
import pstats
|
||||
|
||||
log_fn = "cprofile.log"
|
||||
cProfile.run("rp()", log_fn)
|
||||
p = pstats.Stats(log_fn)
|
||||
p.sort_stats(pstats.SortKey.CUMULATIVE).print_stats(64)
|
||||
|
||||
|
||||
"""
|
||||
python3.9 tests/run.py cprofile -v tests/test_httpcli.py
|
||||
|
||||
python3.9 -m pip install --user vmprof
|
||||
python3.9 -m vmprof --lines -o vmprof.log tests/run.py vmprof -v tests/test_httpcli.py
|
||||
"""
|
||||
206
tests/test_httpcli.py
Normal file
206
tests/test_httpcli.py
Normal file
@@ -0,0 +1,206 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import io
|
||||
import os
|
||||
import time
|
||||
import shutil
|
||||
import pprint
|
||||
import tarfile
|
||||
import tempfile
|
||||
import unittest
|
||||
from argparse import Namespace
|
||||
|
||||
from tests import util as tu
|
||||
from copyparty.authsrv import AuthSrv
|
||||
from copyparty.httpcli import HttpCli
|
||||
|
||||
|
||||
def hdr(query):
|
||||
h = "GET /{} HTTP/1.1\r\nCookie: cppwd=o\r\nConnection: close\r\n\r\n"
|
||||
return h.format(query).encode("utf-8")
|
||||
|
||||
|
||||
class Cfg(Namespace):
|
||||
def __init__(self, a=[], v=[], c=None):
|
||||
super(Cfg, self).__init__(
|
||||
a=a,
|
||||
v=v,
|
||||
c=c,
|
||||
ed=False,
|
||||
no_zip=False,
|
||||
no_scandir=False,
|
||||
no_sendfile=True,
|
||||
no_rescan=True,
|
||||
ihead=False,
|
||||
nih=True,
|
||||
mtp=[],
|
||||
mte="a",
|
||||
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
||||
)
|
||||
|
||||
|
||||
class TestHttpCli(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.td = tu.get_ramdisk()
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(tempfile.gettempdir())
|
||||
shutil.rmtree(self.td)
|
||||
|
||||
def test(self):
|
||||
td = os.path.join(self.td, "vfs")
|
||||
os.mkdir(td)
|
||||
os.chdir(td)
|
||||
|
||||
self.dtypes = ["ra", "ro", "rx", "wa", "wo", "wx", "aa", "ao", "ax"]
|
||||
self.can_read = ["ra", "ro", "aa", "ao"]
|
||||
self.can_write = ["wa", "wo", "aa", "ao"]
|
||||
self.fn = "g{:x}g".format(int(time.time() * 3))
|
||||
|
||||
allfiles = []
|
||||
allvols = []
|
||||
for top in self.dtypes:
|
||||
allvols.append(top)
|
||||
allfiles.append("/".join([top, self.fn]))
|
||||
for s1 in self.dtypes:
|
||||
p = "/".join([top, s1])
|
||||
allvols.append(p)
|
||||
allfiles.append(p + "/" + self.fn)
|
||||
allfiles.append(p + "/n/" + self.fn)
|
||||
for s2 in self.dtypes:
|
||||
p = "/".join([top, s1, "n", s2])
|
||||
os.makedirs(p)
|
||||
allvols.append(p)
|
||||
allfiles.append(p + "/" + self.fn)
|
||||
|
||||
for fp in allfiles:
|
||||
with open(fp, "w") as f:
|
||||
f.write("ok {}\n".format(fp))
|
||||
|
||||
for top in self.dtypes:
|
||||
vcfg = []
|
||||
for vol in allvols:
|
||||
if not vol.startswith(top):
|
||||
continue
|
||||
|
||||
mode = vol[-2]
|
||||
usr = vol[-1]
|
||||
if usr == "a":
|
||||
usr = ""
|
||||
|
||||
if "/" not in vol:
|
||||
vol += "/"
|
||||
|
||||
top, sub = vol.split("/", 1)
|
||||
vcfg.append("{0}/{1}:{1}:{2}{3}".format(top, sub, mode, usr))
|
||||
|
||||
pprint.pprint(vcfg)
|
||||
|
||||
self.args = Cfg(v=vcfg, a=["o:o", "x:x"])
|
||||
self.auth = AuthSrv(self.args, self.log)
|
||||
vfiles = [x for x in allfiles if x.startswith(top)]
|
||||
for fp in vfiles:
|
||||
rok, wok = self.can_rw(fp)
|
||||
furl = fp.split("/", 1)[1]
|
||||
durl = furl.rsplit("/", 1)[0] if "/" in furl else ""
|
||||
|
||||
# file download
|
||||
h, ret = self.curl(furl)
|
||||
res = "ok " + fp in ret
|
||||
print("[{}] {} {} = {}".format(fp, rok, wok, res))
|
||||
if rok != res:
|
||||
print("\033[33m{}\n# {}\033[0m".format(ret, furl))
|
||||
self.fail()
|
||||
|
||||
# file browser: html
|
||||
h, ret = self.curl(durl)
|
||||
res = "'{}'".format(self.fn) in ret
|
||||
print(res)
|
||||
if rok != res:
|
||||
print("\033[33m{}\n# {}\033[0m".format(ret, durl))
|
||||
self.fail()
|
||||
|
||||
# file browser: json
|
||||
url = durl + "?ls"
|
||||
h, ret = self.curl(url)
|
||||
res = '"{}"'.format(self.fn) in ret
|
||||
print(res)
|
||||
if rok != res:
|
||||
print("\033[33m{}\n# {}\033[0m".format(ret, url))
|
||||
self.fail()
|
||||
|
||||
# tar
|
||||
url = durl + "?tar"
|
||||
h, b = self.curl(url, True)
|
||||
# with open(os.path.join(td, "tar"), "wb") as f:
|
||||
# f.write(b)
|
||||
try:
|
||||
tar = tarfile.open(fileobj=io.BytesIO(b)).getnames()
|
||||
except:
|
||||
tar = []
|
||||
tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar]
|
||||
tar = [[x] + self.can_rw(x) for x in tar]
|
||||
tar_ok = [x[0] for x in tar if x[1]]
|
||||
tar_ng = [x[0] for x in tar if not x[1]]
|
||||
self.assertEqual([], tar_ng)
|
||||
|
||||
if durl.split("/")[-1] in self.can_read:
|
||||
ref = [x for x in vfiles if self.in_dive(top + "/" + durl, x)]
|
||||
for f in ref:
|
||||
print("{}: {}".format("ok" if f in tar_ok else "NG", f))
|
||||
ref.sort()
|
||||
tar_ok.sort()
|
||||
self.assertEqual(ref, tar_ok)
|
||||
|
||||
# stash
|
||||
h, ret = self.put(url)
|
||||
res = h.startswith("HTTP/1.1 200 ")
|
||||
self.assertEqual(res, wok)
|
||||
|
||||
def can_rw(self, fp):
|
||||
# lowest non-neutral folder declares permissions
|
||||
expect = fp.split("/")[:-1]
|
||||
for x in reversed(expect):
|
||||
if x != "n":
|
||||
expect = x
|
||||
break
|
||||
|
||||
return [expect in self.can_read, expect in self.can_write]
|
||||
|
||||
def in_dive(self, top, fp):
|
||||
# archiver bails at first inaccessible subvolume
|
||||
top = top.strip("/").split("/")
|
||||
fp = fp.split("/")
|
||||
for f1, f2 in zip(top, fp):
|
||||
if f1 != f2:
|
||||
return False
|
||||
|
||||
for f in fp[len(top) :]:
|
||||
if f == self.fn:
|
||||
return True
|
||||
if f not in self.can_read and f != "n":
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def put(self, url):
|
||||
buf = "PUT /{0} HTTP/1.1\r\nCookie: cppwd=o\r\nConnection: close\r\nContent-Length: {1}\r\n\r\nok {0}\n"
|
||||
buf = buf.format(url, len(url) + 4).encode("utf-8")
|
||||
conn = tu.VHttpConn(self.args, self.auth, self.log, buf)
|
||||
HttpCli(conn).run()
|
||||
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
|
||||
def curl(self, url, binary=False):
|
||||
conn = tu.VHttpConn(self.args, self.auth, self.log, hdr(url))
|
||||
HttpCli(conn).run()
|
||||
if binary:
|
||||
h, b = conn.s._reply.split(b"\r\n\r\n", 1)
|
||||
return [h.decode("utf-8"), b]
|
||||
|
||||
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
|
||||
def log(self, src, msg, c=0):
|
||||
# print(repr(msg))
|
||||
pass
|
||||
@@ -3,15 +3,14 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
import subprocess as sp # nosec
|
||||
|
||||
from textwrap import dedent
|
||||
from argparse import Namespace
|
||||
|
||||
from tests import util as tu
|
||||
from copyparty.authsrv import AuthSrv
|
||||
from copyparty import util
|
||||
|
||||
@@ -25,6 +24,13 @@ class Cfg(Namespace):
|
||||
|
||||
|
||||
class TestVFS(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.td = tu.get_ramdisk()
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(tempfile.gettempdir())
|
||||
shutil.rmtree(self.td)
|
||||
|
||||
def dump(self, vfs):
|
||||
print(json.dumps(vfs, indent=4, sort_keys=True, default=lambda o: o.__dict__))
|
||||
|
||||
@@ -51,57 +57,11 @@ class TestVFS(unittest.TestCase):
|
||||
real = [x[0] for x in real]
|
||||
return fsdir, real, virt
|
||||
|
||||
def runcmd(self, *argv):
|
||||
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
stdout, stderr = p.communicate()
|
||||
stdout = stdout.decode("utf-8")
|
||||
stderr = stderr.decode("utf-8")
|
||||
return [p.returncode, stdout, stderr]
|
||||
|
||||
def chkcmd(self, *argv):
|
||||
ok, sout, serr = self.runcmd(*argv)
|
||||
if ok != 0:
|
||||
raise Exception(serr)
|
||||
|
||||
return sout, serr
|
||||
|
||||
def get_ramdisk(self):
|
||||
for vol in ["/dev/shm", "/Volumes/cptd"]: # nosec (singleton test)
|
||||
if os.path.exists(vol):
|
||||
return vol
|
||||
|
||||
if os.path.exists("/Volumes"):
|
||||
devname, _ = self.chkcmd("hdiutil", "attach", "-nomount", "ram://8192")
|
||||
devname = devname.strip()
|
||||
print("devname: [{}]".format(devname))
|
||||
for _ in range(10):
|
||||
try:
|
||||
_, _ = self.chkcmd(
|
||||
"diskutil", "eraseVolume", "HFS+", "cptd", devname
|
||||
)
|
||||
return "/Volumes/cptd"
|
||||
except Exception as ex:
|
||||
print(repr(ex))
|
||||
time.sleep(0.25)
|
||||
|
||||
raise Exception("ramdisk creation failed")
|
||||
|
||||
ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
|
||||
try:
|
||||
os.mkdir(ret)
|
||||
finally:
|
||||
return ret
|
||||
|
||||
def log(self, src, msg, c=0):
|
||||
pass
|
||||
|
||||
def test(self):
|
||||
td = os.path.join(self.get_ramdisk(), "vfs")
|
||||
try:
|
||||
shutil.rmtree(td)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
td = os.path.join(self.td, "vfs")
|
||||
os.mkdir(td)
|
||||
os.chdir(td)
|
||||
|
||||
@@ -268,7 +228,7 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(list(v1), list(v2))
|
||||
|
||||
# config file parser
|
||||
cfg_path = os.path.join(self.get_ramdisk(), "test.cfg")
|
||||
cfg_path = os.path.join(self.td, "test.cfg")
|
||||
with open(cfg_path, "wb") as f:
|
||||
f.write(
|
||||
dedent(
|
||||
@@ -301,6 +261,4 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(n.uwrite, ["asd"])
|
||||
self.assertEqual(len(n.nodes), 0)
|
||||
|
||||
os.chdir(tempfile.gettempdir())
|
||||
shutil.rmtree(td)
|
||||
os.unlink(cfg_path)
|
||||
|
||||
127
tests/util.py
Normal file
127
tests/util.py
Normal file
@@ -0,0 +1,127 @@
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
import jinja2
|
||||
import tempfile
|
||||
import platform
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
WINDOWS = platform.system() == "Windows"
|
||||
ANYWIN = WINDOWS or sys.platform in ["msys"]
|
||||
MACOS = platform.system() == "Darwin"
|
||||
|
||||
J2_ENV = jinja2.Environment(loader=jinja2.BaseLoader)
|
||||
J2_FILES = J2_ENV.from_string("{{ files|join('\n') }}")
|
||||
|
||||
|
||||
def nah(*a, **ka):
|
||||
return False
|
||||
|
||||
|
||||
if MACOS:
|
||||
import posixpath
|
||||
|
||||
posixpath.islink = nah
|
||||
os.path.islink = nah
|
||||
# 25% faster; until any tests do symlink stuff
|
||||
|
||||
|
||||
from copyparty.util import Unrecv
|
||||
|
||||
|
||||
def runcmd(*argv):
|
||||
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
stdout, stderr = p.communicate()
|
||||
stdout = stdout.decode("utf-8")
|
||||
stderr = stderr.decode("utf-8")
|
||||
return [p.returncode, stdout, stderr]
|
||||
|
||||
|
||||
def chkcmd(*argv):
|
||||
ok, sout, serr = runcmd(*argv)
|
||||
if ok != 0:
|
||||
raise Exception(serr)
|
||||
|
||||
return sout, serr
|
||||
|
||||
|
||||
def get_ramdisk():
|
||||
def subdir(top):
|
||||
ret = os.path.join(top, "cptd-{}".format(os.getpid()))
|
||||
shutil.rmtree(ret, True)
|
||||
os.mkdir(ret)
|
||||
return ret
|
||||
|
||||
for vol in ["/dev/shm", "/Volumes/cptd"]: # nosec (singleton test)
|
||||
if os.path.exists(vol):
|
||||
return subdir(vol)
|
||||
|
||||
if os.path.exists("/Volumes"):
|
||||
# hdiutil eject /Volumes/cptd/
|
||||
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://65536")
|
||||
devname = devname.strip()
|
||||
print("devname: [{}]".format(devname))
|
||||
for _ in range(10):
|
||||
try:
|
||||
_, _ = chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
|
||||
return subdir("/Volumes/cptd")
|
||||
except Exception as ex:
|
||||
print(repr(ex))
|
||||
time.sleep(0.25)
|
||||
|
||||
raise Exception("ramdisk creation failed")
|
||||
|
||||
ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
|
||||
try:
|
||||
os.mkdir(ret)
|
||||
finally:
|
||||
return subdir(ret)
|
||||
|
||||
|
||||
class NullBroker(object):
|
||||
def put(*args):
|
||||
pass
|
||||
|
||||
|
||||
class VSock(object):
|
||||
def __init__(self, buf):
|
||||
self._query = buf
|
||||
self._reply = b""
|
||||
self.sendall = self.send
|
||||
|
||||
def recv(self, sz):
|
||||
ret = self._query[:sz]
|
||||
self._query = self._query[sz:]
|
||||
return ret
|
||||
|
||||
def send(self, buf):
|
||||
self._reply += buf
|
||||
return len(buf)
|
||||
|
||||
|
||||
class VHttpSrv(object):
|
||||
def __init__(self):
|
||||
self.broker = NullBroker()
|
||||
|
||||
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
|
||||
self.j2 = {x: J2_FILES for x in aliases}
|
||||
|
||||
|
||||
class VHttpConn(object):
|
||||
def __init__(self, args, auth, log, buf):
|
||||
self.s = VSock(buf)
|
||||
self.sr = Unrecv(self.s)
|
||||
self.addr = ("127.0.0.1", "42069")
|
||||
self.args = args
|
||||
self.auth = auth
|
||||
self.log_func = log
|
||||
self.log_src = "a"
|
||||
self.lf_url = None
|
||||
self.hsrv = VHttpSrv()
|
||||
self.nbyte = 0
|
||||
self.workload = 0
|
||||
self.ico = None
|
||||
self.thumbcli = None
|
||||
self.t0 = time.time()
|
||||
Reference in New Issue
Block a user