Compare commits

...

64 Commits

Author SHA1 Message Date
ed
cd52dea488 v0.11.15 2021-06-15 00:01:11 +02:00
ed
6ea75df05d add audio equalizer 2021-06-14 23:58:56 +02:00
ed
4846e1e8d6 mention num.clients for rproxy 2021-06-14 19:27:34 +02:00
ed
fc024f789d v0.11.14 2021-06-14 03:05:50 +02:00
ed
473e773aea fix deadlock 2021-06-14 00:55:11 +00:00
ed
48a2e1a353 add threadwatcher 2021-06-14 01:57:18 +02:00
ed
6da63fbd79 up2k-cli: recover from lost handshakes 2021-06-14 01:01:06 +02:00
ed
5bec37fcee fix cosmetic login glitch 2021-06-14 00:28:08 +02:00
ed
3fd0ba0a31 oh right its the other way around 2021-06-13 22:49:55 +02:00
ed
241a143366 add --rproxy for explicit proxy level 2021-06-13 22:22:31 +02:00
ed
a537064da7 custom-css example to add filetype icons 2021-06-13 00:49:28 +02:00
ed
f3dfd24c92 v0.11.13 2021-06-12 20:37:05 +02:00
ed
fa0a7f50bb add image gallery 2021-06-12 20:25:08 +02:00
ed
44a78a7e21 v0.11.12 2021-06-12 04:28:21 +02:00
ed
6b75cbf747 add readme 2021-06-12 04:26:53 +02:00
ed
e7b18ab9fe custom css 2021-06-12 04:22:07 +02:00
ed
aa12830015 keep transparency in thumbnails 2021-06-12 03:32:06 +02:00
ed
f156e00064 s/cover/folder/g 2021-06-12 03:06:56 +02:00
ed
d53c212516 add mtp queue to status page 2021-06-12 02:23:48 +02:00
ed
ca27f8587c add cygpath support for volume src too 2021-06-12 01:55:45 +02:00
ed
88ce008e16 more status on admin panel 2021-06-12 01:39:14 +02:00
ed
081d2cc5d7 add folder thumbnails (cover.jpg or png) 2021-06-11 23:54:54 +02:00
ed
60ac68d000 single authsrv instance per process 2021-06-11 23:01:13 +02:00
ed
fbe656957d fix race 2021-06-11 18:12:06 +02:00
ed
5534c78c17 tests pass 2021-06-11 03:10:33 +02:00
ed
a45a53fdce support macos ffmpeg 2021-06-11 03:05:42 +02:00
ed
972a56e738 fix stuff 2021-06-11 01:45:28 +02:00
ed
5e03b3ca38 use parent db/thumbs in jump-volumes 2021-06-10 20:43:19 +02:00
ed
1078d933b4 adding --no-hash 2021-06-10 18:08:30 +02:00
ed
d6bf300d80 option to store state out-of-volume (mostly untested) 2021-06-10 01:27:04 +02:00
ed
a359d64d44 v0.11.11 2021-06-08 23:43:00 +02:00
ed
22396e8c33 zopfli js/css 2021-06-08 23:19:35 +02:00
ed
5ded5a4516 alphabetical up2k indexing 2021-06-08 21:42:08 +02:00
ed
79c7639aaf haha memes 2021-06-08 21:10:25 +02:00
ed
5bbf875385 fuse-client: print python version 2021-06-08 20:19:51 +02:00
ed
5e159432af vscode: support running with -jN 2021-06-08 20:18:24 +02:00
ed
1d6ae409f6 count expenses when sending files 2021-06-08 20:17:53 +02:00
ed
9d729d3d1a add thread names 2021-06-08 20:14:23 +02:00
ed
4dd5d4e1b7 when rootless, blank instead of block rootdir 2021-06-08 18:35:55 +02:00
ed
acd8149479 dont track workloads unless multiprocessing 2021-06-08 18:01:59 +02:00
ed
b97a1088fa v0.11.10 2021-06-08 09:41:31 +02:00
ed
b77bed3324 fix terminating tls connections wow 2021-06-08 09:40:49 +02:00
ed
a2b7c85a1f forgot what version was running on a box 2021-06-08 00:01:08 +02:00
ed
b28533f850 v0.11.9 2021-06-07 20:22:10 +02:00
ed
bd8c7e538a sfx.sh: use system jinja2 when available 2021-06-07 20:09:45 +02:00
ed
89e48cff24 detect recursive symlinks 2021-06-07 20:09:18 +02:00
ed
ae90a7b7b6 mention firefox funny 2021-06-07 02:10:54 +02:00
ed
6fc1be04da support windows-py3.5 2021-06-06 21:10:53 +02:00
ed
0061d29534 v0.11.8 2021-06-06 19:09:55 +02:00
ed
a891f34a93 update sharex example 2021-06-06 19:06:33 +02:00
ed
d6a1e62a95 append file-ext when avoiding name collisions 2021-06-06 18:53:32 +02:00
ed
cda36ea8b4 support json replies from bput 2021-06-06 18:47:21 +02:00
ed
909a76434a a 2021-06-06 03:07:11 +02:00
ed
39348ef659 add sharex example 2021-06-06 02:53:01 +02:00
ed
99d30edef3 v0.11.7 2021-06-05 03:33:29 +02:00
ed
b63ab15bf9 gallery links in new tab if a selection is atcive 2021-06-05 03:27:44 +02:00
ed
485cb4495c minify asmcrypto a bit 2021-06-05 03:25:54 +02:00
ed
df018eb1f2 add colors 2021-06-05 01:34:39 +02:00
ed
49aa47a9b8 way faster sha512 wasm fallback 2021-06-05 01:14:16 +02:00
ed
7d20eb202a optimize 2021-06-04 19:35:08 +02:00
ed
c533da9129 fix single-threaded mtag 2021-06-04 19:00:24 +02:00
ed
5cba31a814 spin on thumbnails too 2021-06-04 17:38:57 +02:00
ed
1d824cb26c add volume lister / containment checker 2021-06-04 02:23:46 +02:00
ed
83b903d60e readme: update todos 2021-06-02 09:42:33 +02:00
50 changed files with 2579 additions and 395 deletions

17
.vscode/launch.json vendored
View File

@@ -16,12 +16,9 @@
"-e2ts", "-e2ts",
"-mtp", "-mtp",
".bpm=f,bin/mtag/audio-bpm.py", ".bpm=f,bin/mtag/audio-bpm.py",
"-a", "-aed:wark",
"ed:wark", "-vsrv::r:aed:cnodupe",
"-v", "-vdist:dist:r"
"srv::r:aed:cnodupe",
"-v",
"dist:dist:r"
] ]
}, },
{ {
@@ -43,5 +40,13 @@
"${file}" "${file}"
] ]
}, },
{
"name": "Python: Current File",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"justMyCode": false
},
] ]
} }

18
.vscode/launch.py vendored
View File

@@ -3,14 +3,16 @@
# launches 10x faster than mspython debugpy # launches 10x faster than mspython debugpy
# and is stoppable with ^C # and is stoppable with ^C
import re
import os import os
import sys import sys
print(sys.executable)
import shlex import shlex
sys.path.insert(0, os.getcwd())
import jstyleson import jstyleson
from copyparty.__main__ import main as copyparty import subprocess as sp
with open(".vscode/launch.json", "r", encoding="utf-8") as f: with open(".vscode/launch.json", "r", encoding="utf-8") as f:
tj = f.read() tj = f.read()
@@ -25,6 +27,14 @@ except:
pass pass
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv] argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
if re.search(" -j ?[0-9]", " ".join(argv)):
argv = [sys.executable, "-m", "copyparty"] + argv
sp.check_call(argv)
else:
sys.path.insert(0, os.getcwd())
from copyparty.__main__ import main as copyparty
try: try:
copyparty(["a"] + argv) copyparty(["a"] + argv)
except SystemExit as ex: except SystemExit as ex:

View File

@@ -37,6 +37,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [other tricks](#other-tricks) * [other tricks](#other-tricks)
* [searching](#searching) * [searching](#searching)
* [search configuration](#search-configuration) * [search configuration](#search-configuration)
* [database location](#database-location)
* [metadata from audio files](#metadata-from-audio-files) * [metadata from audio files](#metadata-from-audio-files)
* [file parser plugins](#file-parser-plugins) * [file parser plugins](#file-parser-plugins)
* [complete examples](#complete-examples) * [complete examples](#complete-examples)
@@ -68,12 +69,16 @@ you may also want these, especially on servers:
## notes ## notes
general:
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
* because no browsers currently implement the media-query to do this properly orz
browser-specific:
* iPhone/iPad: use Firefox to download files * iPhone/iPad: use Firefox to download files
* Android-Chrome: increase "parallel uploads" for higher speed (android bug) * Android-Chrome: increase "parallel uploads" for higher speed (android bug)
* Android-Firefox: takes a while to select files (their fix for ☝️) * Android-Firefox: takes a while to select files (their fix for ☝️)
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now* * Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale * Desktop-Firefox: may stop you from deleting folders you've uploaded until you visit `about:memory` and click `Minimize memory usage`
* because no browsers currently implement the media-query to do this properly orz
## status ## status
@@ -101,6 +106,7 @@ summary: all planned features work! now please enjoy the bloatening
* ☑ images using Pillow * ☑ images using Pillow
* ☑ videos using FFmpeg * ☑ videos using FFmpeg
* ☑ cache eviction (max-age; maybe max-size eventually) * ☑ cache eviction (max-age; maybe max-size eventually)
* ☑ image gallery
* ☑ SPA (browse while uploading) * ☑ SPA (browse while uploading)
* if you use the file-tree on the left only, not folders in the file list * if you use the file-tree on the left only, not folders in the file list
* server indexing * server indexing
@@ -117,12 +123,12 @@ summary: all planned features work! now please enjoy the bloatening
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade * Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d` * Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* Windows: python 2.7 cannot handle filenames with mojibake * Windows: python 2.7 cannot handle filenames with mojibake
* MacOS: `--th-ff-jpg` may fix thumbnails using macports-FFmpeg
## general bugs ## general bugs
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise * all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
* cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1` * cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1`
* hiding the contents at url `/d1/d2/d3` using `-v :d1/d2/d3:cd2d` has the side-effect of creating databases (for files/tags) inside folders d1 and d2, and those databases take precedence over the main db at the top of the vfs - this means all files in d2 and below will be reindexed unless you already had a vfs entry at or below d2
* probably more, pls let me know * probably more, pls let me know
## not my bugs ## not my bugs
@@ -176,6 +182,8 @@ click `[-]` and `[+]` to adjust the size, and the `[a]` toggles if the tree shou
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how destructive your users are it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how destructive your users are
images named `folder.jpg` and `folder.png` become the thumbnail of the folder they're in
## zip downloads ## zip downloads
@@ -292,9 +300,29 @@ the same arguments can be set as volume flags, in addition to `d2d` and `d2t` fo
* `-v ~/music::r:cd2d` disables **all** indexing, even if any `-e2*` are on * `-v ~/music::r:cd2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*` * `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those note:
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `cdhash`, this has the following consequences:
* initial indexing is way faster, especially when the volume is on a networked disk
* makes it impossible to [file-search](#file-search)
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
if you set `--no-hash`, you can enable hashing for specific volumes using flag `cehash`
## database location
copyparty creates a subfolder named `.hist` inside each volume where it stores the database, thumbnails, and some other stuff
this can instead be kept in a single place using the `--hist` argument, or the `hist=` volume flag, or a mix of both:
* `--hist ~/.cache/copyparty -v ~/music::r:chist=-` sets `~/.cache/copyparty` as the default place to put volume info, but `~/music` gets the regular `.hist` subfolder (`-` restores default behavior)
note:
* markdown edits are always stored in a local `.hist` subdirectory
* on windows the volflag path is cyglike, so `/c/temp` means `C:\temp` but use regular paths for `--hist`
* you can use cygpaths for volumes too, `-v C:\Users::r` and `-v /c/users::r` both work
## metadata from audio files ## metadata from audio files
@@ -397,6 +425,8 @@ quick summary of more eccentric web-browsers trying to view a directory index:
* cross-platform python client available in [./bin/](bin/) * cross-platform python client available in [./bin/](bin/)
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md) * [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
* sharex (screenshot utility): see [./contrib/sharex.sxcu](contrib/#sharexsxcu)
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods: copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;} b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
@@ -520,20 +550,25 @@ in the `scripts` folder:
roughly sorted by priority roughly sorted by priority
* separate sqlite table per tag
* audio fingerprinting
* readme.md as epilogue * readme.md as epilogue
* single sha512 across all up2k chunks? maybe
* reduce up2k roundtrips * reduce up2k roundtrips
* start from a chunk index and just go * start from a chunk index and just go
* terminate client on bad data * terminate client on bad data
* `os.copy_file_range` for up2k cloning
* single sha512 across all up2k chunks? maybe
* figure out the deal with pixel3a not being connectable as hotspot
* pixel3a having unpredictable 3sec latency in general :||||
discarded ideas discarded ideas
* separate sqlite table per tag
* performance fixed by skipping some indexes (`+mt.k`)
* audio fingerprinting
* only makes sense if there can be a wasm client and that doesn't exist yet (except for olaf which is agpl hence counts as not existing)
* `os.copy_file_range` for up2k cloning
* almost never hit this path anyways
* up2k partials ui * up2k partials ui
* feels like there isn't much point
* cache sha512 chunks on client * cache sha512 chunks on client
* too dangerous
* comment field * comment field
* nah
* look into android thumbnail cache file format * look into android thumbnail cache file format
* absolutely not

View File

@@ -54,6 +54,12 @@ MACOS = platform.system() == "Darwin"
info = log = dbg = None info = log = dbg = None
print("{} v{} @ {}".format(
platform.python_implementation(),
".".join([str(x) for x in sys.version_info]),
sys.executable))
try: try:
from fuse import FUSE, FuseOSError, Operations from fuse import FUSE, FuseOSError, Operations
except: except:

View File

@@ -9,6 +9,16 @@
* assumes the webserver and copyparty is running on the same server/IP * assumes the webserver and copyparty is running on the same server/IP
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript * modify `10.13.1.1` as necessary if you wish to support browsers without javascript
### [`sharex.sxcu`](sharex.sxcu)
* sharex config file to upload screenshots and grab the URL
* `RequestURL`: full URL to the target folder
* `pw`: password (remove the `pw` line if anon-write)
however if your copyparty is behind a reverse-proxy, you may want to use [`sharex-html.sxcu`](sharex-html.sxcu) instead:
* `RequestURL`: full URL to the target folder
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
* `pw`: password (remove `Parameters` if anon-write)
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg) ### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
* disables thumbnails and folder-type detection in windows explorer * disables thumbnails and folder-type detection in windows explorer
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse)) * makes it way faster (especially for slow/networked locations (such as copyparty-fuse))

View File

@@ -1,3 +1,8 @@
# when running copyparty behind a reverse-proxy,
# make sure that copyparty allows at least as many clients as the proxy does,
# so run copyparty with -nc 512 if your nginx has the default limits
# (worker_processes 1, worker_connections 512)
upstream cpp { upstream cpp {
server 127.0.0.1:3923; server 127.0.0.1:3923;
keepalive 120; keepalive 120;

19
contrib/sharex-html.sxcu Normal file
View File

@@ -0,0 +1,19 @@
{
"Version": "13.5.0",
"Name": "copyparty-html",
"DestinationType": "ImageUploader",
"RequestMethod": "POST",
"RequestURL": "http://127.0.0.1:3923/sharex",
"Parameters": {
"pw": "wark"
},
"Body": "MultipartFormData",
"Arguments": {
"act": "bput"
},
"FileFormName": "f",
"RegexList": [
"bytes // <a href=\"/([^\"]+)\""
],
"URL": "http://127.0.0.1:3923/$regex:1|1$"
}

17
contrib/sharex.sxcu Normal file
View File

@@ -0,0 +1,17 @@
{
"Version": "13.5.0",
"Name": "copyparty",
"DestinationType": "ImageUploader",
"RequestMethod": "POST",
"RequestURL": "http://127.0.0.1:3923/sharex",
"Parameters": {
"pw": "wark",
"j": null
},
"Body": "MultipartFormData",
"Arguments": {
"act": "bput"
},
"FileFormName": "f",
"URL": "$json:files[0].url$"
}

View File

@@ -23,7 +23,7 @@ from textwrap import dedent
from .__init__ import E, WINDOWS, VT100, PY2 from .__init__ import E, WINDOWS, VT100, PY2
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub from .svchub import SvcHub
from .util import py_desc, align_tab, IMPLICATIONS from .util import py_desc, align_tab, IMPLICATIONS, alltrace
HAVE_SSL = True HAVE_SSL = True
try: try:
@@ -182,6 +182,16 @@ def sighandler(sig=None, frame=None):
print("\n".join(msg)) print("\n".join(msg))
def stackmon(fp, ival):
ctr = 0
while True:
ctr += 1
time.sleep(ival)
st = "{}, {}\n{}".format(ctr, time.time(), alltrace())
with open(fp, "wb") as f:
f.write(st.encode("utf-8", "replace"))
def run_argparse(argv, formatter): def run_argparse(argv, formatter):
ap = argparse.ArgumentParser( ap = argparse.ArgumentParser(
formatter_class=formatter, formatter_class=formatter,
@@ -222,32 +232,63 @@ def run_argparse(argv, formatter):
"print,get" prints the data in the log and returns GET "print,get" prints the data in the log and returns GET
(leave out the ",get" to return an error instead) (leave out the ",get" to return an error instead)
--ciphers help = available ssl/tls ciphers, values for --ls:
--ssl-ver help = available ssl/tls versions, "USR" is a user to browse as; * is anonymous, ** is all users
default is what python considers safe, usually >= TLS1 "VOL" is a single volume to scan, default is * (all vols)
"FLAG" is flags;
"v" in addition to realpaths, print usernames and vpaths
"ln" only prints symlinks leaving the volume mountpoint
"p" exits 1 if any such symlinks are found
"r" resumes startup after the listing
examples:
--ls '**' # list all files which are possible to read
--ls '**,*,ln' # check for dangerous symlinks
--ls '**,*,ln,p,r' # check, then start normally if safe
\033[0m
""" """
), ),
) )
# fmt: off # fmt: off
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file") ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients") ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores") ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account") ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account, USER:PASS; example [ed:wark")
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume") ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
ap.add_argument("-q", action="store_true", help="quiet")
ap.add_argument("-ed", action="store_true", help="enable ?dots") ap.add_argument("-ed", action="store_true", help="enable ?dots")
ap.add_argument("-emp", action="store_true", help="enable markdown plugins") ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate") ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap.add_argument("-nih", action="store_true", help="no info hostname")
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
ap.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads") ap.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)") ap.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms") ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
ap2 = ap.add_argument_group('network options')
ap2.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap2.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
ap2.add_argument("--ssl-ver", metavar="LIST", type=str, help="set allowed ssl/tls versions; [help] shows available versions; default is what your python version considers safe")
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ssl/tls ciphers; [help] shows available ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
ap2 = ap.add_argument_group('opt-outs')
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap2.add_argument("-nih", action="store_true", help="no info hostname")
ap2.add_argument("-nid", action="store_true", help="no info disk-usage")
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap2 = ap.add_argument_group('safety options')
ap2.add_argument("--ls", metavar="U[,V[,F]]", help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
ap2.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
ap2 = ap.add_argument_group('logging options')
ap2.add_argument("-q", action="store_true", help="quiet")
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header")
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
ap2 = ap.add_argument_group('admin panel options') ap2 = ap.add_argument_group('admin panel options')
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)") ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
@@ -260,6 +301,7 @@ def run_argparse(argv, formatter):
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image") ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output") ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output") ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown") ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval") ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age") ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
@@ -271,6 +313,8 @@ def run_argparse(argv, formatter):
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing") ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t") ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts") ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
ap2.add_argument("--hist", metavar="PATH", type=str, help="where to store volume state")
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead") ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism") ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping") ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
@@ -279,21 +323,14 @@ def run_argparse(argv, formatter):
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin") ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline") ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
ap2 = ap.add_argument_group('SSL/TLS options') ap2 = ap.add_argument_group('appearance options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls") ap2.add_argument("--css-browser", metavar="L", help="URL to additional CSS to include")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
ap2.add_argument("--ssl-ver", metavar="LIST", type=str, help="ssl/tls versions to allow")
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
ap2 = ap.add_argument_group('debug options') ap2 = ap.add_argument_group('debug options')
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile") ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir") ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing") ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing")
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header") ap2.add_argument("--stackmon", metavar="P,S", help="write stacktrace to Path every S second")
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
return ap.parse_args(args=argv[1:]) return ap.parse_args(args=argv[1:])
# fmt: on # fmt: on
@@ -333,6 +370,16 @@ def main(argv=None):
except AssertionError: except AssertionError:
al = run_argparse(argv, Dodge11874) al = run_argparse(argv, Dodge11874)
if al.stackmon:
fp, f = al.stackmon.rsplit(",", 1)
f = int(f)
t = threading.Thread(
target=stackmon,
args=(fp, f),
)
t.daemon = True
t.start()
# propagate implications # propagate implications
for k1, k2 in IMPLICATIONS: for k1, k2 in IMPLICATIONS:
if getattr(al, k1): if getattr(al, k1):
@@ -363,6 +410,9 @@ def main(argv=None):
+ " (if you crash with codec errors then that is why)" + " (if you crash with codec errors then that is why)"
) )
if WINDOWS and sys.version_info < (3, 6):
al.no_scandir = True
# signal.signal(signal.SIGINT, sighandler) # signal.signal(signal.SIGINT, sighandler)
SvcHub(al).run() SvcHub(al).run()

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (0, 11, 6) VERSION = (0, 11, 15)
CODENAME = "the grid" CODENAME = "the grid"
BUILD_DT = (2021, 6, 1) BUILD_DT = (2021, 6, 15)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -5,9 +5,11 @@ import re
import os import os
import sys import sys
import stat import stat
import base64
import hashlib
import threading import threading
from .__init__ import PY2, WINDOWS from .__init__ import WINDOWS
from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint
@@ -22,7 +24,15 @@ class VFS(object):
self.uadm = uadm # users who are regular admins self.uadm = uadm # users who are regular admins
self.flags = flags # config switches self.flags = flags # config switches
self.nodes = {} # child nodes self.nodes = {} # child nodes
self.histtab = None # all realpath->histpath
self.dbv = None # closest full/non-jump parent
if realpath:
self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
self.all_vols = {vpath: self} # flattened recursive self.all_vols = {vpath: self} # flattened recursive
else:
self.histpath = None
self.all_vols = None
def __repr__(self): def __repr__(self):
return "VFS({})".format( return "VFS({})".format(
@@ -32,9 +42,12 @@ class VFS(object):
) )
) )
def _trk(self, vol): def get_all_vols(self, outdict):
self.all_vols[vol.vpath] = vol if self.realpath:
return vol outdict[self.vpath] = self
for v in self.nodes.values():
v.get_all_vols(outdict)
def add(self, src, dst): def add(self, src, dst):
"""get existing, or add new path to the vfs""" """get existing, or add new path to the vfs"""
@@ -46,19 +59,19 @@ class VFS(object):
name, dst = dst.split("/", 1) name, dst = dst.split("/", 1)
if name in self.nodes: if name in self.nodes:
# exists; do not manipulate permissions # exists; do not manipulate permissions
return self._trk(self.nodes[name].add(src, dst)) return self.nodes[name].add(src, dst)
vn = VFS( vn = VFS(
"{}/{}".format(self.realpath, name), os.path.join(self.realpath, name) if self.realpath else None,
"{}/{}".format(self.vpath, name).lstrip("/"), "{}/{}".format(self.vpath, name).lstrip("/"),
self.uread, self.uread,
self.uwrite, self.uwrite,
self.uadm, self.uadm,
self.flags, self._copy_flags(name),
) )
self._trk(vn) vn.dbv = self.dbv or self
self.nodes[name] = vn self.nodes[name] = vn
return self._trk(vn.add(src, dst)) return vn.add(src, dst)
if dst in self.nodes: if dst in self.nodes:
# leaf exists; return as-is # leaf exists; return as-is
@@ -67,8 +80,26 @@ class VFS(object):
# leaf does not exist; create and keep permissions blank # leaf does not exist; create and keep permissions blank
vp = "{}/{}".format(self.vpath, dst).lstrip("/") vp = "{}/{}".format(self.vpath, dst).lstrip("/")
vn = VFS(src, vp) vn = VFS(src, vp)
vn.dbv = self.dbv or self
self.nodes[dst] = vn self.nodes[dst] = vn
return self._trk(vn) return vn
def _copy_flags(self, name):
flags = {k: v for k, v in self.flags.items()}
hist = flags.get("hist")
if hist and hist != "-":
flags["hist"] = "{}/{}".format(hist.rstrip("/"), name)
return flags
def bubble_flags(self):
if self.dbv:
for k, v in self.dbv.flags.items():
if k not in ["hist"]:
self.flags[k] = v
for v in self.nodes.values():
v.bubble_flags()
def _find(self, vpath): def _find(self, vpath):
"""return [vfs,remainder]""" """return [vfs,remainder]"""
@@ -96,6 +127,7 @@ class VFS(object):
] ]
def get(self, vpath, uname, will_read, will_write): def get(self, vpath, uname, will_read, will_write):
# type: (str, str, bool, bool) -> tuple[VFS, str]
"""returns [vfsnode,fs_remainder] if user has the requested permissions""" """returns [vfsnode,fs_remainder] if user has the requested permissions"""
vn, rem = self._find(vpath) vn, rem = self._find(vpath)
@@ -107,6 +139,15 @@ class VFS(object):
return vn, rem return vn, rem
def get_dbv(self, vrem):
dbv = self.dbv
if not dbv:
return self, vrem
vrem = [self.vpath[len(dbv.vpath) + 1 :], vrem]
vrem = "/".join([x for x in vrem if x])
return dbv, vrem
def canonical(self, rem): def canonical(self, rem):
"""returns the canonical path (fully-resolved absolute fs path)""" """returns the canonical path (fully-resolved absolute fs path)"""
rp = self.realpath rp = self.realpath
@@ -136,6 +177,7 @@ class VFS(object):
return os.path.realpath(rp) return os.path.realpath(rp)
def ls(self, rem, uname, scandir, incl_wo=False, lstat=False): def ls(self, rem, uname, scandir, incl_wo=False, lstat=False):
# type: (str, str, bool, bool, bool) -> tuple[str, str, dict[str, VFS]]
"""return user-readable [fsdir,real,virt] items at vpath""" """return user-readable [fsdir,real,virt] items at vpath"""
virt_vis = {} # nodes readable by user virt_vis = {} # nodes readable by user
abspath = self.canonical(rem) abspath = self.canonical(rem)
@@ -156,13 +198,21 @@ class VFS(object):
return [abspath, real, virt_vis] return [abspath, real, virt_vis]
def walk(self, rel, rem, uname, dots, scandir, lstat=False): def walk(self, rel, rem, seen, uname, dots, scandir, lstat):
""" """
recursively yields from ./rem; recursively yields from ./rem;
rel is a unix-style user-defined vpath (not vfs-related) rel is a unix-style user-defined vpath (not vfs-related)
""" """
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, False, lstat) fsroot, vfs_ls, vfs_virt = self.ls(
rem, uname, scandir, incl_wo=False, lstat=lstat
)
if seen and not fsroot.startswith(seen[-1]) and fsroot in seen:
print("bailing from symlink loop,\n {}\n {}".format(seen[-1], fsroot))
return
seen = seen[:] + [fsroot]
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)] rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)] rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
@@ -177,7 +227,7 @@ class VFS(object):
wrel = (rel + "/" + rdir).lstrip("/") wrel = (rel + "/" + rdir).lstrip("/")
wrem = (rem + "/" + rdir).lstrip("/") wrem = (rem + "/" + rdir).lstrip("/")
for x in self.walk(wrel, wrem, uname, scandir, lstat): for x in self.walk(wrel, wrem, seen, uname, dots, scandir, lstat):
yield x yield x
for n, vfs in sorted(vfs_virt.items()): for n, vfs in sorted(vfs_virt.items()):
@@ -185,14 +235,16 @@ class VFS(object):
continue continue
wrel = (rel + "/" + n).lstrip("/") wrel = (rel + "/" + n).lstrip("/")
for x in vfs.walk(wrel, "", uname, scandir, lstat): for x in vfs.walk(wrel, "", seen, uname, dots, scandir, lstat):
yield x yield x
def zipgen(self, vrem, flt, uname, dots, scandir): def zipgen(self, vrem, flt, uname, dots, scandir):
if flt: if flt:
flt = {k: True for k in flt} flt = {k: True for k in flt}
for vpath, apath, files, rd, vd in self.walk("", vrem, uname, dots, scandir): for vpath, apath, files, rd, vd in self.walk(
"", vrem, [], uname, dots, scandir, False
):
if flt: if flt:
files = [x for x in files if x[0] in flt] files = [x for x in files if x[0] in flt]
@@ -228,21 +280,19 @@ class VFS(object):
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]: for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
yield f yield f
def user_tree(self, uname, readable=False, writable=False, admin=False): def user_tree(self, uname, readable, writable, admin):
ret = [] is_readable = False
opt1 = readable and (uname in self.uread or "*" in self.uread) if uname in self.uread or "*" in self.uread:
opt2 = writable and (uname in self.uwrite or "*" in self.uwrite) readable.append(self.vpath)
if admin: is_readable = True
if opt1 and opt2:
ret.append(self.vpath) if uname in self.uwrite or "*" in self.uwrite:
else: writable.append(self.vpath)
if opt1 or opt2: if is_readable:
ret.append(self.vpath) admin.append(self.vpath)
for _, vn in sorted(self.nodes.items()): for _, vn in sorted(self.nodes.items()):
ret.extend(vn.user_tree(uname, readable, writable, admin)) vn.user_tree(uname, readable, writable, admin)
return ret
class AuthSrv(object): class AuthSrv(object):
@@ -263,6 +313,7 @@ class AuthSrv(object):
self.reload() self.reload()
def log(self, msg, c=0): def log(self, msg, c=0):
if self.log_func:
self.log_func("auth", msg, c) self.log_func("auth", msg, c)
def laggy_iter(self, iterable): def laggy_iter(self, iterable):
@@ -388,6 +439,9 @@ class AuthSrv(object):
raise Exception("invalid -v argument: [{}]".format(v_str)) raise Exception("invalid -v argument: [{}]".format(v_str))
src, dst, perms = m.groups() src, dst, perms = m.groups()
if WINDOWS and src.startswith("/"):
src = "{}:\\{}".format(src[1], src[3:])
# print("\n".join([src, dst, perms])) # print("\n".join([src, dst, perms]))
src = fsdec(os.path.abspath(fsenc(src))) src = fsdec(os.path.abspath(fsenc(src)))
dst = dst.strip("/") dst = dst.strip("/")
@@ -420,7 +474,7 @@ class AuthSrv(object):
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"]) vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
elif "" not in mount: elif "" not in mount:
# there's volumes but no root; make root inaccessible # there's volumes but no root; make root inaccessible
vfs = VFS(os.path.abspath("."), "") vfs = VFS(None, "")
vfs.flags["d2d"] = True vfs.flags["d2d"] = True
maxdepth = 0 maxdepth = 0
@@ -441,6 +495,10 @@ class AuthSrv(object):
v.uwrite = mwrite[dst] v.uwrite = mwrite[dst]
v.uadm = madm[dst] v.uadm = madm[dst]
v.flags = mflags[dst] v.flags = mflags[dst]
v.dbv = None
vfs.all_vols = {}
vfs.get_all_vols(vfs.all_vols)
missing_users = {} missing_users = {}
for d in [mread, mwrite]: for d in [mread, mwrite]:
@@ -457,6 +515,69 @@ class AuthSrv(object):
) )
raise Exception("invalid config") raise Exception("invalid config")
promote = []
demote = []
for vol in vfs.all_vols.values():
hid = hashlib.sha512(fsenc(vol.realpath)).digest()
hid = base64.b32encode(hid).decode("ascii").lower()
vflag = vol.flags.get("hist")
if vflag == "-":
pass
elif vflag:
if WINDOWS and vflag.startswith("/"):
vflag = "{}:\\{}".format(vflag[1], vflag[3:])
vol.histpath = vflag
elif self.args.hist:
for nch in range(len(hid)):
hpath = os.path.join(self.args.hist, hid[: nch + 1])
try:
os.makedirs(hpath)
except:
pass
powner = os.path.join(hpath, "owner.txt")
try:
with open(powner, "rb") as f:
owner = f.read().rstrip()
except:
owner = None
me = fsenc(vol.realpath).rstrip()
if owner not in [None, me]:
continue
if owner is None:
with open(powner, "wb") as f:
f.write(me)
vol.histpath = hpath
break
vol.histpath = os.path.realpath(vol.histpath)
if vol.dbv:
if os.path.exists(os.path.join(vol.histpath, "up2k.db")):
promote.append(vol)
vol.dbv = None
else:
demote.append(vol)
# discard jump-vols
for v in demote:
vfs.all_vols.pop(v.vpath)
if promote:
msg = [
"\n the following jump-volumes were generated to assist the vfs.\n As they contain a database (probably from v0.11.11 or older),\n they are promoted to full volumes:"
]
for vol in promote:
msg.append(
" /{} ({}) ({})".format(vol.vpath, vol.realpath, vol.histpath)
)
self.log("\n\n".join(msg) + "\n", c=3)
vfs.histtab = {v.realpath: v.histpath for v in vfs.all_vols.values()}
all_mte = {} all_mte = {}
errors = False errors = False
for vol in vfs.all_vols.values(): for vol in vfs.all_vols.values():
@@ -466,6 +587,10 @@ class AuthSrv(object):
if self.args.e2d or "e2ds" in vol.flags: if self.args.e2d or "e2ds" in vol.flags:
vol.flags["e2d"] = True vol.flags["e2d"] = True
if self.args.no_hash:
if "ehash" not in vol.flags:
vol.flags["dhash"] = True
for k in ["e2t", "e2ts", "e2tsr"]: for k in ["e2t", "e2ts", "e2tsr"]:
if getattr(self.args, k): if getattr(self.args, k):
vol.flags[k] = True vol.flags[k] = True
@@ -543,6 +668,8 @@ class AuthSrv(object):
if errors: if errors:
sys.exit(1) sys.exit(1)
vfs.bubble_flags()
try: try:
v, _ = vfs.get("/", "*", False, True) v, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == v.realpath: if self.warn_anonwrite and os.getcwd() == v.realpath:
@@ -559,3 +686,90 @@ class AuthSrv(object):
# import pprint # import pprint
# pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount}) # pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount})
def dbg_ls(self):
users = self.args.ls
vols = "*"
flags = []
try:
users, vols = users.split(",", 1)
except:
pass
try:
vols, flags = vols.split(",", 1)
flags = flags.split(",")
except:
pass
if users == "**":
users = list(self.user.keys()) + ["*"]
else:
users = [users]
for u in users:
if u not in self.user and u != "*":
raise Exception("user not found: " + u)
if vols == "*":
vols = ["/" + x for x in self.vfs.all_vols.keys()]
else:
vols = [vols]
for v in vols:
if not v.startswith("/"):
raise Exception("volumes must start with /")
if v[1:] not in self.vfs.all_vols:
raise Exception("volume not found: " + v)
self.log({"users": users, "vols": vols, "flags": flags})
for k, v in self.vfs.all_vols.items():
self.log("/{}: read({}) write({})".format(k, v.uread, v.uwrite))
flag_v = "v" in flags
flag_ln = "ln" in flags
flag_p = "p" in flags
flag_r = "r" in flags
n_bads = 0
for v in vols:
v = v[1:]
vtop = "/{}/".format(v) if v else "/"
for u in users:
self.log("checking /{} as {}".format(v, u))
try:
vn, _ = self.vfs.get(v, u, True, False)
except:
continue
atop = vn.realpath
g = vn.walk("", "", [], u, True, not self.args.no_scandir, False)
for vpath, apath, files, _, _ in g:
fnames = [n[0] for n in files]
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
vpaths = [vtop + x for x in vpaths]
apaths = [os.path.join(apath, n) for n in fnames]
files = [[vpath + "/", apath + os.sep]] + list(zip(vpaths, apaths))
if flag_ln:
files = [x for x in files if not x[1].startswith(atop + os.sep)]
n_bads += len(files)
if flag_v:
msg = [
'# user "{}", vpath "{}"\n{}'.format(u, vp, ap)
for vp, ap in files
]
else:
msg = [x[1] for x in files]
if msg:
nuprint("\n".join(msg))
if n_bads and flag_p:
raise Exception("found symlink leaving volume, and strict is set")
if not flag_r:
sys.exit(0)

View File

@@ -44,7 +44,9 @@ class BrokerMp(object):
proc.clients = {} proc.clients = {}
proc.workload = 0 proc.workload = 0
thr = threading.Thread(target=self.collector, args=(proc,)) thr = threading.Thread(
target=self.collector, args=(proc,), name="mp-collector"
)
thr.daemon = True thr.daemon = True
thr.start() thr.start()
@@ -52,14 +54,19 @@ class BrokerMp(object):
proc.start() proc.start()
if not self.args.q: if not self.args.q:
thr = threading.Thread(target=self.debug_load_balancer) thr = threading.Thread(
target=self.debug_load_balancer, name="mp-dbg-loadbalancer"
)
thr.daemon = True thr.daemon = True
thr.start() thr.start()
def shutdown(self): def shutdown(self):
self.log("broker", "shutting down") self.log("broker", "shutting down")
for proc in self.procs: for n, proc in enumerate(self.procs):
thr = threading.Thread(target=proc.q_pend.put([0, "shutdown", []])) thr = threading.Thread(
target=proc.q_pend.put([0, "shutdown", []]),
name="mp-shutdown-{}-{}".format(n, len(self.procs)),
)
thr.start() thr.start()
with self.mutex: with self.mutex:

View File

@@ -1,5 +1,6 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
from copyparty.authsrv import AuthSrv
import sys import sys
import time import time
@@ -27,20 +28,23 @@ class MpWorker(object):
self.retpend = {} self.retpend = {}
self.retpend_mutex = threading.Lock() self.retpend_mutex = threading.Lock()
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.workload_thr_active = False self.workload_thr_alive = False
# we inherited signal_handler from parent, # we inherited signal_handler from parent,
# replace it with something harmless # replace it with something harmless
if not FAKE_MP: if not FAKE_MP:
signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGINT, self.signal_handler)
# starting to look like a good idea
self.asrv = AuthSrv(args, None, False)
# instantiate all services here (TODO: inheritance?) # instantiate all services here (TODO: inheritance?)
self.httpsrv = HttpSrv(self) self.httpsrv = HttpSrv(self, True)
self.httpsrv.disconnect_func = self.httpdrop self.httpsrv.disconnect_func = self.httpdrop
# on winxp and some other platforms, # on winxp and some other platforms,
# use thr.join() to block all signals # use thr.join() to block all signals
thr = threading.Thread(target=self.main) thr = threading.Thread(target=self.main, name="mpw-main")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
thr.join() thr.join()
@@ -79,9 +83,11 @@ class MpWorker(object):
self.httpsrv.accept(sck, addr) self.httpsrv.accept(sck, addr)
with self.mutex: with self.mutex:
if not self.workload_thr_active: if not self.workload_thr_alive:
self.workload_thr_alive = True self.workload_thr_alive = True
thr = threading.Thread(target=self.thr_workload) thr = threading.Thread(
target=self.thr_workload, name="mpw-workload"
)
thr.daemon = True thr.daemon = True
thr.start() thr.start()

View File

@@ -3,6 +3,7 @@ from __future__ import print_function, unicode_literals
import threading import threading
from .authsrv import AuthSrv
from .httpsrv import HttpSrv from .httpsrv import HttpSrv
from .broker_util import ExceptionalQueue, try_exec from .broker_util import ExceptionalQueue, try_exec
@@ -14,6 +15,7 @@ class BrokerThr(object):
self.hub = hub self.hub = hub
self.log = hub.log self.log = hub.log
self.args = hub.args self.args = hub.args
self.asrv = hub.asrv
self.mutex = threading.Lock() self.mutex = threading.Lock()

View File

@@ -16,6 +16,7 @@ import calendar
from .__init__ import E, PY2, WINDOWS, ANYWIN from .__init__ import E, PY2, WINDOWS, ANYWIN
from .util import * # noqa # pylint: disable=unused-wildcard-import from .util import * # noqa # pylint: disable=unused-wildcard-import
from .authsrv import AuthSrv
from .szip import StreamZip from .szip import StreamZip
from .star import StreamTar from .star import StreamTar
@@ -35,12 +36,13 @@ class HttpCli(object):
def __init__(self, conn): def __init__(self, conn):
self.t0 = time.time() self.t0 = time.time()
self.conn = conn self.conn = conn
self.s = conn.s self.s = conn.s # type: socket
self.sr = conn.sr self.sr = conn.sr # type: Unrecv
self.ip = conn.addr[0] self.ip = conn.addr[0]
self.addr = conn.addr self.addr = conn.addr # type: tuple[str, int]
self.args = conn.args self.args = conn.args
self.auth = conn.auth self.is_mp = conn.is_mp
self.asrv = conn.asrv # type: AuthSrv
self.ico = conn.ico self.ico = conn.ico
self.thumbcli = conn.thumbcli self.thumbcli = conn.thumbcli
self.log_func = conn.log_func self.log_func = conn.log_func
@@ -102,9 +104,20 @@ class HttpCli(object):
v = self.headers.get("connection", "").lower() v = self.headers.get("connection", "").lower()
self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0" self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0"
v = self.headers.get("x-forwarded-for", None) n = self.args.rproxy
if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]: if n:
self.ip = v.split(",")[0] v = self.headers.get("x-forwarded-for")
if v and self.conn.addr[0] in ["127.0.0.1", "::1"]:
if n > 0:
n -= 1
vs = v.split(",")
try:
self.ip = vs[n].strip()
except:
self.ip = vs[-1].strip()
self.log("rproxy={} oob x-fwd {}".format(self.args.rproxy, v), c=3)
self.log_src = self.conn.set_rproxy(self.ip) self.log_src = self.conn.set_rproxy(self.ip)
if self.args.ihead: if self.args.ihead:
@@ -152,11 +165,9 @@ class HttpCli(object):
self.vpath = unquotep(vpath) self.vpath = unquotep(vpath)
pwd = uparam.get("pw") pwd = uparam.get("pw")
self.uname = self.auth.iuser.get(pwd, "*") self.uname = self.asrv.iuser.get(pwd, "*")
if self.uname: self.rvol, self.wvol, self.avol = [[], [], []]
self.rvol = self.auth.vfs.user_tree(self.uname, readable=True) self.asrv.vfs.user_tree(self.uname, self.rvol, self.wvol, self.avol)
self.wvol = self.auth.vfs.user_tree(self.uname, writable=True)
self.avol = self.auth.vfs.user_tree(self.uname, True, True, True)
ua = self.headers.get("user-agent", "") ua = self.headers.get("user-agent", "")
self.is_rclone = ua.startswith("rclone/") self.is_rclone = ua.startswith("rclone/")
@@ -258,7 +269,14 @@ class HttpCli(object):
return "?" + "&amp;".join(r) return "?" + "&amp;".join(r)
def redirect( def redirect(
self, vpath, suf="", msg="aight", flavor="go to", click=True, use302=False self,
vpath,
suf="",
msg="aight",
flavor="go to",
click=True,
status=200,
use302=False,
): ):
html = self.j2( html = self.j2(
"msg", "msg",
@@ -273,7 +291,7 @@ class HttpCli(object):
h = {"Location": "/" + vpath, "Cache-Control": "no-cache"} h = {"Location": "/" + vpath, "Cache-Control": "no-cache"}
self.reply(html, status=302, headers=h) self.reply(html, status=302, headers=h)
else: else:
self.reply(html) self.reply(html, status=status)
def handle_get(self): def handle_get(self):
if self.do_log: if self.do_log:
@@ -314,9 +332,7 @@ class HttpCli(object):
self.redirect(vpath, flavor="redirecting to", use302=True) self.redirect(vpath, flavor="redirecting to", use302=True)
return True return True
self.readable, self.writable = self.conn.auth.vfs.can_access( self.readable, self.writable = self.asrv.vfs.can_access(self.vpath, self.uname)
self.vpath, self.uname
)
if not self.readable and not self.writable: if not self.readable and not self.writable:
if self.vpath: if self.vpath:
self.log("inaccessible: [{}]".format(self.vpath)) self.log("inaccessible: [{}]".format(self.vpath))
@@ -433,7 +449,7 @@ class HttpCli(object):
def dump_to_file(self): def dump_to_file(self):
reader, remains = self.get_body_reader() reader, remains = self.get_body_reader()
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
fdir = os.path.join(vfs.realpath, rem) fdir = os.path.join(vfs.realpath, rem)
addr = self.ip.replace(":", ".") addr = self.ip.replace(":", ".")
@@ -443,8 +459,10 @@ class HttpCli(object):
with open(fsenc(path), "wb", 512 * 1024) as f: with open(fsenc(path), "wb", 512 * 1024) as f:
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f) post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
vfs, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put( self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fn False, "up2k.hash_file", vfs.realpath, vfs.flags, vrem, fn
) )
return post_sz, sha_b64, remains, path return post_sz, sha_b64, remains, path
@@ -500,7 +518,7 @@ class HttpCli(object):
if v is None: if v is None:
raise Pebkac(422, "need zip or tar keyword") raise Pebkac(422, "need zip or tar keyword")
vn, rem = self.auth.vfs.get(self.vpath, self.uname, True, False) vn, rem = self.asrv.vfs.get(self.vpath, self.uname, True, False)
items = self.parser.require("files", 1024 * 1024) items = self.parser.require("files", 1024 * 1024)
if not items: if not items:
raise Pebkac(422, "need files list") raise Pebkac(422, "need files list")
@@ -508,6 +526,7 @@ class HttpCli(object):
items = items.replace("\r", "").split("\n") items = items.replace("\r", "").split("\n")
items = [unquotep(x) for x in items if items] items = [unquotep(x) for x in items if items]
self.parser.drop()
return self.tx_zip(k, v, vn, rem, items, self.args.ed) return self.tx_zip(k, v, vn, rem, items, self.args.ed)
def handle_post_json(self): def handle_post_json(self):
@@ -549,13 +568,14 @@ class HttpCli(object):
self.vpath = "/".join([self.vpath, sub]).strip("/") self.vpath = "/".join([self.vpath, sub]).strip("/")
body["name"] = name body["name"] = name
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
dbv, vrem = vfs.get_dbv(rem)
body["vtop"] = vfs.vpath body["vtop"] = dbv.vpath
body["ptop"] = vfs.realpath body["ptop"] = dbv.realpath
body["prel"] = rem body["prel"] = vrem
body["addr"] = self.ip body["addr"] = self.ip
body["vcfg"] = vfs.flags body["vcfg"] = dbv.flags
if sub: if sub:
try: try:
@@ -577,8 +597,14 @@ class HttpCli(object):
def handle_search(self, body): def handle_search(self, body):
vols = [] vols = []
seen = {}
for vtop in self.rvol: for vtop in self.rvol:
vfs, _ = self.conn.auth.vfs.get(vtop, self.uname, True, False) vfs, _ = self.asrv.vfs.get(vtop, self.uname, True, False)
vfs = vfs.dbv or vfs
if vfs in seen:
continue
seen[vfs] = True
vols.append([vfs.vpath, vfs.realpath, vfs.flags]) vols.append([vfs.vpath, vfs.realpath, vfs.flags])
idx = self.conn.get_u2idx() idx = self.conn.get_u2idx()
@@ -634,8 +660,8 @@ class HttpCli(object):
except KeyError: except KeyError:
raise Pebkac(400, "need hash and wark headers for binary POST") raise Pebkac(400, "need hash and wark headers for binary POST")
vfs, _ = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, _ = self.asrv.vfs.get(self.vpath, self.uname, False, True)
ptop = vfs.realpath ptop = (vfs.dbv or vfs).realpath
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash) x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash)
response = x.get() response = x.get()
@@ -707,7 +733,7 @@ class HttpCli(object):
pwd = self.parser.require("cppwd", 64) pwd = self.parser.require("cppwd", 64)
self.parser.drop() self.parser.drop()
if pwd in self.auth.iuser: if pwd in self.asrv.iuser:
msg = "login ok" msg = "login ok"
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365) dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT") exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
@@ -726,7 +752,7 @@ class HttpCli(object):
self.parser.drop() self.parser.drop()
nullwrite = self.args.nw nullwrite = self.args.nw
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem) self._assert_safe_rem(rem)
sanitized = sanitize_fn(new_dir) sanitized = sanitize_fn(new_dir)
@@ -755,7 +781,7 @@ class HttpCli(object):
self.parser.drop() self.parser.drop()
nullwrite = self.args.nw nullwrite = self.args.nw
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem) self._assert_safe_rem(rem)
if not new_file.endswith(".md"): if not new_file.endswith(".md"):
@@ -779,7 +805,7 @@ class HttpCli(object):
def handle_plain_upload(self): def handle_plain_upload(self):
nullwrite = self.args.nw nullwrite = self.args.nw
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem) self._assert_safe_rem(rem)
files = [] files = []
@@ -816,8 +842,14 @@ class HttpCli(object):
raise Pebkac(400, "empty files in post") raise Pebkac(400, "empty files in post")
files.append([sz, sha512_hex, p_file, fname]) files.append([sz, sha512_hex, p_file, fname])
dbv, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put( self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname False,
"up2k.hash_file",
dbv.realpath,
dbv.flags,
vrem,
fname,
) )
self.conn.nbyte += sz self.conn.nbyte += sz
@@ -847,18 +879,36 @@ class HttpCli(object):
status = "OK" status = "OK"
if errmsg: if errmsg:
self.log(errmsg) self.log(errmsg)
errmsg = "ERROR: " + errmsg
status = "ERROR" status = "ERROR"
msg = "{} // {} bytes // {:.3f} MiB/s\n".format(status, sz_total, spd) msg = "{} // {} bytes // {:.3f} MiB/s\n".format(status, sz_total, spd)
jmsg = {"status": status, "sz": sz_total, "mbps": round(spd, 3), "files": []}
if errmsg:
msg += errmsg + "\n"
jmsg["error"] = errmsg
errmsg = "ERROR: " + errmsg
for sz, sha512, ofn, lfn in files: for sz, sha512, ofn, lfn in files:
vpath = self.vpath + "/" + lfn vpath = (self.vpath + "/" if self.vpath else "") + lfn
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format( msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True) sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True)
) )
# truncated SHA-512 prevents length extension attacks; # truncated SHA-512 prevents length extension attacks;
# using SHA-512/224, optionally SHA-512/256 = :64 # using SHA-512/224, optionally SHA-512/256 = :64
jpart = {
"url": "{}://{}/{}".format(
"https" if self.tls else "http",
self.headers.get("host", "copyparty"),
vpath,
),
"sha512": sha512[:56],
"sz": sz,
"fn": lfn,
"fn_orig": ofn,
"path": vpath,
}
jmsg["files"].append(jpart)
vspd = self._spd(sz_total, False) vspd = self._spd(sz_total, False)
self.log("{} {}".format(vspd, msg)) self.log("{} {}".format(vspd, msg))
@@ -870,7 +920,22 @@ class HttpCli(object):
ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg) ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg)
f.write(ft.encode("utf-8")) f.write(ft.encode("utf-8"))
self.redirect(self.vpath, msg=msg, flavor="return to", click=False) status = 400 if errmsg else 200
if "j" in self.uparam:
jtxt = json.dumps(jmsg, indent=2, sort_keys=True).encode("utf-8", "replace")
self.reply(jtxt, mime="application/json", status=status)
else:
self.redirect(
self.vpath,
msg=msg,
flavor="return to",
click=False,
status=status,
)
if errmsg:
return False
self.parser.drop() self.parser.drop()
return True return True
@@ -881,7 +946,7 @@ class HttpCli(object):
raise Pebkac(400, "could not read lastmod from request") raise Pebkac(400, "could not read lastmod from request")
nullwrite = self.args.nw nullwrite = self.args.nw
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem) self._assert_safe_rem(rem)
# TODO: # TODO:
@@ -974,6 +1039,8 @@ class HttpCli(object):
cli_lastmod = self.headers.get("if-modified-since") cli_lastmod = self.headers.get("if-modified-since")
if cli_lastmod: if cli_lastmod:
try: try:
# some browser append "; length=573"
cli_lastmod = cli_lastmod.split(";")[0].strip()
cli_dt = time.strptime(cli_lastmod, HTTP_TS_FMT) cli_dt = time.strptime(cli_lastmod, HTTP_TS_FMT)
cli_ts = calendar.timegm(cli_dt) cli_ts = calendar.timegm(cli_dt)
return file_lastmod, int(file_ts) > int(cli_ts) return file_lastmod, int(file_ts) > int(cli_ts)
@@ -1143,7 +1210,8 @@ class HttpCli(object):
if use_sendfile: if use_sendfile:
remains = sendfile_kern(lower, upper, f, self.s) remains = sendfile_kern(lower, upper, f, self.s)
else: else:
remains = sendfile_py(lower, upper, f, self.s) actor = self.conn if self.is_mp else None
remains = sendfile_py(lower, upper, f, self.s, actor)
if remains > 0: if remains > 0:
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m" logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
@@ -1316,11 +1384,13 @@ class HttpCli(object):
for y in [self.rvol, self.wvol, self.avol] for y in [self.rvol, self.wvol, self.avol]
] ]
vstate = {}
if self.avol and not self.args.no_rescan: if self.avol and not self.args.no_rescan:
x = self.conn.hsrv.broker.put(True, "up2k.get_volstate") x = self.conn.hsrv.broker.put(True, "up2k.get_state")
vstate = json.loads(x.get()) vs = json.loads(x.get())
vstate = {("/" + k).rstrip("/") + "/": v for k, v in vstate.items()} vstate = {("/" + k).rstrip("/") + "/": v for k, v in vs["volstate"].items()}
else:
vstate = {}
vs = {"scanning": None, "hashq": None, "tagq": None, "mtpq": None}
html = self.j2( html = self.j2(
"splash", "splash",
@@ -1329,6 +1399,10 @@ class HttpCli(object):
wvol=wvol, wvol=wvol,
avol=avol, avol=avol,
vstate=vstate, vstate=vstate,
scanning=vs["scanning"],
hashq=vs["hashq"],
tagq=vs["tagq"],
mtpq=vs["mtpq"],
url_suf=suf, url_suf=suf,
) )
self.reply(html.encode("utf-8"), headers=NO_STORE) self.reply(html.encode("utf-8"), headers=NO_STORE)
@@ -1341,9 +1415,10 @@ class HttpCli(object):
if self.args.no_rescan: if self.args.no_rescan:
raise Pebkac(403, "disabled by argv") raise Pebkac(403, "disabled by argv")
vn, _ = self.auth.vfs.get(self.vpath, self.uname, True, True) vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True)
args = [self.asrv.vfs.all_vols, [vn.vpath]]
args = [self.auth.vfs.all_vols, [vn.vpath]]
x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args) x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args)
x = x.get() x = x.get()
if not x: if not x:
@@ -1359,17 +1434,8 @@ class HttpCli(object):
if self.args.no_stack: if self.args.no_stack:
raise Pebkac(403, "disabled by argv") raise Pebkac(403, "disabled by argv")
ret = [] ret = "<pre>{}\n{}".format(time.time(), alltrace())
names = dict([(t.ident, t.name) for t in threading.enumerate()]) self.reply(ret.encode("utf-8"))
for tid, stack in sys._current_frames().items():
ret.append("\n\n# {} ({:x})".format(names.get(tid), tid))
for fn, lno, name, line in traceback.extract_stack(stack):
ret.append('File: "{}", line {}, in {}'.format(fn, lno, name))
if line:
ret.append(" " + str(line.strip()))
ret = ("<pre>" + "\n".join(ret)).encode("utf-8")
self.reply(ret)
def tx_tree(self): def tx_tree(self):
top = self.uparam["tree"] or "" top = self.uparam["tree"] or ""
@@ -1399,9 +1465,9 @@ class HttpCli(object):
ret["k" + quotep(excl)] = sub ret["k" + quotep(excl)] = sub
try: try:
vn, rem = self.auth.vfs.get(top, self.uname, True, False) vn, rem = self.asrv.vfs.get(top, self.uname, True, False)
fsroot, vfs_ls, vfs_virt = vn.ls( fsroot, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, True rem, self.uname, not self.args.no_scandir, incl_wo=True
) )
except: except:
vfs_ls = [] vfs_ls = []
@@ -1440,31 +1506,47 @@ class HttpCli(object):
vpnodes.append([quotep(vpath) + "/", html_escape(node, crlf=True)]) vpnodes.append([quotep(vpath) + "/", html_escape(node, crlf=True)])
vn, rem = self.auth.vfs.get( vn, rem = self.asrv.vfs.get(
self.vpath, self.uname, self.readable, self.writable self.vpath, self.uname, self.readable, self.writable
) )
abspath = vn.canonical(rem) abspath = vn.canonical(rem)
dbv, vrem = vn.get_dbv(rem)
try: try:
st = os.stat(fsenc(abspath)) st = os.stat(fsenc(abspath))
except: except:
raise Pebkac(404) raise Pebkac(404)
if self.readable and not stat.S_ISDIR(st.st_mode): if self.readable:
if rem.startswith(".hist/up2k."): if rem.startswith(".hist/up2k."):
raise Pebkac(403) raise Pebkac(403)
is_dir = stat.S_ISDIR(st.st_mode)
th_fmt = self.uparam.get("th") th_fmt = self.uparam.get("th")
if th_fmt is not None: if th_fmt is not None:
if is_dir:
for fn in ["folder.png", "folder.jpg"]:
fp = os.path.join(abspath, fn)
if os.path.exists(fp):
vrem = "{}/{}".format(vrem.rstrip("/"), fn)
is_dir = False
break
if is_dir:
return self.tx_ico("a.folder")
thp = None thp = None
if self.thumbcli: if self.thumbcli:
thp = self.thumbcli.get(vn.realpath, rem, int(st.st_mtime), th_fmt) thp = self.thumbcli.get(
dbv.realpath, vrem, int(st.st_mtime), th_fmt
)
if thp: if thp:
return self.tx_file(thp) return self.tx_file(thp)
return self.tx_ico(rem) return self.tx_ico(rem)
if not is_dir:
if abspath.endswith(".md") and "raw" not in self.uparam: if abspath.endswith(".md") and "raw" not in self.uparam:
return self.tx_md(abspath) return self.tx_md(abspath)
@@ -1568,7 +1650,7 @@ class HttpCli(object):
return self.tx_zip(k, v, vn, rem, [], self.args.ed) return self.tx_zip(k, v, vn, rem, [], self.args.ed)
fsroot, vfs_ls, vfs_virt = vn.ls( fsroot, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, True rem, self.uname, not self.args.no_scandir, incl_wo=True
) )
stats = {k: v for k, v in vfs_ls} stats = {k: v for k, v in vfs_ls}
vfs_ls = [x[0] for x in vfs_ls] vfs_ls = [x[0] for x in vfs_ls]
@@ -1601,7 +1683,7 @@ class HttpCli(object):
icur = None icur = None
if "e2t" in vn.flags: if "e2t" in vn.flags:
idx = self.conn.get_u2idx() idx = self.conn.get_u2idx()
icur = idx.get_cur(vn.realpath) icur = idx.get_cur(dbv.realpath)
dirs = [] dirs = []
files = [] files = []
@@ -1669,6 +1751,9 @@ class HttpCli(object):
rd = f["rd"] rd = f["rd"]
del f["rd"] del f["rd"]
if icur: if icur:
if vn != dbv:
_, rd = vn.get_dbv(rd)
q = "select w from up where rd = ? and fn = ?" q = "select w from up where rd = ? and fn = ?"
try: try:
r = icur.execute(q, (rd, fn)).fetchone() r = icur.execute(q, (rd, fn)).fetchone()
@@ -1709,9 +1794,13 @@ class HttpCli(object):
j2a["files"] = dirs + files j2a["files"] = dirs + files
j2a["logues"] = logues j2a["logues"] = logues
j2a["taglist"] = taglist j2a["taglist"] = taglist
if "mte" in vn.flags: if "mte" in vn.flags:
j2a["tag_order"] = json.dumps(vn.flags["mte"].split(",")) j2a["tag_order"] = json.dumps(vn.flags["mte"].split(","))
if self.args.css_browser:
j2a["css"] = self.args.css_browser
html = self.j2(tpl, **j2a) html = self.j2(tpl, **j2a)
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE) self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
return True return True

View File

@@ -34,7 +34,8 @@ class HttpConn(object):
self.hsrv = hsrv self.hsrv = hsrv
self.args = hsrv.args self.args = hsrv.args
self.auth = hsrv.auth self.asrv = hsrv.asrv
self.is_mp = hsrv.is_mp
self.cert_path = hsrv.cert_path self.cert_path = hsrv.cert_path
enth = HAVE_PIL and not self.args.no_thumb enth = HAVE_PIL and not self.args.no_thumb
@@ -70,7 +71,7 @@ class HttpConn(object):
def get_u2idx(self): def get_u2idx(self):
if not self.u2idx: if not self.u2idx:
self.u2idx = U2idx(self.args, self.log_func) self.u2idx = U2idx(self)
return self.u2idx return self.u2idx
@@ -174,6 +175,11 @@ class HttpConn(object):
self.sr = Unrecv(self.s) self.sr = Unrecv(self.s)
while True: while True:
if self.is_mp:
self.workload += 50
if self.workload >= 2 ** 31:
self.workload = 100
cli = HttpCli(self) cli = HttpCli(self)
if not cli.run(): if not cli.run():
return return

View File

@@ -25,8 +25,8 @@ except ImportError:
sys.exit(1) sys.exit(1)
from .__init__ import E, MACOS from .__init__ import E, MACOS
from .httpconn import HttpConn
from .authsrv import AuthSrv from .authsrv import AuthSrv
from .httpconn import HttpConn
class HttpSrv(object): class HttpSrv(object):
@@ -35,10 +35,12 @@ class HttpSrv(object):
relying on MpSrv for performance (HttpSrv is just plain threads) relying on MpSrv for performance (HttpSrv is just plain threads)
""" """
def __init__(self, broker): def __init__(self, broker, is_mp=False):
self.broker = broker self.broker = broker
self.is_mp = is_mp
self.args = broker.args self.args = broker.args
self.log = broker.log self.log = broker.log
self.asrv = broker.asrv
self.disconnect_func = None self.disconnect_func = None
self.mutex = threading.Lock() self.mutex = threading.Lock()
@@ -46,7 +48,6 @@ class HttpSrv(object):
self.clients = {} self.clients = {}
self.workload = 0 self.workload = 0
self.workload_thr_alive = False self.workload_thr_alive = False
self.auth = AuthSrv(self.args, self.log)
env = jinja2.Environment() env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web")) env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
@@ -66,7 +67,11 @@ class HttpSrv(object):
if self.args.log_conn: if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30") self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
thr = threading.Thread(target=self.thr_client, args=(sck, addr)) thr = threading.Thread(
target=self.thr_client,
args=(sck, addr),
name="httpsrv-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]),
)
thr.daemon = True thr.daemon = True
thr.start() thr.start()
@@ -84,11 +89,14 @@ class HttpSrv(object):
cli = HttpConn(sck, addr, self) cli = HttpConn(sck, addr, self)
with self.mutex: with self.mutex:
self.clients[cli] = 0 self.clients[cli] = 0
self.workload += 50
if self.is_mp:
self.workload += 50
if not self.workload_thr_alive: if not self.workload_thr_alive:
self.workload_thr_alive = True self.workload_thr_alive = True
thr = threading.Thread(target=self.thr_workload) thr = threading.Thread(
target=self.thr_workload, name="httpsrv-workload"
)
thr.daemon = True thr.daemon = True
thr.start() thr.start()
@@ -99,6 +107,7 @@ class HttpSrv(object):
cli.run() cli.run()
finally: finally:
sck = cli.s
if self.args.log_conn: if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30") self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import hashlib import hashlib
import colorsys import colorsys

View File

@@ -1,7 +1,6 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import re
import os import os
import sys import sys
import json import json

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os import os
import tarfile import tarfile
import threading import threading
@@ -42,7 +45,7 @@ class StreamTar(object):
fmt = tarfile.GNU_FORMAT fmt = tarfile.GNU_FORMAT
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
w = threading.Thread(target=self._gen) w = threading.Thread(target=self._gen, name="star-gen")
w.daemon = True w.daemon = True
w.start() w.start()

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os import os
import time import time
import tempfile import tempfile

View File

@@ -37,12 +37,13 @@ class SvcHub(object):
self.log = self._log_disabled if args.q else self._log_enabled self.log = self._log_disabled if args.q else self._log_enabled
# jank goes here
auth = AuthSrv(self.args, self.log, False)
# initiate all services to manage # initiate all services to manage
self.asrv = AuthSrv(self.args, self.log, False)
if args.ls:
self.asrv.dbg_ls()
self.tcpsrv = TcpSrv(self) self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self, auth.vfs.all_vols) self.up2k = Up2k(self)
self.thumbsrv = None self.thumbsrv = None
if not args.no_thumb: if not args.no_thumb:
@@ -52,7 +53,7 @@ class SvcHub(object):
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old" msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
self.log("thumb", msg, c=3) self.log("thumb", msg, c=3)
self.thumbsrv = ThumbSrv(self, auth.vfs.all_vols) self.thumbsrv = ThumbSrv(self)
else: else:
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n" msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n"
self.log( self.log(
@@ -69,7 +70,7 @@ class SvcHub(object):
self.broker = Broker(self) self.broker = Broker(self)
def run(self): def run(self):
thr = threading.Thread(target=self.tcpsrv.run) thr = threading.Thread(target=self.tcpsrv.run, name="svchub-main")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
@@ -93,9 +94,11 @@ class SvcHub(object):
break break
if n == 3: if n == 3:
print("waiting for thumbsrv...") print("waiting for thumbsrv (10sec)...")
print("nailed it") print("nailed it", end="")
finally:
print("\033[0m")
def _log_disabled(self, src, msg, c=0): def _log_disabled(self, src, msg, c=0):
pass pass

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os import os
import time import time
import zlib import zlib

View File

@@ -1,5 +1,7 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os import os
import time
from .util import Cooldown from .util import Cooldown
from .th_srv import thumb_path, THUMBABLE, FMT_FF from .th_srv import thumb_path, THUMBABLE, FMT_FF
@@ -9,6 +11,7 @@ class ThumbCli(object):
def __init__(self, broker): def __init__(self, broker):
self.broker = broker self.broker = broker
self.args = broker.args self.args = broker.args
self.asrv = broker.asrv
# cache on both sides for less broker spam # cache on both sides for less broker spam
self.cooldown = Cooldown(self.args.th_poke) self.cooldown = Cooldown(self.args.th_poke)
@@ -18,16 +21,19 @@ class ThumbCli(object):
if ext not in THUMBABLE: if ext not in THUMBABLE:
return None return None
if self.args.no_vthumb and ext in FMT_FF: is_vid = ext in FMT_FF
if is_vid and self.args.no_vthumb:
return None return None
if fmt == "j" and self.args.th_no_jpg: if fmt == "j" and self.args.th_no_jpg:
fmt = "w" fmt = "w"
if fmt == "w" and self.args.th_no_webp: if fmt == "w":
if self.args.th_no_webp or (is_vid and self.args.th_ff_jpg):
fmt = "j" fmt = "j"
tpath = thumb_path(ptop, rem, mtime, fmt) histpath = self.asrv.vfs.histtab[ptop]
tpath = thumb_path(histpath, rem, mtime, fmt)
ret = None ret = None
try: try:
st = os.stat(tpath) st = os.stat(tpath)

View File

@@ -1,5 +1,7 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os import os
import sys
import time import time
import shutil import shutil
import base64 import base64
@@ -8,7 +10,7 @@ import threading
import subprocess as sp import subprocess as sp
from .__init__ import PY2 from .__init__ import PY2
from .util import fsenc, mchkcmd, Queue, Cooldown, BytesIO from .util import fsenc, runcmd, Queue, Cooldown, BytesIO, min_ex
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
@@ -51,7 +53,7 @@ except:
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html # https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# ffmpeg -formats # ffmpeg -formats
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm" FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv" FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
if HAVE_HEIF: if HAVE_HEIF:
FMT_PIL += " heif heifs heic heics" FMT_PIL += " heif heifs heic heics"
@@ -71,7 +73,7 @@ if HAVE_FFMPEG and HAVE_FFPROBE:
THUMBABLE.update(FMT_FF) THUMBABLE.update(FMT_FF)
def thumb_path(ptop, rem, mtime, fmt): def thumb_path(histpath, rem, mtime, fmt):
# base16 = 16 = 256 # base16 = 16 = 256
# b64-lc = 38 = 1444 # b64-lc = 38 = 1444
# base64 = 64 = 4096 # base64 = 64 = 4096
@@ -92,16 +94,15 @@ def thumb_path(ptop, rem, mtime, fmt):
h = hashlib.sha512(fsenc(fn)).digest()[:24] h = hashlib.sha512(fsenc(fn)).digest()[:24]
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24] fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
return "{}/.hist/th/{}/{}.{:x}.{}".format( return "{}/th/{}/{}.{:x}.{}".format(
ptop, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg" histpath, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
) )
class ThumbSrv(object): class ThumbSrv(object):
def __init__(self, hub, vols): def __init__(self, hub):
self.hub = hub self.hub = hub
self.vols = [v.realpath for v in vols.values()] self.asrv = hub.asrv
self.args = hub.args self.args = hub.args
self.log_func = hub.log self.log_func = hub.log
@@ -114,8 +115,10 @@ class ThumbSrv(object):
self.stopping = False self.stopping = False
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4 self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
self.q = Queue(self.nthr * 4) self.q = Queue(self.nthr * 4)
for _ in range(self.nthr): for n in range(self.nthr):
t = threading.Thread(target=self.worker) t = threading.Thread(
target=self.worker, name="thumb-{}-{}".format(n, self.nthr)
)
t.daemon = True t.daemon = True
t.start() t.start()
@@ -129,9 +132,9 @@ class ThumbSrv(object):
msg = "cannot create video thumbnails because some of the required programs are not available: " msg = "cannot create video thumbnails because some of the required programs are not available: "
msg += ", ".join(missing) msg += ", ".join(missing)
self.log(msg, c=1) self.log(msg, c=3)
t = threading.Thread(target=self.cleaner) t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
t.daemon = True t.daemon = True
t.start() t.start()
@@ -148,9 +151,11 @@ class ThumbSrv(object):
return not self.nthr return not self.nthr
def get(self, ptop, rem, mtime, fmt): def get(self, ptop, rem, mtime, fmt):
tpath = thumb_path(ptop, rem, mtime, fmt) histpath = self.asrv.vfs.histtab[ptop]
tpath = thumb_path(histpath, rem, mtime, fmt)
abspath = os.path.join(ptop, rem) abspath = os.path.join(ptop, rem)
cond = threading.Condition() cond = threading.Condition(self.mutex)
do_conv = False
with self.mutex: with self.mutex:
try: try:
self.busy[tpath].append(cond) self.busy[tpath].append(cond)
@@ -168,6 +173,9 @@ class ThumbSrv(object):
f.write(fsenc(os.path.dirname(abspath))) f.write(fsenc(os.path.dirname(abspath)))
self.busy[tpath] = [cond] self.busy[tpath] = [cond]
do_conv = True
if do_conv:
self.q.put([abspath, tpath]) self.q.put([abspath, tpath])
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6) self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
@@ -177,7 +185,7 @@ class ThumbSrv(object):
break break
with cond: with cond:
cond.wait() cond.wait(3)
try: try:
st = os.stat(tpath) st = os.stat(tpath)
@@ -206,9 +214,9 @@ class ThumbSrv(object):
if fun: if fun:
try: try:
fun(abspath, tpath) fun(abspath, tpath)
except Exception as ex: except:
msg = "{} failed on {}\n {!r}" msg = "{} failed on {}\n{}"
self.log(msg.format(fun.__name__, abspath, ex), 3) self.log(msg.format(fun.__name__, abspath, min_ex()), 3)
with open(tpath, "wb") as _: with open(tpath, "wb") as _:
pass pass
@@ -240,8 +248,8 @@ class ThumbSrv(object):
except: except:
im.thumbnail(self.res) im.thumbnail(self.res)
if im.mode not in ("RGB", "L"): fmts = ["RGB", "L"]
im = im.convert("RGB") args = {"quality": 40}
if tpath.endswith(".webp"): if tpath.endswith(".webp"):
# quality 80 = pillow-default # quality 80 = pillow-default
@@ -249,15 +257,27 @@ class ThumbSrv(object):
# method 0 = pillow-default, fast # method 0 = pillow-default, fast
# method 4 = ffmpeg-default # method 4 = ffmpeg-default
# method 6 = max, slow # method 6 = max, slow
im.save(tpath, quality=40, method=6) fmts += ["RGBA", "LA"]
args["method"] = 6
else: else:
im.save(tpath, quality=40) # default=75 pass # default q = 75
if im.mode not in fmts:
print("conv {}".format(im.mode))
im = im.convert("RGB")
im.save(tpath, quality=40, method=6)
def conv_ffmpeg(self, abspath, tpath): def conv_ffmpeg(self, abspath, tpath):
ret, _ = ffprobe(abspath) ret, _ = ffprobe(abspath)
ext = abspath.rsplit(".")[-1]
if ext in ["h264", "h265"]:
seek = []
else:
dur = ret[".dur"][1] if ".dur" in ret else 4 dur = ret[".dur"][1] if ".dur" in ret else 4
seek = "{:.0f}".format(dur / 3) seek = "{:.0f}".format(dur / 3)
seek = [b"-ss", seek.encode("utf-8")]
scale = "scale={0}:{1}:force_original_aspect_ratio=" scale = "scale={0}:{1}:force_original_aspect_ratio="
if self.args.th_no_crop: if self.args.th_no_crop:
@@ -266,19 +286,20 @@ class ThumbSrv(object):
scale += "increase,crop={0}:{1},setsar=1:1" scale += "increase,crop={0}:{1},setsar=1:1"
scale = scale.format(*list(self.res)).encode("utf-8") scale = scale.format(*list(self.res)).encode("utf-8")
# fmt: off
cmd = [ cmd = [
b"ffmpeg", b"ffmpeg",
b"-nostdin", b"-nostdin",
b"-hide_banner", b"-v", b"error",
b"-ss", b"-hide_banner"
seek,
b"-i",
fsenc(abspath),
b"-vf",
scale,
b"-vframes",
b"1",
] ]
cmd += seek
cmd += [
b"-i", fsenc(abspath),
b"-vf", scale,
b"-vframes", b"1",
]
# fmt: on
if tpath.endswith(".jpg"): if tpath.endswith(".jpg"):
cmd += [ cmd += [
@@ -295,7 +316,11 @@ class ThumbSrv(object):
cmd += [fsenc(tpath)] cmd += [fsenc(tpath)]
mchkcmd(cmd) ret, sout, serr = runcmd(*cmd)
if ret != 0:
msg = ["ff: {}".format(x) for x in serr.split("\n")]
self.log("FFmpeg failed:\n" + "\n".join(msg), c="1;30")
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
def poke(self, tdir): def poke(self, tdir):
if not self.poke_cd.poke(tdir): if not self.poke_cd.poke(tdir):
@@ -314,26 +339,29 @@ class ThumbSrv(object):
interval = self.args.th_clean interval = self.args.th_clean
while True: while True:
time.sleep(interval) time.sleep(interval)
for vol in self.vols: for vol, histpath in self.asrv.vfs.histtab.items():
vol += "/.hist/th" if histpath.startswith(vol):
self.log("\033[Jcln {}/\033[A".format(vol)) self.log("\033[Jcln {}/\033[A".format(histpath))
self.clean(vol) else:
self.log("\033[Jcln {} ({})/\033[A".format(histpath, vol))
self.clean(histpath)
self.log("\033[Jcln ok") self.log("\033[Jcln ok")
def clean(self, vol): def clean(self, histpath):
# self.log("cln {}".format(vol)) # self.log("cln {}".format(histpath))
maxage = self.args.th_maxage maxage = self.args.th_maxage
now = time.time() now = time.time()
prev_b64 = None prev_b64 = None
prev_fp = None prev_fp = None
try: try:
ents = os.listdir(vol) ents = os.listdir(histpath)
except: except:
return return
for f in sorted(ents): for f in sorted(ents):
fp = os.path.join(vol, f) fp = os.path.join(histpath, f)
cmp = fp.lower().replace("\\", "/") cmp = fp.lower().replace("\\", "/")
# "top" or b64 prefix/full (a folder) # "top" or b64 prefix/full (a folder)

View File

@@ -7,7 +7,7 @@ import time
import threading import threading
from datetime import datetime from datetime import datetime
from .util import u8safe, s3dec, html_escape, Pebkac from .util import s3dec, Pebkac, min_ex
from .up2k import up2k_wark_from_hashlist from .up2k import up2k_wark_from_hashlist
@@ -19,10 +19,11 @@ except:
class U2idx(object): class U2idx(object):
def __init__(self, args, log_func): def __init__(self, conn):
self.args = args self.log_func = conn.log_func
self.log_func = log_func self.asrv = conn.asrv
self.timeout = args.srch_time self.args = conn.args
self.timeout = self.args.srch_time
if not HAVE_SQLITE3: if not HAVE_SQLITE3:
self.log("could not load sqlite3; searchign wqill be disabled") self.log("could not load sqlite3; searchign wqill be disabled")
@@ -52,18 +53,20 @@ class U2idx(object):
try: try:
return self.run_query(vols, uq, uv)[0] return self.run_query(vols, uq, uv)[0]
except Exception as ex: except:
raise Pebkac(500, repr(ex)) raise Pebkac(500, min_ex())
def get_cur(self, ptop): def get_cur(self, ptop):
cur = self.cur.get(ptop) cur = self.cur.get(ptop)
if cur: if cur:
return cur return cur
cur = _open(ptop) histpath = self.asrv.vfs.histtab[ptop]
if not cur: db_path = os.path.join(histpath, "up2k.db")
if not os.path.exists(db_path):
return None return None
cur = sqlite3.connect(db_path).cursor()
self.cur[ptop] = cur self.cur[ptop] = cur
return cur return cur
@@ -192,6 +195,7 @@ class U2idx(object):
self.active_id, self.active_id,
done_flag, done_flag,
), ),
name="u2idx-terminator",
) )
thr.daemon = True thr.daemon = True
thr.start() thr.start()
@@ -241,6 +245,7 @@ class U2idx(object):
hit["tags"] = tags hit["tags"] = tags
ret.extend(sret) ret.extend(sret)
# print("[{}] {}".format(ptop, sret))
done_flag.append(True) done_flag.append(True)
self.active_id = None self.active_id = None
@@ -261,9 +266,3 @@ class U2idx(object):
if identifier == self.active_id: if identifier == self.active_id:
self.active_cur.connection.interrupt() self.active_cur.connection.interrupt()
def _open(ptop):
db_path = os.path.join(ptop, ".hist", "up2k.db")
if os.path.exists(db_path):
return sqlite3.connect(db_path).cursor()

View File

@@ -48,8 +48,9 @@ class Up2k(object):
* ~/.config flatfiles for active jobs * ~/.config flatfiles for active jobs
""" """
def __init__(self, hub, all_vols): def __init__(self, hub):
self.hub = hub self.hub = hub
self.asrv = hub.asrv
self.args = hub.args self.args = hub.args
self.log_func = hub.log self.log_func = hub.log
@@ -60,6 +61,8 @@ class Up2k(object):
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.hashq = Queue() self.hashq = Queue()
self.tagq = Queue() self.tagq = Queue()
self.n_hashq = 0
self.n_tagq = 0
self.volstate = {} self.volstate = {}
self.registry = {} self.registry = {}
self.entags = {} self.entags = {}
@@ -83,7 +86,7 @@ class Up2k(object):
if ANYWIN: if ANYWIN:
# usually fails to set lastmod too quickly # usually fails to set lastmod too quickly
self.lastmod_q = Queue() self.lastmod_q = Queue()
thr = threading.Thread(target=self._lastmodder) thr = threading.Thread(target=self._lastmodder, name="up2k-lastmod")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
@@ -94,45 +97,73 @@ class Up2k(object):
self.log("could not initialize sqlite3, will use in-memory registry only") self.log("could not initialize sqlite3, will use in-memory registry only")
if self.args.no_fastboot: if self.args.no_fastboot:
self.deferred_init(all_vols) self.deferred_init()
else: else:
t = threading.Thread(target=self.deferred_init, args=(all_vols,)) t = threading.Thread(
target=self.deferred_init,
name="up2k-deferred-init",
)
t.daemon = True t.daemon = True
t.start() t.start()
def deferred_init(self, all_vols): def deferred_init(self):
all_vols = self.asrv.vfs.all_vols
have_e2d = self.init_indexes(all_vols) have_e2d = self.init_indexes(all_vols)
if have_e2d: if have_e2d:
thr = threading.Thread(target=self._snapshot) thr = threading.Thread(target=self._snapshot, name="up2k-snapshot")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
thr = threading.Thread(target=self._hasher) thr = threading.Thread(target=self._hasher, name="up2k-hasher")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
if self.mtag: if self.mtag:
thr = threading.Thread(target=self._tagger) thr = threading.Thread(target=self._tagger, name="up2k-tagger")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
thr = threading.Thread(target=self._run_all_mtp) thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-init")
thr.daemon = True thr.daemon = True
thr.start() thr.start()
def log(self, msg, c=0): def log(self, msg, c=0):
self.log_func("up2k", msg + "\033[K", c) self.log_func("up2k", msg + "\033[K", c)
def get_volstate(self): def get_state(self):
return json.dumps(self.volstate, indent=4) mtpq = 0
q = "select count(w) from mt where k = 't:mtp'"
got_lock = self.mutex.acquire(timeout=0.5)
if got_lock:
for cur in self.cur.values():
try:
mtpq += cur.execute(q).fetchone()[0]
except:
pass
self.mutex.release()
else:
mtpq = "?"
ret = {
"volstate": self.volstate,
"scanning": hasattr(self, "pp"),
"hashq": self.n_hashq,
"tagq": self.n_tagq,
"mtpq": mtpq,
}
return json.dumps(ret, indent=4)
def rescan(self, all_vols, scan_vols): def rescan(self, all_vols, scan_vols):
if hasattr(self, "pp"): if hasattr(self, "pp"):
return "cannot initiate; scan is already in progress" return "cannot initiate; scan is already in progress"
args = (all_vols, scan_vols) args = (all_vols, scan_vols)
t = threading.Thread(target=self.init_indexes, args=args) t = threading.Thread(
target=self.init_indexes,
args=args,
name="up2k-rescan-{}".format(scan_vols[0]),
)
t.daemon = True t.daemon = True
t.start() t.start()
return None return None
@@ -178,6 +209,8 @@ class Up2k(object):
self.log(msg, c=3) self.log(msg, c=3)
live_vols = [] live_vols = []
with self.mutex:
# only need to protect register_vpath but all in one go feels right
for vol in vols: for vol in vols:
try: try:
os.listdir(vol.realpath) os.listdir(vol.realpath)
@@ -186,11 +219,13 @@ class Up2k(object):
self.log("cannot access " + vol.realpath, c=1) self.log("cannot access " + vol.realpath, c=1)
continue continue
if not self.register_vpath(vol.realpath, vol.flags): if scan_vols and vol.vpath not in scan_vols:
# self.log("db not enabled for {}".format(m, vol.realpath)) continue
if not self.register_vpath(vol.realpath, vol.flags):
# self.log("db not enable for {}".format(m, vol.realpath))
continue continue
if vol.vpath in scan_vols or not scan_vols:
live_vols.append(vol) live_vols.append(vol)
if vol.vpath not in self.volstate: if vol.vpath not in self.volstate:
@@ -271,7 +306,7 @@ class Up2k(object):
if self.mtag: if self.mtag:
m = "online (running mtp)" m = "online (running mtp)"
if scan_vols: if scan_vols:
thr = threading.Thread(target=self._run_all_mtp) thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-scan")
thr.daemon = True thr.daemon = True
else: else:
del self.pp del self.pp
@@ -286,9 +321,13 @@ class Up2k(object):
return have_e2d return have_e2d
def register_vpath(self, ptop, flags): def register_vpath(self, ptop, flags):
db_path = os.path.join(ptop, ".hist", "up2k.db") histpath = self.asrv.vfs.histtab[ptop]
db_path = os.path.join(histpath, "up2k.db")
if ptop in self.registry: if ptop in self.registry:
try:
return [self.cur[ptop], db_path] return [self.cur[ptop], db_path]
except:
return None
_, flags = self._expr_idx_filter(flags) _, flags = self._expr_idx_filter(flags)
@@ -303,7 +342,7 @@ class Up2k(object):
self.log(" ".join(sorted(a)) + "\033[0m") self.log(" ".join(sorted(a)) + "\033[0m")
reg = {} reg = {}
path = os.path.join(ptop, ".hist", "up2k.snap") path = os.path.join(histpath, "up2k.snap")
if "e2d" in flags and os.path.exists(path): if "e2d" in flags and os.path.exists(path):
with gzip.GzipFile(path, "rb") as f: with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8") j = f.read().decode("utf-8")
@@ -327,7 +366,7 @@ class Up2k(object):
return None return None
try: try:
os.mkdir(os.path.join(ptop, ".hist")) os.makedirs(histpath)
except: except:
pass pass
@@ -344,6 +383,7 @@ class Up2k(object):
def _build_file_index(self, vol, all_vols): def _build_file_index(self, vol, all_vols):
do_vac = False do_vac = False
top = vol.realpath top = vol.realpath
nohash = "dhash" in vol.flags
with self.mutex: with self.mutex:
cur, _ = self.register_vpath(top, vol.flags) cur, _ = self.register_vpath(top, vol.flags)
@@ -358,7 +398,7 @@ class Up2k(object):
if WINDOWS: if WINDOWS:
excl = [x.replace("/", "\\") for x in excl] excl = [x.replace("/", "\\") for x in excl]
n_add = self._build_dir(dbw, top, set(excl), top) n_add = self._build_dir(dbw, top, set(excl), top, nohash)
n_rm = self._drop_lost(dbw[0], top) n_rm = self._drop_lost(dbw[0], top)
if dbw[1]: if dbw[1]:
self.log("commit {} new files".format(dbw[1])) self.log("commit {} new files".format(dbw[1]))
@@ -366,23 +406,25 @@ class Up2k(object):
return True, n_add or n_rm or do_vac return True, n_add or n_rm or do_vac
def _build_dir(self, dbw, top, excl, cdir): def _build_dir(self, dbw, top, excl, cdir, nohash):
self.pp.msg = "a{} {}".format(self.pp.n, cdir) self.pp.msg = "a{} {}".format(self.pp.n, cdir)
histdir = os.path.join(top, ".hist") histpath = self.asrv.vfs.histtab[top]
ret = 0 ret = 0
for iname, inf in statdir(self.log, not self.args.no_scandir, False, cdir): g = statdir(self.log, not self.args.no_scandir, False, cdir)
for iname, inf in sorted(g):
abspath = os.path.join(cdir, iname) abspath = os.path.join(cdir, iname)
lmod = int(inf.st_mtime) lmod = int(inf.st_mtime)
sz = inf.st_size
if stat.S_ISDIR(inf.st_mode): if stat.S_ISDIR(inf.st_mode):
if abspath in excl or abspath == histdir: if abspath in excl or abspath == histpath:
continue continue
# self.log(" dir: {}".format(abspath)) # self.log(" dir: {}".format(abspath))
ret += self._build_dir(dbw, top, excl, abspath) ret += self._build_dir(dbw, top, excl, abspath, nohash)
else: else:
# self.log("file: {}".format(abspath)) # self.log("file: {}".format(abspath))
rp = abspath[len(top) :].replace("\\", "/").strip("/") rp = abspath[len(top) :].replace("\\", "/").strip("/")
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp] rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
sql = "select * from up where rd = ? and fn = ?" sql = "select w, mt, sz from up where rd = ? and fn = ?"
try: try:
c = dbw[0].execute(sql, (rd, fn)) c = dbw[0].execute(sql, (rd, fn))
except: except:
@@ -391,18 +433,18 @@ class Up2k(object):
in_db = list(c.fetchall()) in_db = list(c.fetchall())
if in_db: if in_db:
self.pp.n -= 1 self.pp.n -= 1
_, dts, dsz, _, _ = in_db[0] dw, dts, dsz = in_db[0]
if len(in_db) > 1: if len(in_db) > 1:
m = "WARN: multiple entries: [{}] => [{}] |{}|\n{}" m = "WARN: multiple entries: [{}] => [{}] |{}|\n{}"
rep_db = "\n".join([repr(x) for x in in_db]) rep_db = "\n".join([repr(x) for x in in_db])
self.log(m.format(top, rp, len(in_db), rep_db)) self.log(m.format(top, rp, len(in_db), rep_db))
dts = -1 dts = -1
if dts == lmod and dsz == inf.st_size: if dts == lmod and dsz == sz and (nohash or dw[0] != "#"):
continue continue
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format( m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
top, rp, dts, lmod, dsz, inf.st_size top, rp, dts, lmod, dsz, sz
) )
self.log(m) self.log(m)
self.db_rm(dbw[0], rd, fn) self.db_rm(dbw[0], rd, fn)
@@ -411,7 +453,11 @@ class Up2k(object):
in_db = None in_db = None
self.pp.msg = "a{} {}".format(self.pp.n, abspath) self.pp.msg = "a{} {}".format(self.pp.n, abspath)
if inf.st_size > 1024 * 1024:
if nohash:
wark = up2k_wark_from_metadata(self.salt, sz, lmod, rd, fn)
else:
if sz > 1024 * 1024:
self.log("file: {}".format(abspath)) self.log("file: {}".format(abspath))
try: try:
@@ -420,8 +466,9 @@ class Up2k(object):
self.log("hash: {} @ [{}]".format(repr(ex), abspath)) self.log("hash: {} @ [{}]".format(repr(ex), abspath))
continue continue
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes) wark = up2k_wark_from_hashlist(self.salt, sz, hashes)
self.db_add(dbw[0], wark, rd, fn, lmod, inf.st_size)
self.db_add(dbw[0], wark, rd, fn, lmod, sz)
dbw[1] += 1 dbw[1] += 1
ret += 1 ret += 1
td = time.time() - dbw[2] td = time.time() - dbw[2]
@@ -552,6 +599,7 @@ class Up2k(object):
last_write = time.time() last_write = time.time()
n_buf = 0 n_buf = 0
if mpool:
self._stop_mpool(mpool) self._stop_mpool(mpool)
with self.mutex: with self.mutex:
n_add += len(self._flush_mpool(c3)) n_add += len(self._flush_mpool(c3))
@@ -752,7 +800,9 @@ class Up2k(object):
mpool = Queue(nw) mpool = Queue(nw)
for _ in range(nw): for _ in range(nw):
thr = threading.Thread(target=self._tag_thr, args=(mpool,)) thr = threading.Thread(
target=self._tag_thr, args=(mpool,), name="up2k-mpool"
)
thr.daemon = True thr.daemon = True
thr.start() thr.start()
@@ -913,7 +963,7 @@ class Up2k(object):
def _create_v3(self, cur): def _create_v3(self, cur):
""" """
collision in 2^(n/2) files where n = bits (6 bits/ch) collision in 2^(n/2) files where n = bits (6 bits/ch)
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx 10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx 1<<(3*10)
12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx 12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx
16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx 16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx
""" """
@@ -961,6 +1011,7 @@ class Up2k(object):
return self._orz(db_path) return self._orz(db_path)
def handle_json(self, cj): def handle_json(self, cj):
with self.mutex:
if not self.register_vpath(cj["ptop"], cj["vcfg"]): if not self.register_vpath(cj["ptop"], cj["vcfg"]):
if cj["ptop"] not in self.registry: if cj["ptop"] not in self.registry:
raise Pebkac(410, "location unavailable") raise Pebkac(410, "location unavailable")
@@ -971,7 +1022,7 @@ class Up2k(object):
now = time.time() now = time.time()
job = None job = None
with self.mutex: with self.mutex:
cur = self.cur.get(cj["ptop"], None) cur = self.cur.get(cj["ptop"])
reg = self.registry[cj["ptop"]] reg = self.registry[cj["ptop"]]
if cur: if cur:
if self.no_expr_idx: if self.no_expr_idx:
@@ -1129,7 +1180,7 @@ class Up2k(object):
def handle_chunk(self, ptop, wark, chash): def handle_chunk(self, ptop, wark, chash):
with self.mutex: with self.mutex:
job = self.registry[ptop].get(wark, None) job = self.registry[ptop].get(wark)
if not job: if not job:
known = " ".join([x for x in self.registry[ptop].keys()]) known = " ".join([x for x in self.registry[ptop].keys()])
self.log("unknown wark [{}], known: {}".format(wark, known)) self.log("unknown wark [{}], known: {}".format(wark, known))
@@ -1194,7 +1245,7 @@ class Up2k(object):
return ret, dst return ret, dst
def idx_wark(self, ptop, wark, rd, fn, lmod, sz): def idx_wark(self, ptop, wark, rd, fn, lmod, sz):
cur = self.cur.get(ptop, None) cur = self.cur.get(ptop)
if not cur: if not cur:
return False return False
@@ -1204,6 +1255,7 @@ class Up2k(object):
if "e2t" in self.flags[ptop]: if "e2t" in self.flags[ptop]:
self.tagq.put([ptop, wark, rd, fn]) self.tagq.put([ptop, wark, rd, fn])
self.n_tagq += 1
return True return True
@@ -1329,11 +1381,12 @@ class Up2k(object):
for k, reg in self.registry.items(): for k, reg in self.registry.items():
self._snap_reg(prev, k, reg, discard_interval) self._snap_reg(prev, k, reg, discard_interval)
def _snap_reg(self, prev, k, reg, discard_interval): def _snap_reg(self, prev, ptop, reg, discard_interval):
now = time.time() now = time.time()
histpath = self.asrv.vfs.histtab[ptop]
rm = [x for x in reg.values() if now - x["poke"] > discard_interval] rm = [x for x in reg.values() if now - x["poke"] > discard_interval]
if rm: if rm:
m = "dropping {} abandoned uploads in {}".format(len(rm), k) m = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
vis = [self._vis_job_progress(x) for x in rm] vis = [self._vis_job_progress(x) for x in rm]
self.log("\n".join([m] + vis)) self.log("\n".join([m] + vis))
for job in rm: for job in rm:
@@ -1351,21 +1404,21 @@ class Up2k(object):
except: except:
pass pass
path = os.path.join(k, ".hist", "up2k.snap") path = os.path.join(histpath, "up2k.snap")
if not reg: if not reg:
if k not in prev or prev[k] is not None: if ptop not in prev or prev[ptop] is not None:
prev[k] = None prev[ptop] = None
if os.path.exists(fsenc(path)): if os.path.exists(fsenc(path)):
os.unlink(fsenc(path)) os.unlink(fsenc(path))
return return
newest = max(x["poke"] for _, x in reg.items()) if reg else 0 newest = max(x["poke"] for _, x in reg.items()) if reg else 0
etag = [len(reg), newest] etag = [len(reg), newest]
if etag == prev.get(k, None): if etag == prev.get(ptop):
return return
try: try:
os.mkdir(os.path.join(k, ".hist")) os.makedirs(histpath)
except: except:
pass pass
@@ -1377,14 +1430,21 @@ class Up2k(object):
atomic_move(path2, path) atomic_move(path2, path)
self.log("snap: {} |{}|".format(path, len(reg.keys()))) self.log("snap: {} |{}|".format(path, len(reg.keys())))
prev[k] = etag prev[ptop] = etag
def _tagger(self): def _tagger(self):
with self.mutex:
self.n_tagq += 1
while True: while True:
with self.mutex:
self.n_tagq -= 1
ptop, wark, rd, fn = self.tagq.get() ptop, wark, rd, fn = self.tagq.get()
if "e2t" not in self.flags[ptop]: if "e2t" not in self.flags[ptop]:
continue continue
# self.log("\n " + repr([ptop, rd, fn]))
abspath = os.path.join(ptop, rd, fn) abspath = os.path.join(ptop, rd, fn)
tags = self.mtag.get(abspath) tags = self.mtag.get(abspath)
ntags1 = len(tags) ntags1 = len(tags)
@@ -1410,8 +1470,16 @@ class Up2k(object):
self.log("tagged {} ({}+{})".format(abspath, ntags1, len(tags) - ntags1)) self.log("tagged {} ({}+{})".format(abspath, ntags1, len(tags) - ntags1))
def _hasher(self): def _hasher(self):
with self.mutex:
self.n_hashq += 1
while True: while True:
with self.mutex:
self.n_hashq -= 1
# self.log("hashq {}".format(self.n_hashq))
ptop, rd, fn = self.hashq.get() ptop, rd, fn = self.hashq.get()
# self.log("hashq {} pop {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
if "e2d" not in self.flags[ptop]: if "e2d" not in self.flags[ptop]:
continue continue
@@ -1424,8 +1492,11 @@ class Up2k(object):
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size) self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size)
def hash_file(self, ptop, flags, rd, fn): def hash_file(self, ptop, flags, rd, fn):
with self.mutex:
self.register_vpath(ptop, flags) self.register_vpath(ptop, flags)
self.hashq.put([ptop, rd, fn]) self.hashq.put([ptop, rd, fn])
self.n_hashq += 1
# self.log("hashq {} push {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
def up2k_chunksize(filesize): def up2k_chunksize(filesize):
@@ -1447,9 +1518,12 @@ def up2k_wark_from_hashlist(salt, filesize, hashes):
ident.extend(hashes) ident.extend(hashes)
ident = "\n".join(ident) ident = "\n".join(ident)
hasher = hashlib.sha512() wark = hashlib.sha512(ident.encode("utf-8")).digest()
hasher.update(ident.encode("utf-8")) wark = base64.urlsafe_b64encode(wark)
digest = hasher.digest()[:32] return wark.decode("ascii")[:43]
wark = base64.urlsafe_b64encode(digest)
return wark.decode("utf-8").rstrip("=") def up2k_wark_from_metadata(salt, sz, lastmod, rd, fn):
ret = fsenc("{}\n{}\n{}\n{}\n{}".format(salt, lastmod, sz, rd, fn))
ret = base64.urlsafe_b64encode(hashlib.sha512(ret).digest())
return "#{}".format(ret[:42].decode("ascii"))

View File

@@ -193,7 +193,7 @@ class ProgressPrinter(threading.Thread):
""" """
def __init__(self): def __init__(self):
threading.Thread.__init__(self) threading.Thread.__init__(self, name="pp")
self.daemon = True self.daemon = True
self.msg = None self.msg = None
self.end = False self.end = False
@@ -208,6 +208,8 @@ class ProgressPrinter(threading.Thread):
msg = self.msg msg = self.msg
uprint(" {}\033[K\r".format(msg)) uprint(" {}\033[K\r".format(msg))
if PY2:
sys.stdout.flush()
print("\033[K", end="") print("\033[K", end="")
sys.stdout.flush() # necessary on win10 even w/ stderr btw sys.stdout.flush() # necessary on win10 even w/ stderr btw
@@ -252,6 +254,45 @@ def trace(*args, **kwargs):
nuprint(msg) nuprint(msg)
def alltrace():
threads = {}
names = dict([(t.ident, t.name) for t in threading.enumerate()])
for tid, stack in sys._current_frames().items():
name = "{} ({:x})".format(names.get(tid), tid)
threads[name] = stack
rret = []
bret = []
for name, stack in sorted(threads.items()):
ret = ["\n\n# {}".format(name)]
pad = None
for fn, lno, name, line in traceback.extract_stack(stack):
fn = os.sep.join(fn.split(os.sep)[-3:])
ret.append('File: "{}", line {}, in {}'.format(fn, lno, name))
if line:
ret.append(" " + str(line.strip()))
if "self.not_empty.wait()" in line:
pad = " " * 4
if pad:
bret += [ret[0]] + [pad + x for x in ret[1:]]
else:
rret += ret
return "\n".join(rret + bret)
def min_ex():
et, ev, tb = sys.exc_info()
tb = traceback.extract_tb(tb, 2)
ex = [
"{} @ {} <{}>: {}".format(fp.split(os.sep)[-1], ln, fun, txt)
for fp, ln, fun, txt in tb
]
ex.append("{}: {}".format(et.__name__, ev))
return "\n".join(ex)
@contextlib.contextmanager @contextlib.contextmanager
def ren_open(fname, *args, **kwargs): def ren_open(fname, *args, **kwargs):
fdir = kwargs.pop("fdir", None) fdir = kwargs.pop("fdir", None)
@@ -262,6 +303,11 @@ def ren_open(fname, *args, **kwargs):
yield {"orz": [f, fname]} yield {"orz": [f, fname]}
return return
if suffix:
ext = fname.split(".")[-1]
if len(ext) < 7:
suffix += "." + ext
orig_name = fname orig_name = fname
bname = fname bname = fname
ext = "" ext = ""
@@ -561,8 +607,10 @@ def read_header(sr):
else: else:
continue continue
if len(ret) > ofs + 4:
sr.unrecv(ret[ofs + 4 :]) sr.unrecv(ret[ofs + 4 :])
return ret[:ofs].decode("utf-8", "surrogateescape").split("\r\n")
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
def humansize(sz, terse=False): def humansize(sz, terse=False):
@@ -847,13 +895,14 @@ def yieldfile(fn):
def hashcopy(actor, fin, fout): def hashcopy(actor, fin, fout):
u32_lim = int((2 ** 31) * 0.9) is_mp = actor.is_mp
hashobj = hashlib.sha512() hashobj = hashlib.sha512()
tlen = 0 tlen = 0
for buf in fin: for buf in fin:
if is_mp:
actor.workload += 1 actor.workload += 1
if actor.workload > u32_lim: if actor.workload > 2 ** 31:
actor.workload = 100 # prevent overflow actor.workload = 100
tlen += len(buf) tlen += len(buf)
hashobj.update(buf) hashobj.update(buf)
@@ -865,12 +914,17 @@ def hashcopy(actor, fin, fout):
return tlen, hashobj.hexdigest(), digest_b64 return tlen, hashobj.hexdigest(), digest_b64
def sendfile_py(lower, upper, f, s): def sendfile_py(lower, upper, f, s, actor=None):
remains = upper - lower remains = upper - lower
f.seek(lower) f.seek(lower)
while remains > 0: while remains > 0:
if actor:
actor.workload += 1
if actor.workload > 2 ** 31:
actor.workload = 100
# time.sleep(0.01) # time.sleep(0.01)
buf = f.read(min(4096, remains)) buf = f.read(min(1024 * 32, remains))
if not buf: if not buf:
return remains return remains
@@ -972,8 +1026,8 @@ def guess_mime(url, fallback="application/octet-stream"):
def runcmd(*argv): def runcmd(*argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE) p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
stdout = stdout.decode("utf-8") stdout = stdout.decode("utf-8", "replace")
stderr = stderr.decode("utf-8") stderr = stderr.decode("utf-8", "replace")
return [p.returncode, stdout, stderr] return [p.returncode, stdout, stderr]

View File

@@ -0,0 +1,583 @@
/*!
* baguetteBox.js
* @author feimosi
* @version 1.11.1-mod
* @url https://github.com/feimosi/baguetteBox.js
*/
window.baguetteBox = (function () {
'use strict';
var options = {},
defaults = {
captions: true,
buttons: 'auto',
noScrollbars: false,
bodyClass: 'baguetteBox-open',
titleTag: false,
async: false,
preload: 2,
animation: 'slideIn',
afterShow: null,
afterHide: null,
onChange: null,
},
overlay, slider, previousButton, nextButton, closeButton,
currentGallery = [],
currentIndex = 0,
isOverlayVisible = false,
touch = {}, // start-pos
touchFlag = false, // busy
regex = /.+\.(gif|jpe?g|png|webp)/i,
data = {}, // all galleries
imagesElements = [],
documentLastFocus = null;
var overlayClickHandler = function (event) {
if (event.target.id.indexOf('baguette-img') !== -1) {
hideOverlay();
}
};
var touchstartHandler = function (event) {
touch.count++;
if (touch.count > 1) {
touch.multitouch = true;
}
touch.startX = event.changedTouches[0].pageX;
touch.startY = event.changedTouches[0].pageY;
};
var touchmoveHandler = function (event) {
if (touchFlag || touch.multitouch) {
return;
}
event.preventDefault ? event.preventDefault() : event.returnValue = false;
var touchEvent = event.touches[0] || event.changedTouches[0];
if (touchEvent.pageX - touch.startX > 40) {
touchFlag = true;
showPreviousImage();
} else if (touchEvent.pageX - touch.startX < -40) {
touchFlag = true;
showNextImage();
} else if (touch.startY - touchEvent.pageY > 100) {
hideOverlay();
}
};
var touchendHandler = function () {
touch.count--;
if (touch.count <= 0) {
touch.multitouch = false;
}
touchFlag = false;
};
var contextmenuHandler = function () {
touchendHandler();
};
var trapFocusInsideOverlay = function (event) {
if (overlay.style.display === 'block' && (overlay.contains && !overlay.contains(event.target))) {
event.stopPropagation();
initFocus();
}
};
function run(selector, userOptions) {
buildOverlay();
removeFromCache(selector);
return bindImageClickListeners(selector, userOptions);
}
function bindImageClickListeners(selector, userOptions) {
var galleryNodeList = document.querySelectorAll(selector);
var selectorData = {
galleries: [],
nodeList: galleryNodeList
};
data[selector] = selectorData;
[].forEach.call(galleryNodeList, function (galleryElement) {
if (userOptions && userOptions.filter) {
regex = userOptions.filter;
}
var tagsNodeList = [];
if (galleryElement.tagName === 'A') {
tagsNodeList = [galleryElement];
} else {
tagsNodeList = galleryElement.getElementsByTagName('a');
}
tagsNodeList = [].filter.call(tagsNodeList, function (element) {
if (element.className.indexOf(userOptions && userOptions.ignoreClass) === -1) {
return regex.test(element.href);
}
});
if (tagsNodeList.length === 0) {
return;
}
var gallery = [];
[].forEach.call(tagsNodeList, function (imageElement, imageIndex) {
var imageElementClickHandler = function (event) {
if (event && event.ctrlKey)
return true;
event.preventDefault ? event.preventDefault() : event.returnValue = false;
prepareOverlay(gallery, userOptions);
showOverlay(imageIndex);
};
var imageItem = {
eventHandler: imageElementClickHandler,
imageElement: imageElement
};
bind(imageElement, 'click', imageElementClickHandler);
gallery.push(imageItem);
});
selectorData.galleries.push(gallery);
});
return selectorData.galleries;
}
function clearCachedData() {
for (var selector in data) {
if (data.hasOwnProperty(selector)) {
removeFromCache(selector);
}
}
}
function removeFromCache(selector) {
if (!data.hasOwnProperty(selector)) {
return;
}
var galleries = data[selector].galleries;
[].forEach.call(galleries, function (gallery) {
[].forEach.call(gallery, function (imageItem) {
unbind(imageItem.imageElement, 'click', imageItem.eventHandler);
});
if (currentGallery === gallery) {
currentGallery = [];
}
});
delete data[selector];
}
function buildOverlay() {
overlay = ebi('baguetteBox-overlay');
if (overlay) {
slider = ebi('baguetteBox-slider');
previousButton = ebi('previous-button');
nextButton = ebi('next-button');
closeButton = ebi('close-button');
return;
}
overlay = mknod('div');
overlay.setAttribute('role', 'dialog');
overlay.id = 'baguetteBox-overlay';
document.getElementsByTagName('body')[0].appendChild(overlay);
slider = mknod('div');
slider.id = 'baguetteBox-slider';
overlay.appendChild(slider);
previousButton = mknod('button');
previousButton.setAttribute('type', 'button');
previousButton.id = 'previous-button';
previousButton.setAttribute('aria-label', 'Previous');
previousButton.innerHTML = '&lt;';
overlay.appendChild(previousButton);
nextButton = mknod('button');
nextButton.setAttribute('type', 'button');
nextButton.id = 'next-button';
nextButton.setAttribute('aria-label', 'Next');
nextButton.innerHTML = '&gt;';
overlay.appendChild(nextButton);
closeButton = mknod('button');
closeButton.setAttribute('type', 'button');
closeButton.id = 'close-button';
closeButton.setAttribute('aria-label', 'Close');
closeButton.innerHTML = '&times;';
overlay.appendChild(closeButton);
previousButton.className = nextButton.className = closeButton.className = 'baguetteBox-button';
bindEvents();
}
function keyDownHandler(event) {
switch (event.keyCode) {
case 37: // Left
showPreviousImage();
break;
case 39: // Right
showNextImage();
break;
case 27: // Esc
hideOverlay();
break;
case 36: // Home
showFirstImage(event);
break;
case 35: // End
showLastImage(event);
break;
}
}
var passiveSupp = false;
try {
var opts = {
get passive() {
passiveSupp = true;
return false;
}
};
window.addEventListener('test', null, opts);
window.removeEventListener('test', null, opts);
}
catch (ex) {
passiveSupp = false;
}
var passiveEvent = passiveSupp ? { passive: false } : null;
var nonPassiveEvent = passiveSupp ? { passive: true } : null;
function bindEvents() {
bind(overlay, 'click', overlayClickHandler);
bind(previousButton, 'click', showPreviousImage);
bind(nextButton, 'click', showNextImage);
bind(closeButton, 'click', hideOverlay);
bind(slider, 'contextmenu', contextmenuHandler);
bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
bind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
bind(overlay, 'touchend', touchendHandler);
bind(document, 'focus', trapFocusInsideOverlay, true);
}
function unbindEvents() {
unbind(overlay, 'click', overlayClickHandler);
unbind(previousButton, 'click', showPreviousImage);
unbind(nextButton, 'click', showNextImage);
unbind(closeButton, 'click', hideOverlay);
unbind(slider, 'contextmenu', contextmenuHandler);
unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
unbind(overlay, 'touchend', touchendHandler);
unbind(document, 'focus', trapFocusInsideOverlay, true);
}
function prepareOverlay(gallery, userOptions) {
if (currentGallery === gallery) {
return;
}
currentGallery = gallery;
setOptions(userOptions);
slider.innerHTML = '';
imagesElements.length = 0;
var imagesFiguresIds = [];
var imagesCaptionsIds = [];
for (var i = 0, fullImage; i < gallery.length; i++) {
fullImage = mknod('div');
fullImage.className = 'full-image';
fullImage.id = 'baguette-img-' + i;
imagesElements.push(fullImage);
imagesFiguresIds.push('baguetteBox-figure-' + i);
imagesCaptionsIds.push('baguetteBox-figcaption-' + i);
slider.appendChild(imagesElements[i]);
}
overlay.setAttribute('aria-labelledby', imagesFiguresIds.join(' '));
overlay.setAttribute('aria-describedby', imagesCaptionsIds.join(' '));
}
function setOptions(newOptions) {
if (!newOptions) {
newOptions = {};
}
for (var item in defaults) {
options[item] = defaults[item];
if (typeof newOptions[item] !== 'undefined') {
options[item] = newOptions[item];
}
}
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .4s ease' :
options.animation === 'slideIn' ? '' : 'none');
if (options.buttons === 'auto' && ('ontouchstart' in window || currentGallery.length === 1)) {
options.buttons = false;
}
previousButton.style.display = nextButton.style.display = (options.buttons ? '' : 'none');
}
function showOverlay(chosenImageIndex) {
if (options.noScrollbars) {
document.documentElement.style.overflowY = 'hidden';
document.body.style.overflowY = 'scroll';
}
if (overlay.style.display === 'block') {
return;
}
bind(document, 'keydown', keyDownHandler);
currentIndex = chosenImageIndex;
touch = {
count: 0,
startX: null,
startY: null
};
loadImage(currentIndex, function () {
preloadNext(currentIndex);
preloadPrev(currentIndex);
});
updateOffset();
overlay.style.display = 'block';
// Fade in overlay
setTimeout(function () {
overlay.className = 'visible';
if (options.bodyClass && document.body.classList) {
document.body.classList.add(options.bodyClass);
}
if (options.afterShow) {
options.afterShow();
}
}, 50);
if (options.onChange) {
options.onChange(currentIndex, imagesElements.length);
}
documentLastFocus = document.activeElement;
initFocus();
isOverlayVisible = true;
}
function initFocus() {
if (options.buttons) {
previousButton.focus();
} else {
closeButton.focus();
}
}
function hideOverlay(e) {
ev(e);
if (options.noScrollbars) {
document.documentElement.style.overflowY = 'auto';
document.body.style.overflowY = 'auto';
}
if (overlay.style.display === 'none') {
return;
}
unbind(document, 'keydown', keyDownHandler);
// Fade out and hide the overlay
overlay.className = '';
setTimeout(function () {
overlay.style.display = 'none';
if (options.bodyClass && document.body.classList) {
document.body.classList.remove(options.bodyClass);
}
if (options.afterHide) {
options.afterHide();
}
documentLastFocus && documentLastFocus.focus();
isOverlayVisible = false;
}, 500);
}
function loadImage(index, callback) {
var imageContainer = imagesElements[index];
var galleryItem = currentGallery[index];
if (typeof imageContainer === 'undefined' || typeof galleryItem === 'undefined') {
return; // out-of-bounds or gallery dirty
}
if (imageContainer.getElementsByTagName('img')[0]) {
// image is loaded, cb and bail
if (callback) {
callback();
}
return;
}
var imageElement = galleryItem.imageElement,
imageSrc = imageElement.href,
thumbnailElement = imageElement.getElementsByTagName('img')[0],
imageCaption = typeof options.captions === 'function' ?
options.captions.call(currentGallery, imageElement) :
imageElement.getAttribute('data-caption') || imageElement.title;
var figure = mknod('figure');
figure.id = 'baguetteBox-figure-' + index;
figure.innerHTML = '<div class="baguetteBox-spinner">' +
'<div class="baguetteBox-double-bounce1"></div>' +
'<div class="baguetteBox-double-bounce2"></div>' +
'</div>';
if (options.captions && imageCaption) {
var figcaption = mknod('figcaption');
figcaption.id = 'baguetteBox-figcaption-' + index;
figcaption.innerHTML = imageCaption;
figure.appendChild(figcaption);
}
imageContainer.appendChild(figure);
var image = mknod('img');
image.onload = function () {
// Remove loader element
var spinner = document.querySelector('#baguette-img-' + index + ' .baguetteBox-spinner');
figure.removeChild(spinner);
if (!options.async && callback) {
callback();
}
};
image.setAttribute('src', imageSrc);
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
if (options.titleTag && imageCaption) {
image.title = imageCaption;
}
figure.appendChild(image);
if (options.async && callback) {
callback();
}
}
function showNextImage(e) {
ev(e);
return show(currentIndex + 1);
}
function showPreviousImage(e) {
ev(e);
return show(currentIndex - 1);
}
function showFirstImage(event) {
if (event) {
event.preventDefault();
}
return show(0);
}
function showLastImage(event) {
if (event) {
event.preventDefault();
}
return show(currentGallery.length - 1);
}
/**
* Move the gallery to a specific index
* @param `index` {number} - the position of the image
* @param `gallery` {array} - gallery which should be opened, if omitted assumes the currently opened one
* @return {boolean} - true on success or false if the index is invalid
*/
function show(index, gallery) {
if (!isOverlayVisible && index >= 0 && index < gallery.length) {
prepareOverlay(gallery, options);
showOverlay(index);
return true;
}
if (index < 0) {
if (options.animation) {
bounceAnimation('left');
}
return false;
}
if (index >= imagesElements.length) {
if (options.animation) {
bounceAnimation('right');
}
return false;
}
currentIndex = index;
loadImage(currentIndex, function () {
preloadNext(currentIndex);
preloadPrev(currentIndex);
});
updateOffset();
if (options.onChange) {
options.onChange(currentIndex, imagesElements.length);
}
return true;
}
/**
* Triggers the bounce animation
* @param {('left'|'right')} direction - Direction of the movement
*/
function bounceAnimation(direction) {
slider.className = 'bounce-from-' + direction;
setTimeout(function () {
slider.className = '';
}, 400);
}
function updateOffset() {
var offset = -currentIndex * 100 + '%';
if (options.animation === 'fadeIn') {
slider.style.opacity = 0;
setTimeout(function () {
slider.style.transform = 'translate3d(' + offset + ',0,0)';
slider.style.opacity = 1;
}, 400);
} else {
slider.style.transform = 'translate3d(' + offset + ',0,0)';
}
}
function preloadNext(index) {
if (index - currentIndex >= options.preload) {
return;
}
loadImage(index + 1, function () {
preloadNext(index + 1);
});
}
function preloadPrev(index) {
if (currentIndex - index >= options.preload) {
return;
}
loadImage(index - 1, function () {
preloadPrev(index - 1);
});
}
function bind(element, event, callback, options) {
element.addEventListener(event, callback, options);
}
function unbind(element, event, callback, options) {
element.removeEventListener(event, callback, options);
}
function destroyPlugin() {
unbindEvents();
clearCachedData();
unbind(document, 'keydown', keyDownHandler);
document.getElementsByTagName('body')[0].removeChild(ebi('baguetteBox-overlay'));
data = {};
currentGallery = [];
currentIndex = 0;
}
return {
run: run,
show: show,
showNext: showNextImage,
showPrevious: showPreviousImage,
hide: hideOverlay,
destroy: destroyPlugin
};
})();

View File

@@ -497,6 +497,27 @@ input[type="checkbox"]+label {
input[type="checkbox"]:checked+label { input[type="checkbox"]:checked+label {
color: #fc5; color: #fc5;
} }
input.eq_gain {
width: 3em;
text-align: center;
margin: 0 .6em;
}
#audio_eq table {
border-collapse: collapse;
}
#audio_eq td {
text-align: center;
}
#audio_eq a.eq_step {
font-size: 1.5em;
display: block;
padding: 0;
}
#au_eq {
display: block;
margin-top: .5em;
padding: 1.3em .3em;
}
@@ -644,7 +665,6 @@ input[type="checkbox"]:checked+label {
} }
#treeul a+a { #treeul a+a {
width: calc(100% - 2em); width: calc(100% - 2em);
background: #333;
line-height: 1em; line-height: 1em;
} }
#treeul a+a:hover { #treeul a+a:hover {
@@ -751,9 +771,12 @@ input[type="checkbox"]:checked+label {
font-family: monospace, monospace; font-family: monospace, monospace;
line-height: 2em; line-height: 2em;
} }
#griden.on+#thumbs { #thumbs {
opacity: .3; opacity: .3;
} }
#griden.on+#thumbs {
opacity: 1;
}
#ghead { #ghead {
background: #3c3c3c; background: #3c3c3c;
border: 1px solid #444; border: 1px solid #444;
@@ -798,6 +821,12 @@ html.light #ghead {
padding: .2em .3em; padding: .2em .3em;
display: block; display: block;
} }
#ggrid span.dir:before {
content: '📂';
line-height: 0;
font-size: 2em;
margin: -.7em .1em -.5em -.3em;
}
#ggrid a:hover { #ggrid a:hover {
background: #444; background: #444;
border-color: #555; border-color: #555;
@@ -910,6 +939,7 @@ html.light #files {
} }
html.light #files thead th { html.light #files thead th {
background: #eee; background: #eee;
border-radius: 0;
} }
html.light #files tr td { html.light #files tr td {
border-top: 1px solid #ddd; border-top: 1px solid #ddd;
@@ -1022,3 +1052,160 @@ html.light #tree::-webkit-scrollbar {
#tree::-webkit-scrollbar-thumb { #tree::-webkit-scrollbar-thumb {
background: #da0; background: #da0;
} }
#baguetteBox-overlay {
display: none;
opacity: 0;
position: fixed;
overflow: hidden;
top: 0;
left: 0;
width: 100%;
height: 100%;
z-index: 1000000;
background: rgba(0, 0, 0, 0.8);
transition: opacity .3s ease;
}
#baguetteBox-overlay.visible {
opacity: 1;
}
#baguetteBox-overlay .full-image {
display: inline-block;
position: relative;
width: 100%;
height: 100%;
text-align: center;
}
#baguetteBox-overlay .full-image figure {
display: inline;
margin: 0;
height: 100%;
}
#baguetteBox-overlay .full-image img {
display: inline-block;
width: auto;
height: auto;
max-height: 100%;
max-width: 100%;
vertical-align: middle;
box-shadow: 0 0 8px rgba(0, 0, 0, 0.6);
}
#baguetteBox-overlay .full-image figcaption {
display: block;
position: absolute;
bottom: 0;
width: 100%;
text-align: center;
line-height: 1.8;
white-space: normal;
color: #ccc;
}
#baguetteBox-overlay figcaption a {
background: rgba(0, 0, 0, 0.6);
border-radius: .4em;
padding: .3em .6em;
}
#baguetteBox-overlay .full-image:before {
content: "";
display: inline-block;
height: 50%;
width: 1px;
margin-right: -1px;
}
#baguetteBox-slider {
position: absolute;
left: 0;
top: 0;
height: 100%;
width: 100%;
white-space: nowrap;
transition: left .2s ease, transform .2s ease;
}
#baguetteBox-slider.bounce-from-right {
animation: bounceFromRight .4s ease-out;
}
#baguetteBox-slider.bounce-from-left {
animation: bounceFromLeft .4s ease-out;
}
@keyframes bounceFromRight {
0% {margin-left: 0}
50% {margin-left: -30px}
100% {margin-left: 0}
}
@keyframes bounceFromLeft {
0% {margin-left: 0}
50% {margin-left: 30px}
100% {margin-left: 0}
}
.baguetteBox-button#next-button,
.baguetteBox-button#previous-button {
top: 50%;
top: calc(50% - 30px);
width: 44px;
height: 60px;
}
.baguetteBox-button {
position: absolute;
cursor: pointer;
outline: none;
padding: 0;
margin: 0;
border: 0;
border-radius: 15%;
background: rgba(50, 50, 50, 0.5);
color: #ddd;
font: 1.6em sans-serif;
transition: background-color .3s ease;
}
.baguetteBox-button:focus,
.baguetteBox-button:hover {
background: rgba(50, 50, 50, 0.9);
}
#next-button {
right: 2%;
}
#previous-button {
left: 2%;
}
#close-button {
top: 20px;
right: 2%;
width: 30px;
height: 30px;
}
.baguetteBox-button svg {
position: absolute;
left: 0;
top: 0;
}
.baguetteBox-spinner {
width: 40px;
height: 40px;
display: inline-block;
position: absolute;
top: 50%;
left: 50%;
margin-top: -20px;
margin-left: -20px;
}
.baguetteBox-double-bounce1,
.baguetteBox-double-bounce2 {
width: 100%;
height: 100%;
border-radius: 50%;
background-color: #fff;
opacity: .6;
position: absolute;
top: 0;
left: 0;
animation: bounce 2s infinite ease-in-out;
}
.baguetteBox-double-bounce2 {
animation-delay: -1s;
}
@keyframes bounce {
0%, 100% {transform: scale(0)}
50% {transform: scale(1)}
}

View File

@@ -8,6 +8,9 @@
<meta name="viewport" content="width=device-width, initial-scale=0.8"> <meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css{{ ts }}"> <link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css{{ ts }}">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css{{ ts }}"> <link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css{{ ts }}">
{%- if css %}
<link rel="stylesheet" type="text/css" media="screen" href="{{ css }}{{ ts }}">
{%- endif %}
</head> </head>
<body> <body>
@@ -52,6 +55,8 @@
{%- endif %} {%- endif %}
<h3>key notation</h3> <h3>key notation</h3>
<div id="key_notation"></div> <div id="key_notation"></div>
<h3>audio equalizer</h3>
<div id="audio_eq"></div>
</div> </div>
<h1 id="path"> <h1 id="path">

View File

@@ -38,7 +38,7 @@ var have_webp = null;
img.onerror = function () { img.onerror = function () {
have_webp = false; have_webp = false;
}; };
img.src = "data:image/webp;base64,UklGRiIAAABXRUJQVlA4IBYAAAAwAQCdASoBAAEADsD+JaQAA3AAAAAA"; img.src = "data:image/webp;base64,UklGRhoAAABXRUJQVlA4TA0AAAAvAAAAEAcQERGIiP4HAA==";
})(); })();
@@ -48,7 +48,6 @@ function MPlayer() {
this.au = null; this.au = null;
this.au_native = null; this.au_native = null;
this.au_ogvjs = null; this.au_ogvjs = null;
this.cover_url = '';
this.tracks = {}; this.tracks = {};
this.order = []; this.order = [];
@@ -509,6 +508,167 @@ try {
catch (ex) { } catch (ex) { }
var audio_eq = (function () {
var r = {
"en": false,
"bands": [31.25, 62.5, 125, 250, 500, 1000, 2000, 4000, 8000, 16000],
"gains": [0, -1, -2, -3, -4, -4, -3, -2, -1, 0],
"filters": [],
"last_au": null
};
try {
r.gains = jread('au_eq_gain', r.gains);
}
catch (ex) { }
r.draw = function () {
var max = 0;
for (var a = 0; a < r.gains.length; a++)
if (max < r.gains[a])
max = r.gains[a];
if (max > 0)
for (var a = 0; a < r.gains.length; a++)
r.gains[a] -= max;
jwrite('au_eq_gain', r.gains);
var txt = QSA('input.eq_gain');
for (var a = 0; a < r.bands.length; a++)
txt[a].value = r.gains[a];
};
r.apply = function () {
r.draw();
var Ctx = window.AudioContext || window.webkitAudioContext;
if (!Ctx)
bcfg_set('au_eq', false);
if (!Ctx || !mp.au)
return;
if (!r.en && !mp.ac)
return;
if (mp.ac) {
for (var a = 0; a < r.filters.length; a++)
r.filters[a].disconnect();
mp.acs.disconnect();
}
if (!mp.ac || mp.au != r.last_au) {
if (mp.ac)
mp.ac.close();
r.last_au = mp.au;
mp.ac = new Ctx();
mp.acs = mp.ac.createMediaElementSource(mp.au);
}
if (!r.en) {
mp.acs.connect(mp.ac.destination);
return;
}
r.filters = [];
for (var a = 0; a < r.bands.length; a++) {
var fi = mp.ac.createBiquadFilter();
fi.frequency.value = r.bands[a];
fi.gain.value = r.gains[a];
fi.Q.value = a == 0 ? 0 : 1;
fi.type = a == 0 ? 'lowshelf' : a == r.bands.length - 1 ? 'highshelf' : 'peaking';
r.filters.push(fi);
}
for (var a = r.bands.length - 1; a >= 0; a--) {
r.filters[a].connect(a > 0 ? r.filters[a - 1] : mp.ac.destination);
}
mp.acs.connect(r.filters[r.filters.length - 1]);
}
function eq_step(e) {
ev(e);
var band = parseInt(this.getAttribute('band')),
step = parseFloat(this.getAttribute('step'));
r.gains[band] += step;
r.apply();
}
function adj_band(that, step) {
try {
var band = parseInt(that.getAttribute('band')),
v = parseFloat(that.value);
if (isNaN(v))
throw 42;
r.gains[band] = v + step;
}
catch (ex) {
return;
}
r.apply();
}
function eq_mod(e) {
ev(e);
adj_band(this, 0);
}
function eq_keydown(e) {
var step = e.key == 'ArrowUp' ? 0.25 : e.key == 'ArrowDown' ? -0.25 : 0;
if (step != 0)
adj_band(this, step);
}
var html = ['<table><tr><td rowspan="4">',
'<a id="au_eq" class="tgl btn" href="#">enable</a></td>'],
h2 = [], h3 = [], h4 = [];
for (var a = 0; a < r.bands.length; a++) {
var hz = r.bands[a];
if (hz >= 1000)
hz = (hz / 1000) + 'k';
hz = (hz + '').split('.')[0];
html.push('<td><a href="#" class="eq_step" step="0.5" band="' + a + '">+</a></td>');
h2.push('<td>' + hz + '</td>');
h4.push('<td><a href="#" class="eq_step" step="-0.5" band="' + a + '">&ndash;</a></td>');
h3.push('<td><input type="text" class="eq_gain" band="' + a + '" value="' + r.gains[a] + '" /></td>');
}
html.push('</tr><tr>');
html = html.join('\n');
html += h2.join('\n') + '</tr><tr>';
html += h3.join('\n') + '</tr><tr>';
html += h4.join('\n') + '</tr><table>';
ebi('audio_eq').innerHTML = html;
var stp = QSA('a.eq_step');
for (var a = 0, aa = stp.length; a < aa; a++)
stp[a].onclick = eq_step;
var txt = QSA('input.eq_gain');
for (var a = 0; a < r.gains.length; a++) {
txt[a].oninput = eq_mod;
txt[a].onkeydown = eq_keydown;
}
r.en = bcfg_get('au_eq', false);
ebi('au_eq').onclick = function (e) {
ev(e);
r.en = !r.en;
bcfg_set('au_eq', r.en);
r.apply();
};
r.draw();
return r;
})();
// plays the tid'th audio file on the page // plays the tid'th audio file on the page
function play(tid, seek, call_depth) { function play(tid, seek, call_depth) {
if (mp.order.length == 0) if (mp.order.length == 0)
@@ -569,6 +729,8 @@ function play(tid, seek, call_depth) {
mp.au = mp.au_native; mp.au = mp.au_native;
} }
audio_eq.apply();
mp.au.tid = tid; mp.au.tid = tid;
mp.au.src = url; mp.au.src = url;
mp.au.volume = mp.expvol(); mp.au.volume = mp.expvol();
@@ -710,8 +872,9 @@ function autoplay_blocked(seek) {
var thegrid = (function () { var thegrid = (function () {
var lfiles = ebi('files'); var lfiles = ebi('files'),
var gfiles = document.createElement('div'); gfiles = document.createElement('div');
gfiles.setAttribute('id', 'gfiles'); gfiles.setAttribute('id', 'gfiles');
gfiles.style.display = 'none'; gfiles.style.display = 'none';
gfiles.innerHTML = ( gfiles.innerHTML = (
@@ -733,7 +896,8 @@ var thegrid = (function () {
'en': bcfg_get('griden', false), 'en': bcfg_get('griden', false),
'sel': bcfg_get('gridsel', false), 'sel': bcfg_get('gridsel', false),
'sz': fcfg_get('gridsz', 10), 'sz': fcfg_get('gridsz', 10),
'isdirty': true 'isdirty': true,
'bbox': null
}; };
ebi('thumbs').onclick = function (e) { ebi('thumbs').onclick = function (e) {
@@ -803,8 +967,11 @@ var thegrid = (function () {
r.sz = v; r.sz = v;
swrite('gridsz', r.sz); swrite('gridsz', r.sz);
} }
try {
document.documentElement.style.setProperty('--grid-sz', r.sz + 'em'); document.documentElement.style.setProperty('--grid-sz', r.sz + 'em');
} }
catch (ex) { }
}
setsz(); setsz();
function seltgl(e) { function seltgl(e) {
@@ -820,10 +987,18 @@ var thegrid = (function () {
this.setAttribute('class', tr.getAttribute('class')); this.setAttribute('class', tr.getAttribute('class'));
} }
function bgopen(e) {
ev(e);
var url = this.getAttribute('href');
window.open(url, '_blank');
}
r.loadsel = function () { r.loadsel = function () {
var ths = QSA('#ggrid>a'); var ths = QSA('#ggrid>a'),
have_sel = !!QS('#files tr.sel');
for (var a = 0, aa = ths.length; a < aa; a++) { for (var a = 0, aa = ths.length; a < aa; a++) {
ths[a].onclick = r.sel ? seltgl : null; ths[a].onclick = r.sel ? seltgl : have_sel ? bgopen : null;
ths[a].setAttribute('class', ebi(ths[a].getAttribute('ref')).parentNode.parentNode.getAttribute('class')); ths[a].setAttribute('class', ebi(ths[a].getAttribute('ref')).parentNode.parentNode.getAttribute('class'));
} }
var uns = QS('#ggrid a[ref="unsearch"]'); var uns = QS('#ggrid a[ref="unsearch"]');
@@ -847,14 +1022,15 @@ var thegrid = (function () {
href = esc(ao.getAttribute('href')), href = esc(ao.getAttribute('href')),
ref = ao.getAttribute('id'), ref = ao.getAttribute('id'),
isdir = href.split('?')[0].slice(-1)[0] == '/', isdir = href.split('?')[0].slice(-1)[0] == '/',
ac = isdir ? ' class="dir"' : '',
ihref = href; ihref = href;
if (isdir) { if (r.thumbs) {
ihref = '/.cpr/ico/folder'
}
else if (r.thumbs) {
ihref += (ihref.indexOf('?') === -1 ? '?' : '&') + 'th=' + (have_webp ? 'w' : 'j'); ihref += (ihref.indexOf('?') === -1 ? '?' : '&') + 'th=' + (have_webp ? 'w' : 'j');
} }
else if (isdir) {
ihref = '/.cpr/ico/folder';
}
else { else {
var ar = href.split('?')[0].split('.'); var ar = href.split('?')[0].split('.');
if (ar.length > 1) if (ar.length > 1)
@@ -875,14 +1051,42 @@ var thegrid = (function () {
} }
html.push('<a href="' + href + '" ref="' + ref + '"><img src="' + html.push('<a href="' + href + '" ref="' + ref + '"><img src="' +
ihref + '" /><span>' + ao.innerHTML + '</span></a>'); ihref + '" /><span' + ac + '>' + ao.innerHTML + '</span></a>');
} }
lfiles.style.display = 'none'; lfiles.style.display = 'none';
gfiles.style.display = 'block'; gfiles.style.display = 'block';
ebi('ggrid').innerHTML = html.join('\n'); ebi('ggrid').innerHTML = html.join('\n');
r.bagit();
r.loadsel(); r.loadsel();
} }
r.bagit = function () {
if (!window.baguetteBox)
return;
if (r.bbox)
baguetteBox.destroy();
r.bbox = baguetteBox.run('#ggrid', {
captions: function (g) {
var idx = -1,
h = '' + g;
for (var a = 0; a < r.bbox.length; a++)
if (r.bbox[a].imageElement == g)
idx = a;
return '<a download href="' + h +
'">' + (idx + 1) + ' / ' + r.bbox.length + ' -- ' +
esc(uricom_dec(h.split('/').slice(-1)[0])[0]) + '</a>';
}
})[0];
};
setTimeout(function () {
import_js('/.cpr/baguettebox.js', r.bagit);
}, 1);
if (r.en) { if (r.en) {
loadgrid(); loadgrid();
} }
@@ -965,7 +1169,7 @@ document.onkeydown = function (e) {
if (k == 'KeyT') if (k == 'KeyT')
return ebi('thumbs').click(); return ebi('thumbs').click();
if (window['thegrid'] && thegrid.en) { if (thegrid.en) {
if (k == 'KeyS') if (k == 'KeyS')
return ebi('gridsel').click(); return ebi('gridsel').click();
@@ -1437,7 +1641,7 @@ var treectl = (function () {
if (hpush) if (hpush)
get_tree('.', xhr.top); get_tree('.', xhr.top);
enspin('#files'); enspin(thegrid.en ? '#gfiles' : '#files');
} }
function treegrow(e) { function treegrow(e) {
@@ -1517,6 +1721,7 @@ var treectl = (function () {
apply_perms(res.perms); apply_perms(res.perms);
despin('#files'); despin('#files');
despin('#gfiles');
ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : ""; ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : "";
ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : ""; ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : "";
@@ -2051,7 +2256,6 @@ var msel = (function () {
} }
function selui() { function selui() {
clmod(ebi('wtoggle'), 'sel', getsel().length); clmod(ebi('wtoggle'), 'sel', getsel().length);
if (window['thegrid'])
thegrid.loadsel(); thegrid.loadsel();
} }
function seltgl(e) { function seltgl(e) {

View File

@@ -26,10 +26,23 @@ a {
border-radius: .2em; border-radius: .2em;
padding: .2em .8em; padding: .2em .8em;
} }
td, th { table {
border-collapse: collapse;
}
.vols td,
.vols th {
padding: .3em .6em; padding: .3em .6em;
text-align: left; text-align: left;
} }
.num {
border-right: 1px solid #bbb;
}
.num td {
padding: .1em .7em .1em 0;
}
.num td:first-child {
text-align: right;
}
.btns { .btns {
margin: 1em 0; margin: 1em 0;
} }
@@ -58,3 +71,6 @@ html.dark input {
padding: .5em .7em; padding: .5em .7em;
margin: 0 .5em 0 0; margin: 0 .5em 0 0;
} }
html.dark .num {
border-color: #777;
}

View File

@@ -15,7 +15,15 @@
{%- if avol %} {%- if avol %}
<h1>admin panel:</h1> <h1>admin panel:</h1>
<table> <table><tr><td> <!-- hehehe -->
<table class="num">
<tr><td>scanning</td><td>{{ scanning }}</td></tr>
<tr><td>hash-q</td><td>{{ hashq }}</td></tr>
<tr><td>tag-q</td><td>{{ tagq }}</td></tr>
<tr><td>mtp-q</td><td>{{ mtpq }}</td></tr>
</table>
</td><td>
<table class="vols">
<thead><tr><th>vol</th><th>action</th><th>status</th></tr></thead> <thead><tr><th>vol</th><th>action</th><th>status</th></tr></thead>
<tbody> <tbody>
{% for mp in avol %} {% for mp in avol %}
@@ -25,6 +33,7 @@
{% endfor %} {% endfor %}
</tbody> </tbody>
</table> </table>
</td></tr></table>
<div class="btns"> <div class="btns">
<a href="{{ avol[0] }}?stack">dump stack</a> <a href="{{ avol[0] }}?stack">dump stack</a>
</div> </div>
@@ -50,7 +59,7 @@
<h1>login for more:</h1> <h1>login for more:</h1>
<ul> <ul>
<form method="post" enctype="multipart/form-data" action="/{{ url_suf }}"> <form method="post" enctype="multipart/form-data" action="/">
<input type="hidden" name="act" value="login" /> <input type="hidden" name="act" value="login" />
<input type="password" name="cppwd" /> <input type="password" name="cppwd" />
<input type="submit" value="Login" /> <input type="submit" value="Login" />

View File

@@ -17,6 +17,7 @@ function goto_up2k() {
// chrome requires https to use crypto.subtle, // chrome requires https to use crypto.subtle,
// usually it's undefined but some chromes throw on invoke // usually it's undefined but some chromes throw on invoke
var up2k = null; var up2k = null;
var sha_js = window.WebAssembly ? 'hw' : 'ac'; // ff53,c57,sa11
try { try {
var cf = crypto.subtle || crypto.webkitSubtle; var cf = crypto.subtle || crypto.webkitSubtle;
cf.digest('SHA-512', new Uint8Array(1)).then( cf.digest('SHA-512', new Uint8Array(1)).then(
@@ -430,13 +431,15 @@ function up2k_init(subtle) {
// upload ui hidden by default, clicking the header shows it // upload ui hidden by default, clicking the header shows it
function init_deps() { function init_deps() {
if (!subtle && !window.asmCrypto) { if (!subtle && !window.asmCrypto) {
showmodal('<h1>loading sha512.js</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>'); var fn = 'sha512.' + sha_js + '.js';
import_js('/.cpr/deps/sha512.js', unmodal); showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
import_js('/.cpr/deps/' + fn, unmodal);
if (is_https) if (is_https)
ebi('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best'; ebi('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
else else
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance'; ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' +
(sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>';
} }
} }
@@ -801,6 +804,14 @@ function up2k_init(subtle) {
var mou_ikkai = false; var mou_ikkai = false;
if (st.busy.handshake.length > 0 &&
st.busy.handshake[0].busied < Date.now() - 30 * 1000
) {
console.log("retrying stuck handshake");
var t = st.busy.handshake.shift();
st.todo.handshake.unshift(t);
}
if (st.todo.handshake.length > 0 && if (st.todo.handshake.length > 0 &&
st.busy.handshake.length == 0 && ( st.busy.handshake.length == 0 && (
st.todo.handshake[0].t4 || ( st.todo.handshake[0].t4 || (
@@ -886,6 +897,10 @@ function up2k_init(subtle) {
return base64; return base64;
} }
function hex2u8(txt) {
return new Uint8Array(txt.match(/.{2}/g).map(function (b) { return parseInt(b, 16); }));
}
function get_chunksize(filesize) { function get_chunksize(filesize) {
var chunksize = 1024 * 1024, var chunksize = 1024 * 1024,
stepsize = 512 * 1024; stepsize = 512 * 1024;
@@ -987,10 +1002,18 @@ function up2k_init(subtle) {
if (subtle) if (subtle)
subtle.digest('SHA-512', buf).then(hash_done); subtle.digest('SHA-512', buf).then(hash_done);
else setTimeout(function () { else setTimeout(function () {
var u8buf = new Uint8Array(buf);
if (sha_js == 'hw') {
hashwasm.sha512(u8buf).then(function (v) {
hash_done(hex2u8(v))
});
}
else {
var hasher = new asmCrypto.Sha512(); var hasher = new asmCrypto.Sha512();
hasher.process(new Uint8Array(buf)); hasher.process(u8buf);
hasher.finish(); hasher.finish();
hash_done(hasher.result); hash_done(hasher.result);
}
}, 1); }, 1);
}; };
@@ -1004,11 +1027,27 @@ function up2k_init(subtle) {
// //
function exec_handshake() { function exec_handshake() {
var t = st.todo.handshake.shift(); var t = st.todo.handshake.shift(),
me = Date.now();
st.busy.handshake.push(t); st.busy.handshake.push(t);
t.busied = me;
var xhr = new XMLHttpRequest(); var xhr = new XMLHttpRequest();
xhr.onerror = function () {
if (t.busied != me) {
console.log('zombie handshake onerror,', t);
return;
}
console.log('handshake onerror, retrying');
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
st.todo.handshake.unshift(t);
};
xhr.onload = function (e) { xhr.onload = function (e) {
if (t.busied != me) {
console.log('zombie handshake onload,', t);
return;
}
if (xhr.status == 200) { if (xhr.status == 200) {
var response = JSON.parse(xhr.responseText); var response = JSON.parse(xhr.responseText);

View File

@@ -238,6 +238,10 @@
color: #fff; color: #fff;
font-style: italic; font-style: italic;
} }
#u2foot span {
color: #999;
font-size: .9em;
}
#u2footfoot { #u2footfoot {
margin-bottom: -1em; margin-bottom: -1em;
} }

22
docs/README.md Normal file
View File

@@ -0,0 +1,22 @@
# example `.epilogue.html`
save one of these as `.epilogue.html` inside a folder to customize it:
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
# example browser-css
point `--css-browser` to one of these by URL:
* [`browser.css`](browser.css) changes the background
* [`browser-icons.css`](browser-icons.css) adds filetype icons
# other stuff
## [`rclone.md`](rclone.md)
* notes on using rclone as a fuse client/server
## [`example.conf`](example.conf)
* example config file for `-c` which never really happened

68
docs/browser-icons.css Normal file
View File

@@ -0,0 +1,68 @@
/* put filetype icons inline with text
#ggrid>a>span:before,
#ggrid>a>span.dir:before {
display: inline;
line-height: 0;
font-size: 1.7em;
margin: -.7em .1em -.5em -.6em;
}
*/
/* move folder icons top-left */
#ggrid>a>span.dir:before {
content: initial;
}
#ggrid>a[href$="/"]:before {
content: '📂';
display: block;
position: absolute;
margin: -.1em -.4em;
text-shadow: 0 0 .1em #000;
font-size: 2em;
}
/* put filetype icons top-left */
#ggrid>a:before {
display: block;
position: absolute;
margin: -.1em -.4em;
text-shadow: 0 0 .1em #000;
font-size: 2em;
}
/* video */
#ggrid>a:is(
[href$=".mkv"i],
[href$=".mp4"i],
[href$=".webm"i],
):before {
content: '📺';
}
/* audio */
#ggrid>a:is(
[href$=".mp3"i],
[href$=".ogg"i],
[href$=".opus"i],
[href$=".flac"i],
[href$=".m4a"i],
[href$=".aac"i],
):before {
content: '🎵';
}
/* image */
#ggrid>a:is(
[href$=".jpg"i],
[href$=".jpeg"i],
[href$=".png"i],
[href$=".gif"i],
[href$=".webp"i],
):before {
content: '🎨';
}

29
docs/browser.css Normal file
View File

@@ -0,0 +1,29 @@
html {
background: #333 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
}
#files th {
background: rgba(32, 32, 32, 0.9) !important;
}
#ops,
#treeul,
#files td {
background: rgba(32, 32, 32, 0.3) !important;
}
html.light {
background: #eee url('/wp/wallhaven-dpxl6l.png') center / cover no-repeat fixed;
}
html.light #files th {
background: rgba(255, 255, 255, 0.9) !important;
}
html.light #ops,
html.light #treeul,
html.light #files td {
background: rgba(248, 248, 248, 0.8) !important;
}
#files * {
background: transparent !important;
}

View File

@@ -86,6 +86,9 @@ var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.quer
# get the size and video-id of all youtube vids in folder, assuming filename ends with -id.ext, and create a copyparty search query # get the size and video-id of all youtube vids in folder, assuming filename ends with -id.ext, and create a copyparty search query
find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv|mp4|webm)$/{next} {sub(/\.[^\.]+$/,"");n=length($0);v=substr($0,n-10);print $1, v}' | tee /dev/stderr | awk 'BEGIN {p="("} {printf("%s name like *-%s.* ",p,$2);p="or"} END {print ")\n"}' | cat >&2 find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv|mp4|webm)$/{next} {sub(/\.[^\.]+$/,"");n=length($0);v=substr($0,n-10);print $1, v}' | tee /dev/stderr | awk 'BEGIN {p="("} {printf("%s name like *-%s.* ",p,$2);p="or"} END {print ")\n"}' | cat >&2
# unique stacks in a stackdump
f=a; rm -rf stacks; mkdir stacks; grep -E '^#' $f | while IFS= read -r n; do awk -v n="$n" '!$0{o=0} o; $0==n{o=1}' <$f >stacks/f; h=$(sha1sum <stacks/f | cut -c-16); mv stacks/f stacks/$h-"$n"; done ; find stacks/ | sort | uniq -cw24
## ##
## sqlite3 stuff ## sqlite3 stuff
@@ -153,6 +156,9 @@ dbg.asyncStore.pendingBreakpoints = {}
# fix firefox phantom breakpoints # fix firefox phantom breakpoints
about:config >> devtools.debugger.prefs-schema-version = -1 about:config >> devtools.debugger.prefs-schema-version = -1
# determine server version
git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > /dev/shm/revs && cat /dev/shm/revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js 2>/dev/null | diff -wNarU0 - <(cat /mnt/Users/ed/Downloads/ref/{util,browser,up2k}.js) | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
## ##
## http 206 ## http 206

32
docs/tcp-debug.sh Normal file
View File

@@ -0,0 +1,32 @@
(cd ~/dev/copyparty && strace -Tttyyvfs 256 -o strace.strace python3 -um copyparty -i 127.0.0.1 --http-only --stackmon /dev/shm/cpps,10 ) 2>&1 | tee /dev/stderr > ~/log-copyparty-$(date +%Y-%m%d-%H%M%S).txt
14/Jun/2021:16:34:02 1623688447.212405 death
14/Jun/2021:16:35:02 1623688502.420860 back
tcpdump -nni lo -w /home/ed/lo.pcap
# 16:35:25.324662 IP 127.0.0.1.48632 > 127.0.0.1.3920: Flags [F.], seq 849, ack 544, win 359, options [nop,nop,TS val 809396796 ecr 809396796], length 0
tcpdump -nnr /home/ed/lo.pcap | awk '/ > 127.0.0.1.3920: /{sub(/ > .*/,"");sub(/.*\./,"");print}' | sort -n | uniq | while IFS= read -r port; do echo; tcpdump -nnr /home/ed/lo.pcap 2>/dev/null | grep -E "\.$port( > |: F)" | sed -r 's/ > .*, /, /'; done | grep -E '^16:35:0.*length [^0]' -C50
16:34:02.441732 IP 127.0.0.1.48638, length 0
16:34:02.441738 IP 127.0.0.1.3920, length 0
16:34:02.441744 IP 127.0.0.1.48638, length 0
16:34:02.441756 IP 127.0.0.1.48638, length 791
16:34:02.441759 IP 127.0.0.1.3920, length 0
16:35:02.445529 IP 127.0.0.1.48638, length 0
16:35:02.489194 IP 127.0.0.1.3920, length 0
16:35:02.515595 IP 127.0.0.1.3920, length 216
16:35:02.515600 IP 127.0.0.1.48638, length 0
grep 48638 "$(find ~ -maxdepth 1 -name log-copyparty-\*.txt | sort | tail -n 1)"
1623688502.510380 48638 rh
1623688502.511291 48638 Unrecv direct ...
1623688502.511827 48638 rh = 791
16:35:02.518 127.0.0.1 48638 shut(8): [Errno 107] Socket not connected
Exception in thread httpsrv-0.1-48638:
grep 48638 ~/dev/copyparty/strace.strace
14561 16:35:02.506310 <... accept4 resumed> {sa_family=AF_INET, sin_port=htons(48638), sin_addr=inet_addr("127.0.0.1")}, [16], SOCK_CLOEXEC) = 8<TCP:[127.0.0.1:3920->127.0.0.1:48638]> <0.000012>
15230 16:35:02.510725 write(1<pipe:[256639555]>, "1623688502.510380 48638 rh\n", 27 <unfinished ...>

View File

@@ -9,6 +9,12 @@ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_zopfli=1.0.3 ver_zopfli=1.0.3
# TODO
# sha512.hw.js https://github.com/Daninet/hash-wasm
# sha512.kc.js https://github.com/chm-diederichs/sha3-wasm
# awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' <sha512.ac.js.orig >sha512.ac.js; for fn in sha512.ac.js.orig sha512.ac.js; do wc -c <$fn; wc -c <$fn.gz ; for n in {1..9}; do printf '%8d %d bz\n' $(bzip2 -c$n <$fn | wc -c) $n; done; done
# download; # download;
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap # the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
RUN mkdir -p /z/dist/no-pk \ RUN mkdir -p /z/dist/no-pk \

View File

@@ -32,6 +32,10 @@ gtar=$(command -v gtar || command -v gnutar) || true
[ -e /opt/local/bin/bzip2 ] && [ -e /opt/local/bin/bzip2 ] &&
bzip2() { /opt/local/bin/bzip2 "$@"; } bzip2() { /opt/local/bin/bzip2 "$@"; }
} }
gawk=$(command -v gawk || command -v gnuawk || command -v awk)
awk() { $gawk "$@"; }
pybin=$(command -v python3 || command -v python) || { pybin=$(command -v python3 || command -v python) || {
echo need python echo need python
exit 1 exit 1
@@ -194,17 +198,46 @@ tmv "$f"
# up2k goes from 28k to 22k laff # up2k goes from 28k to 22k laff
echo entabbening echo entabbening
find | grep -E '\.(js|css|html)$' | while IFS= read -r f; do find | grep -E '\.css$' | while IFS= read -r f; do
awk '{
sub(/^[ \t]+/,"");
sub(/[ \t]+$/,"");
$0=gensub(/^([a-z-]+) *: *(.*[^ ]) *;$/,"\\1:\\2;","1");
sub(/ +\{$/,"{");
gsub(/, /,",")
}
!/\}$/ {printf "%s",$0;next}
1
' <$f | sed 's/;\}$/}/' >t
tmv "$f"
done
find | grep -E '\.(js|html)$' | while IFS= read -r f; do
unexpand -t 4 --first-only <"$f" >t unexpand -t 4 --first-only <"$f" >t
tmv "$f" tmv "$f"
done done
gzres() {
command -v pigz &&
pk='pigz -11 -J 34 -I 100' ||
pk='gzip'
echo "$pk"
find | grep -E '\.(js|css)$' | grep -vF /deps/ | while IFS= read -r f; do
echo -n .
$pk "$f"
done
echo
}
gzres
echo gen tarlist echo gen tarlist
for d in copyparty dep-j2; do find $d -type f; done | for d in copyparty dep-j2; do find $d -type f; done |
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort | sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1 sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
(grep -vE 'gz$' list1; grep -E 'gz$' list1) >list (grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1) >list || true
echo creating tar echo creating tar
args=(--owner=1000 --group=1000) args=(--owner=1000 --group=1000)

34
scripts/profile.py Normal file
View File

@@ -0,0 +1,34 @@
#!/usr/bin/env python3
import sys
sys.path.insert(0, ".")
cmd = sys.argv[1]
if cmd == "cpp":
from copyparty.__main__ import main
argv = ["__main__", "-v", "srv::r", "-v", "../../yt:yt:r"]
main(argv=argv)
elif cmd == "test":
from unittest import main
argv = ["__main__", "discover", "-s", "tests"]
main(module=None, argv=argv)
else:
raise Exception()
# import dis; print(dis.dis(main))
# macos:
# option1) python3.9 -m pip install --user -U vmprof==0.4.9
# option2) python3.9 -m pip install --user -U https://github.com/vmprof/vmprof-python/archive/refs/heads/master.zip
#
# python -m vmprof -o prof --lines ./scripts/profile.py test
# linux: ~/.local/bin/vmprofshow prof tree | grep -vF '[1m 0.'
# macos: ~/Library/Python/3.9/bin/vmprofshow prof tree | grep -vF '[1m 0.'
# win: %appdata%\..\Roaming\Python\Python39\Scripts\vmprofshow.exe prof tree

View File

@@ -3,10 +3,13 @@ set -ex
pids=() pids=()
for py in python{2,3}; do for py in python{2,3}; do
$py -m unittest discover -s tests >/dev/null & nice $py -m unittest discover -s tests >/dev/null &
pids+=($!) pids+=($!)
done done
python3 scripts/test/smoketest.py &
pids+=($!)
for pid in ${pids[@]}; do for pid in ${pids[@]}; do
wait $pid wait $pid
done done

View File

@@ -47,7 +47,7 @@ grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
printf '\033[1;30mlooking for jinja2 in [%s]\033[0m\n' "$_py" >&2 printf '\033[1;30mlooking for jinja2 in [%s]\033[0m\n' "$_py" >&2
$_py -c 'import jinja2' 2>/dev/null || continue $_py -c 'import jinja2' 2>/dev/null || continue
printf '%s\n' "$_py" printf '%s\n' "$_py"
mv $dir/{,x.}jinja2 mv $dir/{,x.}dep-j2
break break
done)" done)"

209
scripts/test/smoketest.py Normal file
View File

@@ -0,0 +1,209 @@
import os
import sys
import time
import shlex
import shutil
import signal
import tempfile
import requests
import threading
import subprocess as sp
CPP = []
class Cpp(object):
def __init__(self, args):
args = [sys.executable, "-m", "copyparty"] + args
print(" ".join([shlex.quote(x) for x in args]))
self.ls_pre = set(list(os.listdir()))
self.p = sp.Popen(args)
# , stdout=sp.PIPE, stderr=sp.PIPE)
self.t = threading.Thread(target=self._run)
self.t.daemon = True
self.t.start()
def _run(self):
self.so, self.se = self.p.communicate()
def stop(self, wait):
if wait:
os.kill(self.p.pid, signal.SIGINT)
self.t.join(timeout=2)
else:
self.p.kill() # macos py3.8
def clean(self):
t = os.listdir()
for f in t:
if f not in self.ls_pre and f.startswith("up."):
os.unlink(f)
def await_idle(self, ub, timeout):
req = ["scanning</td><td>False", "hash-q</td><td>0", "tag-q</td><td>0"]
lim = int(timeout * 10)
u = ub + "?h"
for n in range(lim):
try:
time.sleep(0.1)
r = requests.get(u, timeout=0.1)
for x in req:
if x not in r.text:
print("ST: {}/{} miss {}".format(n, lim, x))
raise Exception()
print("ST: idle")
return
except:
pass
def tc1():
ub = "http://127.0.0.1:4321/"
td = os.path.join("srv", "smoketest")
try:
shutil.rmtree(td)
except:
if os.path.exists(td):
raise
for _ in range(10):
try:
os.mkdir(td)
except:
time.sleep(0.1) # win10
assert os.path.exists(td)
vidp = os.path.join(tempfile.gettempdir(), "smoketest.h264")
if not os.path.exists(vidp):
cmd = "ffmpeg -f lavfi -i testsrc=48x32:3 -t 1 -c:v libx264 -tune animation -preset veryslow -crf 69"
sp.check_call(cmd.split(" ") + [vidp])
with open(vidp, "rb") as f:
ovid = f.read()
args = [
"-p4321",
"-e2dsa",
"-e2tsr",
"--no-mutagen",
"--th-ff-jpg",
"--hist",
os.path.join(td, "dbm"),
]
pdirs = []
hpaths = {}
for d1 in ["r", "w", "a"]:
pdirs.append("{}/{}".format(td, d1))
pdirs.append("{}/{}/j".format(td, d1))
for d2 in ["r", "w", "a"]:
d = os.path.join(td, d1, "j", d2)
pdirs.append(d)
os.makedirs(d)
pdirs = [x.replace("\\", "/") for x in pdirs]
udirs = [x.split("/", 2)[2] for x in pdirs]
perms = [x.rstrip("j/")[-1] for x in pdirs]
for pd, ud, p in zip(pdirs, udirs, perms):
if ud[-1] == "j":
continue
hp = None
if pd.endswith("st/a"):
hp = hpaths[ud] = os.path.join(td, "db1")
elif pd[:-1].endswith("a/j/"):
hpaths[ud] = os.path.join(td, "dbm")
hp = None
else:
hp = "-"
hpaths[ud] = os.path.join(pd, ".hist")
arg = "{}:{}:{}".format(pd, ud, p, hp)
if hp:
arg += ":chist=" + hp
args += ["-v", arg]
# return
cpp = Cpp(args)
CPP.append(cpp)
cpp.await_idle(ub, 3)
for d in udirs:
vid = ovid + "\n{}".format(d).encode("utf-8")
try:
requests.post(ub + d, data={"act": "bput"}, files={"f": ("a.h264", vid)})
except:
pass
cpp.clean()
# GET permission
for d, p in zip(udirs, perms):
u = "{}{}/a.h264".format(ub, d)
r = requests.get(u)
ok = bool(r)
if ok != (p in ["a"]):
raise Exception("get {} with perm {} at {}".format(ok, p, u))
# stat filesystem
for d, p in zip(pdirs, perms):
u = "{}/a.h264".format(d)
ok = os.path.exists(u)
if ok != (p in ["a", "w"]):
raise Exception("stat {} with perm {} at {}".format(ok, p, u))
# GET thumbnail, vreify contents
for d, p in zip(udirs, perms):
u = "{}{}/a.h264?th=j".format(ub, d)
r = requests.get(u)
ok = bool(r and r.content[:3] == b"\xff\xd8\xff")
if ok != (p in ["a"]):
raise Exception("thumb {} with perm {} at {}".format(ok, p, u))
# check tags
cpp.await_idle(ub, 5)
for d, p in zip(udirs, perms):
u = "{}{}?ls".format(ub, d)
r = requests.get(u)
j = r.json() if r else False
tag = None
if j:
for f in j["files"]:
tag = tag or f["tags"].get("res")
r_ok = bool(j)
w_ok = bool(r_ok and j.get("files"))
if not r_ok or w_ok != (p in ["a"]):
raise Exception("ls {} with perm {} at {}".format(ok, p, u))
if (tag and p != "a") or (not tag and p == "a"):
raise Exception("tag {} with perm {} at {}".format(tag, p, u))
if tag is not None and tag != "48x32":
raise Exception("tag [{}] at {}".format(tag, u))
cpp.stop(True)
def run(tc):
try:
tc()
finally:
try:
CPP[0].stop(False)
except:
pass
def main():
run(tc1)
if __name__ == "__main__":
main()

View File

@@ -10,12 +10,11 @@ import pprint
import tarfile import tarfile
import tempfile import tempfile
import unittest import unittest
from argparse import Namespace from argparse import Namespace
from copyparty.authsrv import AuthSrv
from copyparty.httpcli import HttpCli
from tests import util as tu from tests import util as tu
from copyparty.authsrv import AuthSrv
from copyparty.httpcli import HttpCli
def hdr(query): def hdr(query):
@@ -29,6 +28,7 @@ class Cfg(Namespace):
a=a, a=a,
v=v, v=v,
c=c, c=c,
rproxy=0,
ed=False, ed=False,
no_zip=False, no_zip=False,
no_scandir=False, no_scandir=False,
@@ -38,6 +38,9 @@ class Cfg(Namespace):
nih=True, nih=True,
mtp=[], mtp=[],
mte="a", mte="a",
hist=None,
no_hash=False,
css_browser=None,
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()} **{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
) )
@@ -100,7 +103,7 @@ class TestHttpCli(unittest.TestCase):
pprint.pprint(vcfg) pprint.pprint(vcfg)
self.args = Cfg(v=vcfg, a=["o:o", "x:x"]) self.args = Cfg(v=vcfg, a=["o:o", "x:x"])
self.auth = AuthSrv(self.args, self.log) self.asrv = AuthSrv(self.args, self.log)
vfiles = [x for x in allfiles if x.startswith(top)] vfiles = [x for x in allfiles if x.startswith(top)]
for fp in vfiles: for fp in vfiles:
rok, wok = self.can_rw(fp) rok, wok = self.can_rw(fp)
@@ -189,12 +192,12 @@ class TestHttpCli(unittest.TestCase):
def put(self, url): def put(self, url):
buf = "PUT /{0} HTTP/1.1\r\nCookie: cppwd=o\r\nConnection: close\r\nContent-Length: {1}\r\n\r\nok {0}\n" buf = "PUT /{0} HTTP/1.1\r\nCookie: cppwd=o\r\nConnection: close\r\nContent-Length: {1}\r\n\r\nok {0}\n"
buf = buf.format(url, len(url) + 4).encode("utf-8") buf = buf.format(url, len(url) + 4).encode("utf-8")
conn = tu.VHttpConn(self.args, self.auth, self.log, buf) conn = tu.VHttpConn(self.args, self.asrv, self.log, buf)
HttpCli(conn).run() HttpCli(conn).run()
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1) return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
def curl(self, url, binary=False): def curl(self, url, binary=False):
conn = tu.VHttpConn(self.args, self.auth, self.log, hdr(url)) conn = tu.VHttpConn(self.args, self.asrv, self.log, hdr(url))
HttpCli(conn).run() HttpCli(conn).run()
if binary: if binary:
h, b = conn.s._reply.split(b"\r\n\r\n", 1) h, b = conn.s._reply.split(b"\r\n\r\n", 1)

View File

@@ -7,20 +7,26 @@ import json
import shutil import shutil
import tempfile import tempfile
import unittest import unittest
from textwrap import dedent from textwrap import dedent
from argparse import Namespace from argparse import Namespace
from copyparty.authsrv import AuthSrv
from copyparty import util
from tests import util as tu from tests import util as tu
from copyparty.authsrv import AuthSrv, VFS
from copyparty import util
class Cfg(Namespace): class Cfg(Namespace):
def __init__(self, a=[], v=[], c=None): def __init__(self, a=[], v=[], c=None):
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()} ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
ex["mtp"] = [] ex2 = {
ex["mte"] = "a" "mtp": [],
"mte": "a",
"hist": None,
"no_hash": False,
"css_browser": None,
"rproxy": 0,
}
ex.update(ex2)
super(Cfg, self).__init__(a=a, v=v, c=c, **ex) super(Cfg, self).__init__(a=a, v=v, c=c, **ex)
@@ -48,6 +54,7 @@ class TestVFS(unittest.TestCase):
self.assertEqual(util.undot(query), response) self.assertEqual(util.undot(query), response)
def ls(self, vfs, vpath, uname): def ls(self, vfs, vpath, uname):
# type: (VFS, str, str) -> tuple[str, str, str]
"""helper for resolving and listing a folder""" """helper for resolving and listing a folder"""
vn, rem = vfs.get(vpath, uname, True, False) vn, rem = vfs.get(vpath, uname, True, False)
r1 = vn.ls(rem, uname, False) r1 = vn.ls(rem, uname, False)
@@ -113,13 +120,13 @@ class TestVFS(unittest.TestCase):
n = vfs.nodes["a"] n = vfs.nodes["a"]
self.assertEqual(len(vfs.nodes), 1) self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a") self.assertEqual(n.vpath, "a")
self.assertEqual(n.realpath, td + "/a") self.assertEqual(n.realpath, os.path.join(td, "a"))
self.assertEqual(n.uread, ["*", "k"]) self.assertEqual(n.uread, ["*", "k"])
self.assertEqual(n.uwrite, ["k"]) self.assertEqual(n.uwrite, ["k"])
n = n.nodes["ac"] n = n.nodes["ac"]
self.assertEqual(len(vfs.nodes), 1) self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a/ac") self.assertEqual(n.vpath, "a/ac")
self.assertEqual(n.realpath, td + "/a/ac") self.assertEqual(n.realpath, os.path.join(td, "a", "ac"))
self.assertEqual(n.uread, ["*", "k"]) self.assertEqual(n.uread, ["*", "k"])
self.assertEqual(n.uwrite, ["k"]) self.assertEqual(n.uwrite, ["k"])
n = n.nodes["acb"] n = n.nodes["acb"]
@@ -251,7 +258,7 @@ class TestVFS(unittest.TestCase):
n = au.vfs n = au.vfs
# root was not defined, so PWD with no access to anyone # root was not defined, so PWD with no access to anyone
self.assertEqual(n.vpath, "") self.assertEqual(n.vpath, "")
self.assertEqual(n.realpath, td) self.assertEqual(n.realpath, None)
self.assertEqual(n.uread, []) self.assertEqual(n.uread, [])
self.assertEqual(n.uwrite, []) self.assertEqual(n.uwrite, [])
self.assertEqual(len(n.nodes), 1) self.assertEqual(len(n.nodes), 1)

View File

@@ -1,17 +1,36 @@
import os import os
import sys
import time import time
import shutil import shutil
import jinja2 import jinja2
import tempfile import tempfile
import platform
import subprocess as sp import subprocess as sp
from copyparty.util import Unrecv
WINDOWS = platform.system() == "Windows"
ANYWIN = WINDOWS or sys.platform in ["msys"]
MACOS = platform.system() == "Darwin"
J2_ENV = jinja2.Environment(loader=jinja2.BaseLoader) J2_ENV = jinja2.Environment(loader=jinja2.BaseLoader)
J2_FILES = J2_ENV.from_string("{{ files|join('\n') }}") J2_FILES = J2_ENV.from_string("{{ files|join('\n') }}")
def nah(*a, **ka):
return False
if MACOS:
import posixpath
posixpath.islink = nah
os.path.islink = nah
# 25% faster; until any tests do symlink stuff
from copyparty.util import Unrecv
def runcmd(*argv): def runcmd(*argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE) p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
@@ -41,7 +60,7 @@ def get_ramdisk():
if os.path.exists("/Volumes"): if os.path.exists("/Volumes"):
# hdiutil eject /Volumes/cptd/ # hdiutil eject /Volumes/cptd/
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://65536") devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://131072")
devname = devname.strip() devname = devname.strip()
print("devname: [{}]".format(devname)) print("devname: [{}]".format(devname))
for _ in range(10): for _ in range(10):
@@ -91,12 +110,13 @@ class VHttpSrv(object):
class VHttpConn(object): class VHttpConn(object):
def __init__(self, args, auth, log, buf): def __init__(self, args, asrv, log, buf):
self.s = VSock(buf) self.s = VSock(buf)
self.sr = Unrecv(self.s) self.sr = Unrecv(self.s)
self.addr = ("127.0.0.1", "42069") self.addr = ("127.0.0.1", "42069")
self.args = args self.args = args
self.auth = auth self.asrv = asrv
self.is_mp = False
self.log_func = log self.log_func = log
self.log_src = "a" self.log_src = "a"
self.lf_url = None self.lf_url = None