Compare commits

...

99 Commits

Author SHA1 Message Date
ed
0a8e759fe6 v0.11.17 2021-06-17 00:31:38 +02:00
ed
d70981cdd1 fix eq param input 2021-06-17 00:29:14 +02:00
ed
e08c03b886 audio-filters: expose gain control 2021-06-16 22:25:29 +02:00
ed
56086e8984 ux: contrast tweaks + fix anchor-scroll 2021-06-16 21:38:30 +02:00
ed
1aa9033022 add play/pause hotkey 2021-06-16 19:19:29 +02:00
ed
076e103d53 ux: responsive settings layout 2021-06-16 19:10:32 +02:00
ed
38c00ea8fc print thumbnail cleanup summary 2021-06-16 18:57:10 +02:00
ed
415757af43 mention the symlink-scanner too 2021-06-16 18:37:23 +02:00
ed
e72ed8c0ed mention some essentials 2021-06-16 18:29:29 +02:00
ed
32f9c6b5bb v0.11.16 2021-06-16 01:51:18 +02:00
ed
6251584ef6 fix .13dB clipping with all-zero eq 2021-06-15 23:37:44 +00:00
ed
f3e413bc28 icons 2021-06-16 00:01:07 +02:00
ed
6f6cc8f3f8 move eq to the player settings tab 2021-06-15 22:26:39 +02:00
ed
8b081e9e69 media player: continue to next folder 2021-06-15 22:19:53 +02:00
ed
c8a510d10e fully hide columns when minimized 2021-06-15 21:43:37 +02:00
ed
6f834f6679 sticky tree header 2021-06-15 21:07:27 +02:00
ed
cf2d6650ac audio-eq: flatten frequency response 2021-06-15 21:06:00 +02:00
ed
cd52dea488 v0.11.15 2021-06-15 00:01:11 +02:00
ed
6ea75df05d add audio equalizer 2021-06-14 23:58:56 +02:00
ed
4846e1e8d6 mention num.clients for rproxy 2021-06-14 19:27:34 +02:00
ed
fc024f789d v0.11.14 2021-06-14 03:05:50 +02:00
ed
473e773aea fix deadlock 2021-06-14 00:55:11 +00:00
ed
48a2e1a353 add threadwatcher 2021-06-14 01:57:18 +02:00
ed
6da63fbd79 up2k-cli: recover from lost handshakes 2021-06-14 01:01:06 +02:00
ed
5bec37fcee fix cosmetic login glitch 2021-06-14 00:28:08 +02:00
ed
3fd0ba0a31 oh right its the other way around 2021-06-13 22:49:55 +02:00
ed
241a143366 add --rproxy for explicit proxy level 2021-06-13 22:22:31 +02:00
ed
a537064da7 custom-css example to add filetype icons 2021-06-13 00:49:28 +02:00
ed
f3dfd24c92 v0.11.13 2021-06-12 20:37:05 +02:00
ed
fa0a7f50bb add image gallery 2021-06-12 20:25:08 +02:00
ed
44a78a7e21 v0.11.12 2021-06-12 04:28:21 +02:00
ed
6b75cbf747 add readme 2021-06-12 04:26:53 +02:00
ed
e7b18ab9fe custom css 2021-06-12 04:22:07 +02:00
ed
aa12830015 keep transparency in thumbnails 2021-06-12 03:32:06 +02:00
ed
f156e00064 s/cover/folder/g 2021-06-12 03:06:56 +02:00
ed
d53c212516 add mtp queue to status page 2021-06-12 02:23:48 +02:00
ed
ca27f8587c add cygpath support for volume src too 2021-06-12 01:55:45 +02:00
ed
88ce008e16 more status on admin panel 2021-06-12 01:39:14 +02:00
ed
081d2cc5d7 add folder thumbnails (cover.jpg or png) 2021-06-11 23:54:54 +02:00
ed
60ac68d000 single authsrv instance per process 2021-06-11 23:01:13 +02:00
ed
fbe656957d fix race 2021-06-11 18:12:06 +02:00
ed
5534c78c17 tests pass 2021-06-11 03:10:33 +02:00
ed
a45a53fdce support macos ffmpeg 2021-06-11 03:05:42 +02:00
ed
972a56e738 fix stuff 2021-06-11 01:45:28 +02:00
ed
5e03b3ca38 use parent db/thumbs in jump-volumes 2021-06-10 20:43:19 +02:00
ed
1078d933b4 adding --no-hash 2021-06-10 18:08:30 +02:00
ed
d6bf300d80 option to store state out-of-volume (mostly untested) 2021-06-10 01:27:04 +02:00
ed
a359d64d44 v0.11.11 2021-06-08 23:43:00 +02:00
ed
22396e8c33 zopfli js/css 2021-06-08 23:19:35 +02:00
ed
5ded5a4516 alphabetical up2k indexing 2021-06-08 21:42:08 +02:00
ed
79c7639aaf haha memes 2021-06-08 21:10:25 +02:00
ed
5bbf875385 fuse-client: print python version 2021-06-08 20:19:51 +02:00
ed
5e159432af vscode: support running with -jN 2021-06-08 20:18:24 +02:00
ed
1d6ae409f6 count expenses when sending files 2021-06-08 20:17:53 +02:00
ed
9d729d3d1a add thread names 2021-06-08 20:14:23 +02:00
ed
4dd5d4e1b7 when rootless, blank instead of block rootdir 2021-06-08 18:35:55 +02:00
ed
acd8149479 dont track workloads unless multiprocessing 2021-06-08 18:01:59 +02:00
ed
b97a1088fa v0.11.10 2021-06-08 09:41:31 +02:00
ed
b77bed3324 fix terminating tls connections wow 2021-06-08 09:40:49 +02:00
ed
a2b7c85a1f forgot what version was running on a box 2021-06-08 00:01:08 +02:00
ed
b28533f850 v0.11.9 2021-06-07 20:22:10 +02:00
ed
bd8c7e538a sfx.sh: use system jinja2 when available 2021-06-07 20:09:45 +02:00
ed
89e48cff24 detect recursive symlinks 2021-06-07 20:09:18 +02:00
ed
ae90a7b7b6 mention firefox funny 2021-06-07 02:10:54 +02:00
ed
6fc1be04da support windows-py3.5 2021-06-06 21:10:53 +02:00
ed
0061d29534 v0.11.8 2021-06-06 19:09:55 +02:00
ed
a891f34a93 update sharex example 2021-06-06 19:06:33 +02:00
ed
d6a1e62a95 append file-ext when avoiding name collisions 2021-06-06 18:53:32 +02:00
ed
cda36ea8b4 support json replies from bput 2021-06-06 18:47:21 +02:00
ed
909a76434a a 2021-06-06 03:07:11 +02:00
ed
39348ef659 add sharex example 2021-06-06 02:53:01 +02:00
ed
99d30edef3 v0.11.7 2021-06-05 03:33:29 +02:00
ed
b63ab15bf9 gallery links in new tab if a selection is atcive 2021-06-05 03:27:44 +02:00
ed
485cb4495c minify asmcrypto a bit 2021-06-05 03:25:54 +02:00
ed
df018eb1f2 add colors 2021-06-05 01:34:39 +02:00
ed
49aa47a9b8 way faster sha512 wasm fallback 2021-06-05 01:14:16 +02:00
ed
7d20eb202a optimize 2021-06-04 19:35:08 +02:00
ed
c533da9129 fix single-threaded mtag 2021-06-04 19:00:24 +02:00
ed
5cba31a814 spin on thumbnails too 2021-06-04 17:38:57 +02:00
ed
1d824cb26c add volume lister / containment checker 2021-06-04 02:23:46 +02:00
ed
83b903d60e readme: update todos 2021-06-02 09:42:33 +02:00
ed
9c8ccabe8e v0.11.6 2021-06-01 08:25:35 +02:00
ed
b1f2c4e70d gain 1000x performance with one weird trick 2021-06-01 06:17:46 +00:00
ed
273ca0c8da run tests on commit 2021-06-01 05:49:41 +02:00
ed
d6f516b34f pypi exclusive 2021-06-01 04:14:23 +02:00
ed
83127858ca v0.11.4 2021-06-01 03:55:51 +02:00
ed
d89329757e fix permission check in tar/zip generator (gdi) 2021-06-01 03:55:31 +02:00
ed
49ffec5320 v0.11.3 2021-06-01 03:11:02 +02:00
ed
2eaae2b66a fix youtube query example 2021-06-01 02:53:54 +02:00
ed
ea4441e25c v0.11.2 2021-06-01 02:47:37 +02:00
ed
e5f34042f9 more precise volume state in admin panel 2021-06-01 02:32:53 +02:00
ed
271096874a fix adv and date handling in query lang 2021-06-01 02:10:17 +02:00
ed
8efd780a72 thumbnail cleaner too noisy 2021-06-01 01:51:03 +02:00
ed
41bcf7308d fix search results as thumbnails 2021-06-01 01:41:36 +02:00
ed
d102bb3199 fix on-upload hasher (0.11.1 regression) 2021-06-01 01:20:34 +02:00
ed
d0bed95415 search: add a query language 2021-06-01 01:16:40 +02:00
ed
2528729971 add dbtool 2021-05-30 16:49:08 +00:00
ed
292c18b3d0 v0.11.1 2021-05-29 23:39:39 +02:00
ed
0be7c5e2d8 live db/tags rescan 2021-05-29 23:35:07 +02:00
55 changed files with 3793 additions and 782 deletions

17
.vscode/launch.json vendored
View File

@@ -16,12 +16,9 @@
"-e2ts",
"-mtp",
".bpm=f,bin/mtag/audio-bpm.py",
"-a",
"ed:wark",
"-v",
"srv::r:aed:cnodupe",
"-v",
"dist:dist:r"
"-aed:wark",
"-vsrv::r:aed:cnodupe",
"-vdist:dist:r"
]
},
{
@@ -43,5 +40,13 @@
"${file}"
]
},
{
"name": "Python: Current File",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"justMyCode": false
},
]
}

28
.vscode/launch.py vendored
View File

@@ -3,14 +3,16 @@
# launches 10x faster than mspython debugpy
# and is stoppable with ^C
import re
import os
import sys
print(sys.executable)
import shlex
sys.path.insert(0, os.getcwd())
import jstyleson
from copyparty.__main__ import main as copyparty
import subprocess as sp
with open(".vscode/launch.json", "r", encoding="utf-8") as f:
tj = f.read()
@@ -25,11 +27,19 @@ except:
pass
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
try:
copyparty(["a"] + argv)
except SystemExit as ex:
if ex.code:
raise
if re.search(" -j ?[0-9]", " ".join(argv)):
argv = [sys.executable, "-m", "copyparty"] + argv
sp.check_call(argv)
else:
sys.path.insert(0, os.getcwd())
from copyparty.__main__ import main as copyparty
try:
copyparty(["a"] + argv)
except SystemExit as ex:
if ex.code:
raise
print("\n\033[32mokke\033[0m")
sys.exit(1)

View File

@@ -37,6 +37,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [other tricks](#other-tricks)
* [searching](#searching)
* [search configuration](#search-configuration)
* [database location](#database-location)
* [metadata from audio files](#metadata-from-audio-files)
* [file parser plugins](#file-parser-plugins)
* [complete examples](#complete-examples)
@@ -61,6 +62,14 @@ download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/do
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone full access to the current folder; see `-h` for help if you want accounts and volumes etc
some recommended options:
* `-e2dsa` enables general file indexing, see [search configuration](#search-configuration)
* `-e2ts` enables audio metadata indexing (needs either FFprobe or mutagen), see [optional dependencies](#optional-dependencies)
* `-v /mnt/music:/music:r:afoo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, with user `foo` as `a`dmin (read/write), password `bar`
* replace `:r:afoo` with `:rfoo` to only make the folder readable by `foo` and nobody else
* in addition to `r`ead and `a`dmin, `w`rite makes a folder write-only, so cannot list/access files in it
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
you may also want these, especially on servers:
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
@@ -68,12 +77,16 @@ you may also want these, especially on servers:
## notes
general:
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
* because no browsers currently implement the media-query to do this properly orz
browser-specific:
* iPhone/iPad: use Firefox to download files
* Android-Chrome: increase "parallel uploads" for higher speed (android bug)
* Android-Firefox: takes a while to select files (their fix for ☝️)
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
* because no browsers currently implement the media-query to do this properly orz
* Desktop-Firefox: may stop you from deleting folders you've uploaded until you visit `about:memory` and click `Minimize memory usage`
## status
@@ -96,11 +109,12 @@ summary: all planned features work! now please enjoy the bloatening
* ☑ FUSE client (read-only)
* browser
* ☑ tree-view
*media player
*audio player
* ☑ thumbnails
* ☑ images using Pillow
* ☑ videos using FFmpeg
* ☑ cache eviction (max-age; maybe max-size eventually)
* ☑ image gallery
* ☑ SPA (browse while uploading)
* if you use the file-tree on the left only, not folders in the file list
* server indexing
@@ -117,12 +131,12 @@ summary: all planned features work! now please enjoy the bloatening
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* Windows: python 2.7 cannot handle filenames with mojibake
* MacOS: `--th-ff-jpg` may fix thumbnails using macports-FFmpeg
## general bugs
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
* cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1`
* hiding the contents at url `/d1/d2/d3` using `-v :d1/d2/d3:cd2d` has the side-effect of creating databases (for files/tags) inside folders d1 and d2, and those databases take precedence over the main db at the top of the vfs - this means all files in d2 and below will be reindexed unless you already had a vfs entry at or below d2
* probably more, pls let me know
## not my bugs
@@ -157,7 +171,7 @@ the browser has the following hotkeys
* `0..9` jump to 10%..90%
* `U/O` skip 10sec back/forward
* `J/L` prev/next song
* `J` also starts playing the folder
* `M` play/pause (also starts playing the folder)
* in the grid view:
* `S` toggle multiselect
* `A/D` zoom
@@ -176,6 +190,8 @@ click `[-]` and `[+]` to adjust the size, and the `[a]` toggles if the tree shou
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how destructive your users are
images named `folder.jpg` and `folder.png` become the thumbnail of the folder they're in
## zip downloads
@@ -292,7 +308,29 @@ the same arguments can be set as volume flags, in addition to `d2d` and `d2t` fo
* `-v ~/music::r:cd2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
note:
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `cdhash`, this has the following consequences:
* initial indexing is way faster, especially when the volume is on a networked disk
* makes it impossible to [file-search](#file-search)
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
if you set `--no-hash`, you can enable hashing for specific volumes using flag `cehash`
## database location
copyparty creates a subfolder named `.hist` inside each volume where it stores the database, thumbnails, and some other stuff
this can instead be kept in a single place using the `--hist` argument, or the `hist=` volume flag, or a mix of both:
* `--hist ~/.cache/copyparty -v ~/music::r:chist=-` sets `~/.cache/copyparty` as the default place to put volume info, but `~/music` gets the regular `.hist` subfolder (`-` restores default behavior)
note:
* markdown edits are always stored in a local `.hist` subdirectory
* on windows the volflag path is cyglike, so `/c/temp` means `C:\temp` but use regular paths for `--hist`
* you can use cygpaths for volumes too, `-v C:\Users::r` and `-v /c/users::r` both work
## metadata from audio files
@@ -395,6 +433,8 @@ quick summary of more eccentric web-browsers trying to view a directory index:
* cross-platform python client available in [./bin/](bin/)
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
* sharex (screenshot utility): see [./contrib/sharex.sxcu](contrib/#sharexsxcu)
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
@@ -518,20 +558,25 @@ in the `scripts` folder:
roughly sorted by priority
* separate sqlite table per tag
* audio fingerprinting
* readme.md as epilogue
* single sha512 across all up2k chunks? maybe
* reduce up2k roundtrips
* start from a chunk index and just go
* terminate client on bad data
* `os.copy_file_range` for up2k cloning
* single sha512 across all up2k chunks? maybe
* figure out the deal with pixel3a not being connectable as hotspot
* pixel3a having unpredictable 3sec latency in general :||||
discarded ideas
* separate sqlite table per tag
* performance fixed by skipping some indexes (`+mt.k`)
* audio fingerprinting
* only makes sense if there can be a wasm client and that doesn't exist yet (except for olaf which is agpl hence counts as not existing)
* `os.copy_file_range` for up2k cloning
* almost never hit this path anyways
* up2k partials ui
* feels like there isn't much point
* cache sha512 chunks on client
* too dangerous
* comment field
* nah
* look into android thumbnail cache file format
* absolutely not

View File

@@ -45,3 +45,18 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
# [`mtag/`](mtag/)
* standalone programs which perform misc. file analysis
* copyparty can Popen programs like these during file indexing to collect additional metadata
# [`dbtool.py`](dbtool.py)
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty recommends to wipe the DB and reindex because it now collects additional metadata during analysis, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
for that example (upgrading to v0.11.0), first move the old db aside, launch copyparty, let it rebuild the db until the point where it starts running mtp (colored messages as it adds the mtp tags), then CTRL-C and patch in the old mtp tags from the old db instead
so assuming you have `-mtp` parsers to provide the tags `key` and `.bpm`:
```
~/bin/dbtool.py -ls up2k.db
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -cmp
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy key
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy .bpm -vac
```

View File

@@ -54,6 +54,12 @@ MACOS = platform.system() == "Darwin"
info = log = dbg = None
print("{} v{} @ {}".format(
platform.python_implementation(),
".".join([str(x) for x in sys.version_info]),
sys.executable))
try:
from fuse import FUSE, FuseOSError, Operations
except:

198
bin/dbtool.py Executable file
View File

@@ -0,0 +1,198 @@
#!/usr/bin/env python3
import os
import sys
import sqlite3
import argparse
DB_VER = 3
def die(msg):
print("\033[31m\n" + msg + "\n\033[0m")
sys.exit(1)
def read_ver(db):
for tab in ["ki", "kv"]:
try:
c = db.execute(r"select v from {} where k = 'sver'".format(tab))
except:
continue
rows = c.fetchall()
if rows:
return int(rows[0][0])
return "corrupt"
def ls(db):
nfiles = next(db.execute("select count(w) from up"))[0]
ntags = next(db.execute("select count(w) from mt"))[0]
print(f"{nfiles} files")
print(f"{ntags} tags\n")
print("number of occurences for each tag,")
print(" 'x' = file has no tags")
print(" 't:mtp' = the mtp flag (file not mtp processed yet)")
print()
for k, nk in db.execute("select k, count(k) from mt group by k order by k"):
print(f"{nk:9} {k}")
def compare(n1, d1, n2, d2, verbose):
nt = next(d1.execute("select count(w) from up"))[0]
n = 0
miss = 0
for w, rd, fn in d1.execute("select w, rd, fn from up"):
n += 1
if n % 25_000 == 0:
m = f"\033[36mchecked {n:,} of {nt:,} files in {n1} against {n2}\033[0m"
print(m)
q = "select w from up where substr(w,1,16) = ?"
hit = d2.execute(q, (w[:16],)).fetchone()
if not hit:
miss += 1
if verbose:
print(f"file in {n1} missing in {n2}: [{w}] {rd}/{fn}")
print(f" {miss} files in {n1} missing in {n2}\n")
nt = next(d1.execute("select count(w) from mt"))[0]
n = 0
miss = {}
nmiss = 0
for w, k, v in d1.execute("select * from mt"):
n += 1
if n % 100_000 == 0:
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
print(m)
v2 = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
if v2:
v2 = v2[0]
# if v != v2 and v2 and k in [".bpm", "key"] and n2 == "src":
# print(f"{w} [{rd}/{fn}] {k} = [{v}] / [{v2}]")
if v2 is not None:
if k.startswith("."):
try:
diff = abs(float(v) - float(v2))
if diff > float(v) / 0.9:
v2 = None
else:
v2 = v
except:
pass
if v != v2:
v2 = None
if v2 is None:
nmiss += 1
try:
miss[k] += 1
except:
miss[k] = 1
if verbose:
q = "select rd, fn from up where substr(w,1,16) = ?"
rd, fn = d1.execute(q, (w,)).fetchone()
print(f"missing in {n2}: [{w}] [{rd}/{fn}] {k} = {v}")
for k, v in sorted(miss.items()):
if v:
print(f"{n1} has {v:6} more {k:<6} tags than {n2}")
print(f"in total, {nmiss} missing tags in {n2}\n")
def copy_mtp(d1, d2, tag, rm):
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
n = 0
ndone = 0
for w, k, v in d1.execute("select * from mt where k = ?", (tag,)):
n += 1
if n % 25_000 == 0:
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
print(m)
hit = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
if hit:
hit = hit[0]
if hit != v:
ndone += 1
if hit is not None:
d2.execute("delete from mt where w = ? and +k = ?", (w, k))
d2.execute("insert into mt values (?,?,?)", (w, k, v))
if rm:
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w,))
d2.commit()
print(f"copied {ndone} {tag} tags over")
def main():
os.system("")
print()
ap = argparse.ArgumentParser()
ap.add_argument("db", help="database to work on")
ap.add_argument("-src", metavar="DB", type=str, help="database to copy from")
ap2 = ap.add_argument_group("informational / read-only stuff")
ap2.add_argument("-v", action="store_true", help="verbose")
ap2.add_argument("-ls", action="store_true", help="list summary for db")
ap2.add_argument("-cmp", action="store_true", help="compare databases")
ap2 = ap.add_argument_group("options which modify target db")
ap2.add_argument("-copy", metavar="TAG", type=str, help="mtp tag to copy over")
ap2.add_argument(
"-rm-mtp-flag",
action="store_true",
help="when an mtp tag is copied over, also mark that as done, so copyparty won't run mtp on it",
)
ap2.add_argument("-vac", action="store_true", help="optimize DB")
ar = ap.parse_args()
for v in [ar.db, ar.src]:
if v and not os.path.exists(v):
die("database must exist")
db = sqlite3.connect(ar.db)
ds = sqlite3.connect(ar.src) if ar.src else None
for d, n in [[ds, "src"], [db, "dst"]]:
if not d:
continue
ver = read_ver(d)
if ver == "corrupt":
die("{} database appears to be corrupt, sorry")
if ver != DB_VER:
m = f"{n} db is version {ver}, this tool only supports version {DB_VER}, please upgrade it with copyparty first"
die(m)
if ar.ls:
ls(db)
if ar.cmp:
if not ds:
die("need src db to compare against")
compare("src", ds, "dst", db, ar.v)
compare("dst", db, "src", ds, ar.v)
if ar.copy:
copy_mtp(ds, db, ar.copy, ar.rm_mtp_flag)
if __name__ == "__main__":
main()

View File

@@ -9,6 +9,16 @@
* assumes the webserver and copyparty is running on the same server/IP
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
### [`sharex.sxcu`](sharex.sxcu)
* sharex config file to upload screenshots and grab the URL
* `RequestURL`: full URL to the target folder
* `pw`: password (remove the `pw` line if anon-write)
however if your copyparty is behind a reverse-proxy, you may want to use [`sharex-html.sxcu`](sharex-html.sxcu) instead:
* `RequestURL`: full URL to the target folder
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
* `pw`: password (remove `Parameters` if anon-write)
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
* disables thumbnails and folder-type detection in windows explorer
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))

View File

@@ -1,3 +1,8 @@
# when running copyparty behind a reverse-proxy,
# make sure that copyparty allows at least as many clients as the proxy does,
# so run copyparty with -nc 512 if your nginx has the default limits
# (worker_processes 1, worker_connections 512)
upstream cpp {
server 127.0.0.1:3923;
keepalive 120;

19
contrib/sharex-html.sxcu Normal file
View File

@@ -0,0 +1,19 @@
{
"Version": "13.5.0",
"Name": "copyparty-html",
"DestinationType": "ImageUploader",
"RequestMethod": "POST",
"RequestURL": "http://127.0.0.1:3923/sharex",
"Parameters": {
"pw": "wark"
},
"Body": "MultipartFormData",
"Arguments": {
"act": "bput"
},
"FileFormName": "f",
"RegexList": [
"bytes // <a href=\"/([^\"]+)\""
],
"URL": "http://127.0.0.1:3923/$regex:1|1$"
}

17
contrib/sharex.sxcu Normal file
View File

@@ -0,0 +1,17 @@
{
"Version": "13.5.0",
"Name": "copyparty",
"DestinationType": "ImageUploader",
"RequestMethod": "POST",
"RequestURL": "http://127.0.0.1:3923/sharex",
"Parameters": {
"pw": "wark",
"j": null
},
"Body": "MultipartFormData",
"Arguments": {
"act": "bput"
},
"FileFormName": "f",
"URL": "$json:files[0].url$"
}

View File

@@ -23,7 +23,7 @@ from textwrap import dedent
from .__init__ import E, WINDOWS, VT100, PY2
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub
from .util import py_desc, align_tab, IMPLICATIONS
from .util import py_desc, align_tab, IMPLICATIONS, alltrace
HAVE_SSL = True
try:
@@ -182,6 +182,16 @@ def sighandler(sig=None, frame=None):
print("\n".join(msg))
def stackmon(fp, ival):
ctr = 0
while True:
ctr += 1
time.sleep(ival)
st = "{}, {}\n{}".format(ctr, time.time(), alltrace())
with open(fp, "wb") as f:
f.write(st.encode("utf-8", "replace"))
def run_argparse(argv, formatter):
ap = argparse.ArgumentParser(
formatter_class=formatter,
@@ -222,32 +232,67 @@ def run_argparse(argv, formatter):
"print,get" prints the data in the log and returns GET
(leave out the ",get" to return an error instead)
--ciphers help = available ssl/tls ciphers,
--ssl-ver help = available ssl/tls versions,
default is what python considers safe, usually >= TLS1
values for --ls:
"USR" is a user to browse as; * is anonymous, ** is all users
"VOL" is a single volume to scan, default is * (all vols)
"FLAG" is flags;
"v" in addition to realpaths, print usernames and vpaths
"ln" only prints symlinks leaving the volume mountpoint
"p" exits 1 if any such symlinks are found
"r" resumes startup after the listing
examples:
--ls '**' # list all files which are possible to read
--ls '**,*,ln' # check for dangerous symlinks
--ls '**,*,ln,p,r' # check, then start normally if safe
\033[0m
"""
),
)
# fmt: off
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
ap.add_argument("-q", action="store_true", help="quiet")
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account, USER:PASS; example [ed:wark")
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
ap.add_argument("-ed", action="store_true", help="enable ?dots")
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap.add_argument("-nih", action="store_true", help="no info hostname")
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
ap.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms")
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
ap2 = ap.add_argument_group('network options')
ap2.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap2.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
ap2.add_argument("--ssl-ver", metavar="LIST", type=str, help="set allowed ssl/tls versions; [help] shows available versions; default is what your python version considers safe")
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ssl/tls ciphers; [help] shows available ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
ap2 = ap.add_argument_group('opt-outs')
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap2.add_argument("-nih", action="store_true", help="no info hostname")
ap2.add_argument("-nid", action="store_true", help="no info disk-usage")
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap2 = ap.add_argument_group('safety options')
ap2.add_argument("--ls", metavar="U[,V[,F]]", help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
ap2.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
ap2 = ap.add_argument_group('logging options')
ap2.add_argument("-q", action="store_true", help="quiet")
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header")
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
ap2 = ap.add_argument_group('admin panel options')
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
ap2 = ap.add_argument_group('thumbnail options')
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
@@ -256,8 +301,9 @@ def run_argparse(argv, formatter):
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=1800, help="cleanup interval")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
ap2 = ap.add_argument_group('database options')
@@ -267,6 +313,8 @@ def run_argparse(argv, formatter):
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
ap2.add_argument("--hist", metavar="PATH", type=str, help="where to store volume state")
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
@@ -275,20 +323,14 @@ def run_argparse(argv, formatter):
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
ap2.add_argument("--ssl-ver", metavar="LIST", type=str, help="ssl/tls versions to allow")
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
ap2 = ap.add_argument_group('appearance options')
ap2.add_argument("--css-browser", metavar="L", help="URL to additional CSS to include")
ap2 = ap.add_argument_group('debug options')
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header")
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing")
ap2.add_argument("--stackmon", metavar="P,S", help="write stacktrace to Path every S second")
return ap.parse_args(args=argv[1:])
# fmt: on
@@ -328,6 +370,16 @@ def main(argv=None):
except AssertionError:
al = run_argparse(argv, Dodge11874)
if al.stackmon:
fp, f = al.stackmon.rsplit(",", 1)
f = int(f)
t = threading.Thread(
target=stackmon,
args=(fp, f),
)
t.daemon = True
t.start()
# propagate implications
for k1, k2 in IMPLICATIONS:
if getattr(al, k1):
@@ -358,6 +410,9 @@ def main(argv=None):
+ " (if you crash with codec errors then that is why)"
)
if WINDOWS and sys.version_info < (3, 6):
al.no_scandir = True
# signal.signal(signal.SIGINT, sighandler)
SvcHub(al).run()

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (0, 11, 0)
VERSION = (0, 11, 17)
CODENAME = "the grid"
BUILD_DT = (2021, 5, 29)
BUILD_DT = (2021, 6, 17)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -5,35 +5,49 @@ import re
import os
import sys
import stat
import base64
import hashlib
import threading
from .__init__ import PY2, WINDOWS
from .__init__ import WINDOWS
from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint
class VFS(object):
"""single level in the virtual fs"""
def __init__(self, realpath, vpath, uread=[], uwrite=[], flags={}):
def __init__(self, realpath, vpath, uread=[], uwrite=[], uadm=[], flags={}):
self.realpath = realpath # absolute path on host filesystem
self.vpath = vpath # absolute path in the virtual filesystem
self.uread = uread # users who can read this
self.uwrite = uwrite # users who can write this
self.uadm = uadm # users who are regular admins
self.flags = flags # config switches
self.nodes = {} # child nodes
self.all_vols = {vpath: self} # flattened recursive
self.histtab = None # all realpath->histpath
self.dbv = None # closest full/non-jump parent
if realpath:
self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
self.all_vols = {vpath: self} # flattened recursive
else:
self.histpath = None
self.all_vols = None
def __repr__(self):
return "VFS({})".format(
", ".join(
"{}={!r}".format(k, self.__dict__[k])
for k in "realpath vpath uread uwrite flags".split()
for k in "realpath vpath uread uwrite uadm flags".split()
)
)
def _trk(self, vol):
self.all_vols[vol.vpath] = vol
return vol
def get_all_vols(self, outdict):
if self.realpath:
outdict[self.vpath] = self
for v in self.nodes.values():
v.get_all_vols(outdict)
def add(self, src, dst):
"""get existing, or add new path to the vfs"""
@@ -45,18 +59,19 @@ class VFS(object):
name, dst = dst.split("/", 1)
if name in self.nodes:
# exists; do not manipulate permissions
return self._trk(self.nodes[name].add(src, dst))
return self.nodes[name].add(src, dst)
vn = VFS(
"{}/{}".format(self.realpath, name),
os.path.join(self.realpath, name) if self.realpath else None,
"{}/{}".format(self.vpath, name).lstrip("/"),
self.uread,
self.uwrite,
self.flags,
self.uadm,
self._copy_flags(name),
)
self._trk(vn)
vn.dbv = self.dbv or self
self.nodes[name] = vn
return self._trk(vn.add(src, dst))
return vn.add(src, dst)
if dst in self.nodes:
# leaf exists; return as-is
@@ -65,8 +80,26 @@ class VFS(object):
# leaf does not exist; create and keep permissions blank
vp = "{}/{}".format(self.vpath, dst).lstrip("/")
vn = VFS(src, vp)
vn.dbv = self.dbv or self
self.nodes[dst] = vn
return self._trk(vn)
return vn
def _copy_flags(self, name):
flags = {k: v for k, v in self.flags.items()}
hist = flags.get("hist")
if hist and hist != "-":
flags["hist"] = "{}/{}".format(hist.rstrip("/"), name)
return flags
def bubble_flags(self):
if self.dbv:
for k, v in self.dbv.flags.items():
if k not in ["hist"]:
self.flags[k] = v
for v in self.nodes.values():
v.bubble_flags()
def _find(self, vpath):
"""return [vfs,remainder]"""
@@ -94,6 +127,7 @@ class VFS(object):
]
def get(self, vpath, uname, will_read, will_write):
# type: (str, str, bool, bool) -> tuple[VFS, str]
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
vn, rem = self._find(vpath)
@@ -105,6 +139,15 @@ class VFS(object):
return vn, rem
def get_dbv(self, vrem):
dbv = self.dbv
if not dbv:
return self, vrem
vrem = [self.vpath[len(dbv.vpath) + 1 :], vrem]
vrem = "/".join([x for x in vrem if x])
return dbv, vrem
def canonical(self, rem):
"""returns the canonical path (fully-resolved absolute fs path)"""
rp = self.realpath
@@ -133,7 +176,8 @@ class VFS(object):
#
return os.path.realpath(rp)
def ls(self, rem, uname, scandir, lstat=False):
def ls(self, rem, uname, scandir, incl_wo=False, lstat=False):
# type: (str, str, bool, bool, bool) -> tuple[str, str, dict[str, VFS]]
"""return user-readable [fsdir,real,virt] items at vpath"""
virt_vis = {} # nodes readable by user
abspath = self.canonical(rem)
@@ -141,12 +185,12 @@ class VFS(object):
real.sort()
if not rem:
for name, vn2 in sorted(self.nodes.items()):
if (
uname in vn2.uread
or "*" in vn2.uread
or uname in vn2.uwrite
or "*" in vn2.uwrite
):
ok = uname in vn2.uread or "*" in vn2.uread
if not ok and incl_wo:
ok = uname in vn2.uwrite or "*" in vn2.uwrite
if ok:
virt_vis[name] = vn2
# no vfs nodes in the list of real inodes
@@ -154,13 +198,21 @@ class VFS(object):
return [abspath, real, virt_vis]
def walk(self, rel, rem, uname, dots, scandir, lstat=False):
def walk(self, rel, rem, seen, uname, dots, scandir, lstat):
"""
recursively yields from ./rem;
rel is a unix-style user-defined vpath (not vfs-related)
"""
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, lstat)
fsroot, vfs_ls, vfs_virt = self.ls(
rem, uname, scandir, incl_wo=False, lstat=lstat
)
if seen and not fsroot.startswith(seen[-1]) and fsroot in seen:
print("bailing from symlink loop,\n {}\n {}".format(seen[-1], fsroot))
return
seen = seen[:] + [fsroot]
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
@@ -175,7 +227,7 @@ class VFS(object):
wrel = (rel + "/" + rdir).lstrip("/")
wrem = (rem + "/" + rdir).lstrip("/")
for x in self.walk(wrel, wrem, uname, scandir, lstat):
for x in self.walk(wrel, wrem, seen, uname, dots, scandir, lstat):
yield x
for n, vfs in sorted(vfs_virt.items()):
@@ -183,14 +235,16 @@ class VFS(object):
continue
wrel = (rel + "/" + n).lstrip("/")
for x in vfs.walk(wrel, "", uname, scandir, lstat):
for x in vfs.walk(wrel, "", seen, uname, dots, scandir, lstat):
yield x
def zipgen(self, vrem, flt, uname, dots, scandir):
if flt:
flt = {k: True for k in flt}
for vpath, apath, files, rd, vd in self.walk("", vrem, uname, dots, scandir):
for vpath, apath, files, rd, vd in self.walk(
"", vrem, [], uname, dots, scandir, False
):
if flt:
files = [x for x in files if x[0] in flt]
@@ -226,17 +280,19 @@ class VFS(object):
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
yield f
def user_tree(self, uname, readable=False, writable=False):
ret = []
opt1 = readable and (uname in self.uread or "*" in self.uread)
opt2 = writable and (uname in self.uwrite or "*" in self.uwrite)
if opt1 or opt2:
ret.append(self.vpath)
def user_tree(self, uname, readable, writable, admin):
is_readable = False
if uname in self.uread or "*" in self.uread:
readable.append(self.vpath)
is_readable = True
if uname in self.uwrite or "*" in self.uwrite:
writable.append(self.vpath)
if is_readable:
admin.append(self.vpath)
for _, vn in sorted(self.nodes.items()):
ret.extend(vn.user_tree(uname, readable, writable))
return ret
vn.user_tree(uname, readable, writable, admin)
class AuthSrv(object):
@@ -257,7 +313,8 @@ class AuthSrv(object):
self.reload()
def log(self, msg, c=0):
self.log_func("auth", msg, c)
if self.log_func:
self.log_func("auth", msg, c)
def laggy_iter(self, iterable):
"""returns [value,isFinalValue]"""
@@ -269,7 +326,7 @@ class AuthSrv(object):
yield prev, True
def _parse_config_file(self, fd, user, mread, mwrite, mflags, mount):
def _parse_config_file(self, fd, user, mread, mwrite, madm, mflags, mount):
vol_src = None
vol_dst = None
self.line_ctr = 0
@@ -301,6 +358,7 @@ class AuthSrv(object):
mount[vol_dst] = vol_src
mread[vol_dst] = []
mwrite[vol_dst] = []
madm[vol_dst] = []
mflags[vol_dst] = {}
continue
@@ -311,10 +369,15 @@ class AuthSrv(object):
uname = "*"
self._read_vol_str(
lvl, uname, mread[vol_dst], mwrite[vol_dst], mflags[vol_dst]
lvl,
uname,
mread[vol_dst],
mwrite[vol_dst],
madm[vol_dst],
mflags[vol_dst],
)
def _read_vol_str(self, lvl, uname, mr, mw, mf):
def _read_vol_str(self, lvl, uname, mr, mw, ma, mf):
if lvl == "c":
cval = True
if "=" in uname:
@@ -332,6 +395,9 @@ class AuthSrv(object):
if lvl in "wa":
mw.append(uname)
if lvl == "a":
ma.append(uname)
def _read_volflag(self, flags, name, value, is_list):
if name not in ["mtp"]:
flags[name] = value
@@ -355,6 +421,7 @@ class AuthSrv(object):
user = {} # username:password
mread = {} # mountpoint:[username]
mwrite = {} # mountpoint:[username]
madm = {} # mountpoint:[username]
mflags = {} # mountpoint:[flag]
mount = {} # dst:src (mountpoint:realpath)
@@ -372,23 +439,31 @@ class AuthSrv(object):
raise Exception("invalid -v argument: [{}]".format(v_str))
src, dst, perms = m.groups()
if WINDOWS and src.startswith("/"):
src = "{}:\\{}".format(src[1], src[3:])
# print("\n".join([src, dst, perms]))
src = fsdec(os.path.abspath(fsenc(src)))
dst = dst.strip("/")
mount[dst] = src
mread[dst] = []
mwrite[dst] = []
madm[dst] = []
mflags[dst] = {}
perms = perms.split(":")
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
self._read_vol_str(lvl, uname, mread[dst], mwrite[dst], mflags[dst])
self._read_vol_str(
lvl, uname, mread[dst], mwrite[dst], madm[dst], mflags[dst]
)
if self.args.c:
for cfg_fn in self.args.c:
with open(cfg_fn, "rb") as f:
try:
self._parse_config_file(f, user, mread, mwrite, mflags, mount)
self._parse_config_file(
f, user, mread, mwrite, madm, mflags, mount
)
except:
m = "\n\033[1;31m\nerror in config file {} on line {}:\n\033[0m"
print(m.format(cfg_fn, self.line_ctr))
@@ -399,7 +474,7 @@ class AuthSrv(object):
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
elif "" not in mount:
# there's volumes but no root; make root inaccessible
vfs = VFS(os.path.abspath("."), "")
vfs = VFS(None, "")
vfs.flags["d2d"] = True
maxdepth = 0
@@ -410,13 +485,20 @@ class AuthSrv(object):
if dst == "":
# rootfs was mapped; fully replaces the default CWD vfs
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst], mflags[dst])
vfs = VFS(
mount[dst], dst, mread[dst], mwrite[dst], madm[dst], mflags[dst]
)
continue
v = vfs.add(mount[dst], dst)
v.uread = mread[dst]
v.uwrite = mwrite[dst]
v.uadm = madm[dst]
v.flags = mflags[dst]
v.dbv = None
vfs.all_vols = {}
vfs.get_all_vols(vfs.all_vols)
missing_users = {}
for d in [mread, mwrite]:
@@ -433,6 +515,69 @@ class AuthSrv(object):
)
raise Exception("invalid config")
promote = []
demote = []
for vol in vfs.all_vols.values():
hid = hashlib.sha512(fsenc(vol.realpath)).digest()
hid = base64.b32encode(hid).decode("ascii").lower()
vflag = vol.flags.get("hist")
if vflag == "-":
pass
elif vflag:
if WINDOWS and vflag.startswith("/"):
vflag = "{}:\\{}".format(vflag[1], vflag[3:])
vol.histpath = vflag
elif self.args.hist:
for nch in range(len(hid)):
hpath = os.path.join(self.args.hist, hid[: nch + 1])
try:
os.makedirs(hpath)
except:
pass
powner = os.path.join(hpath, "owner.txt")
try:
with open(powner, "rb") as f:
owner = f.read().rstrip()
except:
owner = None
me = fsenc(vol.realpath).rstrip()
if owner not in [None, me]:
continue
if owner is None:
with open(powner, "wb") as f:
f.write(me)
vol.histpath = hpath
break
vol.histpath = os.path.realpath(vol.histpath)
if vol.dbv:
if os.path.exists(os.path.join(vol.histpath, "up2k.db")):
promote.append(vol)
vol.dbv = None
else:
demote.append(vol)
# discard jump-vols
for v in demote:
vfs.all_vols.pop(v.vpath)
if promote:
msg = [
"\n the following jump-volumes were generated to assist the vfs.\n As they contain a database (probably from v0.11.11 or older),\n they are promoted to full volumes:"
]
for vol in promote:
msg.append(
" /{} ({}) ({})".format(vol.vpath, vol.realpath, vol.histpath)
)
self.log("\n\n".join(msg) + "\n", c=3)
vfs.histtab = {v.realpath: v.histpath for v in vfs.all_vols.values()}
all_mte = {}
errors = False
for vol in vfs.all_vols.values():
@@ -442,6 +587,10 @@ class AuthSrv(object):
if self.args.e2d or "e2ds" in vol.flags:
vol.flags["e2d"] = True
if self.args.no_hash:
if "ehash" not in vol.flags:
vol.flags["dhash"] = True
for k in ["e2t", "e2ts", "e2tsr"]:
if getattr(self.args, k):
vol.flags[k] = True
@@ -519,6 +668,8 @@ class AuthSrv(object):
if errors:
sys.exit(1)
vfs.bubble_flags()
try:
v, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == v.realpath:
@@ -535,3 +686,90 @@ class AuthSrv(object):
# import pprint
# pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount})
def dbg_ls(self):
users = self.args.ls
vols = "*"
flags = []
try:
users, vols = users.split(",", 1)
except:
pass
try:
vols, flags = vols.split(",", 1)
flags = flags.split(",")
except:
pass
if users == "**":
users = list(self.user.keys()) + ["*"]
else:
users = [users]
for u in users:
if u not in self.user and u != "*":
raise Exception("user not found: " + u)
if vols == "*":
vols = ["/" + x for x in self.vfs.all_vols.keys()]
else:
vols = [vols]
for v in vols:
if not v.startswith("/"):
raise Exception("volumes must start with /")
if v[1:] not in self.vfs.all_vols:
raise Exception("volume not found: " + v)
self.log({"users": users, "vols": vols, "flags": flags})
for k, v in self.vfs.all_vols.items():
self.log("/{}: read({}) write({})".format(k, v.uread, v.uwrite))
flag_v = "v" in flags
flag_ln = "ln" in flags
flag_p = "p" in flags
flag_r = "r" in flags
n_bads = 0
for v in vols:
v = v[1:]
vtop = "/{}/".format(v) if v else "/"
for u in users:
self.log("checking /{} as {}".format(v, u))
try:
vn, _ = self.vfs.get(v, u, True, False)
except:
continue
atop = vn.realpath
g = vn.walk("", "", [], u, True, not self.args.no_scandir, False)
for vpath, apath, files, _, _ in g:
fnames = [n[0] for n in files]
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
vpaths = [vtop + x for x in vpaths]
apaths = [os.path.join(apath, n) for n in fnames]
files = [[vpath + "/", apath + os.sep]] + list(zip(vpaths, apaths))
if flag_ln:
files = [x for x in files if not x[1].startswith(atop + os.sep)]
n_bads += len(files)
if flag_v:
msg = [
'# user "{}", vpath "{}"\n{}'.format(u, vp, ap)
for vp, ap in files
]
else:
msg = [x[1] for x in files]
if msg:
nuprint("\n".join(msg))
if n_bads and flag_p:
raise Exception("found symlink leaving volume, and strict is set")
if not flag_r:
sys.exit(0)

View File

@@ -44,7 +44,9 @@ class BrokerMp(object):
proc.clients = {}
proc.workload = 0
thr = threading.Thread(target=self.collector, args=(proc,))
thr = threading.Thread(
target=self.collector, args=(proc,), name="mp-collector"
)
thr.daemon = True
thr.start()
@@ -52,14 +54,19 @@ class BrokerMp(object):
proc.start()
if not self.args.q:
thr = threading.Thread(target=self.debug_load_balancer)
thr = threading.Thread(
target=self.debug_load_balancer, name="mp-dbg-loadbalancer"
)
thr.daemon = True
thr.start()
def shutdown(self):
self.log("broker", "shutting down")
for proc in self.procs:
thr = threading.Thread(target=proc.q_pend.put([0, "shutdown", []]))
for n, proc in enumerate(self.procs):
thr = threading.Thread(
target=proc.q_pend.put([0, "shutdown", []]),
name="mp-shutdown-{}-{}".format(n, len(self.procs)),
)
thr.start()
with self.mutex:

View File

@@ -1,5 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
from copyparty.authsrv import AuthSrv
import sys
import time
@@ -27,20 +28,23 @@ class MpWorker(object):
self.retpend = {}
self.retpend_mutex = threading.Lock()
self.mutex = threading.Lock()
self.workload_thr_active = False
self.workload_thr_alive = False
# we inherited signal_handler from parent,
# replace it with something harmless
if not FAKE_MP:
signal.signal(signal.SIGINT, self.signal_handler)
# starting to look like a good idea
self.asrv = AuthSrv(args, None, False)
# instantiate all services here (TODO: inheritance?)
self.httpsrv = HttpSrv(self)
self.httpsrv = HttpSrv(self, True)
self.httpsrv.disconnect_func = self.httpdrop
# on winxp and some other platforms,
# use thr.join() to block all signals
thr = threading.Thread(target=self.main)
thr = threading.Thread(target=self.main, name="mpw-main")
thr.daemon = True
thr.start()
thr.join()
@@ -75,13 +79,15 @@ class MpWorker(object):
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
self.httpsrv.accept(sck, addr)
with self.mutex:
if not self.workload_thr_active:
if not self.workload_thr_alive:
self.workload_thr_alive = True
thr = threading.Thread(target=self.thr_workload)
thr = threading.Thread(
target=self.thr_workload, name="mpw-workload"
)
thr.daemon = True
thr.start()

View File

@@ -3,6 +3,7 @@ from __future__ import print_function, unicode_literals
import threading
from .authsrv import AuthSrv
from .httpsrv import HttpSrv
from .broker_util import ExceptionalQueue, try_exec
@@ -14,6 +15,7 @@ class BrokerThr(object):
self.hub = hub
self.log = hub.log
self.args = hub.args
self.asrv = hub.asrv
self.mutex = threading.Lock()

View File

@@ -10,11 +10,13 @@ import json
import string
import socket
import ctypes
import traceback
from datetime import datetime
import calendar
from .__init__ import E, PY2, WINDOWS, ANYWIN
from .util import * # noqa # pylint: disable=unused-wildcard-import
from .authsrv import AuthSrv
from .szip import StreamZip
from .star import StreamTar
@@ -34,12 +36,13 @@ class HttpCli(object):
def __init__(self, conn):
self.t0 = time.time()
self.conn = conn
self.s = conn.s
self.sr = conn.sr
self.s = conn.s # type: socket
self.sr = conn.sr # type: Unrecv
self.ip = conn.addr[0]
self.addr = conn.addr
self.addr = conn.addr # type: tuple[str, int]
self.args = conn.args
self.auth = conn.auth
self.is_mp = conn.is_mp
self.asrv = conn.asrv # type: AuthSrv
self.ico = conn.ico
self.thumbcli = conn.thumbcli
self.log_func = conn.log_func
@@ -101,10 +104,21 @@ class HttpCli(object):
v = self.headers.get("connection", "").lower()
self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0"
v = self.headers.get("x-forwarded-for", None)
if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]:
self.ip = v.split(",")[0]
self.log_src = self.conn.set_rproxy(self.ip)
n = self.args.rproxy
if n:
v = self.headers.get("x-forwarded-for")
if v and self.conn.addr[0] in ["127.0.0.1", "::1"]:
if n > 0:
n -= 1
vs = v.split(",")
try:
self.ip = vs[n].strip()
except:
self.ip = vs[-1].strip()
self.log("rproxy={} oob x-fwd {}".format(self.args.rproxy, v), c=3)
self.log_src = self.conn.set_rproxy(self.ip)
if self.args.ihead:
keys = self.args.ihead
@@ -151,10 +165,9 @@ class HttpCli(object):
self.vpath = unquotep(vpath)
pwd = uparam.get("pw")
self.uname = self.auth.iuser.get(pwd, "*")
if self.uname:
self.rvol = self.auth.vfs.user_tree(self.uname, readable=True)
self.wvol = self.auth.vfs.user_tree(self.uname, writable=True)
self.uname = self.asrv.iuser.get(pwd, "*")
self.rvol, self.wvol, self.avol = [[], [], []]
self.asrv.vfs.user_tree(self.uname, self.rvol, self.wvol, self.avol)
ua = self.headers.get("user-agent", "")
self.is_rclone = ua.startswith("rclone/")
@@ -256,7 +269,14 @@ class HttpCli(object):
return "?" + "&amp;".join(r)
def redirect(
self, vpath, suf="", msg="aight", flavor="go to", click=True, use302=False
self,
vpath,
suf="",
msg="aight",
flavor="go to",
click=True,
status=200,
use302=False,
):
html = self.j2(
"msg",
@@ -271,7 +291,7 @@ class HttpCli(object):
h = {"Location": "/" + vpath, "Cache-Control": "no-cache"}
self.reply(html, status=302, headers=h)
else:
self.reply(html)
self.reply(html, status=status)
def handle_get(self):
if self.do_log:
@@ -312,9 +332,7 @@ class HttpCli(object):
self.redirect(vpath, flavor="redirecting to", use302=True)
return True
self.readable, self.writable = self.conn.auth.vfs.can_access(
self.vpath, self.uname
)
self.readable, self.writable = self.asrv.vfs.can_access(self.vpath, self.uname)
if not self.readable and not self.writable:
if self.vpath:
self.log("inaccessible: [{}]".format(self.vpath))
@@ -326,6 +344,12 @@ class HttpCli(object):
self.vpath = None
return self.tx_mounts()
if "scan" in self.uparam:
return self.scanvol()
if "stack" in self.uparam:
return self.tx_stack()
return self.tx_browser()
def handle_options(self):
@@ -425,7 +449,7 @@ class HttpCli(object):
def dump_to_file(self):
reader, remains = self.get_body_reader()
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
fdir = os.path.join(vfs.realpath, rem)
addr = self.ip.replace(":", ".")
@@ -435,8 +459,10 @@ class HttpCli(object):
with open(fsenc(path), "wb", 512 * 1024) as f:
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
vfs, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fn
False, "up2k.hash_file", vfs.realpath, vfs.flags, vrem, fn
)
return post_sz, sha_b64, remains, path
@@ -492,7 +518,7 @@ class HttpCli(object):
if v is None:
raise Pebkac(422, "need zip or tar keyword")
vn, rem = self.auth.vfs.get(self.vpath, self.uname, True, False)
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, True, False)
items = self.parser.require("files", 1024 * 1024)
if not items:
raise Pebkac(422, "need files list")
@@ -500,6 +526,7 @@ class HttpCli(object):
items = items.replace("\r", "").split("\n")
items = [unquotep(x) for x in items if items]
self.parser.drop()
return self.tx_zip(k, v, vn, rem, items, self.args.ed)
def handle_post_json(self):
@@ -541,13 +568,14 @@ class HttpCli(object):
self.vpath = "/".join([self.vpath, sub]).strip("/")
body["name"] = name
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
dbv, vrem = vfs.get_dbv(rem)
body["vtop"] = vfs.vpath
body["ptop"] = vfs.realpath
body["prel"] = rem
body["vtop"] = dbv.vpath
body["ptop"] = dbv.realpath
body["prel"] = vrem
body["addr"] = self.ip
body["vcfg"] = vfs.flags
body["vcfg"] = dbv.flags
if sub:
try:
@@ -569,8 +597,14 @@ class HttpCli(object):
def handle_search(self, body):
vols = []
seen = {}
for vtop in self.rvol:
vfs, _ = self.conn.auth.vfs.get(vtop, self.uname, True, False)
vfs, _ = self.asrv.vfs.get(vtop, self.uname, True, False)
vfs = vfs.dbv or vfs
if vfs in seen:
continue
seen[vfs] = True
vols.append([vfs.vpath, vfs.realpath, vfs.flags])
idx = self.conn.get_u2idx()
@@ -592,8 +626,9 @@ class HttpCli(object):
taglist = {}
else:
# search by query params
self.log("qj: " + repr(body))
hits, taglist = idx.search(vols, body)
q = body["q"]
self.log("qj: " + q)
hits, taglist = idx.search(vols, q)
msg = len(hits)
idx.p_end = time.time()
@@ -625,8 +660,8 @@ class HttpCli(object):
except KeyError:
raise Pebkac(400, "need hash and wark headers for binary POST")
vfs, _ = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
ptop = vfs.realpath
vfs, _ = self.asrv.vfs.get(self.vpath, self.uname, False, True)
ptop = (vfs.dbv or vfs).realpath
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash)
response = x.get()
@@ -698,7 +733,7 @@ class HttpCli(object):
pwd = self.parser.require("cppwd", 64)
self.parser.drop()
if pwd in self.auth.iuser:
if pwd in self.asrv.iuser:
msg = "login ok"
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
@@ -717,7 +752,7 @@ class HttpCli(object):
self.parser.drop()
nullwrite = self.args.nw
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem)
sanitized = sanitize_fn(new_dir)
@@ -746,7 +781,7 @@ class HttpCli(object):
self.parser.drop()
nullwrite = self.args.nw
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem)
if not new_file.endswith(".md"):
@@ -770,7 +805,7 @@ class HttpCli(object):
def handle_plain_upload(self):
nullwrite = self.args.nw
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem)
files = []
@@ -807,8 +842,14 @@ class HttpCli(object):
raise Pebkac(400, "empty files in post")
files.append([sz, sha512_hex, p_file, fname])
dbv, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname
False,
"up2k.hash_file",
dbv.realpath,
dbv.flags,
vrem,
fname,
)
self.conn.nbyte += sz
@@ -838,18 +879,36 @@ class HttpCli(object):
status = "OK"
if errmsg:
self.log(errmsg)
errmsg = "ERROR: " + errmsg
status = "ERROR"
msg = "{} // {} bytes // {:.3f} MiB/s\n".format(status, sz_total, spd)
jmsg = {"status": status, "sz": sz_total, "mbps": round(spd, 3), "files": []}
if errmsg:
msg += errmsg + "\n"
jmsg["error"] = errmsg
errmsg = "ERROR: " + errmsg
for sz, sha512, ofn, lfn in files:
vpath = self.vpath + "/" + lfn
vpath = (self.vpath + "/" if self.vpath else "") + lfn
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True)
)
# truncated SHA-512 prevents length extension attacks;
# using SHA-512/224, optionally SHA-512/256 = :64
jpart = {
"url": "{}://{}/{}".format(
"https" if self.tls else "http",
self.headers.get("host", "copyparty"),
vpath,
),
"sha512": sha512[:56],
"sz": sz,
"fn": lfn,
"fn_orig": ofn,
"path": vpath,
}
jmsg["files"].append(jpart)
vspd = self._spd(sz_total, False)
self.log("{} {}".format(vspd, msg))
@@ -861,7 +920,22 @@ class HttpCli(object):
ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg)
f.write(ft.encode("utf-8"))
self.redirect(self.vpath, msg=msg, flavor="return to", click=False)
status = 400 if errmsg else 200
if "j" in self.uparam:
jtxt = json.dumps(jmsg, indent=2, sort_keys=True).encode("utf-8", "replace")
self.reply(jtxt, mime="application/json", status=status)
else:
self.redirect(
self.vpath,
msg=msg,
flavor="return to",
click=False,
status=status,
)
if errmsg:
return False
self.parser.drop()
return True
@@ -872,7 +946,7 @@ class HttpCli(object):
raise Pebkac(400, "could not read lastmod from request")
nullwrite = self.args.nw
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
self._assert_safe_rem(rem)
# TODO:
@@ -965,6 +1039,8 @@ class HttpCli(object):
cli_lastmod = self.headers.get("if-modified-since")
if cli_lastmod:
try:
# some browser append "; length=573"
cli_lastmod = cli_lastmod.split(";")[0].strip()
cli_dt = time.strptime(cli_lastmod, HTTP_TS_FMT)
cli_ts = calendar.timegm(cli_dt)
return file_lastmod, int(file_ts) > int(cli_ts)
@@ -1134,7 +1210,8 @@ class HttpCli(object):
if use_sendfile:
remains = sendfile_kern(lower, upper, f, self.s)
else:
remains = sendfile_py(lower, upper, f, self.s)
actor = self.conn if self.is_mp else None
remains = sendfile_py(lower, upper, f, self.s, actor)
if remains > 0:
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
@@ -1302,12 +1379,64 @@ class HttpCli(object):
def tx_mounts(self):
suf = self.urlq(rm=["h"])
rvol = [x + "/" if x else x for x in self.rvol]
wvol = [x + "/" if x else x for x in self.wvol]
html = self.j2("splash", this=self, rvol=rvol, wvol=wvol, url_suf=suf)
rvol, wvol, avol = [
[("/" + x).rstrip("/") + "/" for x in y]
for y in [self.rvol, self.wvol, self.avol]
]
if self.avol and not self.args.no_rescan:
x = self.conn.hsrv.broker.put(True, "up2k.get_state")
vs = json.loads(x.get())
vstate = {("/" + k).rstrip("/") + "/": v for k, v in vs["volstate"].items()}
else:
vstate = {}
vs = {"scanning": None, "hashq": None, "tagq": None, "mtpq": None}
html = self.j2(
"splash",
this=self,
rvol=rvol,
wvol=wvol,
avol=avol,
vstate=vstate,
scanning=vs["scanning"],
hashq=vs["hashq"],
tagq=vs["tagq"],
mtpq=vs["mtpq"],
url_suf=suf,
)
self.reply(html.encode("utf-8"), headers=NO_STORE)
return True
def scanvol(self):
if not self.readable or not self.writable:
raise Pebkac(403, "not admin")
if self.args.no_rescan:
raise Pebkac(403, "disabled by argv")
vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True)
args = [self.asrv.vfs.all_vols, [vn.vpath]]
x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args)
x = x.get()
if not x:
self.redirect("", "?h")
return ""
raise Pebkac(500, x)
def tx_stack(self):
if not self.readable or not self.writable:
raise Pebkac(403, "not admin")
if self.args.no_stack:
raise Pebkac(403, "disabled by argv")
ret = "<pre>{}\n{}".format(time.time(), alltrace())
self.reply(ret.encode("utf-8"))
def tx_tree(self):
top = self.uparam["tree"] or ""
dst = self.vpath
@@ -1336,8 +1465,10 @@ class HttpCli(object):
ret["k" + quotep(excl)] = sub
try:
vn, rem = self.auth.vfs.get(top, self.uname, True, False)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
vn, rem = self.asrv.vfs.get(top, self.uname, True, False)
fsroot, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, incl_wo=True
)
except:
vfs_ls = []
vfs_virt = {}
@@ -1375,35 +1506,51 @@ class HttpCli(object):
vpnodes.append([quotep(vpath) + "/", html_escape(node, crlf=True)])
vn, rem = self.auth.vfs.get(
vn, rem = self.asrv.vfs.get(
self.vpath, self.uname, self.readable, self.writable
)
abspath = vn.canonical(rem)
dbv, vrem = vn.get_dbv(rem)
try:
st = os.stat(fsenc(abspath))
except:
raise Pebkac(404)
if self.readable and not stat.S_ISDIR(st.st_mode):
if self.readable:
if rem.startswith(".hist/up2k."):
raise Pebkac(403)
is_dir = stat.S_ISDIR(st.st_mode)
th_fmt = self.uparam.get("th")
if th_fmt is not None:
if is_dir:
for fn in ["folder.png", "folder.jpg"]:
fp = os.path.join(abspath, fn)
if os.path.exists(fp):
vrem = "{}/{}".format(vrem.rstrip("/"), fn)
is_dir = False
break
if is_dir:
return self.tx_ico("a.folder")
thp = None
if self.thumbcli:
thp = self.thumbcli.get(vn.realpath, rem, int(st.st_mtime), th_fmt)
thp = self.thumbcli.get(
dbv.realpath, vrem, int(st.st_mtime), th_fmt
)
if thp:
return self.tx_file(thp)
return self.tx_ico(rem)
if abspath.endswith(".md") and "raw" not in self.uparam:
return self.tx_md(abspath)
if not is_dir:
if abspath.endswith(".md") and "raw" not in self.uparam:
return self.tx_md(abspath)
return self.tx_file(abspath)
return self.tx_file(abspath)
srv_info = []
@@ -1502,7 +1649,9 @@ class HttpCli(object):
if v is not None:
return self.tx_zip(k, v, vn, rem, [], self.args.ed)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
fsroot, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, incl_wo=True
)
stats = {k: v for k, v in vfs_ls}
vfs_ls = [x[0] for x in vfs_ls]
vfs_ls.extend(vfs_virt.keys())
@@ -1534,7 +1683,7 @@ class HttpCli(object):
icur = None
if "e2t" in vn.flags:
idx = self.conn.get_u2idx()
icur = idx.get_cur(vn.realpath)
icur = idx.get_cur(dbv.realpath)
dirs = []
files = []
@@ -1602,6 +1751,9 @@ class HttpCli(object):
rd = f["rd"]
del f["rd"]
if icur:
if vn != dbv:
_, rd = vn.get_dbv(rd)
q = "select w from up where rd = ? and fn = ?"
try:
r = icur.execute(q, (rd, fn)).fetchone()
@@ -1642,9 +1794,13 @@ class HttpCli(object):
j2a["files"] = dirs + files
j2a["logues"] = logues
j2a["taglist"] = taglist
if "mte" in vn.flags:
j2a["tag_order"] = json.dumps(vn.flags["mte"].split(","))
if self.args.css_browser:
j2a["css"] = self.args.css_browser
html = self.j2(tpl, **j2a)
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
return True

View File

@@ -34,7 +34,8 @@ class HttpConn(object):
self.hsrv = hsrv
self.args = hsrv.args
self.auth = hsrv.auth
self.asrv = hsrv.asrv
self.is_mp = hsrv.is_mp
self.cert_path = hsrv.cert_path
enth = HAVE_PIL and not self.args.no_thumb
@@ -70,7 +71,7 @@ class HttpConn(object):
def get_u2idx(self):
if not self.u2idx:
self.u2idx = U2idx(self.args, self.log_func)
self.u2idx = U2idx(self)
return self.u2idx
@@ -174,6 +175,11 @@ class HttpConn(object):
self.sr = Unrecv(self.s)
while True:
if self.is_mp:
self.workload += 50
if self.workload >= 2 ** 31:
self.workload = 100
cli = HttpCli(self)
if not cli.run():
return

View File

@@ -25,8 +25,8 @@ except ImportError:
sys.exit(1)
from .__init__ import E, MACOS
from .httpconn import HttpConn
from .authsrv import AuthSrv
from .httpconn import HttpConn
class HttpSrv(object):
@@ -35,10 +35,12 @@ class HttpSrv(object):
relying on MpSrv for performance (HttpSrv is just plain threads)
"""
def __init__(self, broker):
def __init__(self, broker, is_mp=False):
self.broker = broker
self.is_mp = is_mp
self.args = broker.args
self.log = broker.log
self.asrv = broker.asrv
self.disconnect_func = None
self.mutex = threading.Lock()
@@ -46,7 +48,6 @@ class HttpSrv(object):
self.clients = {}
self.workload = 0
self.workload_thr_alive = False
self.auth = AuthSrv(self.args, self.log)
env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
@@ -66,7 +67,11 @@ class HttpSrv(object):
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
thr = threading.Thread(target=self.thr_client, args=(sck, addr))
thr = threading.Thread(
target=self.thr_client,
args=(sck, addr),
name="httpsrv-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]),
)
thr.daemon = True
thr.start()
@@ -84,13 +89,16 @@ class HttpSrv(object):
cli = HttpConn(sck, addr, self)
with self.mutex:
self.clients[cli] = 0
self.workload += 50
if not self.workload_thr_alive:
self.workload_thr_alive = True
thr = threading.Thread(target=self.thr_workload)
thr.daemon = True
thr.start()
if self.is_mp:
self.workload += 50
if not self.workload_thr_alive:
self.workload_thr_alive = True
thr = threading.Thread(
target=self.thr_workload, name="httpsrv-workload"
)
thr.daemon = True
thr.start()
try:
if self.args.log_conn:
@@ -99,6 +107,7 @@ class HttpSrv(object):
cli.run()
finally:
sck = cli.s
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import hashlib
import colorsys

View File

@@ -1,7 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import re
import os
import sys
import json

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import tarfile
import threading
@@ -42,7 +45,7 @@ class StreamTar(object):
fmt = tarfile.GNU_FORMAT
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
w = threading.Thread(target=self._gen)
w = threading.Thread(target=self._gen, name="star-gen")
w.daemon = True
w.start()

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import time
import tempfile

View File

@@ -37,12 +37,13 @@ class SvcHub(object):
self.log = self._log_disabled if args.q else self._log_enabled
# jank goes here
auth = AuthSrv(self.args, self.log, False)
# initiate all services to manage
self.asrv = AuthSrv(self.args, self.log, False)
if args.ls:
self.asrv.dbg_ls()
self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self, auth.vfs.all_vols)
self.up2k = Up2k(self)
self.thumbsrv = None
if not args.no_thumb:
@@ -52,7 +53,7 @@ class SvcHub(object):
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
self.log("thumb", msg, c=3)
self.thumbsrv = ThumbSrv(self, auth.vfs.all_vols)
self.thumbsrv = ThumbSrv(self)
else:
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n"
self.log(
@@ -69,7 +70,7 @@ class SvcHub(object):
self.broker = Broker(self)
def run(self):
thr = threading.Thread(target=self.tcpsrv.run)
thr = threading.Thread(target=self.tcpsrv.run, name="svchub-main")
thr.daemon = True
thr.start()
@@ -93,9 +94,11 @@ class SvcHub(object):
break
if n == 3:
print("waiting for thumbsrv...")
print("waiting for thumbsrv (10sec)...")
print("nailed it")
print("nailed it", end="")
finally:
print("\033[0m")
def _log_disabled(self, src, msg, c=0):
pass

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import time
import zlib

View File

@@ -1,5 +1,7 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import time
from .util import Cooldown
from .th_srv import thumb_path, THUMBABLE, FMT_FF
@@ -9,6 +11,7 @@ class ThumbCli(object):
def __init__(self, broker):
self.broker = broker
self.args = broker.args
self.asrv = broker.asrv
# cache on both sides for less broker spam
self.cooldown = Cooldown(self.args.th_poke)
@@ -18,16 +21,19 @@ class ThumbCli(object):
if ext not in THUMBABLE:
return None
if self.args.no_vthumb and ext in FMT_FF:
is_vid = ext in FMT_FF
if is_vid and self.args.no_vthumb:
return None
if fmt == "j" and self.args.th_no_jpg:
fmt = "w"
if fmt == "w" and self.args.th_no_webp:
fmt = "j"
if fmt == "w":
if self.args.th_no_webp or (is_vid and self.args.th_ff_jpg):
fmt = "j"
tpath = thumb_path(ptop, rem, mtime, fmt)
histpath = self.asrv.vfs.histtab[ptop]
tpath = thumb_path(histpath, rem, mtime, fmt)
ret = None
try:
st = os.stat(tpath)

View File

@@ -1,5 +1,7 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import sys
import time
import shutil
import base64
@@ -8,7 +10,7 @@ import threading
import subprocess as sp
from .__init__ import PY2
from .util import fsenc, mchkcmd, Queue, Cooldown, BytesIO
from .util import fsenc, runcmd, Queue, Cooldown, BytesIO, min_ex
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
@@ -51,7 +53,7 @@ except:
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# ffmpeg -formats
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
if HAVE_HEIF:
FMT_PIL += " heif heifs heic heics"
@@ -71,7 +73,7 @@ if HAVE_FFMPEG and HAVE_FFPROBE:
THUMBABLE.update(FMT_FF)
def thumb_path(ptop, rem, mtime, fmt):
def thumb_path(histpath, rem, mtime, fmt):
# base16 = 16 = 256
# b64-lc = 38 = 1444
# base64 = 64 = 4096
@@ -92,16 +94,15 @@ def thumb_path(ptop, rem, mtime, fmt):
h = hashlib.sha512(fsenc(fn)).digest()[:24]
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
return "{}/.hist/th/{}/{}.{:x}.{}".format(
ptop, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
return "{}/th/{}/{}.{:x}.{}".format(
histpath, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
)
class ThumbSrv(object):
def __init__(self, hub, vols):
def __init__(self, hub):
self.hub = hub
self.vols = [v.realpath for v in vols.values()]
self.asrv = hub.asrv
self.args = hub.args
self.log_func = hub.log
@@ -114,8 +115,10 @@ class ThumbSrv(object):
self.stopping = False
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
self.q = Queue(self.nthr * 4)
for _ in range(self.nthr):
t = threading.Thread(target=self.worker)
for n in range(self.nthr):
t = threading.Thread(
target=self.worker, name="thumb-{}-{}".format(n, self.nthr)
)
t.daemon = True
t.start()
@@ -129,9 +132,9 @@ class ThumbSrv(object):
msg = "cannot create video thumbnails because some of the required programs are not available: "
msg += ", ".join(missing)
self.log(msg, c=1)
self.log(msg, c=3)
t = threading.Thread(target=self.cleaner)
t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
t.daemon = True
t.start()
@@ -148,9 +151,11 @@ class ThumbSrv(object):
return not self.nthr
def get(self, ptop, rem, mtime, fmt):
tpath = thumb_path(ptop, rem, mtime, fmt)
histpath = self.asrv.vfs.histtab[ptop]
tpath = thumb_path(histpath, rem, mtime, fmt)
abspath = os.path.join(ptop, rem)
cond = threading.Condition()
cond = threading.Condition(self.mutex)
do_conv = False
with self.mutex:
try:
self.busy[tpath].append(cond)
@@ -168,8 +173,11 @@ class ThumbSrv(object):
f.write(fsenc(os.path.dirname(abspath)))
self.busy[tpath] = [cond]
self.q.put([abspath, tpath])
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
do_conv = True
if do_conv:
self.q.put([abspath, tpath])
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
while not self.stopping:
with self.mutex:
@@ -177,7 +185,7 @@ class ThumbSrv(object):
break
with cond:
cond.wait()
cond.wait(3)
try:
st = os.stat(tpath)
@@ -206,9 +214,9 @@ class ThumbSrv(object):
if fun:
try:
fun(abspath, tpath)
except Exception as ex:
msg = "{} failed on {}\n {!r}"
self.log(msg.format(fun.__name__, abspath, ex), 3)
except:
msg = "{} failed on {}\n{}"
self.log(msg.format(fun.__name__, abspath, min_ex()), 3)
with open(tpath, "wb") as _:
pass
@@ -240,8 +248,8 @@ class ThumbSrv(object):
except:
im.thumbnail(self.res)
if im.mode not in ("RGB", "L"):
im = im.convert("RGB")
fmts = ["RGB", "L"]
args = {"quality": 40}
if tpath.endswith(".webp"):
# quality 80 = pillow-default
@@ -249,15 +257,27 @@ class ThumbSrv(object):
# method 0 = pillow-default, fast
# method 4 = ffmpeg-default
# method 6 = max, slow
im.save(tpath, quality=40, method=6)
fmts += ["RGBA", "LA"]
args["method"] = 6
else:
im.save(tpath, quality=40) # default=75
pass # default q = 75
if im.mode not in fmts:
print("conv {}".format(im.mode))
im = im.convert("RGB")
im.save(tpath, quality=40, method=6)
def conv_ffmpeg(self, abspath, tpath):
ret, _ = ffprobe(abspath)
dur = ret[".dur"][1] if ".dur" in ret else 4
seek = "{:.0f}".format(dur / 3)
ext = abspath.rsplit(".")[-1]
if ext in ["h264", "h265"]:
seek = []
else:
dur = ret[".dur"][1] if ".dur" in ret else 4
seek = "{:.0f}".format(dur / 3)
seek = [b"-ss", seek.encode("utf-8")]
scale = "scale={0}:{1}:force_original_aspect_ratio="
if self.args.th_no_crop:
@@ -266,19 +286,20 @@ class ThumbSrv(object):
scale += "increase,crop={0}:{1},setsar=1:1"
scale = scale.format(*list(self.res)).encode("utf-8")
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-hide_banner",
b"-ss",
seek,
b"-i",
fsenc(abspath),
b"-vf",
scale,
b"-vframes",
b"1",
b"-v", b"error",
b"-hide_banner"
]
cmd += seek
cmd += [
b"-i", fsenc(abspath),
b"-vf", scale,
b"-vframes", b"1",
]
# fmt: on
if tpath.endswith(".jpg"):
cmd += [
@@ -295,7 +316,11 @@ class ThumbSrv(object):
cmd += [fsenc(tpath)]
mchkcmd(cmd)
ret, sout, serr = runcmd(*cmd)
if ret != 0:
msg = ["ff: {}".format(x) for x in serr.split("\n")]
self.log("FFmpeg failed:\n" + "\n".join(msg), c="1;30")
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
def poke(self, tdir):
if not self.poke_cd.poke(tdir):
@@ -314,26 +339,32 @@ class ThumbSrv(object):
interval = self.args.th_clean
while True:
time.sleep(interval)
for vol in self.vols:
vol += "/.hist/th"
self.log("cln {}/".format(vol))
self.clean(vol)
ndirs = 0
for vol, histpath in self.asrv.vfs.histtab.items():
if histpath.startswith(vol):
self.log("\033[Jcln {}/\033[A".format(histpath))
else:
self.log("\033[Jcln {} ({})/\033[A".format(histpath, vol))
self.log("cln ok")
ndirs += self.clean(histpath)
def clean(self, vol):
# self.log("cln {}".format(vol))
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
def clean(self, histpath):
thumbpath = os.path.join(histpath, "th")
# self.log("cln {}".format(thumbpath))
maxage = self.args.th_maxage
now = time.time()
prev_b64 = None
prev_fp = None
try:
ents = os.listdir(vol)
ents = os.listdir(thumbpath)
except:
return
return 0
ndirs = 0
for f in sorted(ents):
fp = os.path.join(vol, f)
fp = os.path.join(thumbpath, f)
cmp = fp.lower().replace("\\", "/")
# "top" or b64 prefix/full (a folder)
@@ -348,10 +379,11 @@ class ThumbSrv(object):
break
if safe:
ndirs += 1
self.log("rm -rf [{}]".format(fp))
shutil.rmtree(fp, ignore_errors=True)
else:
self.clean(fp)
ndirs += self.clean(fp)
continue
# thumb file
@@ -373,3 +405,5 @@ class ThumbSrv(object):
prev_b64 = b64
prev_fp = fp
return ndirs

View File

@@ -7,7 +7,7 @@ import time
import threading
from datetime import datetime
from .util import u8safe, s3dec, html_escape, Pebkac
from .util import s3dec, Pebkac, min_ex
from .up2k import up2k_wark_from_hashlist
@@ -19,10 +19,11 @@ except:
class U2idx(object):
def __init__(self, args, log_func):
self.args = args
self.log_func = log_func
self.timeout = args.srch_time
def __init__(self, conn):
self.log_func = conn.log_func
self.asrv = conn.asrv
self.args = conn.args
self.timeout = self.args.srch_time
if not HAVE_SQLITE3:
self.log("could not load sqlite3; searchign wqill be disabled")
@@ -47,57 +48,143 @@ class U2idx(object):
fhash = body["hash"]
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
uq = "substr(w,1,16) = ? and w = ?"
uq = "where substr(w,1,16) = ? and w = ?"
uv = [wark[:16], wark]
try:
return self.run_query(vols, uq, uv, {})[0]
except Exception as ex:
raise Pebkac(500, repr(ex))
return self.run_query(vols, uq, uv)[0]
except:
raise Pebkac(500, min_ex())
def get_cur(self, ptop):
cur = self.cur.get(ptop)
if cur:
return cur
cur = _open(ptop)
if not cur:
histpath = self.asrv.vfs.histtab[ptop]
db_path = os.path.join(histpath, "up2k.db")
if not os.path.exists(db_path):
return None
cur = sqlite3.connect(db_path).cursor()
self.cur[ptop] = cur
return cur
def search(self, vols, body):
def search(self, vols, uq):
"""search by query params"""
if not HAVE_SQLITE3:
return []
qobj = {}
_conv_sz(qobj, body, "sz_min", "up.sz >= ?")
_conv_sz(qobj, body, "sz_max", "up.sz <= ?")
_conv_dt(qobj, body, "dt_min", "up.mt >= ?")
_conv_dt(qobj, body, "dt_max", "up.mt <= ?")
for seg, dk in [["path", "up.rd"], ["name", "up.fn"]]:
if seg in body:
_conv_txt(qobj, body, seg, dk)
q = ""
va = []
joins = ""
is_key = True
is_size = False
is_date = False
kw_key = ["(", ")", "and ", "or ", "not "]
kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "]
ptn_mt = re.compile(r"^\.?[a-z]+$")
mt_ctr = 0
mt_keycmp = "substr(up.w,1,16)"
mt_keycmp2 = None
uq, uv = _sqlize(qobj)
while True:
uq = uq.strip()
if not uq:
break
qobj = {}
if "tags" in body:
_conv_txt(qobj, body, "tags", "mt.v")
ok = False
for kw in kw_key + kw_val:
if uq.startswith(kw):
is_key = kw in kw_key
uq = uq[len(kw) :]
ok = True
q += kw
break
if "adv" in body:
_conv_adv(qobj, body, "adv")
if ok:
continue
v, uq = (uq + " ").split(" ", 1)
if is_key:
is_key = False
if v == "size":
v = "up.sz"
is_size = True
elif v == "date":
v = "up.mt"
is_date = True
elif v == "path":
v = "up.rd"
elif v == "name":
v = "up.fn"
elif v == "tags" or ptn_mt.match(v):
mt_ctr += 1
mt_keycmp2 = "mt{}.w".format(mt_ctr)
joins += "inner join mt mt{} on {} = {} ".format(
mt_ctr, mt_keycmp, mt_keycmp2
)
mt_keycmp = mt_keycmp2
if v == "tags":
v = "mt{0}.v".format(mt_ctr)
else:
v = "+mt{0}.k = '{1}' and mt{0}.v".format(mt_ctr, v)
else:
raise Pebkac(400, "invalid key [" + v + "]")
q += v + " "
continue
head = ""
tail = ""
if is_date:
is_date = False
v = v.upper().rstrip("Z").replace(",", " ").replace("T", " ")
while " " in v:
v = v.replace(" ", " ")
for fmt in [
"%Y-%m-%d %H:%M:%S",
"%Y-%m-%d %H:%M",
"%Y-%m-%d %H",
"%Y-%m-%d",
]:
try:
v = datetime.strptime(v, fmt).timestamp()
break
except:
pass
elif is_size:
is_size = False
v = int(float(v) * 1024 * 1024)
else:
if v.startswith("*"):
head = "'%'||"
v = v[1:]
if v.endswith("*"):
tail = "||'%'"
v = v[:-1]
q += " {}?{} ".format(head, tail)
va.append(v)
is_key = True
try:
return self.run_query(vols, uq, uv, qobj)
return self.run_query(vols, joins + "where " + q, va)
except Exception as ex:
raise Pebkac(500, repr(ex))
def run_query(self, vols, uq, uv, targs):
self.log("qs: {} {} , {}".format(uq, repr(uv), repr(targs)))
def run_query(self, vols, uq, uv):
done_flag = []
self.active_id = "{:.6f}_{}".format(
time.time(), threading.current_thread().ident
@@ -108,39 +195,19 @@ class U2idx(object):
self.active_id,
done_flag,
),
name="u2idx-terminator",
)
thr.daemon = True
thr.start()
if not targs:
if not uq:
q = "select * from up"
v = ()
else:
q = "select * from up where " + uq
v = tuple(uv)
if not uq or not uv:
q = "select * from up"
v = ()
else:
q = "select up.* from up"
keycmp = "substr(up.w,1,16)"
where = []
v = []
ctr = 0
for tq, tv in sorted(targs.items()):
ctr += 1
tq = tq.split("\n")[0]
keycmp2 = "mt{}.w".format(ctr)
q += " inner join mt mt{} on {} = {}".format(ctr, keycmp, keycmp2)
keycmp = keycmp2
where.append(tq.replace("mt.", keycmp[:-1]))
v.append(tv)
q = "select up.* from up " + uq
v = tuple(uv)
if uq:
where.append(uq)
v.extend(uv)
q += " where " + (" and ".join(where))
# self.log("q2: {} {}".format(q, repr(v)))
self.log("qs: {!r} {!r}".format(q, v))
ret = []
lim = 1000
@@ -163,7 +230,7 @@ class U2idx(object):
if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn)
rp = "/".join([vtop, rd, fn])
rp = "/".join([x for x in [vtop, rd, fn] if x])
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
for hit in sret:
@@ -178,6 +245,7 @@ class U2idx(object):
hit["tags"] = tags
ret.extend(sret)
# print("[{}] {}".format(ptop, sret))
done_flag.append(True)
self.active_id = None
@@ -198,84 +266,3 @@ class U2idx(object):
if identifier == self.active_id:
self.active_cur.connection.interrupt()
def _open(ptop):
db_path = os.path.join(ptop, ".hist", "up2k.db")
if os.path.exists(db_path):
return sqlite3.connect(db_path).cursor()
def _conv_sz(q, body, k, sql):
if k in body:
q[sql] = int(float(body[k]) * 1024 * 1024)
def _conv_dt(q, body, k, sql):
if k not in body:
return
v = body[k].upper().rstrip("Z").replace(",", " ").replace("T", " ")
while " " in v:
v = v.replace(" ", " ")
for fmt in ["%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M", "%Y-%m-%d %H", "%Y-%m-%d"]:
try:
ts = datetime.strptime(v, fmt).timestamp()
break
except:
ts = None
if ts:
q[sql] = ts
def _conv_txt(q, body, k, sql):
for v in body[k].split(" "):
inv = ""
if v.startswith("-"):
inv = "not"
v = v[1:]
if not v:
continue
head = "'%'||"
if v.startswith("^"):
head = ""
v = v[1:]
tail = "||'%'"
if v.endswith("$"):
tail = ""
v = v[:-1]
qk = "{} {} like {}?{}".format(sql, inv, head, tail)
q[qk + "\n" + v] = u8safe(v)
def _conv_adv(q, body, k):
ptn = re.compile(r"^(\.?[a-z]+) *(==?|!=|<=?|>=?) *(.*)$")
parts = body[k].split(" ")
parts = [x.strip() for x in parts if x.strip()]
for part in parts:
m = ptn.match(part)
if not m:
p = html_escape(part)
raise Pebkac(400, "invalid argument [" + p + "]")
k, op, v = m.groups()
qk = "mt.k = '{}' and mt.v {} ?".format(k, op)
q[qk + "\n" + v] = u8safe(v)
def _sqlize(qobj):
keys = []
values = []
for k, v in sorted(qobj.items()):
keys.append(k.split("\n")[0])
values.append(v)
return " and ".join(keys), values

View File

@@ -48,11 +48,11 @@ class Up2k(object):
* ~/.config flatfiles for active jobs
"""
def __init__(self, hub, all_vols):
def __init__(self, hub):
self.hub = hub
self.asrv = hub.asrv
self.args = hub.args
self.log_func = hub.log
self.all_vols = all_vols
# config
self.salt = self.args.salt
@@ -61,12 +61,16 @@ class Up2k(object):
self.mutex = threading.Lock()
self.hashq = Queue()
self.tagq = Queue()
self.n_hashq = 0
self.n_tagq = 0
self.volstate = {}
self.registry = {}
self.entags = {}
self.flags = {}
self.cur = {}
self.mtag = None
self.pending_tags = None
self.mtp_parsers = {}
self.mem_cur = None
self.sqlite_ver = None
@@ -82,7 +86,7 @@ class Up2k(object):
if ANYWIN:
# usually fails to set lastmod too quickly
self.lastmod_q = Queue()
thr = threading.Thread(target=self._lastmodder)
thr = threading.Thread(target=self._lastmodder, name="up2k-lastmod")
thr.daemon = True
thr.start()
@@ -92,29 +96,78 @@ class Up2k(object):
if not HAVE_SQLITE3:
self.log("could not initialize sqlite3, will use in-memory registry only")
have_e2d = self.init_indexes()
if self.args.no_fastboot:
self.deferred_init()
else:
t = threading.Thread(
target=self.deferred_init,
name="up2k-deferred-init",
)
t.daemon = True
t.start()
def deferred_init(self):
all_vols = self.asrv.vfs.all_vols
have_e2d = self.init_indexes(all_vols)
if have_e2d:
thr = threading.Thread(target=self._snapshot)
thr = threading.Thread(target=self._snapshot, name="up2k-snapshot")
thr.daemon = True
thr.start()
thr = threading.Thread(target=self._hasher)
thr = threading.Thread(target=self._hasher, name="up2k-hasher")
thr.daemon = True
thr.start()
if self.mtag:
thr = threading.Thread(target=self._tagger)
thr = threading.Thread(target=self._tagger, name="up2k-tagger")
thr.daemon = True
thr.start()
thr = threading.Thread(target=self._run_all_mtp)
thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-init")
thr.daemon = True
thr.start()
def log(self, msg, c=0):
self.log_func("up2k", msg + "\033[K", c)
def get_state(self):
mtpq = 0
q = "select count(w) from mt where k = 't:mtp'"
got_lock = self.mutex.acquire(timeout=0.5)
if got_lock:
for cur in self.cur.values():
try:
mtpq += cur.execute(q).fetchone()[0]
except:
pass
self.mutex.release()
else:
mtpq = "?"
ret = {
"volstate": self.volstate,
"scanning": hasattr(self, "pp"),
"hashq": self.n_hashq,
"tagq": self.n_tagq,
"mtpq": mtpq,
}
return json.dumps(ret, indent=4)
def rescan(self, all_vols, scan_vols):
if hasattr(self, "pp"):
return "cannot initiate; scan is already in progress"
args = (all_vols, scan_vols)
t = threading.Thread(
target=self.init_indexes,
args=args,
name="up2k-rescan-{}".format(scan_vols[0]),
)
t.daemon = True
t.start()
return None
def _vis_job_progress(self, job):
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
path = os.path.join(job["ptop"], job["prel"], job["name"])
@@ -137,9 +190,9 @@ class Up2k(object):
return True, ret
def init_indexes(self):
def init_indexes(self, all_vols, scan_vols=[]):
self.pp = ProgressPrinter()
vols = self.all_vols.values()
vols = all_vols.values()
t0 = time.time()
have_e2d = False
@@ -156,27 +209,42 @@ class Up2k(object):
self.log(msg, c=3)
live_vols = []
for vol in vols:
try:
os.listdir(vol.realpath)
with self.mutex:
# only need to protect register_vpath but all in one go feels right
for vol in vols:
try:
os.listdir(vol.realpath)
except:
self.volstate[vol.vpath] = "OFFLINE (cannot access folder)"
self.log("cannot access " + vol.realpath, c=1)
continue
if scan_vols and vol.vpath not in scan_vols:
continue
if not self.register_vpath(vol.realpath, vol.flags):
# self.log("db not enable for {}".format(m, vol.realpath))
continue
live_vols.append(vol)
except:
self.log("cannot access " + vol.realpath, c=1)
if vol.vpath not in self.volstate:
self.volstate[vol.vpath] = "OFFLINE (pending initialization)"
vols = live_vols
need_vac = {}
need_mtag = False
for vol in vols:
if "e2t" in vol.flags:
need_mtag = True
if need_mtag:
if need_mtag and not self.mtag:
self.mtag = MTag(self.log_func, self.args)
if not self.mtag.usable:
self.mtag = None
# e2ds(a) volumes first,
# also covers tags where e2ts is set
# e2ds(a) volumes first
for vol in vols:
en = {}
if "mte" in vol.flags:
@@ -188,26 +256,45 @@ class Up2k(object):
have_e2d = True
if "e2ds" in vol.flags:
r = self._build_file_index(vol, vols)
if not r:
needed_mutagen = True
self.volstate[vol.vpath] = "busy (hashing files)"
_, vac = self._build_file_index(vol, list(all_vols.values()))
if vac:
need_vac[vol] = True
if "e2ts" not in vol.flags:
m = "online, idle"
else:
m = "online (tags pending)"
self.volstate[vol.vpath] = m
# open the rest + do any e2ts(a)
needed_mutagen = False
for vol in vols:
r = self.register_vpath(vol.realpath, vol.flags)
if not r or "e2ts" not in vol.flags:
if "e2ts" not in vol.flags:
continue
cur, db_path, sz0 = r
n_add, n_rm, success = self._build_tags_index(vol.realpath)
m = "online (reading tags)"
self.volstate[vol.vpath] = m
self.log("{} [{}]".format(m, vol.realpath))
nadd, nrm, success = self._build_tags_index(vol)
if not success:
needed_mutagen = True
if n_add or n_rm:
self.vac(cur, db_path, n_add, n_rm, sz0)
if nadd or nrm:
need_vac[vol] = True
self.volstate[vol.vpath] = "online (mtp soon)"
for vol in need_vac:
cur, _ = self.register_vpath(vol.realpath, vol.flags)
with self.mutex:
cur.connection.commit()
cur.execute("vacuum")
self.pp.end = True
msg = "{} volumes in {:.2f} sec"
self.log(msg.format(len(vols), time.time() - t0))
@@ -215,128 +302,129 @@ class Up2k(object):
msg = "could not read tags because no backends are available (mutagen or ffprobe)"
self.log(msg, c=1)
thr = None
if self.mtag:
m = "online (running mtp)"
if scan_vols:
thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-scan")
thr.daemon = True
else:
del self.pp
m = "online, idle"
for vol in vols:
self.volstate[vol.vpath] = m
if thr:
thr.start()
return have_e2d
def register_vpath(self, ptop, flags):
with self.mutex:
if ptop in self.registry:
return None
_, flags = self._expr_idx_filter(flags)
ft = "\033[0;32m{}{:.0}"
ff = "\033[0;35m{}{:.0}"
fv = "\033[0;36m{}:\033[1;30m{}"
a = [
(ft if v is True else ff if v is False else fv).format(k, str(v))
for k, v in flags.items()
]
if a:
self.log(" ".join(sorted(a)) + "\033[0m")
reg = {}
path = os.path.join(ptop, ".hist", "up2k.snap")
if "e2d" in flags and os.path.exists(path):
with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8")
reg2 = json.loads(j)
for k, job in reg2.items():
path = os.path.join(job["ptop"], job["prel"], job["name"])
if os.path.exists(fsenc(path)):
reg[k] = job
job["poke"] = time.time()
else:
self.log("ign deleted file in snap: [{}]".format(path))
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
m = [m] + self._vis_reg_progress(reg)
self.log("\n".join(m))
self.flags[ptop] = flags
self.registry[ptop] = reg
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
return None
histpath = self.asrv.vfs.histtab[ptop]
db_path = os.path.join(histpath, "up2k.db")
if ptop in self.registry:
try:
os.mkdir(os.path.join(ptop, ".hist"))
return [self.cur[ptop], db_path]
except:
pass
db_path = os.path.join(ptop, ".hist", "up2k.db")
if ptop in self.cur:
return None
try:
sz0 = 0
if os.path.exists(db_path):
sz0 = os.path.getsize(db_path) // 1024
_, flags = self._expr_idx_filter(flags)
cur = self._open_db(db_path)
self.cur[ptop] = cur
return [cur, db_path, sz0]
except:
msg = "cannot use database at [{}]:\n{}"
self.log(msg.format(ptop, traceback.format_exc()))
ft = "\033[0;32m{}{:.0}"
ff = "\033[0;35m{}{:.0}"
fv = "\033[0;36m{}:\033[1;30m{}"
a = [
(ft if v is True else ff if v is False else fv).format(k, str(v))
for k, v in flags.items()
]
if a:
self.log(" ".join(sorted(a)) + "\033[0m")
reg = {}
path = os.path.join(histpath, "up2k.snap")
if "e2d" in flags and os.path.exists(path):
with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8")
reg2 = json.loads(j)
for k, job in reg2.items():
path = os.path.join(job["ptop"], job["prel"], job["name"])
if os.path.exists(fsenc(path)):
reg[k] = job
job["poke"] = time.time()
else:
self.log("ign deleted file in snap: [{}]".format(path))
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
m = [m] + self._vis_reg_progress(reg)
self.log("\n".join(m))
self.flags[ptop] = flags
self.registry[ptop] = reg
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
return None
try:
os.makedirs(histpath)
except:
pass
try:
cur = self._open_db(db_path)
self.cur[ptop] = cur
return [cur, db_path]
except:
msg = "cannot use database at [{}]:\n{}"
self.log(msg.format(ptop, traceback.format_exc()))
return None
def _build_file_index(self, vol, all_vols):
do_vac = False
top = vol.realpath
reg = self.register_vpath(top, vol.flags)
if not reg:
return
nohash = "dhash" in vol.flags
with self.mutex:
cur, _ = self.register_vpath(top, vol.flags)
_, db_path, sz0 = reg
dbw = [reg[0], 0, time.time()]
self.pp.n = next(dbw[0].execute("select count(w) from up"))[0]
dbw = [cur, 0, time.time()]
self.pp.n = next(dbw[0].execute("select count(w) from up"))[0]
excl = [
vol.realpath + "/" + d.vpath[len(vol.vpath) :].lstrip("/")
for d in all_vols
if d != vol and (d.vpath.startswith(vol.vpath + "/") or not vol.vpath)
]
n_add = self._build_dir(dbw, top, set(excl), top)
n_rm = self._drop_lost(dbw[0], top)
if dbw[1]:
self.log("commit {} new files".format(dbw[1]))
dbw[0].connection.commit()
excl = [
vol.realpath + "/" + d.vpath[len(vol.vpath) :].lstrip("/")
for d in all_vols
if d != vol and (d.vpath.startswith(vol.vpath + "/") or not vol.vpath)
]
if WINDOWS:
excl = [x.replace("/", "\\") for x in excl]
n_add, n_rm, success = self._build_tags_index(vol.realpath)
n_add = self._build_dir(dbw, top, set(excl), top, nohash)
n_rm = self._drop_lost(dbw[0], top)
if dbw[1]:
self.log("commit {} new files".format(dbw[1]))
dbw[0].connection.commit()
dbw[0].connection.commit()
if n_add or n_rm or do_vac:
self.vac(dbw[0], db_path, n_add, n_rm, sz0)
return True, n_add or n_rm or do_vac
return success
def vac(self, cur, db_path, n_add, n_rm, sz0):
sz1 = os.path.getsize(db_path) // 1024
cur.execute("vacuum")
sz2 = os.path.getsize(db_path) // 1024
msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format(
n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2
)
self.log(msg)
def _build_dir(self, dbw, top, excl, cdir):
def _build_dir(self, dbw, top, excl, cdir, nohash):
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
histdir = os.path.join(top, ".hist")
histpath = self.asrv.vfs.histtab[top]
ret = 0
for iname, inf in statdir(self.log, not self.args.no_scandir, False, cdir):
g = statdir(self.log, not self.args.no_scandir, False, cdir)
for iname, inf in sorted(g):
abspath = os.path.join(cdir, iname)
lmod = int(inf.st_mtime)
sz = inf.st_size
if stat.S_ISDIR(inf.st_mode):
if abspath in excl or abspath == histdir:
if abspath in excl or abspath == histpath:
continue
# self.log(" dir: {}".format(abspath))
ret += self._build_dir(dbw, top, excl, abspath)
ret += self._build_dir(dbw, top, excl, abspath, nohash)
else:
# self.log("file: {}".format(abspath))
rp = abspath[len(top) :].replace("\\", "/").strip("/")
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
sql = "select * from up where rd = ? and fn = ?"
sql = "select w, mt, sz from up where rd = ? and fn = ?"
try:
c = dbw[0].execute(sql, (rd, fn))
except:
@@ -345,18 +433,18 @@ class Up2k(object):
in_db = list(c.fetchall())
if in_db:
self.pp.n -= 1
_, dts, dsz, _, _ = in_db[0]
dw, dts, dsz = in_db[0]
if len(in_db) > 1:
m = "WARN: multiple entries: [{}] => [{}] |{}|\n{}"
rep_db = "\n".join([repr(x) for x in in_db])
self.log(m.format(top, rp, len(in_db), rep_db))
dts = -1
if dts == lmod and dsz == inf.st_size:
if dts == lmod and dsz == sz and (nohash or dw[0] != "#"):
continue
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
top, rp, dts, lmod, dsz, inf.st_size
top, rp, dts, lmod, dsz, sz
)
self.log(m)
self.db_rm(dbw[0], rd, fn)
@@ -365,17 +453,22 @@ class Up2k(object):
in_db = None
self.pp.msg = "a{} {}".format(self.pp.n, abspath)
if inf.st_size > 1024 * 1024:
self.log("file: {}".format(abspath))
try:
hashes = self._hashlist_from_file(abspath)
except Exception as ex:
self.log("hash: {} @ [{}]".format(repr(ex), abspath))
continue
if nohash:
wark = up2k_wark_from_metadata(self.salt, sz, lmod, rd, fn)
else:
if sz > 1024 * 1024:
self.log("file: {}".format(abspath))
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
self.db_add(dbw[0], wark, rd, fn, lmod, inf.st_size)
try:
hashes = self._hashlist_from_file(abspath)
except Exception as ex:
self.log("hash: {} @ [{}]".format(repr(ex), abspath))
continue
wark = up2k_wark_from_hashlist(self.salt, sz, hashes)
self.db_add(dbw[0], wark, rd, fn, lmod, sz)
dbw[1] += 1
ret += 1
td = time.time() - dbw[2]
@@ -413,45 +506,53 @@ class Up2k(object):
return len(rm)
def _build_tags_index(self, ptop):
entags = self.entags[ptop]
flags = self.flags[ptop]
cur = self.cur[ptop]
def _build_tags_index(self, vol):
ptop = vol.realpath
with self.mutex:
_, db_path = self.register_vpath(ptop, vol.flags)
entags = self.entags[ptop]
flags = self.flags[ptop]
cur = self.cur[ptop]
n_add = 0
n_rm = 0
n_buf = 0
last_write = time.time()
if "e2tsr" in flags:
n_rm = cur.execute("select count(w) from mt").fetchone()[0]
if n_rm:
self.log("discarding {} media tags for a full rescan".format(n_rm))
cur.execute("delete from mt")
else:
self.log("volume has e2tsr but there are no media tags to discard")
with self.mutex:
n_rm = cur.execute("select count(w) from mt").fetchone()[0]
if n_rm:
self.log("discarding {} media tags for a full rescan".format(n_rm))
cur.execute("delete from mt")
# integrity: drop tags for tracks that were deleted
if "e2t" in flags:
drops = []
c2 = cur.connection.cursor()
up_q = "select w from up where substr(w,1,16) = ?"
for (w,) in cur.execute("select w from mt"):
if not c2.execute(up_q, (w,)).fetchone():
drops.append(w[:16])
c2.close()
with self.mutex:
drops = []
c2 = cur.connection.cursor()
up_q = "select w from up where substr(w,1,16) = ?"
for (w,) in cur.execute("select w from mt"):
if not c2.execute(up_q, (w,)).fetchone():
drops.append(w[:16])
c2.close()
if drops:
msg = "discarding media tags for {} deleted files"
self.log(msg.format(len(drops)))
n_rm += len(drops)
for w in drops:
cur.execute("delete from mt where w = ?", (w,))
if drops:
msg = "discarding media tags for {} deleted files"
self.log(msg.format(len(drops)))
n_rm += len(drops)
for w in drops:
cur.execute("delete from mt where w = ?", (w,))
# bail if a volume flag disables indexing
if "d2t" in flags or "d2d" in flags:
return n_add, n_rm, True
# add tags for new files
gcur = cur
with self.mutex:
gcur.connection.commit()
if "e2ts" in flags:
if not self.mtag:
return n_add, n_rm, False
@@ -460,8 +561,10 @@ class Up2k(object):
if self.mtag.prefer_mt and not self.args.no_mtag_mt:
mpool = self._start_mpool()
c2 = cur.connection.cursor()
c3 = cur.connection.cursor()
conn = sqlite3.connect(db_path, timeout=15)
cur = conn.cursor()
c2 = conn.cursor()
c3 = conn.cursor()
n_left = cur.execute("select count(w) from up").fetchone()[0]
for w, rd, fn in cur.execute("select w, rd, fn from up"):
n_left -= 1
@@ -483,7 +586,8 @@ class Up2k(object):
n_tags = self._tag_file(c3, *args)
else:
mpool.put(["mtag"] + args)
n_tags = len(self._flush_mpool(c3))
with self.mutex:
n_tags = len(self._flush_mpool(c3))
n_add += n_tags
n_buf += n_tags
@@ -495,26 +599,33 @@ class Up2k(object):
last_write = time.time()
n_buf = 0
self._stop_mpool(mpool, c3)
if mpool:
self._stop_mpool(mpool)
with self.mutex:
n_add += len(self._flush_mpool(c3))
conn.commit()
c3.close()
c2.close()
cur.close()
conn.close()
with self.mutex:
gcur.connection.commit()
return n_add, n_rm, True
def _flush_mpool(self, wcur):
with self.mutex:
ret = []
for x in self.pending_tags:
self._tag_file(wcur, *x)
ret.append(x[1])
ret = []
for x in self.pending_tags:
self._tag_file(wcur, *x)
ret.append(x[1])
self.pending_tags = []
return ret
self.pending_tags = []
return ret
def _run_all_mtp(self):
t0 = time.time()
self.mtp_parsers = {}
for ptop, flags in self.flags.items():
if "mtp" in flags:
self._run_one_mtp(ptop)
@@ -523,10 +634,12 @@ class Up2k(object):
msg = "mtp finished in {:.2f} sec ({})"
self.log(msg.format(td, s2hms(td, True)))
def _run_one_mtp(self, ptop):
db_path = os.path.join(ptop, ".hist", "up2k.db")
sz0 = os.path.getsize(db_path) // 1024
del self.pp
for k in list(self.volstate.keys()):
if "OFFLINE" not in self.volstate[k]:
self.volstate[k] = "online, idle"
def _run_one_mtp(self, ptop):
entags = self.entags[ptop]
parsers = {}
@@ -585,9 +698,8 @@ class Up2k(object):
jobs.append([parsers, None, w, abspath])
in_progress[w] = True
done = self._flush_mpool(wcur)
with self.mutex:
done = self._flush_mpool(wcur)
for w in done:
to_delete[w] = True
in_progress.pop(w)
@@ -628,15 +740,16 @@ class Up2k(object):
with self.mutex:
cur.connection.commit()
done = self._stop_mpool(mpool, wcur)
self._stop_mpool(mpool)
with self.mutex:
done = self._flush_mpool(wcur)
for w in done:
q = "delete from mt where w = ? and k = 't:mtp'"
cur.execute(q, (w,))
cur.connection.commit()
if n_done:
self.vac(cur, db_path, n_done, 0, sz0)
cur.execute("vacuum")
wcur.close()
cur.close()
@@ -687,13 +800,15 @@ class Up2k(object):
mpool = Queue(nw)
for _ in range(nw):
thr = threading.Thread(target=self._tag_thr, args=(mpool,))
thr = threading.Thread(
target=self._tag_thr, args=(mpool,), name="up2k-mpool"
)
thr.daemon = True
thr.start()
return mpool
def _stop_mpool(self, mpool, wcur):
def _stop_mpool(self, mpool):
if not mpool:
return
@@ -701,8 +816,6 @@ class Up2k(object):
mpool.put(None)
mpool.join()
done = self._flush_mpool(wcur)
return done
def _tag_thr(self, q):
while True:
@@ -850,7 +963,7 @@ class Up2k(object):
def _create_v3(self, cur):
"""
collision in 2^(n/2) files where n = bits (6 bits/ch)
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx 1<<(3*10)
12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx
16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx
"""
@@ -898,9 +1011,10 @@ class Up2k(object):
return self._orz(db_path)
def handle_json(self, cj):
if not self.register_vpath(cj["ptop"], cj["vcfg"]):
if cj["ptop"] not in self.registry:
raise Pebkac(410, "location unavailable")
with self.mutex:
if not self.register_vpath(cj["ptop"], cj["vcfg"]):
if cj["ptop"] not in self.registry:
raise Pebkac(410, "location unavailable")
cj["name"] = sanitize_fn(cj["name"], bad=[".prologue.html", ".epilogue.html"])
cj["poke"] = time.time()
@@ -908,7 +1022,7 @@ class Up2k(object):
now = time.time()
job = None
with self.mutex:
cur = self.cur.get(cj["ptop"], None)
cur = self.cur.get(cj["ptop"])
reg = self.registry[cj["ptop"]]
if cur:
if self.no_expr_idx:
@@ -1066,7 +1180,7 @@ class Up2k(object):
def handle_chunk(self, ptop, wark, chash):
with self.mutex:
job = self.registry[ptop].get(wark, None)
job = self.registry[ptop].get(wark)
if not job:
known = " ".join([x for x in self.registry[ptop].keys()])
self.log("unknown wark [{}], known: {}".format(wark, known))
@@ -1131,7 +1245,7 @@ class Up2k(object):
return ret, dst
def idx_wark(self, ptop, wark, rd, fn, lmod, sz):
cur = self.cur.get(ptop, None)
cur = self.cur.get(ptop)
if not cur:
return False
@@ -1141,6 +1255,7 @@ class Up2k(object):
if "e2t" in self.flags[ptop]:
self.tagq.put([ptop, wark, rd, fn])
self.n_tagq += 1
return True
@@ -1181,12 +1296,15 @@ class Up2k(object):
return wark
def _hashlist_from_file(self, path):
pp = self.pp if hasattr(self, "pp") else None
fsz = os.path.getsize(fsenc(path))
csz = up2k_chunksize(fsz)
ret = []
with open(fsenc(path), "rb", 512 * 1024) as f:
while fsz > 0:
self.pp.msg = "{} MB, {}".format(int(fsz / 1024 / 1024), path)
if pp:
pp.msg = "{} MB, {}".format(int(fsz / 1024 / 1024), path)
hashobj = hashlib.sha512()
rem = min(csz, fsz)
fsz -= rem
@@ -1263,11 +1381,12 @@ class Up2k(object):
for k, reg in self.registry.items():
self._snap_reg(prev, k, reg, discard_interval)
def _snap_reg(self, prev, k, reg, discard_interval):
def _snap_reg(self, prev, ptop, reg, discard_interval):
now = time.time()
histpath = self.asrv.vfs.histtab[ptop]
rm = [x for x in reg.values() if now - x["poke"] > discard_interval]
if rm:
m = "dropping {} abandoned uploads in {}".format(len(rm), k)
m = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
vis = [self._vis_job_progress(x) for x in rm]
self.log("\n".join([m] + vis))
for job in rm:
@@ -1285,21 +1404,21 @@ class Up2k(object):
except:
pass
path = os.path.join(k, ".hist", "up2k.snap")
path = os.path.join(histpath, "up2k.snap")
if not reg:
if k not in prev or prev[k] is not None:
prev[k] = None
if ptop not in prev or prev[ptop] is not None:
prev[ptop] = None
if os.path.exists(fsenc(path)):
os.unlink(fsenc(path))
return
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
etag = [len(reg), newest]
if etag == prev.get(k, None):
if etag == prev.get(ptop):
return
try:
os.mkdir(os.path.join(k, ".hist"))
os.makedirs(histpath)
except:
pass
@@ -1311,14 +1430,21 @@ class Up2k(object):
atomic_move(path2, path)
self.log("snap: {} |{}|".format(path, len(reg.keys())))
prev[k] = etag
prev[ptop] = etag
def _tagger(self):
with self.mutex:
self.n_tagq += 1
while True:
with self.mutex:
self.n_tagq -= 1
ptop, wark, rd, fn = self.tagq.get()
if "e2t" not in self.flags[ptop]:
continue
# self.log("\n " + repr([ptop, rd, fn]))
abspath = os.path.join(ptop, rd, fn)
tags = self.mtag.get(abspath)
ntags1 = len(tags)
@@ -1344,8 +1470,16 @@ class Up2k(object):
self.log("tagged {} ({}+{})".format(abspath, ntags1, len(tags) - ntags1))
def _hasher(self):
with self.mutex:
self.n_hashq += 1
while True:
with self.mutex:
self.n_hashq -= 1
# self.log("hashq {}".format(self.n_hashq))
ptop, rd, fn = self.hashq.get()
# self.log("hashq {} pop {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
if "e2d" not in self.flags[ptop]:
continue
@@ -1358,8 +1492,11 @@ class Up2k(object):
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size)
def hash_file(self, ptop, flags, rd, fn):
self.register_vpath(ptop, flags)
self.hashq.put([ptop, rd, fn])
with self.mutex:
self.register_vpath(ptop, flags)
self.hashq.put([ptop, rd, fn])
self.n_hashq += 1
# self.log("hashq {} push {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
def up2k_chunksize(filesize):
@@ -1381,9 +1518,12 @@ def up2k_wark_from_hashlist(salt, filesize, hashes):
ident.extend(hashes)
ident = "\n".join(ident)
hasher = hashlib.sha512()
hasher.update(ident.encode("utf-8"))
digest = hasher.digest()[:32]
wark = hashlib.sha512(ident.encode("utf-8")).digest()
wark = base64.urlsafe_b64encode(wark)
return wark.decode("ascii")[:43]
wark = base64.urlsafe_b64encode(digest)
return wark.decode("utf-8").rstrip("=")
def up2k_wark_from_metadata(salt, sz, lastmod, rd, fn):
ret = fsenc("{}\n{}\n{}\n{}\n{}".format(salt, lastmod, sz, rd, fn))
ret = base64.urlsafe_b64encode(hashlib.sha512(ret).digest())
return "#{}".format(ret[:42].decode("ascii"))

View File

@@ -193,7 +193,7 @@ class ProgressPrinter(threading.Thread):
"""
def __init__(self):
threading.Thread.__init__(self)
threading.Thread.__init__(self, name="pp")
self.daemon = True
self.msg = None
self.end = False
@@ -208,6 +208,8 @@ class ProgressPrinter(threading.Thread):
msg = self.msg
uprint(" {}\033[K\r".format(msg))
if PY2:
sys.stdout.flush()
print("\033[K", end="")
sys.stdout.flush() # necessary on win10 even w/ stderr btw
@@ -252,6 +254,45 @@ def trace(*args, **kwargs):
nuprint(msg)
def alltrace():
threads = {}
names = dict([(t.ident, t.name) for t in threading.enumerate()])
for tid, stack in sys._current_frames().items():
name = "{} ({:x})".format(names.get(tid), tid)
threads[name] = stack
rret = []
bret = []
for name, stack in sorted(threads.items()):
ret = ["\n\n# {}".format(name)]
pad = None
for fn, lno, name, line in traceback.extract_stack(stack):
fn = os.sep.join(fn.split(os.sep)[-3:])
ret.append('File: "{}", line {}, in {}'.format(fn, lno, name))
if line:
ret.append(" " + str(line.strip()))
if "self.not_empty.wait()" in line:
pad = " " * 4
if pad:
bret += [ret[0]] + [pad + x for x in ret[1:]]
else:
rret += ret
return "\n".join(rret + bret)
def min_ex():
et, ev, tb = sys.exc_info()
tb = traceback.extract_tb(tb, 2)
ex = [
"{} @ {} <{}>: {}".format(fp.split(os.sep)[-1], ln, fun, txt)
for fp, ln, fun, txt in tb
]
ex.append("{}: {}".format(et.__name__, ev))
return "\n".join(ex)
@contextlib.contextmanager
def ren_open(fname, *args, **kwargs):
fdir = kwargs.pop("fdir", None)
@@ -262,6 +303,11 @@ def ren_open(fname, *args, **kwargs):
yield {"orz": [f, fname]}
return
if suffix:
ext = fname.split(".")[-1]
if len(ext) < 7:
suffix += "." + ext
orig_name = fname
bname = fname
ext = ""
@@ -561,8 +607,10 @@ def read_header(sr):
else:
continue
sr.unrecv(ret[ofs + 4 :])
return ret[:ofs].decode("utf-8", "surrogateescape").split("\r\n")
if len(ret) > ofs + 4:
sr.unrecv(ret[ofs + 4 :])
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
def humansize(sz, terse=False):
@@ -847,13 +895,14 @@ def yieldfile(fn):
def hashcopy(actor, fin, fout):
u32_lim = int((2 ** 31) * 0.9)
is_mp = actor.is_mp
hashobj = hashlib.sha512()
tlen = 0
for buf in fin:
actor.workload += 1
if actor.workload > u32_lim:
actor.workload = 100 # prevent overflow
if is_mp:
actor.workload += 1
if actor.workload > 2 ** 31:
actor.workload = 100
tlen += len(buf)
hashobj.update(buf)
@@ -865,12 +914,17 @@ def hashcopy(actor, fin, fout):
return tlen, hashobj.hexdigest(), digest_b64
def sendfile_py(lower, upper, f, s):
def sendfile_py(lower, upper, f, s, actor=None):
remains = upper - lower
f.seek(lower)
while remains > 0:
if actor:
actor.workload += 1
if actor.workload > 2 ** 31:
actor.workload = 100
# time.sleep(0.01)
buf = f.read(min(4096, remains))
buf = f.read(min(1024 * 32, remains))
if not buf:
return remains
@@ -972,8 +1026,8 @@ def guess_mime(url, fallback="application/octet-stream"):
def runcmd(*argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate()
stdout = stdout.decode("utf-8")
stderr = stderr.decode("utf-8")
stdout = stdout.decode("utf-8", "replace")
stderr = stderr.decode("utf-8", "replace")
return [p.returncode, stdout, stderr]

View File

@@ -0,0 +1,583 @@
/*!
* baguetteBox.js
* @author feimosi
* @version 1.11.1-mod
* @url https://github.com/feimosi/baguetteBox.js
*/
window.baguetteBox = (function () {
'use strict';
var options = {},
defaults = {
captions: true,
buttons: 'auto',
noScrollbars: false,
bodyClass: 'baguetteBox-open',
titleTag: false,
async: false,
preload: 2,
animation: 'slideIn',
afterShow: null,
afterHide: null,
onChange: null,
},
overlay, slider, previousButton, nextButton, closeButton,
currentGallery = [],
currentIndex = 0,
isOverlayVisible = false,
touch = {}, // start-pos
touchFlag = false, // busy
regex = /.+\.(gif|jpe?g|png|webp)/i,
data = {}, // all galleries
imagesElements = [],
documentLastFocus = null;
var overlayClickHandler = function (event) {
if (event.target.id.indexOf('baguette-img') !== -1) {
hideOverlay();
}
};
var touchstartHandler = function (event) {
touch.count++;
if (touch.count > 1) {
touch.multitouch = true;
}
touch.startX = event.changedTouches[0].pageX;
touch.startY = event.changedTouches[0].pageY;
};
var touchmoveHandler = function (event) {
if (touchFlag || touch.multitouch) {
return;
}
event.preventDefault ? event.preventDefault() : event.returnValue = false;
var touchEvent = event.touches[0] || event.changedTouches[0];
if (touchEvent.pageX - touch.startX > 40) {
touchFlag = true;
showPreviousImage();
} else if (touchEvent.pageX - touch.startX < -40) {
touchFlag = true;
showNextImage();
} else if (touch.startY - touchEvent.pageY > 100) {
hideOverlay();
}
};
var touchendHandler = function () {
touch.count--;
if (touch.count <= 0) {
touch.multitouch = false;
}
touchFlag = false;
};
var contextmenuHandler = function () {
touchendHandler();
};
var trapFocusInsideOverlay = function (event) {
if (overlay.style.display === 'block' && (overlay.contains && !overlay.contains(event.target))) {
event.stopPropagation();
initFocus();
}
};
function run(selector, userOptions) {
buildOverlay();
removeFromCache(selector);
return bindImageClickListeners(selector, userOptions);
}
function bindImageClickListeners(selector, userOptions) {
var galleryNodeList = document.querySelectorAll(selector);
var selectorData = {
galleries: [],
nodeList: galleryNodeList
};
data[selector] = selectorData;
[].forEach.call(galleryNodeList, function (galleryElement) {
if (userOptions && userOptions.filter) {
regex = userOptions.filter;
}
var tagsNodeList = [];
if (galleryElement.tagName === 'A') {
tagsNodeList = [galleryElement];
} else {
tagsNodeList = galleryElement.getElementsByTagName('a');
}
tagsNodeList = [].filter.call(tagsNodeList, function (element) {
if (element.className.indexOf(userOptions && userOptions.ignoreClass) === -1) {
return regex.test(element.href);
}
});
if (tagsNodeList.length === 0) {
return;
}
var gallery = [];
[].forEach.call(tagsNodeList, function (imageElement, imageIndex) {
var imageElementClickHandler = function (event) {
if (event && event.ctrlKey)
return true;
event.preventDefault ? event.preventDefault() : event.returnValue = false;
prepareOverlay(gallery, userOptions);
showOverlay(imageIndex);
};
var imageItem = {
eventHandler: imageElementClickHandler,
imageElement: imageElement
};
bind(imageElement, 'click', imageElementClickHandler);
gallery.push(imageItem);
});
selectorData.galleries.push(gallery);
});
return selectorData.galleries;
}
function clearCachedData() {
for (var selector in data) {
if (data.hasOwnProperty(selector)) {
removeFromCache(selector);
}
}
}
function removeFromCache(selector) {
if (!data.hasOwnProperty(selector)) {
return;
}
var galleries = data[selector].galleries;
[].forEach.call(galleries, function (gallery) {
[].forEach.call(gallery, function (imageItem) {
unbind(imageItem.imageElement, 'click', imageItem.eventHandler);
});
if (currentGallery === gallery) {
currentGallery = [];
}
});
delete data[selector];
}
function buildOverlay() {
overlay = ebi('baguetteBox-overlay');
if (overlay) {
slider = ebi('baguetteBox-slider');
previousButton = ebi('previous-button');
nextButton = ebi('next-button');
closeButton = ebi('close-button');
return;
}
overlay = mknod('div');
overlay.setAttribute('role', 'dialog');
overlay.id = 'baguetteBox-overlay';
document.getElementsByTagName('body')[0].appendChild(overlay);
slider = mknod('div');
slider.id = 'baguetteBox-slider';
overlay.appendChild(slider);
previousButton = mknod('button');
previousButton.setAttribute('type', 'button');
previousButton.id = 'previous-button';
previousButton.setAttribute('aria-label', 'Previous');
previousButton.innerHTML = '&lt;';
overlay.appendChild(previousButton);
nextButton = mknod('button');
nextButton.setAttribute('type', 'button');
nextButton.id = 'next-button';
nextButton.setAttribute('aria-label', 'Next');
nextButton.innerHTML = '&gt;';
overlay.appendChild(nextButton);
closeButton = mknod('button');
closeButton.setAttribute('type', 'button');
closeButton.id = 'close-button';
closeButton.setAttribute('aria-label', 'Close');
closeButton.innerHTML = '&times;';
overlay.appendChild(closeButton);
previousButton.className = nextButton.className = closeButton.className = 'baguetteBox-button';
bindEvents();
}
function keyDownHandler(event) {
switch (event.keyCode) {
case 37: // Left
showPreviousImage();
break;
case 39: // Right
showNextImage();
break;
case 27: // Esc
hideOverlay();
break;
case 36: // Home
showFirstImage(event);
break;
case 35: // End
showLastImage(event);
break;
}
}
var passiveSupp = false;
try {
var opts = {
get passive() {
passiveSupp = true;
return false;
}
};
window.addEventListener('test', null, opts);
window.removeEventListener('test', null, opts);
}
catch (ex) {
passiveSupp = false;
}
var passiveEvent = passiveSupp ? { passive: false } : null;
var nonPassiveEvent = passiveSupp ? { passive: true } : null;
function bindEvents() {
bind(overlay, 'click', overlayClickHandler);
bind(previousButton, 'click', showPreviousImage);
bind(nextButton, 'click', showNextImage);
bind(closeButton, 'click', hideOverlay);
bind(slider, 'contextmenu', contextmenuHandler);
bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
bind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
bind(overlay, 'touchend', touchendHandler);
bind(document, 'focus', trapFocusInsideOverlay, true);
}
function unbindEvents() {
unbind(overlay, 'click', overlayClickHandler);
unbind(previousButton, 'click', showPreviousImage);
unbind(nextButton, 'click', showNextImage);
unbind(closeButton, 'click', hideOverlay);
unbind(slider, 'contextmenu', contextmenuHandler);
unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
unbind(overlay, 'touchend', touchendHandler);
unbind(document, 'focus', trapFocusInsideOverlay, true);
}
function prepareOverlay(gallery, userOptions) {
if (currentGallery === gallery) {
return;
}
currentGallery = gallery;
setOptions(userOptions);
slider.innerHTML = '';
imagesElements.length = 0;
var imagesFiguresIds = [];
var imagesCaptionsIds = [];
for (var i = 0, fullImage; i < gallery.length; i++) {
fullImage = mknod('div');
fullImage.className = 'full-image';
fullImage.id = 'baguette-img-' + i;
imagesElements.push(fullImage);
imagesFiguresIds.push('baguetteBox-figure-' + i);
imagesCaptionsIds.push('baguetteBox-figcaption-' + i);
slider.appendChild(imagesElements[i]);
}
overlay.setAttribute('aria-labelledby', imagesFiguresIds.join(' '));
overlay.setAttribute('aria-describedby', imagesCaptionsIds.join(' '));
}
function setOptions(newOptions) {
if (!newOptions) {
newOptions = {};
}
for (var item in defaults) {
options[item] = defaults[item];
if (typeof newOptions[item] !== 'undefined') {
options[item] = newOptions[item];
}
}
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .4s ease' :
options.animation === 'slideIn' ? '' : 'none');
if (options.buttons === 'auto' && ('ontouchstart' in window || currentGallery.length === 1)) {
options.buttons = false;
}
previousButton.style.display = nextButton.style.display = (options.buttons ? '' : 'none');
}
function showOverlay(chosenImageIndex) {
if (options.noScrollbars) {
document.documentElement.style.overflowY = 'hidden';
document.body.style.overflowY = 'scroll';
}
if (overlay.style.display === 'block') {
return;
}
bind(document, 'keydown', keyDownHandler);
currentIndex = chosenImageIndex;
touch = {
count: 0,
startX: null,
startY: null
};
loadImage(currentIndex, function () {
preloadNext(currentIndex);
preloadPrev(currentIndex);
});
updateOffset();
overlay.style.display = 'block';
// Fade in overlay
setTimeout(function () {
overlay.className = 'visible';
if (options.bodyClass && document.body.classList) {
document.body.classList.add(options.bodyClass);
}
if (options.afterShow) {
options.afterShow();
}
}, 50);
if (options.onChange) {
options.onChange(currentIndex, imagesElements.length);
}
documentLastFocus = document.activeElement;
initFocus();
isOverlayVisible = true;
}
function initFocus() {
if (options.buttons) {
previousButton.focus();
} else {
closeButton.focus();
}
}
function hideOverlay(e) {
ev(e);
if (options.noScrollbars) {
document.documentElement.style.overflowY = 'auto';
document.body.style.overflowY = 'auto';
}
if (overlay.style.display === 'none') {
return;
}
unbind(document, 'keydown', keyDownHandler);
// Fade out and hide the overlay
overlay.className = '';
setTimeout(function () {
overlay.style.display = 'none';
if (options.bodyClass && document.body.classList) {
document.body.classList.remove(options.bodyClass);
}
if (options.afterHide) {
options.afterHide();
}
documentLastFocus && documentLastFocus.focus();
isOverlayVisible = false;
}, 500);
}
function loadImage(index, callback) {
var imageContainer = imagesElements[index];
var galleryItem = currentGallery[index];
if (typeof imageContainer === 'undefined' || typeof galleryItem === 'undefined') {
return; // out-of-bounds or gallery dirty
}
if (imageContainer.getElementsByTagName('img')[0]) {
// image is loaded, cb and bail
if (callback) {
callback();
}
return;
}
var imageElement = galleryItem.imageElement,
imageSrc = imageElement.href,
thumbnailElement = imageElement.getElementsByTagName('img')[0],
imageCaption = typeof options.captions === 'function' ?
options.captions.call(currentGallery, imageElement) :
imageElement.getAttribute('data-caption') || imageElement.title;
var figure = mknod('figure');
figure.id = 'baguetteBox-figure-' + index;
figure.innerHTML = '<div class="baguetteBox-spinner">' +
'<div class="baguetteBox-double-bounce1"></div>' +
'<div class="baguetteBox-double-bounce2"></div>' +
'</div>';
if (options.captions && imageCaption) {
var figcaption = mknod('figcaption');
figcaption.id = 'baguetteBox-figcaption-' + index;
figcaption.innerHTML = imageCaption;
figure.appendChild(figcaption);
}
imageContainer.appendChild(figure);
var image = mknod('img');
image.onload = function () {
// Remove loader element
var spinner = document.querySelector('#baguette-img-' + index + ' .baguetteBox-spinner');
figure.removeChild(spinner);
if (!options.async && callback) {
callback();
}
};
image.setAttribute('src', imageSrc);
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
if (options.titleTag && imageCaption) {
image.title = imageCaption;
}
figure.appendChild(image);
if (options.async && callback) {
callback();
}
}
function showNextImage(e) {
ev(e);
return show(currentIndex + 1);
}
function showPreviousImage(e) {
ev(e);
return show(currentIndex - 1);
}
function showFirstImage(event) {
if (event) {
event.preventDefault();
}
return show(0);
}
function showLastImage(event) {
if (event) {
event.preventDefault();
}
return show(currentGallery.length - 1);
}
/**
* Move the gallery to a specific index
* @param `index` {number} - the position of the image
* @param `gallery` {array} - gallery which should be opened, if omitted assumes the currently opened one
* @return {boolean} - true on success or false if the index is invalid
*/
function show(index, gallery) {
if (!isOverlayVisible && index >= 0 && index < gallery.length) {
prepareOverlay(gallery, options);
showOverlay(index);
return true;
}
if (index < 0) {
if (options.animation) {
bounceAnimation('left');
}
return false;
}
if (index >= imagesElements.length) {
if (options.animation) {
bounceAnimation('right');
}
return false;
}
currentIndex = index;
loadImage(currentIndex, function () {
preloadNext(currentIndex);
preloadPrev(currentIndex);
});
updateOffset();
if (options.onChange) {
options.onChange(currentIndex, imagesElements.length);
}
return true;
}
/**
* Triggers the bounce animation
* @param {('left'|'right')} direction - Direction of the movement
*/
function bounceAnimation(direction) {
slider.className = 'bounce-from-' + direction;
setTimeout(function () {
slider.className = '';
}, 400);
}
function updateOffset() {
var offset = -currentIndex * 100 + '%';
if (options.animation === 'fadeIn') {
slider.style.opacity = 0;
setTimeout(function () {
slider.style.transform = 'translate3d(' + offset + ',0,0)';
slider.style.opacity = 1;
}, 400);
} else {
slider.style.transform = 'translate3d(' + offset + ',0,0)';
}
}
function preloadNext(index) {
if (index - currentIndex >= options.preload) {
return;
}
loadImage(index + 1, function () {
preloadNext(index + 1);
});
}
function preloadPrev(index) {
if (currentIndex - index >= options.preload) {
return;
}
loadImage(index - 1, function () {
preloadPrev(index - 1);
});
}
function bind(element, event, callback, options) {
element.addEventListener(event, callback, options);
}
function unbind(element, event, callback, options) {
element.removeEventListener(event, callback, options);
}
function destroyPlugin() {
unbindEvents();
clearCachedData();
unbind(document, 'keydown', keyDownHandler);
document.getElementsByTagName('body')[0].removeChild(ebi('baguetteBox-overlay'));
data = {};
currentGallery = [];
currentIndex = 0;
}
return {
run: run,
show: show,
showNext: showNextImage,
showPrevious: showPreviousImage,
hide: hideOverlay,
destroy: destroyPlugin
};
})();

View File

@@ -53,6 +53,7 @@ body {
#files tbody a {
display: block;
padding: .3em 0;
scroll-margin-top: 45vh;
}
#files tbody div a {
color: #f5a;
@@ -68,7 +69,6 @@ a, #files tbody div a:last-child {
text-decoration: underline;
}
#files thead {
background: #333;
position: sticky;
top: 0;
}
@@ -76,29 +76,30 @@ a, #files tbody div a:last-child {
color: #999;
font-weight: normal;
}
#files tr:hover {
#files tr:hover td {
background: #1c1c1c;
}
#files thead th {
padding: .5em 1.3em .3em 1.3em;
padding: .5em .3em .3em .3em;
border-right: 2px solid #3c3c3c;
border-bottom: 2px solid #444;
background: #333;
cursor: pointer;
}
#files thead th+th {
border-left: 2px solid #2a2a2a;
}
#files thead th:last-child {
background: #444;
border-radius: .7em .7em 0 0;
border-right: none;
}
#files thead th:first-child {
#files tbody {
background: #222;
}
#files tbody,
#files thead th:nth-child(2) {
background: #222;
border-radius: 0 .7em 0 0;
}
#files td {
margin: 0;
padding: 0 .5em;
border-bottom: 1px solid #111;
border-left: 1px solid #2c2c2c;
}
#files td+td+td {
max-width: 30em;
@@ -185,7 +186,7 @@ a, #files tbody div a:last-child {
margin: -.2em;
}
#files tbody a.play.act {
color: #840;
color: #720;
text-shadow: 0 0 .3em #b80;
}
#files tbody tr.sel td,
@@ -483,20 +484,48 @@ html.light #ggrid a.sel {
margin: .5em;
}
.opview input[type=text] {
color: #fff;
background: #383838;
color: #fff;
border: none;
box-shadow: 0 0 .3em #222;
border-bottom: 1px solid #fc5;
border-radius: .2em;
padding: .2em .3em;
}
.opview input.err {
background: #a20;
border-color: #f00;
box-shadow: 0 0 .7em #f00;
text-shadow: 1px 1px 0 #500;
outline: none;
}
input[type="checkbox"]+label {
color: #f5a;
}
input[type="checkbox"]:checked+label {
color: #fc5;
}
input.eq_gain {
width: 3em;
text-align: center;
margin: 0 .6em;
}
#audio_eq table {
border-collapse: collapse;
}
#audio_eq td {
text-align: center;
}
#audio_eq a.eq_step {
font-size: 1.5em;
display: block;
padding: 0;
}
#au_eq {
display: block;
margin-top: .5em;
padding: 1.3em .3em;
}
@@ -529,6 +558,17 @@ input[type="checkbox"]:checked+label {
height: 1em;
margin: .2em 0 -1em 1.6em;
}
#tq_raw {
width: calc(100% - 2em);
margin: .3em 0 0 1.4em;
}
#tq_raw td+td {
width: 100%;
}
#op_search #q_raw {
width: 100%;
display: block;
}
#files td div span {
color: #fff;
padding: 0 .4em;
@@ -552,6 +592,7 @@ input[type="checkbox"]:checked+label {
}
#wrap {
margin-top: 2em;
min-height: 90vh;
}
#tree {
display: none;
@@ -564,6 +605,12 @@ input[type="checkbox"]:checked+label {
overscroll-behavior-y: none;
scrollbar-color: #eb0 #333;
}
#treeh {
background: #333;
position: sticky;
z-index: 1;
top: 0;
}
#thx_ff {
padding: 5em 0;
}
@@ -589,6 +636,7 @@ input[type="checkbox"]:checked+label {
box-shadow: 0 .1em .2em #222 inset;
border-radius: .3em;
margin: .2em;
white-space: pre;
position: relative;
top: -.2em;
}
@@ -633,7 +681,6 @@ input[type="checkbox"]:checked+label {
}
#treeul a+a {
width: calc(100% - 2em);
background: #333;
line-height: 1em;
}
#treeul a+a:hover {
@@ -657,34 +704,20 @@ input[type="checkbox"]:checked+label {
font-size: 2em;
white-space: nowrap;
}
#files th:hover .cfg,
#files th.min .cfg {
#files th:hover .cfg {
display: block;
width: 1em;
border-radius: .2em;
margin: -1.3em auto 0 auto;
background: #444;
}
#files th.min .cfg {
margin: -.6em;
}
#files>thead>tr>th.min span {
position: absolute;
transform: rotate(270deg);
background: linear-gradient(90deg, rgba(68,68,68,0), rgba(68,68,68,0.5) 70%, #444);
margin-left: -4.6em;
padding: .4em;
top: 5.4em;
width: 8em;
text-align: right;
letter-spacing: .04em;
#files>thead>tr>th.min,
#files td.min {
display: none;
}
#files td:nth-child(2n) {
color: #f5a;
}
#files td.min a {
display: none;
}
#files tr.play td,
#files tr.play div a {
background: #fc4;
@@ -699,18 +732,28 @@ input[type="checkbox"]:checked+label {
color: #300;
background: #fea;
}
#op_cfg {
.opwide {
max-width: none;
margin-right: 1.5em;
}
#op_cfg>div>a {
.opwide>div {
display: inline-block;
vertical-align: top;
border-left: .2em solid #444;
margin-left: .5em;
padding-left: .5em;
}
.opwide>div.fill {
display: block;
}
.opwide>div>div>a {
line-height: 2em;
}
#op_cfg>div>span {
#op_cfg>div>div>span {
display: inline-block;
padding: .2em .4em;
}
#op_cfg h3 {
.opbox h3 {
margin: .8em 0 0 .6em;
padding: 0;
border-bottom: 1px solid #555;
@@ -740,9 +783,12 @@ input[type="checkbox"]:checked+label {
font-family: monospace, monospace;
line-height: 2em;
}
#griden.on+#thumbs {
#thumbs {
opacity: .3;
}
#griden.on+#thumbs {
opacity: 1;
}
#ghead {
background: #3c3c3c;
border: 1px solid #444;
@@ -787,6 +833,12 @@ html.light #ghead {
padding: .2em .3em;
display: block;
}
#ggrid span.dir:before {
content: '📂';
line-height: 0;
font-size: 2em;
margin: -.7em .1em -.5em -.3em;
}
#ggrid a:hover {
background: #444;
border-color: #555;
@@ -899,12 +951,14 @@ html.light #files {
}
html.light #files thead th {
background: #eee;
border-right: 1px solid #ccc;
border-bottom: 1px solid #ccc;
}
html.light #files tr td {
border-top: 1px solid #ddd;
html.light #files thead th {
border-left: 1px solid #f7f7f7;
}
html.light #files td {
border-bottom: 1px solid #f7f7f7;
border-color: #ddd #fff #fff #ddd;
}
html.light #files tbody tr:last-child td {
border-bottom: .2em solid #ccc;
@@ -912,25 +966,25 @@ html.light #files tbody tr:last-child td {
html.light #files td:nth-child(2n) {
color: #d38;
}
html.light #files tr:hover td {
background: #fff;
html.light #files tr.play td:nth-child(2n) {
color: #c16;
}
html.light #files tbody a.play {
color: #c0f;
}
html.light tr.play td {
html.light #files tr.play td {
background: #fc5;
border-color: #eb1;
}
html.light #files tr:hover td {
background: #fff;
}
html.light tr.play a {
color: #406;
}
html.light #files th:hover .cfg,
html.light #files th.min .cfg {
html.light #files th:hover .cfg {
background: #ccc;
}
html.light #files > thead > tr > th.min span {
background: linear-gradient(90deg, rgba(204,204,204,0), rgba(204,204,204,0.5) 70%, #ccc);
}
html.light #blocked {
background: #eee;
}
@@ -940,7 +994,21 @@ html.light #blk_abrt a {
box-shadow: 0 .2em .4em #ddd;
}
html.light #widget a {
color: #fc5;
color: #06a;
}
html.light #wtoggle,
html.light #widgeti {
background: #eee;
}
html.light #wtoggle {
box-shadow: 0 0 .5em #bbb;
}
html.light #widget.open {
border-top: .2em solid #f7f7f7;
}
html.light #wzip,
html.light #wnp {
border-color: #ccc;
}
html.light #files tr.sel:hover td {
background: #c37;
@@ -957,6 +1025,9 @@ html.light #files tr.sel a.play.act {
html.light input[type="checkbox"] + label {
color: #333;
}
html.light .opwide>div {
border-color: #ddd;
}
html.light .opview input[type="text"] {
background: #fff;
color: #333;
@@ -1001,6 +1072,9 @@ html.light #files tr.sel a:hover {
color: #000;
background: #fff;
}
html.light #treeh {
background: #eee;
}
html.light #tree {
scrollbar-color: #a70 #ddd;
}
@@ -1010,4 +1084,161 @@ html.light #tree::-webkit-scrollbar {
}
#tree::-webkit-scrollbar-thumb {
background: #da0;
}
}
#baguetteBox-overlay {
display: none;
opacity: 0;
position: fixed;
overflow: hidden;
top: 0;
left: 0;
width: 100%;
height: 100%;
z-index: 1000000;
background: rgba(0, 0, 0, 0.8);
transition: opacity .3s ease;
}
#baguetteBox-overlay.visible {
opacity: 1;
}
#baguetteBox-overlay .full-image {
display: inline-block;
position: relative;
width: 100%;
height: 100%;
text-align: center;
}
#baguetteBox-overlay .full-image figure {
display: inline;
margin: 0;
height: 100%;
}
#baguetteBox-overlay .full-image img {
display: inline-block;
width: auto;
height: auto;
max-height: 100%;
max-width: 100%;
vertical-align: middle;
box-shadow: 0 0 8px rgba(0, 0, 0, 0.6);
}
#baguetteBox-overlay .full-image figcaption {
display: block;
position: absolute;
bottom: 0;
width: 100%;
text-align: center;
line-height: 1.8;
white-space: normal;
color: #ccc;
}
#baguetteBox-overlay figcaption a {
background: rgba(0, 0, 0, 0.6);
border-radius: .4em;
padding: .3em .6em;
}
#baguetteBox-overlay .full-image:before {
content: "";
display: inline-block;
height: 50%;
width: 1px;
margin-right: -1px;
}
#baguetteBox-slider {
position: absolute;
left: 0;
top: 0;
height: 100%;
width: 100%;
white-space: nowrap;
transition: left .2s ease, transform .2s ease;
}
#baguetteBox-slider.bounce-from-right {
animation: bounceFromRight .4s ease-out;
}
#baguetteBox-slider.bounce-from-left {
animation: bounceFromLeft .4s ease-out;
}
@keyframes bounceFromRight {
0% {margin-left: 0}
50% {margin-left: -30px}
100% {margin-left: 0}
}
@keyframes bounceFromLeft {
0% {margin-left: 0}
50% {margin-left: 30px}
100% {margin-left: 0}
}
.baguetteBox-button#next-button,
.baguetteBox-button#previous-button {
top: 50%;
top: calc(50% - 30px);
width: 44px;
height: 60px;
}
.baguetteBox-button {
position: absolute;
cursor: pointer;
outline: none;
padding: 0;
margin: 0;
border: 0;
border-radius: 15%;
background: rgba(50, 50, 50, 0.5);
color: #ddd;
font: 1.6em sans-serif;
transition: background-color .3s ease;
}
.baguetteBox-button:focus,
.baguetteBox-button:hover {
background: rgba(50, 50, 50, 0.9);
}
#next-button {
right: 2%;
}
#previous-button {
left: 2%;
}
#close-button {
top: 20px;
right: 2%;
width: 30px;
height: 30px;
}
.baguetteBox-button svg {
position: absolute;
left: 0;
top: 0;
}
.baguetteBox-spinner {
width: 40px;
height: 40px;
display: inline-block;
position: absolute;
top: 50%;
left: 50%;
margin-top: -20px;
margin-left: -20px;
}
.baguetteBox-double-bounce1,
.baguetteBox-double-bounce2 {
width: 100%;
height: 100%;
border-radius: 50%;
background-color: #fff;
opacity: .6;
position: absolute;
top: 0;
left: 0;
animation: bounce 2s infinite ease-in-out;
}
.baguetteBox-double-bounce2 {
animation-delay: -1s;
}
@keyframes bounce {
0%, 100% {transform: scale(0)}
50% {transform: scale(1)}
}

View File

@@ -8,6 +8,9 @@
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css{{ ts }}">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css{{ ts }}">
{%- if css %}
<link rel="stylesheet" type="text/css" media="screen" href="{{ css }}{{ ts }}">
{%- endif %}
</head>
<body>
@@ -23,6 +26,7 @@
<a href="#" data-perm="write" data-dest="mkdir" data-desc="mkdir: create a new directory">📂</a>
<a href="#" data-perm="read write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
<a href="#" data-perm="write" data-dest="msg" data-desc="msg: send a message to the server log">📟</a>
<a href="#" data-dest="player" data-desc="media player options">🎺</a>
<a href="#" data-dest="cfg" data-desc="configuration options">⚙️</a>
<div id="opdesc"></div>
</div>
@@ -36,22 +40,25 @@
<div id="srch_q"></div>
</div>
<div id="op_player" class="opview opbox opwide"></div>
{%- include 'upload.html' %}
<div id="op_cfg" class="opview opbox">
<h3>switches</h3>
<div id="op_cfg" class="opview opbox opwide">
<div>
<a id="tooltips" class="tgl btn" href="#">tooltips</a>
<a id="lightmode" class="tgl btn" href="#">lightmode</a>
<a id="griden" class="tgl btn" href="#">the grid</a>
<a id="thumbs" class="tgl btn" href="#">thumbs</a>
<h3>switches</h3>
<div>
<a id="tooltips" class="tgl btn" href="#"> tooltips</a>
<a id="lightmode" class="tgl btn" href="#">☀️ lightmode</a>
<a id="griden" class="tgl btn" href="#">田 the grid</a>
<a id="thumbs" class="tgl btn" href="#">🖼️ thumbs</a>
</div>
</div>
{%- if have_zip %}
<h3>folder download</h3>
<div id="arc_fmt"></div>
<div><h3>folder download</h3><div id="arc_fmt"></div></div>
{%- endif %}
<h3>key notation</h3>
<div id="key_notation"></div>
<div><h3>key notation</h3><div id="key_notation"></div></div>
<div class="fill"><h3>hidden columns</h3><div id="hcols"></div></div>
</div>
<h1 id="path">
@@ -62,10 +69,12 @@
</h1>
<div id="tree">
<a href="#" id="detree">🍞...</a>
<a href="#" class="btn" step="2" id="twobytwo">+</a>
<a href="#" class="btn" step="-2" id="twig">&ndash;</a>
<a href="#" class="tgl btn" id="dyntree">a</a>
<div id="treeh">
<a href="#" id="detree">🍞...</a>
<a href="#" class="btn" step="2" id="twobytwo">+</a>
<a href="#" class="btn" step="-2" id="twig">&ndash;</a>
<a href="#" class="tgl btn" id="dyntree">a</a>
</div>
<ul id="treeul"></ul>
<div id="thx_ff">&nbsp;</div>
</div>

View File

@@ -38,7 +38,40 @@ var have_webp = null;
img.onerror = function () {
have_webp = false;
};
img.src = "data:image/webp;base64,UklGRiIAAABXRUJQVlA4IBYAAAAwAQCdASoBAAEADsD+JaQAA3AAAAAA";
img.src = "data:image/webp;base64,UklGRhoAAABXRUJQVlA4TA0AAAAvAAAAEAcQERGIiP4HAA==";
})();
var mpl = (function () {
ebi('op_player').innerHTML = (
'<div><h3>playback mode</h3><div id="pb_mode">' +
'<a href="#" class="tgl btn">🔁 loop-folder</a>' +
'<a href="#" class="tgl btn">📂 next-folder</a>' +
'</div></div>' +
'<div><h3>audio equalizer</h3><div id="audio_eq"></div></div>');
var r = {
"pb_mode": sread('pb_mode') || 'loop-folder'
};
function draw_pb_mode() {
var btns = QSA('#pb_mode>a');
for (var a = 0, aa = btns.length; a < aa; a++) {
clmod(btns[a], 'on', btns[a].textContent.indexOf(r.pb_mode) != -1);
btns[a].onclick = set_pb_mode;
}
}
draw_pb_mode();
function set_pb_mode(e) {
ev(e);
r.pb_mode = this.textContent.split(' ').slice(-1)[0];
swrite('pb_mode', r.pb_mode);
draw_pb_mode();
}
return r;
})();
@@ -48,7 +81,6 @@ function MPlayer() {
this.au = null;
this.au_native = null;
this.au_ogvjs = null;
this.cover_url = '';
this.tracks = {};
this.order = [];
@@ -163,8 +195,9 @@ var widget = (function () {
m = ck + 'np: ';
for (var a = 1, aa = th.length; a < aa; a++) {
var tk = a == 1 ? '' : th[a].getAttribute('name').split('/').slice(-1)[0];
var tv = tr[a].getAttribute('html') || tr[a].textContent;
var tv = tr[a].textContent,
tk = a == 1 ? '' : th[a].getAttribute('name').split('/').slice(-1)[0];
m += tk + '(' + cv + tv + ck + ') // ';
}
@@ -408,23 +441,26 @@ function song_skip(n) {
if (tid !== null)
play(mp.order.indexOf(tid) + n);
else
play(mp.order[0]);
play(mp.order[n == -1 ? mp.order.length - 1 : 0]);
}
function playpause(e) {
ev(e);
if (mp.au) {
if (mp.au.paused)
mp.au.play();
else
mp.au.pause();
}
else
play(0);
};
// hook up the widget buttons
(function () {
ebi('bplay').onclick = function (e) {
ev(e);
if (mp.au) {
if (mp.au.paused)
mp.au.play();
else
mp.au.pause();
}
else
play(0);
};
ebi('bplay').onclick = playpause;
ebi('bprev').onclick = function (e) {
ev(e);
song_skip(-1);
@@ -509,6 +545,219 @@ try {
catch (ex) { }
var audio_eq = (function () {
var r = {
"en": false,
"bands": [31.25, 62.5, 125, 250, 500, 1000, 2000, 4000, 8000, 16000],
"gains": [4, 3, 2, 1, 0, 0, 1, 2, 3, 4],
"filters": [],
"amp": 0,
"last_au": null
};
var cfg = [ // hz, q, g
[31.25 * 0.88, 0, 1.4], // shelf
[31.25 * 1.04, 0.7, 0.96], // peak
[62.5, 0.7, 1],
[125, 0.8, 1],
[250, 0.9, 1.03],
[500, 0.9, 1.1],
[1000, 0.9, 1.1],
[2000, 0.9, 1.105],
[4000, 0.88, 1.05],
[8000 * 1.006, 0.73, 1.24],
[16000 * 0.89, 0.7, 1.26], // peak
[16000 * 1.13, 0.82, 1.09], // peak
[16000 * 1.205, 0, 1.9] // shelf
];
try {
r.amp = fcfg_get('au_eq_amp', r.amp);
var gains = jread('au_eq_gain', r.gains);
if (r.gains.length == gains.length)
r.gains = gains;
}
catch (ex) { }
r.draw = function () {
jwrite('au_eq_gain', r.gains);
swrite('au_eq_amp', r.amp);
var txt = QSA('input.eq_gain');
for (var a = 0; a < r.bands.length; a++)
txt[a].value = r.gains[a];
QS('input.eq_gain[band="amp"]').value = r.amp;
};
r.apply = function () {
r.draw();
var Ctx = window.AudioContext || window.webkitAudioContext;
if (!Ctx)
bcfg_set('au_eq', false);
if (!Ctx || !mp.au)
return;
if (!r.en && !mp.ac)
return;
if (mp.ac) {
for (var a = 0; a < r.filters.length; a++)
r.filters[a].disconnect();
mp.acs.disconnect();
}
if (!mp.ac || mp.au != r.last_au) {
if (mp.ac)
mp.ac.close();
r.last_au = mp.au;
mp.ac = new Ctx();
mp.acs = mp.ac.createMediaElementSource(mp.au);
}
r.filters = [];
if (!r.en) {
mp.acs.connect(mp.ac.destination);
return;
}
var max = 0;
for (var a = 0; a < r.gains.length; a++)
if (max < r.gains[a])
max = r.gains[a];
var gains = []
for (var a = 0; a < r.gains.length; a++)
gains.push(r.gains[a] - max);
var t = gains[gains.length - 1];
gains.push(t);
gains.push(t);
gains.unshift(gains[0]);
for (var a = 0; a < cfg.length; a++) {
var fi = mp.ac.createBiquadFilter();
fi.frequency.value = cfg[a][0];
fi.gain.value = cfg[a][2] * gains[a];
fi.Q.value = cfg[a][1];
fi.type = a == 0 ? 'lowshelf' : a == cfg.length - 1 ? 'highshelf' : 'peaking';
r.filters.push(fi);
}
// pregain, keep first in chain
fi = mp.ac.createGain();
fi.gain.value = r.amp + 0.94; // +.137 dB measured; now -.25 dB and almost bitperfect
r.filters.push(fi);
for (var a = r.filters.length - 1; a >= 0; a--)
r.filters[a].connect(a > 0 ? r.filters[a - 1] : mp.ac.destination);
mp.acs.connect(r.filters[r.filters.length - 1]);
}
function eq_step(e) {
ev(e);
var band = parseInt(this.getAttribute('band')),
step = parseFloat(this.getAttribute('step'));
if (isNaN(band))
r.amp = Math.round((r.amp + step * 0.2) * 100) / 100;
else
r.gains[band] += step;
r.apply();
}
function adj_band(that, step) {
var err = false;
try {
var band = parseInt(that.getAttribute('band')),
vs = that.value,
v = parseFloat(vs);
if (isNaN(v) || v + '' != vs)
throw 42;
if (isNaN(band))
r.amp = Math.round((v + step * 0.2) * 100) / 100;
else
r.gains[band] = v + step;
r.apply();
}
catch (ex) {
err = true;
}
clmod(that, 'err', err);
}
function eq_mod(e) {
ev(e);
adj_band(this, 0);
}
function eq_keydown(e) {
var step = e.key == 'ArrowUp' ? 0.25 : e.key == 'ArrowDown' ? -0.25 : 0;
if (step != 0)
adj_band(this, step);
}
var html = ['<table><tr><td rowspan="4">',
'<a id="au_eq" class="tgl btn" href="#">enable</a></td>'],
h2 = [], h3 = [], h4 = [];
var vs = [];
for (var a = 0; a < r.bands.length; a++) {
var hz = r.bands[a];
if (hz >= 1000)
hz = (hz / 1000) + 'k';
hz = (hz + '').split('.')[0];
vs.push([a, hz, r.gains[a]]);
}
vs.push(["amp", "boost", r.amp]);
for (var a = 0; a < vs.length; a++) {
var b = vs[a][0];
html.push('<td><a href="#" class="eq_step" step="0.5" band="' + b + '">+</a></td>');
h2.push('<td>' + vs[a][1] + '</td>');
h4.push('<td><a href="#" class="eq_step" step="-0.5" band="' + b + '">&ndash;</a></td>');
h3.push('<td><input type="text" class="eq_gain" band="' + b + '" value="' + vs[a][2] + '" /></td>');
}
html = html.join('\n') + '</tr><tr>';
html += h2.join('\n') + '</tr><tr>';
html += h3.join('\n') + '</tr><tr>';
html += h4.join('\n') + '</tr><table>';
ebi('audio_eq').innerHTML = html;
var stp = QSA('a.eq_step');
for (var a = 0, aa = stp.length; a < aa; a++)
stp[a].onclick = eq_step;
var txt = QSA('input.eq_gain');
for (var a = 0; a < txt.length; a++) {
txt[a].oninput = eq_mod;
txt[a].onkeydown = eq_keydown;
}
r.en = bcfg_get('au_eq', false);
ebi('au_eq').onclick = function (e) {
ev(e);
r.en = !r.en;
bcfg_set('au_eq', r.en);
r.apply();
};
r.draw();
return r;
})();
// plays the tid'th audio file on the page
function play(tid, seek, call_depth) {
if (mp.order.length == 0)
@@ -518,11 +767,25 @@ function play(tid, seek, call_depth) {
if ((tn + '').indexOf('f-') === 0)
tn = mp.order.indexOf(tn);
while (tn >= mp.order.length)
tn -= mp.order.length;
if (tn >= mp.order.length) {
if (mpl.pb_mode == 'loop-folder') {
tn = 0;
}
else if (mpl.pb_mode == 'next-folder') {
treectl.ls_cb = function () { song_skip(1); };
return tree_neigh(1);
}
}
while (tn < 0)
tn += mp.order.length;
if (tn < 0) {
if (mpl.pb_mode == 'loop-folder') {
tn = mp.order.length - 1;
}
else if (mpl.pb_mode == 'next-folder') {
treectl.ls_cb = function () { song_skip(-1); };
return tree_neigh(-1);
}
}
tid = mp.order[tn];
@@ -569,6 +832,8 @@ function play(tid, seek, call_depth) {
mp.au = mp.au_native;
}
audio_eq.apply();
mp.au.tid = tid;
mp.au.src = url;
mp.au.volume = mp.expvol();
@@ -710,8 +975,9 @@ function autoplay_blocked(seek) {
var thegrid = (function () {
var lfiles = ebi('files');
var gfiles = document.createElement('div');
var lfiles = ebi('files'),
gfiles = document.createElement('div');
gfiles.setAttribute('id', 'gfiles');
gfiles.style.display = 'none';
gfiles.innerHTML = (
@@ -733,7 +999,8 @@ var thegrid = (function () {
'en': bcfg_get('griden', false),
'sel': bcfg_get('gridsel', false),
'sz': fcfg_get('gridsz', 10),
'isdirty': true
'isdirty': true,
'bbox': null
};
ebi('thumbs').onclick = function (e) {
@@ -803,7 +1070,10 @@ var thegrid = (function () {
r.sz = v;
swrite('gridsz', r.sz);
}
document.documentElement.style.setProperty('--grid-sz', r.sz + 'em');
try {
document.documentElement.style.setProperty('--grid-sz', r.sz + 'em');
}
catch (ex) { }
}
setsz();
@@ -820,12 +1090,25 @@ var thegrid = (function () {
this.setAttribute('class', tr.getAttribute('class'));
}
function bgopen(e) {
ev(e);
var url = this.getAttribute('href');
window.open(url, '_blank');
}
r.loadsel = function () {
var ths = QSA('#ggrid>a');
var ths = QSA('#ggrid>a'),
have_sel = !!QS('#files tr.sel');
for (var a = 0, aa = ths.length; a < aa; a++) {
ths[a].onclick = r.sel ? seltgl : null;
ths[a].onclick = r.sel ? seltgl : have_sel ? bgopen : null;
ths[a].setAttribute('class', ebi(ths[a].getAttribute('ref')).parentNode.parentNode.getAttribute('class'));
}
var uns = QS('#ggrid a[ref="unsearch"]');
if (uns)
uns.onclick = function () {
ebi('unsearch').click();
};
}
function loadgrid() {
@@ -836,20 +1119,21 @@ var thegrid = (function () {
return r.loadsel();
var html = [];
var tr = lfiles.tBodies[0].rows;
for (var a = 0; a < tr.length; a++) {
var ao = tr[a].cells[1].firstChild,
var files = QSA('#files>tbody>tr>td:nth-child(2) a[id]');
for (var a = 0, aa = files.length; a < aa; a++) {
var ao = files[a],
href = esc(ao.getAttribute('href')),
ref = ao.getAttribute('id'),
isdir = href.split('?')[0].slice(-1)[0] == '/',
ac = isdir ? ' class="dir"' : '',
ihref = href;
if (isdir) {
ihref = '/.cpr/ico/folder'
}
else if (r.thumbs) {
if (r.thumbs) {
ihref += (ihref.indexOf('?') === -1 ? '?' : '&') + 'th=' + (have_webp ? 'w' : 'j');
}
else if (isdir) {
ihref = '/.cpr/ico/folder';
}
else {
var ar = href.split('?')[0].split('.');
if (ar.length > 1)
@@ -870,14 +1154,42 @@ var thegrid = (function () {
}
html.push('<a href="' + href + '" ref="' + ref + '"><img src="' +
ihref + '" /><span>' + ao.innerHTML + '</span></a>');
ihref + '" /><span' + ac + '>' + ao.innerHTML + '</span></a>');
}
lfiles.style.display = 'none';
gfiles.style.display = 'block';
ebi('ggrid').innerHTML = html.join('\n');
r.bagit();
r.loadsel();
}
r.bagit = function () {
if (!window.baguetteBox)
return;
if (r.bbox)
baguetteBox.destroy();
r.bbox = baguetteBox.run('#ggrid', {
captions: function (g) {
var idx = -1,
h = '' + g;
for (var a = 0; a < r.bbox.length; a++)
if (r.bbox[a].imageElement == g)
idx = a;
return '<a download href="' + h +
'">' + (idx + 1) + ' / ' + r.bbox.length + ' -- ' +
esc(uricom_dec(h.split('/').slice(-1)[0])[0]) + '</a>';
}
})[0];
};
setTimeout(function () {
import_js('/.cpr/baguettebox.js', r.bagit);
}, 1);
if (r.en) {
loadgrid();
}
@@ -943,6 +1255,9 @@ document.onkeydown = function (e) {
if (n !== 0)
return song_skip(n);
if (k == 'KeyM')
return playpause();
n = k == 'KeyU' ? -10 : k == 'KeyO' ? 10 : 0;
if (n !== 0)
return mp.au ? seek_au_sec(mp.au.currentTime + n) : true;
@@ -960,7 +1275,7 @@ document.onkeydown = function (e) {
if (k == 'KeyT')
return ebi('thumbs').click();
if (window['thegrid'] && thegrid.en) {
if (thegrid.en) {
if (k == 'KeyS')
return ebi('gridsel').click();
@@ -1026,6 +1341,7 @@ document.onkeydown = function (e) {
for (var a = 0; a < trs.length; a += 2) {
html.push('<table>' + (trs[a].concat(trs[a + 1])).join('\n') + '</table>');
}
html.push('<table id="tq_raw"><tr><td>raw</td><td><input id="q_raw" type="text" name="q" /></td></tr></table>');
ebi('srch_form').innerHTML = html.join('\n');
var o = QSA('#op_search input');
@@ -1050,33 +1366,83 @@ document.onkeydown = function (e) {
var chk = ebi(id.slice(0, -1) + 'c');
chk.checked = ((v + '').length > 0);
}
if (id != "q_raw")
encode_query();
clearTimeout(search_timeout);
if (Date.now() - search_in_progress > 30 * 1000)
search_timeout = setTimeout(do_search, 200);
}
function encode_query() {
var q = '';
for (var a = 0; a < sconf.length; a++) {
for (var b = 1; b < sconf[a].length; b++) {
var k = sconf[a][b][0],
chk = 'srch_' + k + 'c',
tvs = ebi('srch_' + k + 'v').value.split(/ /g);
if (!ebi(chk).checked)
continue;
for (var c = 0; c < tvs.length; c++) {
var tv = tvs[c];
if (!tv.length)
break;
q += ' and ';
if (k == 'adv') {
q += tv.replace(/ /g, " and ").replace(/([=!><]=?)/, " $1 ");
continue;
}
if (k.length == 3) {
q += k.replace(/sz/, 'size').replace(/dt/, 'date').replace(/l$/, ' >= ').replace(/u$/, ' <= ') + tv;
continue;
}
if (k == 'path' || k == 'name' || k == 'tags') {
var not = ' ';
if (tv.slice(0, 1) == '-') {
tv = tv.slice(1);
not = ' not ';
}
if (tv.slice(0, 1) == '^') {
tv = tv.slice(1);
}
else {
tv = '*' + tv;
}
if (tv.slice(-1) == '$') {
tv = tv.slice(0, -1);
}
else {
tv += '*';
}
q += k + not + 'like ' + tv;
}
}
}
}
ebi('q_raw').value = q.slice(5);
}
function do_search() {
search_in_progress = Date.now();
srch_msg(false, "searching...");
clearTimeout(search_timeout);
var params = {},
o = QSA('#op_search input[type="text"]');
for (var a = 0; a < o.length; a++) {
var chk = ebi(o[a].getAttribute('id').slice(0, -1) + 'c');
if (!chk.checked)
continue;
params[o[a].getAttribute('name')] = o[a].value;
}
// ebi('srch_q').textContent = JSON.stringify(params, null, 4);
var xhr = new XMLHttpRequest();
xhr.open('POST', '/?srch', true);
xhr.setRequestHeader('Content-Type', 'text/plain');
xhr.onreadystatechange = xhr_search_results;
xhr.ts = Date.now();
xhr.send(JSON.stringify(params));
xhr.send(JSON.stringify({ "q": ebi('q_raw').value }));
}
function xhr_search_results() {
@@ -1179,7 +1545,8 @@ document.onkeydown = function (e) {
var treectl = (function () {
var treectl = {
"hidden": false
"hidden": false,
"ls_cb": null
},
entreed = false,
fixedpos = false,
@@ -1279,6 +1646,11 @@ var treectl = (function () {
onscroll();
}
treectl.goto = function (url, push) {
get_tree("", url, true);
reqls(url, push);
}
function get_tree(top, dst, rst) {
var xhr = new XMLHttpRequest();
xhr.top = top;
@@ -1381,7 +1753,7 @@ var treectl = (function () {
if (hpush)
get_tree('.', xhr.top);
enspin('#files');
enspin(thegrid.en ? '#gfiles' : '#files');
}
function treegrow(e) {
@@ -1461,6 +1833,7 @@ var treectl = (function () {
apply_perms(res.perms);
despin('#files');
despin('#gfiles');
ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : "";
ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : "";
@@ -1472,6 +1845,12 @@ var treectl = (function () {
msel.render();
reload_tree();
reload_browser();
var fun = treectl.ls_cb;
if (fun) {
treectl.ls_cb = null;
fun();
}
}
function parsetree(res, top) {
@@ -1536,9 +1915,7 @@ var treectl = (function () {
return;
var url = new URL(e.state, "https://" + document.location.host);
url = url.pathname;
get_tree("", url, true);
reqls(url);
treectl.goto(url.pathname);
};
if (window.history && history.pushState) {
@@ -1663,17 +2040,34 @@ var filecols = (function () {
var add_btns = function () {
var ths = QSA('#files th>span');
for (var a = 0, aa = ths.length; a < aa; a++) {
var th = ths[a].parentElement,
is_hidden = has(hidden, ths[a].textContent);
th.innerHTML = '<div class="cfg"><a href="#">' +
(is_hidden ? '+' : '-') + '</a></div>' + ths[a].outerHTML;
var th = ths[a].parentElement;
th.innerHTML = '<div class="cfg"><a href="#">-</a></div>' + ths[a].outerHTML;
th.getElementsByTagName('a')[0].onclick = ev_row_tgl;
}
};
function hcols_click(e) {
ev(e);
var t = e.target;
if (t.tagName != 'A')
return;
toggle(t.textContent);
}
var set_style = function () {
hidden.sort();
var html = [],
hcols = ebi('hcols');
for (var a = 0; a < hidden.length; a++) {
html.push('<a href="#" class="btn">' + esc(hidden[a]) + '</a>');
}
hcols.previousSibling.style.display = html.length ? 'block' : 'none';
hcols.innerHTML = html.join('\n');
hcols.onclick = hcols_click;
add_btns();
var ohidden = [],
@@ -1698,22 +2092,8 @@ var filecols = (function () {
var cls = has(ohidden, a) ? 'min' : '',
tds = QSA('#files>tbody>tr>td:nth-child(' + (a + 1) + ')');
for (var b = 0, bb = tds.length; b < bb; b++) {
for (var b = 0, bb = tds.length; b < bb; b++)
tds[b].setAttribute('class', cls);
if (a < 2)
continue;
if (cls) {
if (!tds[b].hasAttribute('html')) {
tds[b].setAttribute('html', tds[b].innerHTML);
tds[b].innerHTML = '...';
}
}
else if (tds[b].hasAttribute('html')) {
tds[b].innerHTML = tds[b].getAttribute('html');
tds[b].removeAttribute('html');
}
}
}
};
set_style();
@@ -1732,15 +2112,13 @@ var filecols = (function () {
try {
var ci = find_file_col('dur'),
i = ci[0],
min = ci[1],
rows = ebi('files').tBodies[0].rows;
if (!min)
for (var a = 0, aa = rows.length; a < aa; a++) {
var c = rows[a].cells[i];
if (c && c.textContent)
c.textContent = s2ms(c.textContent);
}
for (var a = 0, aa = rows.length; a < aa; a++) {
var c = rows[a].cells[i];
if (c && c.textContent)
c.textContent = s2ms(c.textContent);
}
}
catch (ex) { }
@@ -1995,8 +2373,7 @@ var msel = (function () {
}
function selui() {
clmod(ebi('wtoggle'), 'sel', getsel().length);
if (window['thegrid'])
thegrid.loadsel();
thegrid.loadsel();
}
function seltgl(e) {
ev(e);

View File

@@ -0,0 +1,61 @@
var ofun = audio_eq.apply.bind(audio_eq);
audio_eq.apply = function () {
var ac1 = mp.ac;
ofun();
var ac = mp.ac,
w = 2048,
h = 256;
if (!audio_eq.filters.length) {
audio_eq.ana = null;
return;
}
var can = ebi('fft_can');
if (!can) {
can = mknod('canvas');
can.setAttribute('id', 'fft_can');
can.style.cssText = 'position:absolute;left:0;bottom:5em;width:' + w + 'px;height:' + h + 'px;z-index:9001';
document.body.appendChild(can);
can.width = w;
can.height = h;
}
var cc = can.getContext('2d');
if (!ac)
return;
var ana = ac.createAnalyser();
ana.smoothingTimeConstant = 0;
ana.fftSize = 8192;
audio_eq.filters[0].connect(ana);
audio_eq.ana = ana;
var buf = new Uint8Array(ana.frequencyBinCount),
colw = can.width / buf.length;
cc.fillStyle = '#fc0';
function draw() {
if (ana == audio_eq.ana)
requestAnimationFrame(draw);
ana.getByteFrequencyData(buf);
cc.clearRect(0, 0, can.width, can.height);
/*var x = 0, w = 1;
for (var a = 0; a < buf.length; a++) {
cc.fillRect(x, h - buf[a], w, h);
x += w;
}*/
var mul = Math.pow(w, 4) / buf.length;
for (var x = 0; x < w; x++) {
var a = Math.floor(Math.pow(x, 4) / mul),
v = buf[a];
cc.fillRect(x, h - v, 1, v);
}
}
draw();
};
audio_eq.apply();

View File

@@ -26,6 +26,26 @@ a {
border-radius: .2em;
padding: .2em .8em;
}
table {
border-collapse: collapse;
}
.vols td,
.vols th {
padding: .3em .6em;
text-align: left;
}
.num {
border-right: 1px solid #bbb;
}
.num td {
padding: .1em .7em .1em 0;
}
.num td:first-child {
text-align: right;
}
.btns {
margin: 1em 0;
}
html.dark,
@@ -50,4 +70,7 @@ html.dark input {
border-radius: .5em;
padding: .5em .7em;
margin: 0 .5em 0 0;
}
html.dark .num {
border-color: #777;
}

View File

@@ -13,11 +13,37 @@
<div id="wrap">
<p>hello {{ this.uname }}</p>
{%- if avol %}
<h1>admin panel:</h1>
<table><tr><td> <!-- hehehe -->
<table class="num">
<tr><td>scanning</td><td>{{ scanning }}</td></tr>
<tr><td>hash-q</td><td>{{ hashq }}</td></tr>
<tr><td>tag-q</td><td>{{ tagq }}</td></tr>
<tr><td>mtp-q</td><td>{{ mtpq }}</td></tr>
</table>
</td><td>
<table class="vols">
<thead><tr><th>vol</th><th>action</th><th>status</th></tr></thead>
<tbody>
{% for mp in avol %}
{%- if mp in vstate and vstate[mp] %}
<tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
{%- endif %}
{% endfor %}
</tbody>
</table>
</td></tr></table>
<div class="btns">
<a href="{{ avol[0] }}?stack">dump stack</a>
</div>
{%- endif %}
{%- if rvol %}
<h1>you can browse these:</h1>
<ul>
{% for mp in rvol %}
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
{% endfor %}
</ul>
{%- endif %}
@@ -26,14 +52,14 @@
<h1>you can upload to:</h1>
<ul>
{% for mp in wvol %}
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
{% endfor %}
</ul>
{%- endif %}
<h1>login for more:</h1>
<ul>
<form method="post" enctype="multipart/form-data" action="/{{ url_suf }}">
<form method="post" enctype="multipart/form-data" action="/">
<input type="hidden" name="act" value="login" />
<input type="password" name="cppwd" />
<input type="submit" value="Login" />

View File

@@ -17,6 +17,7 @@ function goto_up2k() {
// chrome requires https to use crypto.subtle,
// usually it's undefined but some chromes throw on invoke
var up2k = null;
var sha_js = window.WebAssembly ? 'hw' : 'ac'; // ff53,c57,sa11
try {
var cf = crypto.subtle || crypto.webkitSubtle;
cf.digest('SHA-512', new Uint8Array(1)).then(
@@ -430,13 +431,15 @@ function up2k_init(subtle) {
// upload ui hidden by default, clicking the header shows it
function init_deps() {
if (!subtle && !window.asmCrypto) {
showmodal('<h1>loading sha512.js</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
import_js('/.cpr/deps/sha512.js', unmodal);
var fn = 'sha512.' + sha_js + '.js';
showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
import_js('/.cpr/deps/' + fn, unmodal);
if (is_https)
ebi('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
else
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance';
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' +
(sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>';
}
}
@@ -801,6 +804,14 @@ function up2k_init(subtle) {
var mou_ikkai = false;
if (st.busy.handshake.length > 0 &&
st.busy.handshake[0].busied < Date.now() - 30 * 1000
) {
console.log("retrying stuck handshake");
var t = st.busy.handshake.shift();
st.todo.handshake.unshift(t);
}
if (st.todo.handshake.length > 0 &&
st.busy.handshake.length == 0 && (
st.todo.handshake[0].t4 || (
@@ -886,6 +897,10 @@ function up2k_init(subtle) {
return base64;
}
function hex2u8(txt) {
return new Uint8Array(txt.match(/.{2}/g).map(function (b) { return parseInt(b, 16); }));
}
function get_chunksize(filesize) {
var chunksize = 1024 * 1024,
stepsize = 512 * 1024;
@@ -987,10 +1002,18 @@ function up2k_init(subtle) {
if (subtle)
subtle.digest('SHA-512', buf).then(hash_done);
else setTimeout(function () {
var hasher = new asmCrypto.Sha512();
hasher.process(new Uint8Array(buf));
hasher.finish();
hash_done(hasher.result);
var u8buf = new Uint8Array(buf);
if (sha_js == 'hw') {
hashwasm.sha512(u8buf).then(function (v) {
hash_done(hex2u8(v))
});
}
else {
var hasher = new asmCrypto.Sha512();
hasher.process(u8buf);
hasher.finish();
hash_done(hasher.result);
}
}, 1);
};
@@ -1004,11 +1027,27 @@ function up2k_init(subtle) {
//
function exec_handshake() {
var t = st.todo.handshake.shift();
var t = st.todo.handshake.shift(),
me = Date.now();
st.busy.handshake.push(t);
t.busied = me;
var xhr = new XMLHttpRequest();
xhr.onerror = function () {
if (t.busied != me) {
console.log('zombie handshake onerror,', t);
return;
}
console.log('handshake onerror, retrying');
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
st.todo.handshake.unshift(t);
};
xhr.onload = function (e) {
if (t.busied != me) {
console.log('zombie handshake onload,', t);
return;
}
if (xhr.status == 200) {
var response = JSON.parse(xhr.responseText);

View File

@@ -238,6 +238,10 @@
color: #fff;
font-style: italic;
}
#u2foot span {
color: #999;
font-size: .9em;
}
#u2footfoot {
margin-bottom: -1em;
}

22
docs/README.md Normal file
View File

@@ -0,0 +1,22 @@
# example `.epilogue.html`
save one of these as `.epilogue.html` inside a folder to customize it:
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
# example browser-css
point `--css-browser` to one of these by URL:
* [`browser.css`](browser.css) changes the background
* [`browser-icons.css`](browser-icons.css) adds filetype icons
# other stuff
## [`rclone.md`](rclone.md)
* notes on using rclone as a fuse client/server
## [`example.conf`](example.conf)
* example config file for `-c` which never really happened

95
docs/biquad.html Normal file
View File

@@ -0,0 +1,95 @@
<!DOCTYPE html><html><head></head><body><script>
setTimeout(location.reload.bind(location), 700);
document.documentElement.scrollLeft = 0;
var can = document.createElement('canvas'),
cc = can.getContext('2d'),
w = 2048,
h = 1024;
w = 2048;
can.width = w;
can.height = h;
document.body.appendChild(can);
can.style.cssText = 'width:' + w + 'px;height:' + h + 'px';
cc.fillStyle = '#000';
cc.fillRect(0, 0, w, h);
var cfg = [ // hz, q, g
[31.25 * 0.88, 0, 1.4], // shelf
[31.25 * 1.04, 0.7, 0.96], // peak
[62.5, 0.7, 1],
[125, 0.8, 1],
[250, 0.9, 1.03],
[500, 0.9, 1.1],
[1000, 0.9, 1.1],
[2000, 0.9, 1.105],
[4000, 0.88, 1.05],
[8000 * 1.006, 0.73, 1.24],
//[16000 * 1.00, 0.5, 1.75], // peak.v1
//[16000 * 1.19, 0, 1.8] // shelf.v1
[16000 * 0.89, 0.7, 1.26], // peak
[16000 * 1.13, 0.82, 1.09], // peak
[16000 * 1.205, 0, 1.9] // shelf
];
var freqs = new Float32Array(22000),
sum = new Float32Array(freqs.length),
ac = new AudioContext(),
step = w / freqs.length,
colors = [
'rgba(255, 0, 0, 0.7)',
'rgba(0, 224, 0, 0.7)',
'rgba(0, 64, 255, 0.7)'
];
var order = [];
for (var a = 0; a < cfg.length; a += 2)
order.push(a);
for (var a = 1; a < cfg.length; a += 2)
order.push(a);
for (var ia = 0; ia < order.length; ia++) {
var a = order[ia],
fi = ac.createBiquadFilter(),
mag = new Float32Array(freqs.length),
phase = new Float32Array(freqs.length);
for (var b = 0; b < freqs.length; b++)
freqs[b] = b;
fi.type = a == 0 ? 'lowshelf' : a == cfg.length - 1 ? 'highshelf' : 'peaking';
fi.frequency.value = cfg[a][0];
fi.Q.value = cfg[a][1];
fi.gain.value = 1;
fi.getFrequencyResponse(freqs, mag, phase);
cc.fillStyle = colors[a % colors.length];
for (var b = 0; b < sum.length; b++) {
mag[b] -= 1;
sum[b] += mag[b] * cfg[a][2];
var y = h - (mag[b] * h * 3);
cc.fillRect(b * step, y, step, h - y);
cc.fillRect(b * step - 1, y - 1, 3, 3);
}
}
var min = 999999, max = 0;
for (var a = 0; a < sum.length; a++) {
min = Math.min(min, sum[a]);
max = Math.max(max, sum[a]);
}
cc.fillStyle = 'rgba(255,255,255,1)';
for (var a = 0; a < sum.length; a++) {
var v = (sum[a] - min) / (max - min);
cc.fillRect(a * step, 0, step, v * h / 2);
}
cc.fillRect(0, 460, w, 1);
</script></body></html>

68
docs/browser-icons.css Normal file
View File

@@ -0,0 +1,68 @@
/* put filetype icons inline with text
#ggrid>a>span:before,
#ggrid>a>span.dir:before {
display: inline;
line-height: 0;
font-size: 1.7em;
margin: -.7em .1em -.5em -.6em;
}
*/
/* move folder icons top-left */
#ggrid>a>span.dir:before {
content: initial;
}
#ggrid>a[href$="/"]:before {
content: '📂';
display: block;
position: absolute;
margin: -.1em -.4em;
text-shadow: 0 0 .1em #000;
font-size: 2em;
}
/* put filetype icons top-left */
#ggrid>a:before {
display: block;
position: absolute;
margin: -.1em -.4em;
text-shadow: 0 0 .1em #000;
font-size: 2em;
}
/* video */
#ggrid>a:is(
[href$=".mkv"i],
[href$=".mp4"i],
[href$=".webm"i],
):before {
content: '📺';
}
/* audio */
#ggrid>a:is(
[href$=".mp3"i],
[href$=".ogg"i],
[href$=".opus"i],
[href$=".flac"i],
[href$=".m4a"i],
[href$=".aac"i],
):before {
content: '🎵';
}
/* image */
#ggrid>a:is(
[href$=".jpg"i],
[href$=".jpeg"i],
[href$=".png"i],
[href$=".gif"i],
[href$=".webp"i],
):before {
content: '🎨';
}

29
docs/browser.css Normal file
View File

@@ -0,0 +1,29 @@
html {
background: #333 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
}
#files th {
background: rgba(32, 32, 32, 0.9) !important;
}
#ops,
#treeul,
#files td {
background: rgba(32, 32, 32, 0.3) !important;
}
html.light {
background: #eee url('/wp/wallhaven-dpxl6l.png') center / cover no-repeat fixed;
}
html.light #files th {
background: rgba(255, 255, 255, 0.9) !important;
}
html.light #ops,
html.light #treeul,
html.light #files td {
background: rgba(248, 248, 248, 0.8) !important;
}
#files * {
background: transparent !important;
}

View File

@@ -80,6 +80,16 @@ command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (ti
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
##
## bash oneliners
# get the size and video-id of all youtube vids in folder, assuming filename ends with -id.ext, and create a copyparty search query
find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv|mp4|webm)$/{next} {sub(/\.[^\.]+$/,"");n=length($0);v=substr($0,n-10);print $1, v}' | tee /dev/stderr | awk 'BEGIN {p="("} {printf("%s name like *-%s.* ",p,$2);p="or"} END {print ")\n"}' | cat >&2
# unique stacks in a stackdump
f=a; rm -rf stacks; mkdir stacks; grep -E '^#' $f | while IFS= read -r n; do awk -v n="$n" '!$0{o=0} o; $0==n{o=1}' <$f >stacks/f; h=$(sha1sum <stacks/f | cut -c-16); mv stacks/f stacks/$h-"$n"; done ; find stacks/ | sort | uniq -cw24
##
## sqlite3 stuff
@@ -146,6 +156,9 @@ dbg.asyncStore.pendingBreakpoints = {}
# fix firefox phantom breakpoints
about:config >> devtools.debugger.prefs-schema-version = -1
# determine server version
git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > /dev/shm/revs && cat /dev/shm/revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js 2>/dev/null | diff -wNarU0 - <(cat /mnt/Users/ed/Downloads/ref/{util,browser,up2k}.js) | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
##
## http 206

32
docs/tcp-debug.sh Normal file
View File

@@ -0,0 +1,32 @@
(cd ~/dev/copyparty && strace -Tttyyvfs 256 -o strace.strace python3 -um copyparty -i 127.0.0.1 --http-only --stackmon /dev/shm/cpps,10 ) 2>&1 | tee /dev/stderr > ~/log-copyparty-$(date +%Y-%m%d-%H%M%S).txt
14/Jun/2021:16:34:02 1623688447.212405 death
14/Jun/2021:16:35:02 1623688502.420860 back
tcpdump -nni lo -w /home/ed/lo.pcap
# 16:35:25.324662 IP 127.0.0.1.48632 > 127.0.0.1.3920: Flags [F.], seq 849, ack 544, win 359, options [nop,nop,TS val 809396796 ecr 809396796], length 0
tcpdump -nnr /home/ed/lo.pcap | awk '/ > 127.0.0.1.3920: /{sub(/ > .*/,"");sub(/.*\./,"");print}' | sort -n | uniq | while IFS= read -r port; do echo; tcpdump -nnr /home/ed/lo.pcap 2>/dev/null | grep -E "\.$port( > |: F)" | sed -r 's/ > .*, /, /'; done | grep -E '^16:35:0.*length [^0]' -C50
16:34:02.441732 IP 127.0.0.1.48638, length 0
16:34:02.441738 IP 127.0.0.1.3920, length 0
16:34:02.441744 IP 127.0.0.1.48638, length 0
16:34:02.441756 IP 127.0.0.1.48638, length 791
16:34:02.441759 IP 127.0.0.1.3920, length 0
16:35:02.445529 IP 127.0.0.1.48638, length 0
16:35:02.489194 IP 127.0.0.1.3920, length 0
16:35:02.515595 IP 127.0.0.1.3920, length 216
16:35:02.515600 IP 127.0.0.1.48638, length 0
grep 48638 "$(find ~ -maxdepth 1 -name log-copyparty-\*.txt | sort | tail -n 1)"
1623688502.510380 48638 rh
1623688502.511291 48638 Unrecv direct ...
1623688502.511827 48638 rh = 791
16:35:02.518 127.0.0.1 48638 shut(8): [Errno 107] Socket not connected
Exception in thread httpsrv-0.1-48638:
grep 48638 ~/dev/copyparty/strace.strace
14561 16:35:02.506310 <... accept4 resumed> {sa_family=AF_INET, sin_port=htons(48638), sin_addr=inet_addr("127.0.0.1")}, [16], SOCK_CLOEXEC) = 8<TCP:[127.0.0.1:3920->127.0.0.1:48638]> <0.000012>
15230 16:35:02.510725 write(1<pipe:[256639555]>, "1623688502.510380 48638 rh\n", 27 <unfinished ...>

View File

@@ -9,6 +9,12 @@ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_zopfli=1.0.3
# TODO
# sha512.hw.js https://github.com/Daninet/hash-wasm
# sha512.kc.js https://github.com/chm-diederichs/sha3-wasm
# awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' <sha512.ac.js.orig >sha512.ac.js; for fn in sha512.ac.js.orig sha512.ac.js; do wc -c <$fn; wc -c <$fn.gz ; for n in {1..9}; do printf '%8d %d bz\n' $(bzip2 -c$n <$fn | wc -c) $n; done; done
# download;
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
RUN mkdir -p /z/dist/no-pk \

12
scripts/install-githooks.sh Executable file
View File

@@ -0,0 +1,12 @@
#!/bin/bash
set -ex
[ -e setup.py ] || ..
[ -e setup.py ] || {
echo u wot
exit 1
}
cd .git/hooks
rm -f pre-commit
ln -s ../../scripts/run-tests.sh pre-commit

View File

@@ -32,6 +32,10 @@ gtar=$(command -v gtar || command -v gnutar) || true
[ -e /opt/local/bin/bzip2 ] &&
bzip2() { /opt/local/bin/bzip2 "$@"; }
}
gawk=$(command -v gawk || command -v gnuawk || command -v awk)
awk() { $gawk "$@"; }
pybin=$(command -v python3 || command -v python) || {
echo need python
exit 1
@@ -163,7 +167,7 @@ find .. -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
echo use smol web deps
rm -f copyparty/web/deps/*.full.* copyparty/web/Makefile
rm -f copyparty/web/deps/*.full.* copyparty/web/dbg-* copyparty/web/Makefile
# it's fine dw
grep -lE '\.full\.(js|css)' copyparty/web/* |
@@ -194,17 +198,46 @@ tmv "$f"
# up2k goes from 28k to 22k laff
echo entabbening
find | grep -E '\.(js|css|html)$' | while IFS= read -r f; do
find | grep -E '\.css$' | while IFS= read -r f; do
awk '{
sub(/^[ \t]+/,"");
sub(/[ \t]+$/,"");
$0=gensub(/^([a-z-]+) *: *(.*[^ ]) *;$/,"\\1:\\2;","1");
sub(/ +\{$/,"{");
gsub(/, /,",")
}
!/\}$/ {printf "%s",$0;next}
1
' <$f | sed 's/;\}$/}/' >t
tmv "$f"
done
find | grep -E '\.(js|html)$' | while IFS= read -r f; do
unexpand -t 4 --first-only <"$f" >t
tmv "$f"
done
gzres() {
command -v pigz &&
pk='pigz -11 -J 34 -I 100' ||
pk='gzip'
echo "$pk"
find | grep -E '\.(js|css)$' | grep -vF /deps/ | while IFS= read -r f; do
echo -n .
$pk "$f"
done
echo
}
gzres
echo gen tarlist
for d in copyparty dep-j2; do find $d -type f; done |
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
(grep -vE 'gz$' list1; grep -E 'gz$' list1) >list
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1) >list || true
echo creating tar
args=(--owner=1000 --group=1000)

34
scripts/profile.py Normal file
View File

@@ -0,0 +1,34 @@
#!/usr/bin/env python3
import sys
sys.path.insert(0, ".")
cmd = sys.argv[1]
if cmd == "cpp":
from copyparty.__main__ import main
argv = ["__main__", "-v", "srv::r", "-v", "../../yt:yt:r"]
main(argv=argv)
elif cmd == "test":
from unittest import main
argv = ["__main__", "discover", "-s", "tests"]
main(module=None, argv=argv)
else:
raise Exception()
# import dis; print(dis.dis(main))
# macos:
# option1) python3.9 -m pip install --user -U vmprof==0.4.9
# option2) python3.9 -m pip install --user -U https://github.com/vmprof/vmprof-python/archive/refs/heads/master.zip
#
# python -m vmprof -o prof --lines ./scripts/profile.py test
# linux: ~/.local/bin/vmprofshow prof tree | grep -vF '[1m 0.'
# macos: ~/Library/Python/3.9/bin/vmprofshow prof tree | grep -vF '[1m 0.'
# win: %appdata%\..\Roaming\Python\Python39\Scripts\vmprofshow.exe prof tree

15
scripts/run-tests.sh Executable file
View File

@@ -0,0 +1,15 @@
#!/bin/bash
set -ex
pids=()
for py in python{2,3}; do
nice $py -m unittest discover -s tests >/dev/null &
pids+=($!)
done
python3 scripts/test/smoketest.py &
pids+=($!)
for pid in ${pids[@]}; do
wait $pid
done

View File

@@ -47,7 +47,7 @@ grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
printf '\033[1;30mlooking for jinja2 in [%s]\033[0m\n' "$_py" >&2
$_py -c 'import jinja2' 2>/dev/null || continue
printf '%s\n' "$_py"
mv $dir/{,x.}jinja2
mv $dir/{,x.}dep-j2
break
done)"

209
scripts/test/smoketest.py Normal file
View File

@@ -0,0 +1,209 @@
import os
import sys
import time
import shlex
import shutil
import signal
import tempfile
import requests
import threading
import subprocess as sp
CPP = []
class Cpp(object):
def __init__(self, args):
args = [sys.executable, "-m", "copyparty"] + args
print(" ".join([shlex.quote(x) for x in args]))
self.ls_pre = set(list(os.listdir()))
self.p = sp.Popen(args)
# , stdout=sp.PIPE, stderr=sp.PIPE)
self.t = threading.Thread(target=self._run)
self.t.daemon = True
self.t.start()
def _run(self):
self.so, self.se = self.p.communicate()
def stop(self, wait):
if wait:
os.kill(self.p.pid, signal.SIGINT)
self.t.join(timeout=2)
else:
self.p.kill() # macos py3.8
def clean(self):
t = os.listdir()
for f in t:
if f not in self.ls_pre and f.startswith("up."):
os.unlink(f)
def await_idle(self, ub, timeout):
req = ["scanning</td><td>False", "hash-q</td><td>0", "tag-q</td><td>0"]
lim = int(timeout * 10)
u = ub + "?h"
for n in range(lim):
try:
time.sleep(0.1)
r = requests.get(u, timeout=0.1)
for x in req:
if x not in r.text:
print("ST: {}/{} miss {}".format(n, lim, x))
raise Exception()
print("ST: idle")
return
except:
pass
def tc1():
ub = "http://127.0.0.1:4321/"
td = os.path.join("srv", "smoketest")
try:
shutil.rmtree(td)
except:
if os.path.exists(td):
raise
for _ in range(10):
try:
os.mkdir(td)
except:
time.sleep(0.1) # win10
assert os.path.exists(td)
vidp = os.path.join(tempfile.gettempdir(), "smoketest.h264")
if not os.path.exists(vidp):
cmd = "ffmpeg -f lavfi -i testsrc=48x32:3 -t 1 -c:v libx264 -tune animation -preset veryslow -crf 69"
sp.check_call(cmd.split(" ") + [vidp])
with open(vidp, "rb") as f:
ovid = f.read()
args = [
"-p4321",
"-e2dsa",
"-e2tsr",
"--no-mutagen",
"--th-ff-jpg",
"--hist",
os.path.join(td, "dbm"),
]
pdirs = []
hpaths = {}
for d1 in ["r", "w", "a"]:
pdirs.append("{}/{}".format(td, d1))
pdirs.append("{}/{}/j".format(td, d1))
for d2 in ["r", "w", "a"]:
d = os.path.join(td, d1, "j", d2)
pdirs.append(d)
os.makedirs(d)
pdirs = [x.replace("\\", "/") for x in pdirs]
udirs = [x.split("/", 2)[2] for x in pdirs]
perms = [x.rstrip("j/")[-1] for x in pdirs]
for pd, ud, p in zip(pdirs, udirs, perms):
if ud[-1] == "j":
continue
hp = None
if pd.endswith("st/a"):
hp = hpaths[ud] = os.path.join(td, "db1")
elif pd[:-1].endswith("a/j/"):
hpaths[ud] = os.path.join(td, "dbm")
hp = None
else:
hp = "-"
hpaths[ud] = os.path.join(pd, ".hist")
arg = "{}:{}:{}".format(pd, ud, p, hp)
if hp:
arg += ":chist=" + hp
args += ["-v", arg]
# return
cpp = Cpp(args)
CPP.append(cpp)
cpp.await_idle(ub, 3)
for d in udirs:
vid = ovid + "\n{}".format(d).encode("utf-8")
try:
requests.post(ub + d, data={"act": "bput"}, files={"f": ("a.h264", vid)})
except:
pass
cpp.clean()
# GET permission
for d, p in zip(udirs, perms):
u = "{}{}/a.h264".format(ub, d)
r = requests.get(u)
ok = bool(r)
if ok != (p in ["a"]):
raise Exception("get {} with perm {} at {}".format(ok, p, u))
# stat filesystem
for d, p in zip(pdirs, perms):
u = "{}/a.h264".format(d)
ok = os.path.exists(u)
if ok != (p in ["a", "w"]):
raise Exception("stat {} with perm {} at {}".format(ok, p, u))
# GET thumbnail, vreify contents
for d, p in zip(udirs, perms):
u = "{}{}/a.h264?th=j".format(ub, d)
r = requests.get(u)
ok = bool(r and r.content[:3] == b"\xff\xd8\xff")
if ok != (p in ["a"]):
raise Exception("thumb {} with perm {} at {}".format(ok, p, u))
# check tags
cpp.await_idle(ub, 5)
for d, p in zip(udirs, perms):
u = "{}{}?ls".format(ub, d)
r = requests.get(u)
j = r.json() if r else False
tag = None
if j:
for f in j["files"]:
tag = tag or f["tags"].get("res")
r_ok = bool(j)
w_ok = bool(r_ok and j.get("files"))
if not r_ok or w_ok != (p in ["a"]):
raise Exception("ls {} with perm {} at {}".format(ok, p, u))
if (tag and p != "a") or (not tag and p == "a"):
raise Exception("tag {} with perm {} at {}".format(tag, p, u))
if tag is not None and tag != "48x32":
raise Exception("tag [{}] at {}".format(tag, u))
cpp.stop(True)
def run(tc):
try:
tc()
finally:
try:
CPP[0].stop(False)
except:
pass
def main():
run(tc1)
if __name__ == "__main__":
main()

View File

@@ -8,13 +8,13 @@ import time
import shutil
import pprint
import tarfile
import tempfile
import unittest
from argparse import Namespace
from copyparty.authsrv import AuthSrv
from copyparty.httpcli import HttpCli
from tests import util as tu
from copyparty.authsrv import AuthSrv
from copyparty.httpcli import HttpCli
def hdr(query):
@@ -28,25 +28,33 @@ class Cfg(Namespace):
a=a,
v=v,
c=c,
rproxy=0,
ed=False,
no_zip=False,
no_scandir=False,
no_sendfile=True,
no_rescan=True,
ihead=False,
nih=True,
mtp=[],
mte="a",
hist=None,
no_hash=False,
css_browser=None,
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
)
class TestHttpCli(unittest.TestCase):
def test(self):
td = os.path.join(tu.get_ramdisk(), "vfs")
try:
shutil.rmtree(td)
except OSError:
pass
def setUp(self):
self.td = tu.get_ramdisk()
def tearDown(self):
os.chdir(tempfile.gettempdir())
shutil.rmtree(self.td)
def test(self):
td = os.path.join(self.td, "vfs")
os.mkdir(td)
os.chdir(td)
@@ -95,7 +103,7 @@ class TestHttpCli(unittest.TestCase):
pprint.pprint(vcfg)
self.args = Cfg(v=vcfg, a=["o:o", "x:x"])
self.auth = AuthSrv(self.args, self.log)
self.asrv = AuthSrv(self.args, self.log)
vfiles = [x for x in allfiles if x.startswith(top)]
for fp in vfiles:
rok, wok = self.can_rw(fp)
@@ -184,12 +192,12 @@ class TestHttpCli(unittest.TestCase):
def put(self, url):
buf = "PUT /{0} HTTP/1.1\r\nCookie: cppwd=o\r\nConnection: close\r\nContent-Length: {1}\r\n\r\nok {0}\n"
buf = buf.format(url, len(url) + 4).encode("utf-8")
conn = tu.VHttpConn(self.args, self.auth, self.log, buf)
conn = tu.VHttpConn(self.args, self.asrv, self.log, buf)
HttpCli(conn).run()
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
def curl(self, url, binary=False):
conn = tu.VHttpConn(self.args, self.auth, self.log, hdr(url))
conn = tu.VHttpConn(self.args, self.asrv, self.log, hdr(url))
HttpCli(conn).run()
if binary:
h, b = conn.s._reply.split(b"\r\n\r\n", 1)

View File

@@ -7,24 +7,37 @@ import json
import shutil
import tempfile
import unittest
from textwrap import dedent
from argparse import Namespace
from copyparty.authsrv import AuthSrv
from copyparty import util
from tests import util as tu
from copyparty.authsrv import AuthSrv, VFS
from copyparty import util
class Cfg(Namespace):
def __init__(self, a=[], v=[], c=None):
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
ex["mtp"] = []
ex["mte"] = "a"
ex2 = {
"mtp": [],
"mte": "a",
"hist": None,
"no_hash": False,
"css_browser": None,
"rproxy": 0,
}
ex.update(ex2)
super(Cfg, self).__init__(a=a, v=v, c=c, **ex)
class TestVFS(unittest.TestCase):
def setUp(self):
self.td = tu.get_ramdisk()
def tearDown(self):
os.chdir(tempfile.gettempdir())
shutil.rmtree(self.td)
def dump(self, vfs):
print(json.dumps(vfs, indent=4, sort_keys=True, default=lambda o: o.__dict__))
@@ -41,6 +54,7 @@ class TestVFS(unittest.TestCase):
self.assertEqual(util.undot(query), response)
def ls(self, vfs, vpath, uname):
# type: (VFS, str, str) -> tuple[str, str, str]
"""helper for resolving and listing a folder"""
vn, rem = vfs.get(vpath, uname, True, False)
r1 = vn.ls(rem, uname, False)
@@ -55,12 +69,7 @@ class TestVFS(unittest.TestCase):
pass
def test(self):
td = os.path.join(tu.get_ramdisk(), "vfs")
try:
shutil.rmtree(td)
except OSError:
pass
td = os.path.join(self.td, "vfs")
os.mkdir(td)
os.chdir(td)
@@ -111,13 +120,13 @@ class TestVFS(unittest.TestCase):
n = vfs.nodes["a"]
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a")
self.assertEqual(n.realpath, td + "/a")
self.assertEqual(n.realpath, os.path.join(td, "a"))
self.assertEqual(n.uread, ["*", "k"])
self.assertEqual(n.uwrite, ["k"])
n = n.nodes["ac"]
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a/ac")
self.assertEqual(n.realpath, td + "/a/ac")
self.assertEqual(n.realpath, os.path.join(td, "a", "ac"))
self.assertEqual(n.uread, ["*", "k"])
self.assertEqual(n.uwrite, ["k"])
n = n.nodes["acb"]
@@ -227,7 +236,7 @@ class TestVFS(unittest.TestCase):
self.assertEqual(list(v1), list(v2))
# config file parser
cfg_path = os.path.join(tu.get_ramdisk(), "test.cfg")
cfg_path = os.path.join(self.td, "test.cfg")
with open(cfg_path, "wb") as f:
f.write(
dedent(
@@ -249,7 +258,7 @@ class TestVFS(unittest.TestCase):
n = au.vfs
# root was not defined, so PWD with no access to anyone
self.assertEqual(n.vpath, "")
self.assertEqual(n.realpath, td)
self.assertEqual(n.realpath, None)
self.assertEqual(n.uread, [])
self.assertEqual(n.uwrite, [])
self.assertEqual(len(n.nodes), 1)
@@ -260,6 +269,4 @@ class TestVFS(unittest.TestCase):
self.assertEqual(n.uwrite, ["asd"])
self.assertEqual(len(n.nodes), 0)
os.chdir(tempfile.gettempdir())
shutil.rmtree(td)
os.unlink(cfg_path)

View File

@@ -1,16 +1,36 @@
import os
import sys
import time
import shutil
import jinja2
import tempfile
import platform
import subprocess as sp
from copyparty.util import Unrecv
WINDOWS = platform.system() == "Windows"
ANYWIN = WINDOWS or sys.platform in ["msys"]
MACOS = platform.system() == "Darwin"
J2_ENV = jinja2.Environment(loader=jinja2.BaseLoader)
J2_FILES = J2_ENV.from_string("{{ files|join('\n') }}")
def nah(*a, **ka):
return False
if MACOS:
import posixpath
posixpath.islink = nah
os.path.islink = nah
# 25% faster; until any tests do symlink stuff
from copyparty.util import Unrecv
def runcmd(*argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate()
@@ -28,18 +48,25 @@ def chkcmd(*argv):
def get_ramdisk():
def subdir(top):
ret = os.path.join(top, "cptd-{}".format(os.getpid()))
shutil.rmtree(ret, True)
os.mkdir(ret)
return ret
for vol in ["/dev/shm", "/Volumes/cptd"]: # nosec (singleton test)
if os.path.exists(vol):
return vol
return subdir(vol)
if os.path.exists("/Volumes"):
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://32768")
# hdiutil eject /Volumes/cptd/
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://131072")
devname = devname.strip()
print("devname: [{}]".format(devname))
for _ in range(10):
try:
_, _ = chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
return "/Volumes/cptd"
return subdir("/Volumes/cptd")
except Exception as ex:
print(repr(ex))
time.sleep(0.25)
@@ -50,7 +77,7 @@ def get_ramdisk():
try:
os.mkdir(ret)
finally:
return ret
return subdir(ret)
class NullBroker(object):
@@ -83,15 +110,19 @@ class VHttpSrv(object):
class VHttpConn(object):
def __init__(self, args, auth, log, buf):
def __init__(self, args, asrv, log, buf):
self.s = VSock(buf)
self.sr = Unrecv(self.s)
self.addr = ("127.0.0.1", "42069")
self.args = args
self.auth = auth
self.asrv = asrv
self.is_mp = False
self.log_func = log
self.log_src = "a"
self.lf_url = None
self.hsrv = VHttpSrv()
self.nbyte = 0
self.workload = 0
self.ico = None
self.thumbcli = None
self.t0 = time.time()