Compare commits

..

30 Commits

Author SHA1 Message Date
ed
482dd7a938 v0.9.3 2021-03-05 00:00:22 +01:00
ed
bddcc69438 v0.9.2 2021-03-04 22:58:22 +01:00
ed
19d4540630 good 2021-03-04 22:38:12 +01:00
ed
4f5f6c81f5 add buttons to adjust tree width 2021-03-04 22:34:09 +01:00
ed
7e4c1238ba oh 2021-03-04 21:12:54 +01:00
ed
f7196ac773 dodge pushstate size limit 2021-03-04 21:06:59 +01:00
ed
7a7c832000 sfx-builder: support ancient git versions 2021-03-04 20:30:28 +01:00
ed
2b4ccdbebb multithread the slow mtag backends 2021-03-04 20:28:03 +01:00
ed
0d16b49489 broke this too 2021-03-04 01:35:09 +01:00
ed
768405b691 tree broke 2021-03-04 01:32:44 +01:00
ed
da01413b7b remove speedbumps 2021-03-04 01:21:04 +01:00
ed
914e22c53e async tagging of incoming files 2021-03-03 18:36:05 +01:00
ed
43a23bf733 v0.9.1 2021-03-03 01:28:32 +01:00
ed
92bb00c6d2 faster sorting 2021-03-03 01:27:41 +01:00
ed
b0b97a2648 fix bugs 2021-03-03 00:46:15 +01:00
ed
2c452fe323 readme nitpicks 2021-03-02 01:02:13 +01:00
ed
ad73d0c77d update feature list in readme 2021-03-02 00:31:08 +01:00
ed
7f9bf1c78c v0.9.0 2021-03-02 00:12:15 +01:00
ed
61a6bc3a65 make browser columns compactable 2021-03-02 00:07:04 +01:00
ed
46e10b0e9f yab 2021-03-01 03:15:41 +01:00
ed
8441206e26 read media-tags from files (for display/searching) 2021-03-01 02:50:10 +01:00
ed
9fdc5ee748 use one sqlite3 cursor, closes #1 2021-02-25 22:30:40 +01:00
ed
00ff133387 support receiving chunked PUT 2021-02-25 22:26:03 +01:00
ed
96164cb934 v0.8.3 2021-02-22 21:58:37 +01:00
ed
82fb21ae69 v0.8.2 2021-02-22 21:40:55 +01:00
ed
89d4a2b4c4 hide up2k mode-toggle in read-only folders 2021-02-22 21:27:44 +01:00
ed
fc0c7ff374 correct up2k mode in mixed-r/w 2021-02-22 21:11:30 +01:00
ed
5148c4f2e9 include pro/epilogues in ?ls 2021-02-22 21:09:57 +01:00
ed
c3b59f7bcf restore win8/7/xp support 2021-02-22 20:59:44 +01:00
ed
61e148202b too much 2021-02-22 20:56:19 +01:00
21 changed files with 1700 additions and 445 deletions

5
.vscode/launch.json vendored
View File

@@ -13,10 +13,13 @@
"-ed", "-ed",
"-emp", "-emp",
"-e2dsa", "-e2dsa",
"-e2ts",
"-a", "-a",
"ed:wark", "ed:wark",
"-v", "-v",
"srv::r:aed:cnodupe" "srv::r:aed:cnodupe",
"-v",
"dist:dist:r"
] ]
}, },
{ {

2
.vscode/tasks.json vendored
View File

@@ -8,7 +8,7 @@
}, },
{ {
"label": "no_dbg", "label": "no_dbg",
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -a ed:wark -v srv::r:aed:cnodupe ;exit 1", "command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -e2ts -a ed:wark -v srv::r:aed:cnodupe -v dist:dist:r ;exit 1",
"type": "shell" "type": "shell"
} }
] ]

View File

@@ -59,7 +59,7 @@ you may also want these, especially on servers:
* server indexing * server indexing
* ☑ locate files by contents * ☑ locate files by contents
* ☑ search by name/path/date/size * ☑ search by name/path/date/size
* search by ID3-tags etc. * search by ID3-tags etc.
* markdown * markdown
* ☑ viewer * ☑ viewer
* ☑ editor (sure why not) * ☑ editor (sure why not)
@@ -82,7 +82,42 @@ path/name queries are space-separated, AND'ed together, and words are negated wi
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path * path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9) * name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
other metadata (like song tags etc) are not yet indexed for searching add `-e2ts` to also scan/index tags from music files:
## search configuration
searching relies on two databases, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`). Configuration can be done through arguments, volume flags, or a mix of both.
through arguments:
* `-e2d` enables file indexing on upload
* `-e2ds` scans writable folders on startup
* `-e2dsa` scans all mounted volumes (including readonly ones)
* `-e2t` enables metadata indexing on upload
* `-e2ts` scans for tags in all files that don't have tags yet
* `-e2tsr` deletes all existing tags, so a full reindex
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
* `-v ~/music::ce2dsa:ce2tsr` does a full reindex of everything on startup
* `-v ~/music::cd2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
`-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume:
* `-v ~/music::cmte=title,artist` indexes and displays *title* followed by *artist*
if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected
`-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux`
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
* is about 20x slower than mutagen
* catches a few tags that mutagen doesn't
* avoids pulling any GPL code into copyparty
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
# client examples # client examples
@@ -91,16 +126,33 @@ other metadata (like song tags etc) are not yet indexed for searching
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});` * `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');` * `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* curl/wget: upload some files (post=file, chunk=stdin)
* `post(){ curl -b cppwd=wark http://127.0.0.1:3923/ -F act=bput -F f=@"$1";}`
`post movie.mkv`
* `post(){ wget --header='Cookie: cppwd=wark' http://127.0.0.1:3923/?raw --post-file="$1" -O-;}`
`post movie.mkv`
* `chunk(){ curl -b cppwd=wark http://127.0.0.1:3923/ -T-;}`
`chunk <movie.mkv`
* FUSE: mount a copyparty server as a local filesystem * FUSE: mount a copyparty server as a local filesystem
* cross-platform python client available in [./bin/](bin/) * cross-platform python client available in [./bin/](bin/)
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md) * [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
b512 <movie.mkv
# dependencies # dependencies
* `jinja2` * `jinja2` (is built into the SFX)
optional, will eventually enable thumbnails: **optional,** enables music tags:
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
**optional,** will eventually enable thumbnails:
* `Pillow` (requires py2.7 or py3.5+) * `Pillow` (requires py2.7 or py3.5+)

View File

@@ -198,7 +198,7 @@ def main():
and "cflag" is config flags to set on this volume and "cflag" is config flags to set on this volume
list of cflags: list of cflags:
cnodupe rejects existing files (instead of symlinking them) "cnodupe" rejects existing files (instead of symlinking them)
example:\033[35m example:\033[35m
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m -a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
@@ -239,17 +239,28 @@ def main():
ap.add_argument("-q", action="store_true", help="quiet") ap.add_argument("-q", action="store_true", help="quiet")
ap.add_argument("-ed", action="store_true", help="enable ?dots") ap.add_argument("-ed", action="store_true", help="enable ?dots")
ap.add_argument("-emp", action="store_true", help="enable markdown plugins") ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap.add_argument("-e2d", action="store_true", help="enable up2k database")
ap.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
ap.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate") ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)") ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap.add_argument("-nih", action="store_true", help="no info hostname") ap.add_argument("-nih", action="store_true", help="no info hostname")
ap.add_argument("-nid", action="store_true", help="no info disk-usage") ap.add_argument("-nid", action="store_true", help="no info disk-usage")
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile") ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)")
ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)")
ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms") ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms")
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt") ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
ap2 = ap.add_argument_group('database options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q")
ap2 = ap.add_argument_group('SSL/TLS options') ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls") ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext") ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
@@ -257,14 +268,20 @@ def main():
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers") ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info") ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets") ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
al = ap.parse_args() al = ap.parse_args()
# fmt: on # fmt: on
if al.e2dsa: # propagate implications
al.e2ds = True for k1, k2 in [
["e2dsa", "e2ds"],
if al.e2ds: ["e2ds", "e2d"],
al.e2d = True ["e2tsr", "e2ts"],
["e2ts", "e2t"],
["e2t", "e2d"],
]:
if getattr(al, k1):
setattr(al, k2, True)
al.i = al.i.split(",") al.i = al.i.split(",")
try: try:

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (0, 8, 1) VERSION = (0, 9, 3)
CODENAME = "keeping track" CODENAME = "the strongest music server"
BUILD_DT = (2021, 2, 22) BUILD_DT = (2021, 3, 4)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -6,7 +6,7 @@ import re
import threading import threading
from .__init__ import PY2, WINDOWS from .__init__ import PY2, WINDOWS
from .util import undot, Pebkac, fsdec, fsenc from .util import undot, Pebkac, fsdec, fsenc, statdir
class VFS(object): class VFS(object):
@@ -102,12 +102,11 @@ class VFS(object):
return fsdec(os.path.realpath(fsenc(rp))) return fsdec(os.path.realpath(fsenc(rp)))
def ls(self, rem, uname): def ls(self, rem, uname, scandir, lstat=False):
"""return user-readable [fsdir,real,virt] items at vpath""" """return user-readable [fsdir,real,virt] items at vpath"""
virt_vis = {} # nodes readable by user virt_vis = {} # nodes readable by user
abspath = self.canonical(rem) abspath = self.canonical(rem)
items = os.listdir(fsenc(abspath)) real = list(statdir(print, scandir, lstat, abspath))
real = [fsdec(x) for x in items]
real.sort() real.sort()
if not rem: if not rem:
for name, vn2 in sorted(self.nodes.items()): for name, vn2 in sorted(self.nodes.items()):
@@ -115,7 +114,7 @@ class VFS(object):
virt_vis[name] = vn2 virt_vis[name] = vn2
# no vfs nodes in the list of real inodes # no vfs nodes in the list of real inodes
real = [x for x in real if x not in self.nodes] real = [x for x in real if x[0] not in self.nodes]
return [abspath, real, virt_vis] return [abspath, real, virt_vis]
@@ -206,8 +205,11 @@ class AuthSrv(object):
if lvl in "wa": if lvl in "wa":
mwrite[vol_dst].append(uname) mwrite[vol_dst].append(uname)
if lvl == "c": if lvl == "c":
# config option, currently switches only cval = True
mflags[vol_dst][uname] = True if "=" in uname:
uname, cval = uname.split("=", 1)
mflags[vol_dst][uname] = cval
def reload(self): def reload(self):
""" """
@@ -248,12 +250,19 @@ class AuthSrv(object):
perms = perms.split(":") perms = perms.split(":")
for (lvl, uname) in [[x[0], x[1:]] for x in perms]: for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
if lvl == "c": if lvl == "c":
# config option, currently switches only cval = True
mflags[dst][uname] = True if "=" in uname:
uname, cval = uname.split("=", 1)
mflags[dst][uname] = cval
continue
if uname == "": if uname == "":
uname = "*" uname = "*"
if lvl in "ra": if lvl in "ra":
mread[dst].append(uname) mread[dst].append(uname)
if lvl in "wa": if lvl in "wa":
mwrite[dst].append(uname) mwrite[dst].append(uname)
@@ -268,6 +277,7 @@ class AuthSrv(object):
elif "" not in mount: elif "" not in mount:
# there's volumes but no root; make root inaccessible # there's volumes but no root; make root inaccessible
vfs = VFS(os.path.abspath("."), "") vfs = VFS(os.path.abspath("."), "")
vfs.flags["d2d"] = True
maxdepth = 0 maxdepth = 0
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))): for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
@@ -300,15 +310,27 @@ class AuthSrv(object):
) )
raise Exception("invalid config") raise Exception("invalid config")
for vol in vfs.all_vols.values():
if (self.args.e2ds and vol.uwrite) or self.args.e2dsa:
vol.flags["e2ds"] = True
if self.args.e2d or "e2ds" in vol.flags:
vol.flags["e2d"] = True
for k in ["e2t", "e2ts", "e2tsr"]:
if getattr(self.args, k):
vol.flags[k] = True
# default tag-list if unset
if "mte" not in vol.flags:
vol.flags["mte"] = self.args.mte
try: try:
v, _ = vfs.get("/", "*", False, True) v, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == v.realpath: if self.warn_anonwrite and os.getcwd() == v.realpath:
self.warn_anonwrite = False self.warn_anonwrite = False
self.log( msg = "\033[31manyone can read/write the current directory: {}\033[0m"
"\033[31manyone can read/write the current directory: {}\033[0m".format( self.log(msg.format(v.realpath))
v.realpath
)
)
except Pebkac: except Pebkac:
self.warn_anonwrite = True self.warn_anonwrite = True

View File

@@ -222,6 +222,9 @@ class HttpCli(object):
static_path = os.path.join(E.mod, "web/", self.vpath[5:]) static_path = os.path.join(E.mod, "web/", self.vpath[5:])
return self.tx_file(static_path) return self.tx_file(static_path)
if "tree" in self.uparam:
return self.tx_tree()
# conditional redirect to single volumes # conditional redirect to single volumes
if self.vpath == "" and not self.uparam: if self.vpath == "" and not self.uparam:
nread = len(self.rvol) nread = len(self.rvol)
@@ -246,9 +249,6 @@ class HttpCli(object):
self.vpath = None self.vpath = None
return self.tx_mounts() return self.tx_mounts()
if "tree" in self.uparam:
return self.tx_tree()
return self.tx_browser() return self.tx_browser()
def handle_options(self): def handle_options(self):
@@ -323,8 +323,11 @@ class HttpCli(object):
raise Pebkac(405, "don't know how to handle POST({})".format(ctype)) raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
def get_body_reader(self): def get_body_reader(self):
remains = int(self.headers.get("content-length", None)) chunked = "chunked" in self.headers.get("transfer-encoding", "").lower()
if remains is None: remains = int(self.headers.get("content-length", -1))
if chunked:
return read_socket_chunked(self.sr), remains
elif remains == -1:
self.keepalive = False self.keepalive = False
return read_socket_unbounded(self.sr), remains return read_socket_unbounded(self.sr), remains
else: else:
@@ -342,6 +345,10 @@ class HttpCli(object):
with open(path, "wb", 512 * 1024) as f: with open(path, "wb", 512 * 1024) as f:
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f) post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fn
)
return post_sz, sha_b64, remains, path return post_sz, sha_b64, remains, path
def handle_stash(self): def handle_stash(self):
@@ -425,7 +432,7 @@ class HttpCli(object):
body["ptop"] = vfs.realpath body["ptop"] = vfs.realpath
body["prel"] = rem body["prel"] = rem
body["addr"] = self.ip body["addr"] = self.ip
body["flag"] = vfs.flags body["vcfg"] = vfs.flags
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body) x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
response = x.get() response = x.get()
@@ -442,20 +449,31 @@ class HttpCli(object):
vols.append([vfs.vpath, vfs.realpath, vfs.flags]) vols.append([vfs.vpath, vfs.realpath, vfs.flags])
idx = self.conn.get_u2idx() idx = self.conn.get_u2idx()
t0 = time.time()
if "srch" in body: if "srch" in body:
# search by up2k hashlist # search by up2k hashlist
vbody = copy.deepcopy(body) vbody = copy.deepcopy(body)
vbody["hash"] = len(vbody["hash"]) vbody["hash"] = len(vbody["hash"])
self.log("qj: " + repr(vbody)) self.log("qj: " + repr(vbody))
hits = idx.fsearch(vols, body) hits = idx.fsearch(vols, body)
self.log("q#: " + repr(hits)) self.log("q#: {} ({:.2f}s)".format(repr(hits), time.time() - t0))
taglist = []
else: else:
# search by query params # search by query params
self.log("qj: " + repr(body)) self.log("qj: " + repr(body))
hits = idx.search(vols, body) hits, taglist = idx.search(vols, body)
self.log("q#: " + str(len(hits))) self.log("q#: {} ({:.2f}s)".format(len(hits), time.time() - t0))
r = json.dumps(hits).encode("utf-8") order = []
cfg = self.args.mte.split(",")
for t in cfg:
if t in taglist:
order.append(t)
for t in taglist:
if t not in order:
order.append(t)
r = json.dumps({"hits": hits, "tag_order": order}).encode("utf-8")
self.reply(r, mime="application/json") self.reply(r, mime="application/json")
return True return True
@@ -661,6 +679,9 @@ class HttpCli(object):
raise Pebkac(400, "empty files in post") raise Pebkac(400, "empty files in post")
files.append([sz, sha512_hex]) files.append([sz, sha512_hex])
self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname
)
self.conn.nbyte += sz self.conn.nbyte += sz
except Pebkac: except Pebkac:
@@ -1098,7 +1119,7 @@ class HttpCli(object):
try: try:
vn, rem = self.auth.vfs.get(top, self.uname, True, False) vn, rem = self.auth.vfs.get(top, self.uname, True, False)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname) fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
except: except:
vfs_ls = [] vfs_ls = []
vfs_virt = {} vfs_virt = {}
@@ -1109,13 +1130,13 @@ class HttpCli(object):
dirs = [] dirs = []
vfs_ls = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
if not self.args.ed or "dots" not in self.uparam: if not self.args.ed or "dots" not in self.uparam:
vfs_ls = exclude_dotfiles(vfs_ls) vfs_ls = exclude_dotfiles(vfs_ls)
for fn in [x for x in vfs_ls if x != excl]: for fn in [x for x in vfs_ls if x != excl]:
abspath = os.path.join(fsroot, fn) dirs.append(fn)
if os.path.isdir(abspath):
dirs.append(fn)
for x in vfs_virt.keys(): for x in vfs_virt.keys():
if x != excl: if x != excl:
@@ -1154,7 +1175,9 @@ class HttpCli(object):
return self.tx_file(abspath) return self.tx_file(abspath)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname) fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
stats = {k: v for k, v in vfs_ls}
vfs_ls = [x[0] for x in vfs_ls]
vfs_ls.extend(vfs_virt.keys()) vfs_ls.extend(vfs_virt.keys())
# check for old versions of files, # check for old versions of files,
@@ -1183,6 +1206,11 @@ class HttpCli(object):
is_ls = "ls" in self.uparam is_ls = "ls" in self.uparam
icur = None
if "e2t" in vn.flags:
idx = self.conn.get_u2idx()
icur = idx.get_cur(vn.realpath)
dirs = [] dirs = []
files = [] files = []
for fn in vfs_ls: for fn in vfs_ls:
@@ -1200,7 +1228,7 @@ class HttpCli(object):
fspath = fsroot + "/" + fn fspath = fsroot + "/" + fn
try: try:
inf = os.stat(fsenc(fspath)) inf = stats.get(fn) or os.stat(fsenc(fspath))
except: except:
self.log("broken symlink: {}".format(repr(fspath))) self.log("broken symlink: {}".format(repr(fspath)))
continue continue
@@ -1232,12 +1260,38 @@ class HttpCli(object):
"sz": sz, "sz": sz,
"ext": ext, "ext": ext,
"dt": dt, "dt": dt,
"ts": inf.st_mtime, "ts": int(inf.st_mtime),
} }
if is_dir: if is_dir:
dirs.append(item) dirs.append(item)
else: else:
files.append(item) files.append(item)
item["rd"] = rem
taglist = {}
for f in files:
fn = f["name"]
rd = f["rd"]
del f["rd"]
if icur:
q = "select w from up where rd = ? and fn = ?"
r = icur.execute(q, (rd, fn)).fetchone()
if not r:
continue
w = r[0][:16]
tags = {}
q = "select k, v from mt where w = ? and k != 'x'"
for k, v in icur.execute(q, (w,)):
taglist[k] = True
tags[k] = v
f["tags"] = tags
if icur:
taglist = [k for k in self.args.mte.split(",") if k in taglist]
for f in dirs:
f["tags"] = {}
srv_info = [] srv_info = []
@@ -1275,20 +1329,27 @@ class HttpCli(object):
if self.writable: if self.writable:
perms.append("write") perms.append("write")
if is_ls: logues = ["", ""]
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
ret = {"dirs": dirs, "files": files, "srvinf": srv_info, "perms": perms}
ret = json.dumps(ret)
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
return True
logues = [None, None]
for n, fn in enumerate([".prologue.html", ".epilogue.html"]): for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
fn = os.path.join(abspath, fn) fn = os.path.join(abspath, fn)
if os.path.exists(fsenc(fn)): if os.path.exists(fsenc(fn)):
with open(fsenc(fn), "rb") as f: with open(fsenc(fn), "rb") as f:
logues[n] = f.read().decode("utf-8") logues[n] = f.read().decode("utf-8")
if is_ls:
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
ret = {
"dirs": dirs,
"files": files,
"srvinf": srv_info,
"perms": perms,
"logues": logues,
"taglist": taglist,
}
ret = json.dumps(ret)
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
return True
ts = "" ts = ""
# ts = "?{}".format(time.time()) # ts = "?{}".format(time.time())
@@ -1300,9 +1361,11 @@ class HttpCli(object):
files=dirs, files=dirs,
ts=ts, ts=ts,
perms=json.dumps(perms), perms=json.dumps(perms),
have_up2k_idx=self.args.e2d, taglist=taglist,
prologue=logues[0], tag_order=json.dumps(self.args.mte.split(",")),
epilogue=logues[1], have_up2k_idx=("e2d" in vn.flags),
have_tags_idx=("e2t" in vn.flags),
logues=logues,
title=html_escape(self.vpath), title=html_escape(self.vpath),
srv_info=srv_info, srv_info=srv_info,
) )

View File

@@ -20,10 +20,12 @@ except ImportError:
you do not have jinja2 installed,\033[33m you do not have jinja2 installed,\033[33m
choose one of these:\033[0m choose one of these:\033[0m
* apt install python-jinja2 * apt install python-jinja2
* python3 -m pip install --user jinja2 * {} -m pip install --user jinja2
* (try another python version, if you have one) * (try another python version, if you have one)
* (try copyparty.sfx instead) * (try copyparty.sfx instead)
""" """.format(
os.path.basename(sys.executable)
)
) )
sys.exit(1) sys.exit(1)

306
copyparty/mtag.py Normal file
View File

@@ -0,0 +1,306 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import re
import os
import sys
import shutil
import subprocess as sp
from .__init__ import PY2, WINDOWS
from .util import fsenc, fsdec
class MTag(object):
def __init__(self, log_func, args):
self.log_func = log_func
self.usable = True
self.prefer_mt = False
mappings = args.mtm
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
if self.backend == "mutagen":
self.get = self.get_mutagen
try:
import mutagen
except:
self.log("\033[33mcould not load mutagen, trying ffprobe instead")
self.backend = "ffprobe"
if self.backend == "ffprobe":
self.get = self.get_ffprobe
self.prefer_mt = True
# about 20x slower
if PY2:
cmd = ["ffprobe", "-version"]
try:
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
except:
self.usable = False
else:
if not shutil.which("ffprobe"):
self.usable = False
if not self.usable:
msg = "\033[31mneed mutagen or ffprobe to read media tags so please run this:\n {} -m pip install --user mutagen \033[0m"
self.log(msg.format(os.path.basename(sys.executable)))
return
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
tagmap = {
"album": ["album", "talb", "\u00a9alb", "original-album", "toal"],
"artist": [
"artist",
"tpe1",
"\u00a9art",
"composer",
"performer",
"arranger",
"\u00a9wrt",
"tcom",
"tpe3",
"original-artist",
"tope",
],
"title": ["title", "tit2", "\u00a9nam"],
"circle": [
"album-artist",
"tpe2",
"aart",
"conductor",
"organization",
"band",
],
".tn": ["tracknumber", "trck", "trkn", "track"],
"genre": ["genre", "tcon", "\u00a9gen"],
"date": [
"original-release-date",
"release-date",
"date",
"tdrc",
"\u00a9day",
"original-date",
"original-year",
"tyer",
"tdor",
"tory",
"year",
"creation-time",
],
".bpm": ["bpm", "tbpm", "tmpo", "tbp"],
"key": ["initial-key", "tkey", "key"],
"comment": ["comment", "comm", "\u00a9cmt", "comments", "description"],
}
if mappings:
for k, v in [x.split("=") for x in mappings]:
tagmap[k] = v.split(",")
self.tagmap = {}
for k, vs in tagmap.items():
vs2 = []
for v in vs:
if "-" not in v:
vs2.append(v)
continue
vs2.append(v.replace("-", " "))
vs2.append(v.replace("-", "_"))
vs2.append(v.replace("-", ""))
self.tagmap[k] = vs2
self.rmap = {
v: [n, k] for k, vs in self.tagmap.items() for n, v in enumerate(vs)
}
# self.get = self.compare
def log(self, msg):
self.log_func("mtag", msg)
def normalize_tags(self, ret, md):
for k, v in dict(md).items():
if not v:
continue
k = k.lower().split("::")[0].strip()
mk = self.rmap.get(k)
if not mk:
continue
pref, mk = mk
if mk not in ret or ret[mk][0] > pref:
ret[mk] = [pref, v[0]]
# take first value
ret = {k: str(v[1]).strip() for k, v in ret.items()}
# track 3/7 => track 3
for k, v in ret.items():
if k[0] == ".":
v = v.split("/")[0].strip().lstrip("0")
ret[k] = v or 0
return ret
def compare(self, abspath):
if abspath.endswith(".au"):
return {}
print("\n" + abspath)
r1 = self.get_mutagen(abspath)
r2 = self.get_ffprobe(abspath)
keys = {}
for d in [r1, r2]:
for k in d.keys():
keys[k] = True
diffs = []
l1 = []
l2 = []
for k in sorted(keys.keys()):
if k in [".q", ".dur"]:
continue # lenient
v1 = r1.get(k)
v2 = r2.get(k)
if v1 == v2:
print(" ", k, v1)
elif v1 != "0000": # ffprobe date=0
diffs.append(k)
print(" 1", k, v1)
print(" 2", k, v2)
if v1:
l1.append(k)
if v2:
l2.append(k)
if diffs:
raise Exception()
return r1
def get_mutagen(self, abspath):
import mutagen
try:
md = mutagen.File(abspath, easy=True)
x = md.info.length
except Exception as ex:
return {}
ret = {}
try:
dur = int(md.info.length)
try:
q = int(md.info.bitrate / 1024)
except:
q = int((os.path.getsize(abspath) / dur) / 128)
ret[".dur"] = [0, dur]
ret[".q"] = [0, q]
except:
pass
return self.normalize_tags(ret, md)
def get_ffprobe(self, abspath):
cmd = ["ffprobe", "-hide_banner", "--", fsenc(abspath)]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
r = p.communicate()
txt = r[1].decode("utf-8", "replace")
txt = [x.rstrip("\r") for x in txt.split("\n")]
"""
note:
tags which contain newline will be truncated on first \n,
ffmpeg emits \n and spacepads the : to align visually
note:
the Stream ln always mentions Audio: if audio
the Stream ln usually has kb/s, is more accurate
the Duration ln always has kb/s
the Metadata: after Chapter may contain BPM info,
title : Tempo: 126.0
Input #0, wav,
Metadata:
date : <OK>
Duration:
Chapter #
Metadata:
title : <NG>
Input #0, mp3,
Metadata:
album : <OK>
Duration:
Stream #0:0: Audio:
Stream #0:1: Video:
Metadata:
comment : <NG>
"""
ptn_md_beg = re.compile("^( +)Metadata:$")
ptn_md_kv = re.compile("^( +)([^:]+) *: (.*)")
ptn_dur = re.compile("^ *Duration: ([^ ]+)(, |$)")
ptn_br1 = re.compile("^ *Duration: .*, bitrate: ([0-9]+) kb/s(, |$)")
ptn_br2 = re.compile("^ *Stream.*: Audio:.* ([0-9]+) kb/s(, |$)")
ptn_audio = re.compile("^ *Stream .*: Audio: ")
ptn_au_parent = re.compile("^ *(Input #|Stream .*: Audio: )")
ret = {}
md = {}
in_md = False
is_audio = False
au_parent = False
for ln in txt:
m = ptn_md_kv.match(ln)
if m and in_md and len(m.group(1)) == in_md:
_, k, v = [x.strip() for x in m.groups()]
if k != "" and v != "":
md[k] = [v]
continue
else:
in_md = False
m = ptn_md_beg.match(ln)
if m and au_parent:
in_md = len(m.group(1)) + 2
continue
au_parent = bool(ptn_au_parent.search(ln))
if ptn_audio.search(ln):
is_audio = True
m = ptn_dur.search(ln)
if m:
sec = 0
tstr = m.group(1)
if tstr.lower() != "n/a":
try:
tf = tstr.split(",")[0].split(".")[0].split(":")
for f in tf:
sec *= 60
sec += int(f)
except:
self.log(
"\033[33minvalid timestr from ffmpeg: [{}]".format(tstr)
)
ret[".dur"] = sec
m = ptn_br1.search(ln)
if m:
ret[".q"] = m.group(1)
m = ptn_br2.search(ln)
if m:
ret[".q"] = m.group(1)
if not is_audio:
return {}
ret = {k: [0, v] for k, v in ret.items()}
return self.normalize_tags(ret, md)

View File

@@ -39,14 +39,6 @@ class SvcHub(object):
self.tcpsrv = TcpSrv(self) self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self) self.up2k = Up2k(self)
if self.args.e2ds:
auth = AuthSrv(self.args, self.log, False)
vols = auth.vfs.all_vols.values()
if not self.args.e2dsa:
vols = [x for x in vols if x.uwrite]
self.up2k.build_indexes(vols)
# decide which worker impl to use # decide which worker impl to use
if self.check_mp_enable(): if self.check_mp_enable():
from .broker_mp import BrokerMp as Broker from .broker_mp import BrokerMp as Broker
@@ -95,7 +87,7 @@ class SvcHub(object):
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n" fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
if not VT100: if not VT100:
fmt = "{} {:21} {}" fmt = "{} {:21} {}\n"
if "\033" in msg: if "\033" in msg:
msg = self.ansi_re.sub("", msg) msg = self.ansi_re.sub("", msg)
if "\033" in src: if "\033" in src:

View File

@@ -24,7 +24,7 @@ class U2idx(object):
self.log("could not load sqlite3; searchign wqill be disabled") self.log("could not load sqlite3; searchign wqill be disabled")
return return
self.dbs = {} self.cur = {}
def log(self, msg): def log(self, msg):
self.log_func("u2idx", msg) self.log_func("u2idx", msg)
@@ -37,7 +37,19 @@ class U2idx(object):
fsize = body["size"] fsize = body["size"]
fhash = body["hash"] fhash = body["hash"]
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash) wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
return self.run_query(vols, "select * from up where w = ?", [wark]) return self.run_query(vols, "w = ?", [wark], "", [])[0]
def get_cur(self, ptop):
cur = self.cur.get(ptop)
if cur:
return cur
cur = _open(ptop)
if not cur:
return None
self.cur[ptop] = cur
return cur
def search(self, vols, body): def search(self, vols, body):
"""search by query params""" """search by query params"""
@@ -45,59 +57,80 @@ class U2idx(object):
return [] return []
qobj = {} qobj = {}
_conv_sz(qobj, body, "sz_min", "sz >= ?") _conv_sz(qobj, body, "sz_min", "up.sz >= ?")
_conv_sz(qobj, body, "sz_max", "sz <= ?") _conv_sz(qobj, body, "sz_max", "up.sz <= ?")
_conv_dt(qobj, body, "dt_min", "mt >= ?") _conv_dt(qobj, body, "dt_min", "up.mt >= ?")
_conv_dt(qobj, body, "dt_max", "mt <= ?") _conv_dt(qobj, body, "dt_max", "up.mt <= ?")
for seg, dk in [["path", "rd"], ["name", "fn"]]: for seg, dk in [["path", "up.rd"], ["name", "up.fn"]]:
if seg in body: if seg in body:
_conv_txt(qobj, body, seg, dk) _conv_txt(qobj, body, seg, dk)
qstr = "select * from up" uq, uv = _sqlize(qobj)
qv = []
if qobj:
qk = []
for k, v in sorted(qobj.items()):
qk.append(k.split("\n")[0])
qv.append(v)
qstr = " and ".join(qk) tq = ""
qstr = "select * from up where " + qstr tv = []
qobj = {}
if "tags" in body:
_conv_txt(qobj, body, "tags", "mt.v")
tq, tv = _sqlize(qobj)
return self.run_query(vols, qstr, qv) return self.run_query(vols, uq, uv, tq, tv)
def run_query(self, vols, qstr, qv): def run_query(self, vols, uq, uv, tq, tv):
qv = tuple(qv) self.log("qs: {} {} , {} {}".format(uq, repr(uv), tq, repr(tv)))
self.log("qs: {} {}".format(qstr, repr(qv)))
ret = [] ret = []
lim = 100 lim = 1000
taglist = {}
for (vtop, ptop, flags) in vols: for (vtop, ptop, flags) in vols:
db = self.dbs.get(ptop) cur = self.get_cur(ptop)
if not db: if not cur:
db = _open(ptop) continue
if not db:
continue
self.dbs[ptop] = db if not tq:
# self.log("idx /{} @ {} {}".format(vtop, ptop, flags)) if not uq:
q = "select * from up"
v = ()
else:
q = "select * from up where " + uq
v = tuple(uv)
else:
# naive assumption: tags first
q = "select up.* from up inner join mt on substr(up.w,1,16) = mt.w where {}"
q = q.format(" and ".join([tq, uq]) if uq else tq)
v = tuple(tv + uv)
c = db.execute(qstr, qv) sret = []
for _, ts, sz, rd, fn in c: c = cur.execute(q, v)
for hit in c:
w, ts, sz, rd, fn = hit
lim -= 1 lim -= 1
if lim <= 0: if lim <= 0:
break break
rp = os.path.join(vtop, rd, fn).replace("\\", "/") rp = os.path.join(vtop, rd, fn).replace("\\", "/")
ret.append({"ts": int(ts), "sz": sz, "rp": rp}) sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
return ret for hit in sret:
w = hit["w"]
del hit["w"]
tags = {}
q = "select k, v from mt where w = ? and k != 'x'"
for k, v in cur.execute(q, (w,)):
taglist[k] = True
tags[k] = v
hit["tags"] = tags
ret.extend(sret)
return ret, list(taglist.keys())
def _open(ptop): def _open(ptop):
db_path = os.path.join(ptop, ".hist", "up2k.db") db_path = os.path.join(ptop, ".hist", "up2k.db")
if os.path.exists(db_path): if os.path.exists(db_path):
return sqlite3.connect(db_path) return sqlite3.connect(db_path).cursor()
def _conv_sz(q, body, k, sql): def _conv_sz(q, body, k, sql):
@@ -146,3 +179,13 @@ def _conv_txt(q, body, k, sql):
qk = "{} {} like {}?{}".format(sql, inv, head, tail) qk = "{} {} like {}?{}".format(sql, inv, head, tail)
q[qk + "\n" + v] = u8safe(v) q[qk + "\n" + v] = u8safe(v)
def _sqlize(qobj):
keys = []
values = []
for k, v in sorted(qobj.items()):
keys.append(k.split("\n")[0])
values.append(v)
return " and ".join(keys), values

View File

@@ -12,6 +12,7 @@ import shutil
import base64 import base64
import hashlib import hashlib
import threading import threading
import traceback
from copy import deepcopy from copy import deepcopy
from .__init__ import WINDOWS from .__init__ import WINDOWS
@@ -26,7 +27,10 @@ from .util import (
atomic_move, atomic_move,
w8b64enc, w8b64enc,
w8b64dec, w8b64dec,
statdir,
) )
from .mtag import MTag
from .authsrv import AuthSrv
try: try:
HAVE_SQLITE3 = True HAVE_SQLITE3 = True
@@ -47,22 +51,27 @@ class Up2k(object):
self.broker = broker self.broker = broker
self.args = broker.args self.args = broker.args
self.log_func = broker.log self.log_func = broker.log
self.persist = self.args.e2d
# config # config
self.salt = broker.args.salt self.salt = broker.args.salt
# state # state
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.hashq = Queue()
self.tagq = Queue()
self.registry = {} self.registry = {}
self.db = {} self.entags = {}
self.flags = {}
self.cur = {}
self.mtag = None
self.n_mtag_thr_alive = 0
self.n_mtag_tags_added = 0
self.mem_db = None self.mem_cur = None
if HAVE_SQLITE3: if HAVE_SQLITE3:
# mojibake detector # mojibake detector
self.mem_db = sqlite3.connect(":memory:", check_same_thread=False) self.mem_cur = self._orz(":memory:")
self.mem_db.execute(r"create table a (b text)") self.mem_cur.execute(r"create table a (b text)")
self.mem_db.commit()
if WINDOWS: if WINDOWS:
# usually fails to set lastmod too quickly # usually fails to set lastmod too quickly
@@ -71,25 +80,37 @@ class Up2k(object):
thr.daemon = True thr.daemon = True
thr.start() thr.start()
if self.persist: # static
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
if not HAVE_SQLITE3:
self.log("could not initialize sqlite3, will use in-memory registry only")
# this is kinda jank
auth = AuthSrv(self.args, self.log, False)
have_e2d = self.init_indexes(auth)
if have_e2d:
thr = threading.Thread(target=self._snapshot) thr = threading.Thread(target=self._snapshot)
thr.daemon = True thr.daemon = True
thr.start() thr.start()
# static thr = threading.Thread(target=self._tagger)
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$") thr.daemon = True
thr.start()
if self.persist and not HAVE_SQLITE3: thr = threading.Thread(target=self._hasher)
self.log("could not initialize sqlite3, will use in-memory registry only") thr.daemon = True
thr.start()
def log(self, msg): def log(self, msg):
self.log_func("up2k", msg + "\033[K") self.log_func("up2k", msg + "\033[K")
def w8enc(self, rd, fn): def w8enc(self, rd, fn):
ret = [] ret = []
for k, v in [["d", rd], ["f", fn]]: for v in [rd, fn]:
try: try:
self.mem_db.execute("select * from a where b = ?", (v,)) self.mem_cur.execute("select * from a where b = ?", (v,))
ret.append(v) ret.append(v)
except: except:
ret.append("//" + w8b64enc(v)) ret.append("//" + w8b64enc(v))
@@ -120,14 +141,82 @@ class Up2k(object):
return ret return ret
def register_vpath(self, ptop): def init_indexes(self, auth):
self.pp = ProgressPrinter()
vols = auth.vfs.all_vols.values()
t0 = time.time()
have_e2d = False
live_vols = []
for vol in vols:
try:
os.listdir(vol.realpath)
live_vols.append(vol)
except:
self.log("\033[31mcannot access " + vol.realpath)
vols = live_vols
need_mtag = False
for vol in auth.vfs.all_vols.values():
if "e2t" in vol.flags:
need_mtag = True
if need_mtag:
self.mtag = MTag(self.log_func, self.args)
if not self.mtag.usable:
self.mtag = None
# e2ds(a) volumes first,
# also covers tags where e2ts is set
for vol in vols:
en = {}
if "mte" in vol.flags:
en = {k: True for k in vol.flags["mte"].split(",")}
self.entags[vol.realpath] = en
if "e2d" in vol.flags:
have_e2d = True
if "e2ds" in vol.flags:
r = self._build_file_index(vol, vols)
if not r:
needed_mutagen = True
# open the rest + do any e2ts(a)
needed_mutagen = False
for vol in vols:
r = self.register_vpath(vol.realpath, vol.flags)
if not r or "e2ts" not in vol.flags:
continue
cur, db_path, sz0 = r
n_add, n_rm, success = self._build_tags_index(vol.realpath)
if not success:
needed_mutagen = True
if n_add or n_rm:
self.vac(cur, db_path, n_add, n_rm, sz0)
self.pp.end = True
msg = "{} volumes in {:.2f} sec"
self.log(msg.format(len(vols), time.time() - t0))
if needed_mutagen:
msg = "\033[31mcould not read tags because no backends are available (mutagen or ffprobe)\033[0m"
self.log(msg)
return have_e2d
def register_vpath(self, ptop, flags):
with self.mutex: with self.mutex:
if ptop in self.registry: if ptop in self.registry:
return None return None
reg = {} reg = {}
path = os.path.join(ptop, ".hist", "up2k.snap") path = os.path.join(ptop, ".hist", "up2k.snap")
if self.persist and os.path.exists(path): if "e2d" in flags and os.path.exists(path):
with gzip.GzipFile(path, "rb") as f: with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8") j = f.read().decode("utf-8")
@@ -139,8 +228,9 @@ class Up2k(object):
m = [m] + self._vis_reg_progress(reg) m = [m] + self._vis_reg_progress(reg)
self.log("\n".join(m)) self.log("\n".join(m))
self.flags[ptop] = flags
self.registry[ptop] = reg self.registry[ptop] = reg
if not self.persist or not HAVE_SQLITE3: if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
return None return None
try: try:
@@ -149,73 +239,66 @@ class Up2k(object):
pass pass
db_path = os.path.join(ptop, ".hist", "up2k.db") db_path = os.path.join(ptop, ".hist", "up2k.db")
if ptop in self.db: if ptop in self.cur:
# self.db[ptop].close()
return None return None
try: try:
db = self._open_db(db_path) sz0 = 0
self.db[ptop] = db if os.path.exists(db_path):
return db sz0 = os.path.getsize(db_path) // 1024
except Exception as ex:
self.log("cannot use database at [{}]: {}".format(ptop, repr(ex))) cur = self._open_db(db_path)
self.cur[ptop] = cur
return [cur, db_path, sz0]
except:
msg = "cannot use database at [{}]:\n{}"
self.log(msg.format(ptop, traceback.format_exc()))
return None return None
def build_indexes(self, writeables): def _build_file_index(self, vol, all_vols):
tops = [d.realpath for d in writeables] do_vac = False
self.pp = ProgressPrinter() top = vol.realpath
t0 = time.time() reg = self.register_vpath(top, vol.flags)
for top in tops: if not reg:
db = self.register_vpath(top) return
if not db:
continue
self.pp.n = next(db.execute("select count(w) from up"))[0] _, db_path, sz0 = reg
db_path = os.path.join(top, ".hist", "up2k.db") dbw = [reg[0], 0, time.time()]
sz0 = os.path.getsize(db_path) // 1024 self.pp.n = next(dbw[0].execute("select count(w) from up"))[0]
# can be symlink so don't `and d.startswith(top)`` # can be symlink so don't `and d.startswith(top)``
excl = set([d for d in tops if d != top]) excl = set([d.realpath for d in all_vols if d != vol])
dbw = [db, 0, time.time()] n_add = self._build_dir(dbw, top, excl, top)
n_rm = self._drop_lost(dbw[0], top)
if dbw[1]:
self.log("commit {} new files".format(dbw[1]))
dbw[0].connection.commit()
n_add = self._build_dir(dbw, top, excl, top) n_add, n_rm, success = self._build_tags_index(vol.realpath)
n_rm = self._drop_lost(db, top)
if dbw[1]:
self.log("commit {} new files".format(dbw[1]))
db.commit() dbw[0].connection.commit()
if n_add or n_rm: if n_add or n_rm or do_vac:
db_path = os.path.join(top, ".hist", "up2k.db") self.vac(dbw[0], db_path, n_add, n_rm, sz0)
sz1 = os.path.getsize(db_path) // 1024
db.execute("vacuum")
sz2 = os.path.getsize(db_path) // 1024
msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format(
n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2
)
self.log(msg)
self.pp.end = True return success
self.log("{} volumes in {:.2f} sec".format(len(tops), time.time() - t0))
def vac(self, cur, db_path, n_add, n_rm, sz0):
sz1 = os.path.getsize(db_path) // 1024
cur.execute("vacuum")
sz2 = os.path.getsize(db_path) // 1024
msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format(
n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2
)
self.log(msg)
def _build_dir(self, dbw, top, excl, cdir): def _build_dir(self, dbw, top, excl, cdir):
try:
inodes = [fsdec(x) for x in os.listdir(fsenc(cdir))]
except Exception as ex:
self.log("listdir: {} @ [{}]".format(repr(ex), cdir))
return 0
self.pp.msg = "a{} {}".format(self.pp.n, cdir) self.pp.msg = "a{} {}".format(self.pp.n, cdir)
histdir = os.path.join(top, ".hist") histdir = os.path.join(top, ".hist")
ret = 0 ret = 0
for inode in inodes: for iname, inf in statdir(self.log, not self.args.no_scandir, False, cdir):
abspath = os.path.join(cdir, inode) abspath = os.path.join(cdir, iname)
try: lmod = int(inf.st_mtime)
inf = os.stat(fsenc(abspath))
except Exception as ex:
self.log("stat: {} @ [{}]".format(repr(ex), abspath))
continue
if stat.S_ISDIR(inf.st_mode): if stat.S_ISDIR(inf.st_mode):
if abspath in excl or abspath == histdir: if abspath in excl or abspath == histdir:
continue continue
@@ -241,11 +324,11 @@ class Up2k(object):
self.log(m.format(top, rp, len(in_db), rep_db)) self.log(m.format(top, rp, len(in_db), rep_db))
dts = -1 dts = -1
if dts == inf.st_mtime and dsz == inf.st_size: if dts == lmod and dsz == inf.st_size:
continue continue
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format( m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
top, rp, dts, inf.st_mtime, dsz, inf.st_size top, rp, dts, lmod, dsz, inf.st_size
) )
self.log(m) self.log(m)
self.db_rm(dbw[0], rd, fn) self.db_rm(dbw[0], rd, fn)
@@ -264,22 +347,22 @@ class Up2k(object):
continue continue
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes) wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
self.db_add(dbw[0], wark, rd, fn, inf.st_mtime, inf.st_size) self.db_add(dbw[0], wark, rd, fn, lmod, inf.st_size)
dbw[1] += 1 dbw[1] += 1
ret += 1 ret += 1
td = time.time() - dbw[2] td = time.time() - dbw[2]
if dbw[1] >= 4096 or td >= 60: if dbw[1] >= 4096 or td >= 60:
self.log("commit {} new files".format(dbw[1])) self.log("commit {} new files".format(dbw[1]))
dbw[0].commit() dbw[0].connection.commit()
dbw[1] = 0 dbw[1] = 0
dbw[2] = time.time() dbw[2] = time.time()
return ret return ret
def _drop_lost(self, db, top): def _drop_lost(self, cur, top):
rm = [] rm = []
nchecked = 0 nchecked = 0
nfiles = next(db.execute("select count(w) from up"))[0] nfiles = next(cur.execute("select count(w) from up"))[0]
c = db.execute("select * from up") c = cur.execute("select * from up")
for dwark, dts, dsz, drd, dfn in c: for dwark, dts, dsz, drd, dfn in c:
nchecked += 1 nchecked += 1
if drd.startswith("//") or dfn.startswith("//"): if drd.startswith("//") or dfn.startswith("//"):
@@ -298,49 +381,210 @@ class Up2k(object):
self.log("forgetting {} deleted files".format(len(rm))) self.log("forgetting {} deleted files".format(len(rm)))
for rd, fn in rm: for rd, fn in rm:
# self.log("{} / {}".format(rd, fn)) # self.log("{} / {}".format(rd, fn))
self.db_rm(db, rd, fn) self.db_rm(cur, rd, fn)
return len(rm) return len(rm)
def _build_tags_index(self, ptop):
entags = self.entags[ptop]
flags = self.flags[ptop]
cur = self.cur[ptop]
n_add = 0
n_rm = 0
n_buf = 0
last_write = time.time()
if "e2tsr" in flags:
n_rm = cur.execute("select count(w) from mt").fetchone()[0]
if n_rm:
self.log("discarding {} media tags for a full rescan".format(n_rm))
cur.execute("delete from mt")
else:
self.log("volume has e2tsr but there are no media tags to discard")
# integrity: drop tags for tracks that were deleted
if "e2t" in flags:
drops = []
c2 = cur.connection.cursor()
up_q = "select w from up where substr(w,1,16) = ?"
for (w,) in cur.execute("select w from mt"):
if not c2.execute(up_q, (w,)).fetchone():
drops.append(w[:16])
c2.close()
if drops:
msg = "discarding media tags for {} deleted files"
self.log(msg.format(len(drops)))
n_rm += len(drops)
for w in drops:
cur.execute("delete from mt where w = ?", (w,))
# bail if a volume flag disables indexing
if "d2t" in flags or "d2d" in flags:
return n_add, n_rm, True
# add tags for new files
if "e2ts" in flags:
if not self.mtag:
return n_add, n_rm, False
mpool = False
if self.mtag.prefer_mt and not self.args.no_mtag_mt:
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
# both do crazy runahead so lets reinvent another wheel
nw = os.cpu_count()
if not self.n_mtag_thr_alive:
msg = 'using {} cores for tag reader "{}"'
self.log(msg.format(nw, self.mtag.backend))
self.n_mtag_thr_alive = nw
mpool = Queue(nw)
for _ in range(nw):
thr = threading.Thread(target=self._tag_thr, args=(mpool,))
thr.daemon = True
thr.start()
c2 = cur.connection.cursor()
c3 = cur.connection.cursor()
n_left = cur.execute("select count(w) from up").fetchone()[0]
for w, rd, fn in cur.execute("select w, rd, fn from up"):
n_left -= 1
q = "select w from mt where w = ?"
if c2.execute(q, (w[:16],)).fetchone():
continue
abspath = os.path.join(ptop, rd, fn)
self.pp.msg = "c{} {}".format(n_left, abspath)
args = c3, entags, w, abspath
if not mpool:
n_tags = self._tag_file(*args)
else:
mpool.put(args)
with self.mutex:
n_tags = self.n_mtag_tags_added
self.n_mtag_tags_added = 0
n_add += n_tags
n_buf += n_tags
td = time.time() - last_write
if n_buf >= 4096 or td >= 60:
self.log("commit {} new tags".format(n_buf))
cur.connection.commit()
last_write = time.time()
n_buf = 0
if self.n_mtag_thr_alive:
mpool.join()
for _ in range(self.n_mtag_thr_alive):
mpool.put(None)
c3.close()
c2.close()
return n_add, n_rm, True
def _tag_thr(self, q):
while True:
task = q.get()
if not task:
break
try:
write_cur, entags, wark, abspath = task
tags = self.mtag.get(abspath)
with self.mutex:
n = self._tag_file(write_cur, entags, wark, abspath, tags)
self.n_mtag_tags_added += n
except:
with self.mutex:
self.n_mtag_thr_alive -= 1
raise
finally:
q.task_done()
def _tag_file(self, write_cur, entags, wark, abspath, tags=None):
tags = tags or self.mtag.get(abspath)
tags = {k: v for k, v in tags.items() if k in entags}
if not tags:
# indicate scanned without tags
tags = {"x": 0}
ret = 0
for k, v in tags.items():
q = "insert into mt values (?,?,?)"
write_cur.execute(q, (wark[:16], k, v))
ret += 1
return ret
def _orz(self, db_path):
return sqlite3.connect(db_path, check_same_thread=False).cursor()
def _open_db(self, db_path): def _open_db(self, db_path):
existed = os.path.exists(db_path) existed = os.path.exists(db_path)
conn = sqlite3.connect(db_path, check_same_thread=False) cur = self._orz(db_path)
try: ver = self._read_ver(cur)
ver = self._read_ver(conn) if not existed and ver is None:
return self._create_db(db_path, cur)
if ver == 1: orig_ver = ver
conn = self._upgrade_v1(conn, db_path) if not ver or ver < 3:
ver = self._read_ver(conn) bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
db = cur.connection
cur.close()
db.close()
msg = "creating new DB (old is bad); backup: {}"
if ver:
msg = "creating backup before upgrade: {}"
if ver == 2: self.log(msg.format(bak))
try: shutil.copy2(db_path, bak)
nfiles = next(conn.execute("select count(w) from up"))[0] cur = self._orz(db_path)
self.log("found DB at {} |{}|".format(db_path, nfiles))
return conn
except Exception as ex:
self.log("WARN: could not list files, DB corrupt?\n " + repr(ex))
if ver is not None: if ver == 1:
self.log("REPLACING unsupported DB (v.{}) at {}".format(ver, db_path)) cur = self._upgrade_v1(cur, db_path)
elif not existed: if cur:
raise Exception("whatever") ver = 2
conn.close() if ver == 2:
os.unlink(db_path) cur = self._create_v3(cur)
conn = sqlite3.connect(db_path, check_same_thread=False) ver = self._read_ver(cur) if cur else None
except:
pass
# sqlite is variable-width only, no point in using char/nchar/varchar if ver == 3:
self._create_v2(conn) if orig_ver != ver:
conn.commit() cur.connection.commit()
cur.execute("vacuum")
cur.connection.commit()
try:
nfiles = next(cur.execute("select count(w) from up"))[0]
self.log("OK: {} |{}|".format(db_path, nfiles))
return cur
except Exception as ex:
self.log("WARN: could not list files, DB corrupt?\n " + repr(ex))
if cur:
db = cur.connection
cur.close()
db.close()
return self._create_db(db_path, None)
def _create_db(self, db_path, cur):
if not cur:
cur = self._orz(db_path)
self._create_v2(cur)
self._create_v3(cur)
cur.connection.commit()
self.log("created DB at {}".format(db_path)) self.log("created DB at {}".format(db_path))
return conn return cur
def _read_ver(self, conn): def _read_ver(self, cur):
for tab in ["ki", "kv"]: for tab in ["ki", "kv"]:
try: try:
c = conn.execute(r"select v from {} where k = 'sver'".format(tab)) c = cur.execute(r"select v from {} where k = 'sver'".format(tab))
except: except:
continue continue
@@ -348,26 +592,47 @@ class Up2k(object):
if rows: if rows:
return int(rows[0][0]) return int(rows[0][0])
def _create_v2(self, conn): def _create_v2(self, cur):
for cmd in [ for cmd in [
r"create table ks (k text, v text)",
r"create table ki (k text, v int)",
r"create table up (w text, mt int, sz int, rd text, fn text)", r"create table up (w text, mt int, sz int, rd text, fn text)",
r"insert into ki values ('sver', 2)",
r"create index up_w on up(w)",
r"create index up_rd on up(rd)", r"create index up_rd on up(rd)",
r"create index up_fn on up(fn)", r"create index up_fn on up(fn)",
]: ]:
conn.execute(cmd) cur.execute(cmd)
return cur
def _create_v3(self, cur):
"""
collision in 2^(n/2) files where n = bits (6 bits/ch)
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx
12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx
16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx
"""
for c, ks in [["drop table k", "isv"], ["drop index up_", "w"]]:
for k in ks:
try:
cur.execute(c + k)
except:
pass
for cmd in [
r"create index up_w on up(substr(w,1,16))",
r"create table mt (w text, k text, v int)",
r"create index mt_w on mt(w)",
r"create index mt_k on mt(k)",
r"create index mt_v on mt(v)",
r"create table kv (k text, v int)",
r"insert into kv values ('sver', 3)",
]:
cur.execute(cmd)
return cur
def _upgrade_v1(self, odb, db_path): def _upgrade_v1(self, odb, db_path):
self.log("\033[33mupgrading v1 to v2:\033[0m {}".format(db_path))
npath = db_path + ".next" npath = db_path + ".next"
if os.path.exists(npath): if os.path.exists(npath):
os.unlink(npath) os.unlink(npath)
ndb = sqlite3.connect(npath, check_same_thread=False) ndb = self._orz(npath)
self._create_v2(ndb) self._create_v2(ndb)
c = odb.execute("select * from up") c = odb.execute("select * from up")
@@ -376,27 +641,29 @@ class Up2k(object):
v = (wark, ts, sz, rd, fn) v = (wark, ts, sz, rd, fn)
ndb.execute("insert into up values (?,?,?,?,?)", v) ndb.execute("insert into up values (?,?,?,?,?)", v)
ndb.commit() ndb.connection.commit()
ndb.close() ndb.connection.close()
odb.close() odb.connection.close()
bpath = db_path + ".bak.v1"
self.log("success; backup at: " + bpath)
atomic_move(db_path, bpath)
atomic_move(npath, db_path) atomic_move(npath, db_path)
return sqlite3.connect(db_path, check_same_thread=False) return self._orz(db_path)
def handle_json(self, cj): def handle_json(self, cj):
self.register_vpath(cj["ptop"]) if not self.register_vpath(cj["ptop"], cj["vcfg"]):
if cj["ptop"] not in self.registry:
raise Pebkac(410, "location unavailable")
cj["name"] = sanitize_fn(cj["name"]) cj["name"] = sanitize_fn(cj["name"])
cj["poke"] = time.time() cj["poke"] = time.time()
wark = self._get_wark(cj) wark = self._get_wark(cj)
now = time.time() now = time.time()
job = None job = None
with self.mutex: with self.mutex:
db = self.db.get(cj["ptop"], None) cur = self.cur.get(cj["ptop"], None)
reg = self.registry[cj["ptop"]] reg = self.registry[cj["ptop"]]
if db: if cur:
cur = db.execute(r"select * from up where w = ?", (wark,)) q = r"select * from up where substr(w,1,16) = ? and w = ?"
argv = (wark[:16], wark)
cur = cur.execute(q, argv)
for _, dtime, dsize, dp_dir, dp_fn in cur: for _, dtime, dsize, dp_dir, dp_fn in cur:
if dp_dir.startswith("//") or dp_fn.startswith("//"): if dp_dir.startswith("//") or dp_fn.startswith("//"):
dp_dir, dp_fn = self.w8dec(dp_dir, dp_fn) dp_dir, dp_fn = self.w8dec(dp_dir, dp_fn)
@@ -409,7 +676,6 @@ class Up2k(object):
"prel": dp_dir, "prel": dp_dir,
"vtop": cj["vtop"], "vtop": cj["vtop"],
"ptop": cj["ptop"], "ptop": cj["ptop"],
"flag": cj["flag"],
"size": dsize, "size": dsize,
"lmod": dtime, "lmod": dtime,
"hash": [], "hash": [],
@@ -446,7 +712,7 @@ class Up2k(object):
err = "partial upload exists at a different location; please resume uploading here instead:\n" err = "partial upload exists at a different location; please resume uploading here instead:\n"
err += "/" + vsrc + " " err += "/" + vsrc + " "
raise Pebkac(400, err) raise Pebkac(400, err)
elif "nodupe" in job["flag"]: elif "nodupe" in self.flags[job["ptop"]]:
self.log("dupe-reject:\n {0}\n {1}".format(src, dst)) self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
err = "upload rejected, file already exists:\n/" + vsrc + " " err = "upload rejected, file already exists:\n/" + vsrc + " "
raise Pebkac(400, err) raise Pebkac(400, err)
@@ -476,7 +742,6 @@ class Up2k(object):
"vtop", "vtop",
"ptop", "ptop",
"prel", "prel",
"flag",
"name", "name",
"size", "size",
"lmod", "lmod",
@@ -584,16 +849,33 @@ class Up2k(object):
if WINDOWS: if WINDOWS:
self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))]) self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))])
db = self.db.get(job["ptop"], None) # legit api sware 2 me mum
if db: if self.idx_wark(
j = job job["ptop"],
self.db_rm(db, j["prel"], j["name"]) job["wark"],
self.db_add(db, j["wark"], j["prel"], j["name"], j["lmod"], j["size"]) job["prel"],
db.commit() job["name"],
job["lmod"],
job["size"],
):
del self.registry[ptop][wark] del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads # in-memory registry is reserved for unfinished uploads
return ret, dst return ret, dst
def idx_wark(self, ptop, wark, rd, fn, lmod, sz):
cur = self.cur.get(ptop, None)
if not cur:
return False
self.db_rm(cur, rd, fn)
self.db_add(cur, wark, rd, fn, int(lmod), sz)
cur.connection.commit()
if "e2t" in self.flags[ptop]:
self.tagq.put([ptop, wark, rd, fn])
return True
def db_rm(self, db, rd, fn): def db_rm(self, db, rd, fn):
sql = "delete from up where rd = ? and fn = ?" sql = "delete from up where rd = ? and fn = ?"
@@ -604,7 +886,7 @@ class Up2k(object):
def db_add(self, db, wark, rd, fn, ts, sz): def db_add(self, db, wark, rd, fn, ts, sz):
sql = "insert into up values (?,?,?,?,?)" sql = "insert into up values (?,?,?,?,?)"
v = (wark, ts, sz, rd, fn) v = (wark, int(ts), sz, rd, fn)
try: try:
db.execute(sql, v) db.execute(sql, v)
except: except:
@@ -635,10 +917,9 @@ class Up2k(object):
fsz = os.path.getsize(path) fsz = os.path.getsize(path)
csz = up2k_chunksize(fsz) csz = up2k_chunksize(fsz)
ret = [] ret = []
last_print = time.time()
with open(path, "rb", 512 * 1024) as f: with open(path, "rb", 512 * 1024) as f:
while fsz > 0: while fsz > 0:
self.pp.msg = msg = "{} MB".format(int(fsz / 1024 / 1024)) self.pp.msg = "{} MB".format(int(fsz / 1024 / 1024))
hashobj = hashlib.sha512() hashobj = hashlib.sha512()
rem = min(csz, fsz) rem = min(csz, fsz)
fsz -= rem fsz -= rem
@@ -745,6 +1026,45 @@ class Up2k(object):
self.log("snap: {} |{}|".format(path, len(reg.keys()))) self.log("snap: {} |{}|".format(path, len(reg.keys())))
prev[k] = etag prev[k] = etag
def _tagger(self):
while True:
ptop, wark, rd, fn = self.tagq.get()
abspath = os.path.join(ptop, rd, fn)
self.log("tagging " + abspath)
with self.mutex:
cur = self.cur[ptop]
if not cur:
self.log("\033[31mno cursor to write tags with??")
continue
entags = self.entags[ptop]
if not entags:
self.log("\033[33mno entags okay.jpg")
continue
if "e2t" in self.flags[ptop]:
self._tag_file(cur, entags, wark, abspath)
cur.connection.commit()
def _hasher(self):
while True:
ptop, rd, fn = self.hashq.get()
if "e2d" not in self.flags[ptop]:
continue
abspath = os.path.join(ptop, rd, fn)
self.log("hashing " + abspath)
inf = os.stat(fsenc(abspath))
hashes = self._hashlist_from_file(abspath)
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
with self.mutex:
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size)
def hash_file(self, ptop, flags, rd, fn):
self.register_vpath(ptop, flags)
self.hashq.put([ptop, rd, fn])
def up2k_chunksize(filesize): def up2k_chunksize(filesize):
chunksize = 1024 * 1024 chunksize = 1024 * 1024

View File

@@ -521,9 +521,7 @@ def u8safe(txt):
def exclude_dotfiles(filepaths): def exclude_dotfiles(filepaths):
for fpath in filepaths: return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
if not fpath.split("/")[-1].startswith("."):
yield fpath
def html_escape(s, quote=False): def html_escape(s, quote=False):
@@ -633,6 +631,40 @@ def read_socket_unbounded(sr):
yield buf yield buf
def read_socket_chunked(sr, log=None):
err = "expected chunk length, got [{}] |{}| instead"
while True:
buf = b""
while b"\r" not in buf:
rbuf = sr.recv(2)
if not rbuf or len(buf) > 16:
err = err.format(buf.decode("utf-8", "replace"), len(buf))
raise Pebkac(400, err)
buf += rbuf
if not buf.endswith(b"\n"):
sr.recv(1)
try:
chunklen = int(buf.rstrip(b"\r\n"), 16)
except:
err = err.format(buf.decode("utf-8", "replace"), len(buf))
raise Pebkac(400, err)
if chunklen == 0:
sr.recv(2) # \r\n after final chunk
return
if log:
log("receiving {} byte chunk".format(chunklen))
for chunk in read_socket(sr, chunklen):
yield chunk
sr.recv(2) # \r\n after each chunk too
def hashcopy(actor, fin, fout): def hashcopy(actor, fin, fout):
u32_lim = int((2 ** 31) * 0.9) u32_lim = int((2 ** 31) * 0.9)
hashobj = hashlib.sha512() hashobj = hashlib.sha512()
@@ -692,6 +724,30 @@ def sendfile_kern(lower, upper, f, s):
return 0 return 0
def statdir(logger, scandir, lstat, top):
try:
btop = fsenc(top)
if scandir and hasattr(os, "scandir"):
src = "scandir"
with os.scandir(btop) as dh:
for fh in dh:
try:
yield [fsdec(fh.name), fh.stat(follow_symlinks=not lstat)]
except Exception as ex:
logger("scan-stat: {} @ {}".format(repr(ex), fsdec(fh.path)))
else:
src = "listdir"
fun = os.lstat if lstat else os.stat
for name in os.listdir(btop):
abspath = os.path.join(btop, name)
try:
yield [fsdec(name), fun(abspath)]
except Exception as ex:
logger("list-stat: {} @ {}".format(repr(ex), fsdec(abspath)))
except Exception as ex:
logger("{}: {} @ {}".format(src, repr(ex), top))
def unescape_cookie(orig): def unescape_cookie(orig):
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn # mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
ret = "" ret = ""

View File

@@ -46,7 +46,7 @@ body {
display: none; display: none;
} }
#files { #files {
border-collapse: collapse; border-spacing: 0;
margin-top: 2em; margin-top: 2em;
z-index: 1; z-index: 1;
position: relative; position: relative;
@@ -67,16 +67,18 @@ a,
#files a:hover { #files a:hover {
color: #fff; color: #fff;
background: #161616; background: #161616;
text-decoration: underline;
} }
#files thead a { #files thead a {
color: #999; color: #999;
font-weight: normal; font-weight: normal;
} }
#files tr:hover { #files tr+tr:hover {
background: #1c1c1c; background: #1c1c1c;
} }
#files thead th { #files thead th {
padding: .5em 1.3em .3em 1.3em; padding: .5em 1.3em .3em 1.3em;
cursor: pointer;
} }
#files thead th:last-child { #files thead th:last-child {
background: #444; background: #444;
@@ -94,6 +96,16 @@ a,
margin: 0; margin: 0;
padding: 0 .5em; padding: 0 .5em;
} }
#files td {
border-bottom: 1px solid #111;
}
#files td+td+td {
max-width: 30em;
overflow: hidden;
}
#files tr+tr td {
border-top: 1px solid #383838;
}
#files tbody td:nth-child(3) { #files tbody td:nth-child(3) {
font-family: monospace; font-family: monospace;
font-size: 1.3em; font-size: 1.3em;
@@ -112,6 +124,9 @@ a,
padding-bottom: 1.3em; padding-bottom: 1.3em;
border-bottom: .5em solid #444; border-bottom: .5em solid #444;
} }
#files tbody tr td:last-child {
white-space: nowrap;
}
#files thead th[style] { #files thead th[style] {
width: auto !important; width: auto !important;
} }
@@ -160,7 +175,8 @@ a,
margin: -.2em; margin: -.2em;
} }
#files tbody a.play.act { #files tbody a.play.act {
color: #af0; color: #840;
text-shadow: 0 0 .3em #b80;
} }
#blocked { #blocked {
position: fixed; position: fixed;
@@ -291,6 +307,20 @@ a,
width: calc(100% - 10.5em); width: calc(100% - 10.5em);
background: rgba(0,0,0,0.2); background: rgba(0,0,0,0.2);
} }
@media (min-width: 90em) {
#barpos,
#barbuf {
width: calc(100% - 24em);
left: 9.8em;
top: .7em;
height: 1.6em;
bottom: auto;
}
#widget {
bottom: -3.2em;
height: 3.2em;
}
}
@@ -400,14 +430,13 @@ input[type="checkbox"]:checked+label {
color: #fff; color: #fff;
} }
#files td div a { #files td div a {
display: table-cell; display: inline-block;
white-space: nowrap; white-space: nowrap;
} }
#files td div a:last-child { #files td div a:last-child {
width: 100%; width: 100%;
} }
#files td div { #files td div {
display: table;
border-collapse: collapse; border-collapse: collapse;
width: 100%; width: 100%;
} }
@@ -421,12 +450,27 @@ input[type="checkbox"]:checked+label {
#tree { #tree {
padding-top: 2em; padding-top: 2em;
} }
#tree>a+a {
padding: .2em .4em;
font-size: 1.2em;
background: #2a2a2a;
box-shadow: 0 .1em .2em #222 inset;
border-radius: .3em;
margin: .2em;
position: relative;
top: -.2em;
}
#tree>a+a:hover {
background: #805;
}
#tree>a+a.on {
background: #fc4;
color: #400;
text-shadow: none;
}
#detree { #detree {
padding: .3em .5em; padding: .3em .5em;
font-size: 1.5em; font-size: 1.5em;
display: inline-block;
min-width: 12em;
width: 100%;
} }
#treefiles #files tbody { #treefiles #files tbody {
border-radius: 0 .7em 0 .7em; border-radius: 0 .7em 0 .7em;
@@ -447,20 +491,20 @@ input[type="checkbox"]:checked+label {
list-style: none; list-style: none;
white-space: nowrap; white-space: nowrap;
} }
#tree a.hl { #treeul a.hl {
color: #400; color: #400;
background: #fc4; background: #fc4;
border-radius: .3em; border-radius: .3em;
text-shadow: none; text-shadow: none;
} }
#tree a { #treeul a {
display: inline-block; display: inline-block;
} }
#tree a+a { #treeul a+a {
width: calc(100% - 2em); width: calc(100% - 2em);
background: #333; background: #333;
} }
#tree a+a:hover { #treeul a+a:hover {
background: #222; background: #222;
color: #fff; color: #fff;
} }
@@ -480,13 +524,6 @@ input[type="checkbox"]:checked+label {
#treeul a:first-child { #treeul a:first-child {
font-family: monospace, monospace; font-family: monospace, monospace;
} }
#treefiles {
opacity: 1;
transition: opacity 0.2s ease-in-out;
}
#tree:hover+#treefiles {
opacity: .8;
}
.dumb_loader_thing { .dumb_loader_thing {
display: inline-block; display: inline-block;
margin: 1em .3em 1em 1em; margin: 1em .3em 1em 1em;
@@ -496,3 +533,49 @@ input[type="checkbox"]:checked+label {
position: absolute; position: absolute;
z-index: 9; z-index: 9;
} }
#files .cfg {
display: none;
font-size: 2em;
white-space: nowrap;
}
#files th:hover .cfg,
#files th.min .cfg {
display: block;
width: 1em;
border-radius: .2em;
margin: -1.3em auto 0 auto;
background: #444;
}
#files th.min .cfg {
margin: -.6em;
}
#files>thead>tr>th.min span {
position: absolute;
transform: rotate(270deg);
background: linear-gradient(90deg, rgba(68,68,68,0), rgba(68,68,68,0.5) 70%, #444);
margin-left: -4.6em;
padding: .4em;
top: 5.4em;
width: 8em;
text-align: right;
letter-spacing: .04em;
}
#files td:nth-child(2n) {
color: #f5a;
}
#files td.min a {
display: none;
}
#files tr.play td {
background: #fc4;
border-color: transparent;
color: #400;
text-shadow: none;
}
#files tr.play a {
color: inherit;
}
#files tr.play a:hover {
color: #300;
background: #fea;
}

View File

@@ -26,7 +26,11 @@
</div> </div>
<div id="op_search" class="opview"> <div id="op_search" class="opview">
{%- if have_tags_idx %}
<table id="srch_form" class="tags"></table>
{%- else %}
<table id="srch_form"></table> <table id="srch_form"></table>
{%- endif %}
<div id="srch_q"></div> <div id="srch_q"></div>
</div> </div>
{%- include 'upload.html' %} {%- include 'upload.html' %}
@@ -38,14 +42,15 @@
{%- endfor %} {%- endfor %}
</h1> </h1>
{%- if prologue %} <div id="pro" class="logue">{{ logues[0] }}</div>
<div id="pro" class="logue">{{ prologue }}</div>
{%- endif %}
<table id="treetab"> <table id="treetab">
<tr> <tr>
<td id="tree"> <td id="tree">
<a href="#" id="detree">🍞...</a> <a href="#" id="detree">🍞...</a>
<a href="#" step="2" id="twobytwo">+</a>
<a href="#" step="-2" id="twig">&ndash;</a>
<a href="#" id="dyntree">a</a>
<ul id="treeul"></ul> <ul id="treeul"></ul>
</td> </td>
<td id="treefiles"></td> <td id="treefiles"></td>
@@ -56,24 +61,35 @@
<thead> <thead>
<tr> <tr>
<th></th> <th></th>
<th>File Name</th> <th><span>File Name</span></th>
<th sort="int">File Size</th> <th sort="int"><span>Size</span></th>
<th>T</th> {%- for k in taglist %}
<th>Date</th> {%- if k.startswith('.') %}
<th sort="int"><span>{{ k[1:] }}</span></th>
{%- else %}
<th><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
{%- endif %}
{%- endfor %}
<th><span>T</span></th>
<th><span>Date</span></th>
</tr> </tr>
</thead> </thead>
<tbody> <tbody>
{%- for f in files %} {%- for f in files %}
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr> <tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
{%- if f.tags is defined %}
{%- for k in taglist %}
<td>{{ f.tags[k] }}</td>
{%- endfor %}
{%- endif %}
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
{%- endfor %} {%- endfor %}
</tbody> </tbody>
</table> </table>
{%- if epilogue %} <div id="epi" class="logue">{{ logues[1] }}</div>
<div id="epi" class="logue">{{ epilogue }}</div>
{%- endif %}
<h2><a href="?h">control-panel</a></h2> <h2><a href="?h">control-panel</a></h2>
@@ -91,6 +107,9 @@
</div> </div>
</div> </div>
<script>
var tag_order_cfg = {{ tag_order }};
</script>
<script src="/.cpr/util.js{{ ts }}"></script> <script src="/.cpr/util.js{{ ts }}"></script>
<script src="/.cpr/browser.js{{ ts }}"></script> <script src="/.cpr/browser.js{{ ts }}"></script>
<script src="/.cpr/up2k.js{{ ts }}"></script> <script src="/.cpr/up2k.js{{ ts }}"></script>

View File

@@ -6,21 +6,6 @@ function dbg(msg) {
ebi('path').innerHTML = msg; ebi('path').innerHTML = msg;
} }
function ev(e) {
e = e || window.event;
if (!e)
return;
if (e.preventDefault)
e.preventDefault()
if (e.stopPropagation)
e.stopPropagation();
e.returnValue = false;
return e;
}
makeSortable(ebi('files')); makeSortable(ebi('files'));
@@ -55,7 +40,7 @@ function init_mp() {
for (var a = 0, aa = tracks.length; a < aa; a++) for (var a = 0, aa = tracks.length; a < aa; a++)
ebi('trk' + a).onclick = ev_play; ebi('trk' + a).onclick = ev_play;
ret.vol = localStorage.getItem('vol'); ret.vol = sread('vol');
if (ret.vol !== null) if (ret.vol !== null)
ret.vol = parseFloat(ret.vol); ret.vol = parseFloat(ret.vol);
else else
@@ -67,7 +52,7 @@ function init_mp() {
ret.setvol = function (vol) { ret.setvol = function (vol) {
ret.vol = Math.max(Math.min(vol, 1), 0); ret.vol = Math.max(Math.min(vol, 1), 0);
localStorage.setItem('vol', vol); swrite('vol', vol);
if (ret.au) if (ret.au)
ret.au.volume = ret.expvol(); ret.au.volume = ret.expvol();
@@ -153,6 +138,9 @@ var pbar = (function () {
var grad = null; var grad = null;
r.drawbuf = function () { r.drawbuf = function () {
if (!mp.au)
return;
var cs = getComputedStyle(r.bcan); var cs = getComputedStyle(r.bcan);
var sw = parseInt(cs['width']); var sw = parseInt(cs['width']);
var sh = parseInt(cs['height']); var sh = parseInt(cs['height']);
@@ -179,6 +167,9 @@ var pbar = (function () {
} }
}; };
r.drawpos = function () { r.drawpos = function () {
if (!mp.au)
return;
var cs = getComputedStyle(r.bcan); var cs = getComputedStyle(r.bcan);
var sw = parseInt(cs['width']); var sw = parseInt(cs['width']);
var sh = parseInt(cs['height']); var sh = parseInt(cs['height']);
@@ -460,6 +451,11 @@ function play(tid, call_depth) {
mp.au.volume = mp.expvol(); mp.au.volume = mp.expvol();
var oid = 'trk' + tid; var oid = 'trk' + tid;
setclass(oid, 'play act'); setclass(oid, 'play act');
var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
for (var a = 0, aa = trs.length; a < aa; a++) {
trs[a].className = trs[a].className.replace(/ *play */, "");
}
ebi(oid).parentElement.parentElement.className += ' play';
try { try {
if (hack_attempt_play) if (hack_attempt_play)
@@ -472,7 +468,7 @@ function play(tid, call_depth) {
o.setAttribute('id', 'thx_js'); o.setAttribute('id', 'thx_js');
if (window.history && history.replaceState) { if (window.history && history.replaceState) {
var nurl = (document.location + '').split('#')[0] + '#' + oid; var nurl = (document.location + '').split('#')[0] + '#' + oid;
history.replaceState(ebi('files').tBodies[0].innerHTML, nurl, nurl); hist_replace(ebi('files').innerHTML, nurl);
} }
else { else {
document.location.hash = oid; document.location.hash = oid;
@@ -591,6 +587,12 @@ function autoplay_blocked() {
["name", "name", "name contains &nbsp; (negate with -nope)", "46"] ["name", "name", "name contains &nbsp; (negate with -nope)", "46"]
] ]
]; ];
if (document.querySelector('#srch_form.tags'))
sconf.push(["tags",
["tags", "tags", "tags contains", "46"]
]);
var html = []; var html = [];
var orig_html = null; var orig_html = null;
for (var a = 0; a < sconf.length; a++) { for (var a = 0; a < sconf.length; a++) {
@@ -610,7 +612,7 @@ function autoplay_blocked() {
} }
ebi('srch_form').innerHTML = html.join('\n'); ebi('srch_form').innerHTML = html.join('\n');
var o = document.querySelectorAll('#op_search input[type="text"]'); var o = document.querySelectorAll('#op_search input');
for (var a = 0; a < o.length; a++) { for (var a = 0; a < o.length; a++) {
o[a].oninput = ev_search_input; o[a].oninput = ev_search_input;
} }
@@ -619,8 +621,11 @@ function autoplay_blocked() {
function ev_search_input() { function ev_search_input() {
var v = this.value; var v = this.value;
var chk = ebi(this.getAttribute('id').slice(0, -1) + 'c'); var id = this.getAttribute('id');
chk.checked = ((v + '').length > 0); if (id.slice(-1) == 'v') {
var chk = ebi(id.slice(0, -1) + 'c');
chk.checked = ((v + '').length > 0);
}
clearTimeout(search_timeout); clearTimeout(search_timeout);
search_timeout = setTimeout(do_search, 100); search_timeout = setTimeout(do_search, 100);
} }
@@ -653,6 +658,9 @@ function autoplay_blocked() {
return; return;
} }
var res = JSON.parse(this.responseText),
tagord = res.tag_order;
var ofiles = ebi('files'); var ofiles = ebi('files');
if (ofiles.getAttribute('ts') > this.ts) if (ofiles.getAttribute('ts') > this.ts)
return; return;
@@ -660,10 +668,11 @@ function autoplay_blocked() {
ebi('path').style.display = 'none'; ebi('path').style.display = 'none';
ebi('tree').style.display = 'none'; ebi('tree').style.display = 'none';
var html = ['<tr><td>-</td><td colspan="4"><a href="#" id="unsearch">close search results</a></td></tr>']; var html = mk_files_header(tagord);
var res = JSON.parse(this.responseText); html.push('<tbody>');
for (var a = 0; a < res.length; a++) { html.push('<tr><td>-</td><td colspan="42"><a href="#" id="unsearch">close search results</a></td></tr>');
var r = res[a], for (var a = 0; a < res.hits.length; a++) {
var r = res.hits[a],
ts = parseInt(r.ts), ts = parseInt(r.ts),
sz = esc(r.sz + ''), sz = esc(r.sz + ''),
rp = esc(r.rp + ''), rp = esc(r.rp + ''),
@@ -674,15 +683,31 @@ function autoplay_blocked() {
ext = '%'; ext = '%';
links = links.join(''); links = links.join('');
html.push('<tr><td>-</td><td><div>' + links + '</div></td><td>' + sz + var nodes = ['<tr><td>-</td><td><div>' + links + '</div>', sz];
'</td><td>' + ext + '</td><td>' + unix2iso(ts) + '</td></tr>'); for (var b = 0; b < tagord.length; b++) {
var k = tagord[b],
v = r.tags[k] || "";
if (k == "dur") {
var sv = s2ms(v);
nodes[nodes.length - 1] += '</td><td sortv="' + v + '">' + sv;
continue;
}
nodes.push(v);
}
nodes = nodes.concat([ext, unix2iso(ts)]);
html.push(nodes.join('</td><td>'));
html.push('</td></tr>');
} }
if (!orig_html) if (!orig_html)
orig_html = ebi('files').tBodies[0].innerHTML; orig_html = ebi('files').innerHTML;
ofiles.tBodies[0].innerHTML = html.join('\n'); ofiles.innerHTML = html.join('\n');
ofiles.setAttribute("ts", this.ts); ofiles.setAttribute("ts", this.ts);
filecols.set_style();
reload_browser(); reload_browser();
ebi('unsearch').onclick = unsearch; ebi('unsearch').onclick = unsearch;
@@ -692,7 +717,7 @@ function autoplay_blocked() {
ev(e); ev(e);
ebi('path').style.display = 'inline-block'; ebi('path').style.display = 'inline-block';
ebi('tree').style.display = 'block'; ebi('tree').style.display = 'block';
ebi('files').tBodies[0].innerHTML = orig_html; ebi('files').innerHTML = orig_html;
orig_html = null; orig_html = null;
reload_browser(); reload_browser();
} }
@@ -702,6 +727,10 @@ function autoplay_blocked() {
// tree // tree
(function () { (function () {
var treedata = null; var treedata = null;
var dyn = bcfg_get('dyntree', true);
var treesz = icfg_get('treesz', 16);
treesz = isNaN(treesz) ? 16 : Math.min(Math.max(treesz, 4), 50);
console.log('treesz [' + treesz + ']');
function entree(e) { function entree(e) {
ev(e); ev(e);
@@ -712,17 +741,11 @@ function autoplay_blocked() {
treetab.style.display = 'table'; treetab.style.display = 'table';
var pro = ebi('pro'); treefiles.appendChild(ebi('pro'));
if (pro)
treefiles.appendChild(pro);
treefiles.appendChild(ebi('files')); treefiles.appendChild(ebi('files'));
treefiles.appendChild(ebi('epi'));
var epi = ebi('epi'); swrite('entreed', 'tree');
if (epi)
treefiles.appendChild(epi);
localStorage.setItem('entreed', 'tree');
get_tree("", get_vpath()); get_tree("", get_vpath());
} }
@@ -766,7 +789,7 @@ function autoplay_blocked() {
esc(top) + '">' + esc(name) + esc(top) + '">' + esc(name) +
"</a>\n<ul>\n" + html + "</ul>"; "</a>\n<ul>\n" + html + "</ul>";
var links = document.querySelectorAll('#tree a+a'); var links = document.querySelectorAll('#treeul a+a');
for (var a = 0, aa = links.length; a < aa; a++) { for (var a = 0, aa = links.length; a < aa; a++) {
if (links[a].getAttribute('href') == top) { if (links[a].getAttribute('href') == top) {
var o = links[a].parentNode; var o = links[a].parentNode;
@@ -780,7 +803,10 @@ function autoplay_blocked() {
document.querySelector('#treeul>li>a+a').textContent = '[root]'; document.querySelector('#treeul>li>a+a').textContent = '[root]';
despin('#tree'); despin('#tree');
reload_tree(); reload_tree();
rescale_tree();
}
function rescale_tree() {
var q = '#tree'; var q = '#tree';
var nq = 0; var nq = 0;
while (true) { while (true) {
@@ -789,18 +815,19 @@ function autoplay_blocked() {
if (!document.querySelector(q)) if (!document.querySelector(q))
break; break;
} }
ebi('treeul').style.width = (24 + nq) + 'em'; var w = treesz + (dyn ? nq : 0);
ebi('treeul').style.width = w + 'em';
} }
function reload_tree() { function reload_tree() {
var cdir = get_vpath(); var cdir = get_vpath();
var links = document.querySelectorAll('#tree a+a'); var links = document.querySelectorAll('#treeul a+a');
for (var a = 0, aa = links.length; a < aa; a++) { for (var a = 0, aa = links.length; a < aa; a++) {
var href = links[a].getAttribute('href'); var href = links[a].getAttribute('href');
links[a].setAttribute('class', href == cdir ? 'hl' : ''); links[a].setAttribute('class', href == cdir ? 'hl' : '');
links[a].onclick = treego; links[a].onclick = treego;
} }
links = document.querySelectorAll('#tree li>a:first-child'); links = document.querySelectorAll('#treeul li>a:first-child');
for (var a = 0, aa = links.length; a < aa; a++) { for (var a = 0, aa = links.length; a < aa; a++) {
links[a].setAttribute('dst', links[a].nextSibling.getAttribute('href')); links[a].setAttribute('dst', links[a].nextSibling.getAttribute('href'));
links[a].onclick = treegrow; links[a].onclick = treegrow;
@@ -831,6 +858,7 @@ function autoplay_blocked() {
rm.parentNode.removeChild(rm); rm.parentNode.removeChild(rm);
} }
this.textContent = '+'; this.textContent = '+';
rescale_tree();
return; return;
} }
var dst = this.getAttribute('dst'); var dst = this.getAttribute('dst');
@@ -857,27 +885,42 @@ function autoplay_blocked() {
ebi('srv_info').innerHTML = '<span>' + res.srvinf + '</span>'; ebi('srv_info').innerHTML = '<span>' + res.srvinf + '</span>';
var nodes = res.dirs.concat(res.files); var nodes = res.dirs.concat(res.files);
var top = this.top; var top = this.top;
var html = []; var html = mk_files_header(res.taglist);
html.push('<tbody>');
for (var a = 0; a < nodes.length; a++) { for (var a = 0; a < nodes.length; a++) {
var r = nodes[a], var r = nodes[a],
ln = '<tr><td>' + r.lead + '</td><td><a href="' + ln = ['<tr><td>' + r.lead + '</td><td><a href="' +
top + r.href + '">' + esc(decodeURIComponent(r.href)) + '</a>'; top + r.href + '">' + esc(decodeURIComponent(r.href)) + '</a>', r.sz];
ln = [ln, r.sz, r.ext, unix2iso(r.ts)].join('</td><td>'); for (var b = 0; b < res.taglist.length; b++) {
var k = res.taglist[b],
v = (r.tags || {})[k] || "";
if (k[0] == '.')
k = k.slice(1);
if (k == "dur") {
var sv = s2ms(v);
ln[ln.length - 1] += '</td><td sortv="' + v + '">' + sv;
continue;
}
ln.push(v);
}
ln = ln.concat([r.ext, unix2iso(r.ts)]).join('</td><td>');
html.push(ln + '</td></tr>'); html.push(ln + '</td></tr>');
} }
html.push('</tbody>');
html = html.join('\n'); html = html.join('\n');
ebi('files').tBodies[0].innerHTML = html; ebi('files').innerHTML = html;
history.pushState(html, this.top, this.top);
hist_push(html, this.top);
apply_perms(res.perms); apply_perms(res.perms);
despin('#files'); despin('#files');
var o = ebi('pro'); ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : "";
if (o) o.parentNode.removeChild(o); ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : "";
o = ebi('epi');
if (o) o.parentNode.removeChild(o);
filecols.set_style();
reload_tree(); reload_tree();
reload_browser(); reload_browser();
} }
@@ -915,39 +958,55 @@ function autoplay_blocked() {
ev(e); ev(e);
var treetab = ebi('treetab'); var treetab = ebi('treetab');
var pro = ebi('pro'); treetab.parentNode.insertBefore(ebi('pro'), treetab);
if (pro)
treetab.parentNode.insertBefore(pro, treetab);
treetab.parentNode.insertBefore(ebi('files'), treetab.nextSibling); treetab.parentNode.insertBefore(ebi('files'), treetab.nextSibling);
treetab.parentNode.insertBefore(ebi('epi'), ebi('files').nextSibling);
var epi = ebi('epi');
if (epi)
treetab.parentNode.insertBefore(epi, ebi('files').nextSibling);
ebi('path').style.display = 'inline-block'; ebi('path').style.display = 'inline-block';
treetab.style.display = 'none'; treetab.style.display = 'none';
localStorage.setItem('entreed', 'na'); swrite('entreed', 'na');
}
function dyntree(e) {
ev(e);
dyn = !dyn;
bcfg_set('dyntree', dyn);
rescale_tree();
}
function scaletree(e) {
ev(e);
treesz += parseInt(this.getAttribute("step"));
if (isNaN(treesz))
treesz = 16;
swrite('treesz', treesz);
rescale_tree();
} }
ebi('entree').onclick = entree; ebi('entree').onclick = entree;
ebi('detree').onclick = detree; ebi('detree').onclick = detree;
if (window.localStorage && localStorage.getItem('entreed') == 'tree') ebi('dyntree').onclick = dyntree;
ebi('twig').onclick = scaletree;
ebi('twobytwo').onclick = scaletree;
if (sread('entreed') == 'tree')
entree(); entree();
window.onpopstate = function (e) { window.onpopstate = function (e) {
console.log(e.url + ' ,, ' + ((e.state + '').slice(0, 64))); console.log(e.url + ' ,, ' + ((e.state + '').slice(0, 64)));
if (e.state) { var html = sessionStorage.getItem(e.state || 1);
ebi('files').tBodies[0].innerHTML = e.state; if (!html)
reload_tree(); return;
reload_browser();
} ebi('files').innerHTML = html;
reload_tree();
reload_browser();
}; };
if (window.history && history.pushState) { if (window.history && history.pushState) {
var u = get_vpath(); var u = get_vpath() + window.location.hash;
history.replaceState(ebi('files').tBodies[0].innerHTML, u, u); hist_replace(ebi('files').innerHTML, u);
} }
})(); })();
@@ -998,12 +1057,125 @@ function apply_perms(perms) {
(have_write || tds[a].getAttribute('data-perm') == 'read') ? (have_write || tds[a].getAttribute('data-perm') == 'read') ?
'table-cell' : 'none'; 'table-cell' : 'none';
} }
if (!have_write && up2k)
if (window['up2k'])
up2k.set_fsearch(); up2k.set_fsearch();
} }
function mk_files_header(taglist) {
var html = [
'<thead>',
'<th></th>',
'<th><span>File Name</span></th>',
'<th sort="int"><span>Size</span></th>'
];
for (var a = 0; a < taglist.length; a++) {
var tag = taglist[a];
var c1 = tag.slice(0, 1).toUpperCase();
tag = c1 + tag.slice(1);
if (c1 == '.')
tag = '<th sort="int"><span>' + tag.slice(1);
else
tag = '<th><span>' + tag;
html.push(tag + '</span></th>');
}
html = html.concat([
'<th><span>T</span></th>',
'<th><span>Date</span></th>',
'</thead>',
]);
return html;
}
var filecols = (function () {
var hidden = jread('filecols', []);
var add_btns = function () {
var ths = document.querySelectorAll('#files th>span');
for (var a = 0, aa = ths.length; a < aa; a++) {
var th = ths[a].parentElement;
var is_hidden = has(hidden, ths[a].textContent);
th.innerHTML = '<div class="cfg"><a href="#">' +
(is_hidden ? '+' : '-') + '</a></div>' + ths[a].outerHTML;
th.getElementsByTagName('a')[0].onclick = ev_row_tgl;
}
};
var set_style = function () {
add_btns();
var ohidden = [],
ths = document.querySelectorAll('#files th'),
ncols = ths.length;
for (var a = 0; a < ncols; a++) {
var span = ths[a].getElementsByTagName('span');
if (span.length <= 0)
continue;
var name = span[0].textContent,
cls = '';
if (has(hidden, name)) {
ohidden.push(a);
cls = ' min';
}
ths[a].className = ths[a].className.replace(/ *min */, " ") + cls;
}
for (var a = 0; a < ncols; a++) {
var cls = has(ohidden, a) ? 'min' : '';
var tds = document.querySelectorAll('#files>tbody>tr>td:nth-child(' + (a + 1) + ')');
for (var b = 0, bb = tds.length; b < bb; b++) {
tds[b].setAttribute('class', cls);
if (a < 2)
continue;
if (cls) {
if (!tds[b].hasAttribute('html')) {
tds[b].setAttribute('html', tds[b].innerHTML);
tds[b].innerHTML = '...';
}
}
else if (tds[b].hasAttribute('html')) {
tds[b].innerHTML = tds[b].getAttribute('html');
tds[b].removeAttribute('html');
}
}
}
};
set_style();
var toggle = function (name) {
var ofs = hidden.indexOf(name);
if (ofs !== -1)
hidden.splice(ofs, 1);
else
hidden.push(name);
jwrite("filecols", hidden);
set_style();
};
return {
"add_btns": add_btns,
"set_style": set_style,
"toggle": toggle,
};
})();
function ev_row_tgl(e) {
ev(e);
filecols.toggle(this.parentElement.parentElement.getElementsByTagName('span')[0].textContent);
}
function reload_browser(not_mp) { function reload_browser(not_mp) {
filecols.set_style();
makeSortable(ebi('files')); makeSortable(ebi('files'));
var parts = get_vpath().split('/'); var parts = get_vpath().split('/');
@@ -1036,5 +1208,8 @@ function reload_browser(not_mp) {
widget.close(); widget.close();
mp = init_mp(); mp = init_mp();
} }
if (window['up2k'])
up2k.set_fsearch();
} }
reload_browser(true); reload_browser(true);

View File

@@ -524,11 +524,9 @@ dom_navtgl.onclick = function () {
dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav'; dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav';
dom_nav.style.display = hidden ? 'none' : 'block'; dom_nav.style.display = hidden ? 'none' : 'block';
if (window.localStorage) swrite('hidenav', hidden ? 1 : 0);
localStorage.setItem('hidenav', hidden ? 1 : 0);
redraw(); redraw();
}; };
if (window.localStorage && localStorage.getItem('hidenav') == 1) if (sread('hidenav') == 1)
dom_navtgl.onclick(); dom_navtgl.onclick();

View File

@@ -209,42 +209,7 @@ function up2k_init(have_crypto) {
}; };
} }
function cfg_get(name) { var parallel_uploads = icfg_get('nthread');
var val = localStorage.getItem(name);
if (val === null)
return parseInt(ebi(name).value);
ebi(name).value = val;
return val;
}
function bcfg_get(name, defval) {
var o = ebi(name);
if (!o)
return defval;
var val = localStorage.getItem(name);
if (val === null)
val = defval;
else
val = (val == '1');
o.checked = val;
return val;
}
function bcfg_set(name, val) {
localStorage.setItem(
name, val ? '1' : '0');
var o = ebi(name);
if (o)
o.checked = val;
return val;
}
var parallel_uploads = cfg_get('nthread');
var multitask = bcfg_get('multitask', true); var multitask = bcfg_get('multitask', true);
var ask_up = bcfg_get('ask_up', true); var ask_up = bcfg_get('ask_up', true);
var flag_en = bcfg_get('flag_en', false); var flag_en = bcfg_get('flag_en', false);
@@ -282,7 +247,7 @@ function up2k_init(have_crypto) {
var flag = false; var flag = false;
apply_flag_cfg(); apply_flag_cfg();
apply_fsearch_cfg(); set_fsearch();
function nav() { function nav() {
ebi('file' + fdom_ctr).click(); ebi('file' + fdom_ctr).click();
@@ -772,13 +737,13 @@ function up2k_init(have_crypto) {
if (!response.name) { if (!response.name) {
var msg = ''; var msg = '';
var smsg = ''; var smsg = '';
if (!response || !response.length) { if (!response || !response.hits || !response.hits.length) {
msg = 'not found on server'; msg = 'not found on server';
smsg = '404'; smsg = '404';
} }
else { else {
smsg = 'found'; smsg = 'found';
var hit = response[0], var hit = response.hits[0],
msg = linksplit(hit.rp).join(''), msg = linksplit(hit.rp).join(''),
tr = unix2iso(hit.ts), tr = unix2iso(hit.ts),
tu = unix2iso(t.lmod), tu = unix2iso(t.lmod),
@@ -1033,7 +998,7 @@ function up2k_init(have_crypto) {
return; return;
parallel_uploads = v; parallel_uploads = v;
localStorage.setItem('nthread', v); swrite('nthread', v);
obj.style.background = '#444'; obj.style.background = '#444';
return; return;
} }
@@ -1061,12 +1026,31 @@ function up2k_init(have_crypto) {
} }
function tgl_fsearch() { function tgl_fsearch() {
fsearch = !fsearch; set_fsearch(!fsearch);
bcfg_set('fsearch', fsearch);
apply_fsearch_cfg();
} }
function apply_fsearch_cfg() { function set_fsearch(new_state) {
var perms = document.body.getAttribute('perms');
var read_only = false;
if (!ebi('fsearch')) {
new_state = false;
}
else if (perms && perms.indexOf('write') === -1) {
new_state = true;
read_only = true;
}
if (new_state !== undefined) {
fsearch = new_state;
bcfg_set('fsearch', fsearch);
}
try {
document.querySelector('label[for="fsearch"]').style.opacity = read_only ? '0' : '1';
}
catch (ex) { }
try { try {
var fun = fsearch ? 'add' : 'remove'; var fun = fsearch ? 'add' : 'remove';
ebi('op_up2k').classList[fun]('srch'); ebi('op_up2k').classList[fun]('srch');
@@ -1078,11 +1062,6 @@ function up2k_init(have_crypto) {
catch (ex) { } catch (ex) { }
} }
function set_fsearch() {
if (!fsearch)
tgl_fsearch();
}
function tgl_flag_en() { function tgl_flag_en() {
flag_en = !flag_en; flag_en = !flag_en;
bcfg_set('flag_en', flag_en); bcfg_set('flag_en', flag_en);
@@ -1131,12 +1110,8 @@ function up2k_init(have_crypto) {
for (var a = nodes.length - 1; a >= 0; a--) for (var a = nodes.length - 1; a >= 0; a--)
nodes[a].addEventListener('touchend', nop, false); nodes[a].addEventListener('touchend', nop, false);
var perms = document.body.getAttribute('perms'); set_fsearch();
if (perms && perms.indexOf('write') === -1)
set_fsearch();
bumpthread({ "target": 1 }) bumpthread({ "target": 1 })
return { "init_deps": init_deps, "set_fsearch": set_fsearch } return { "init_deps": init_deps, "set_fsearch": set_fsearch }
} }

View File

@@ -43,6 +43,21 @@ function ebi(id) {
return document.getElementById(id); return document.getElementById(id);
} }
function ev(e) {
e = e || window.event;
if (!e)
return;
if (e.preventDefault)
e.preventDefault()
if (e.stopPropagation)
e.stopPropagation();
e.returnValue = false;
return e;
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
if (!String.prototype.endsWith) { if (!String.prototype.endsWith) {
@@ -76,30 +91,41 @@ function import_js(url, cb) {
function sortTable(table, col) { function sortTable(table, col) {
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows var tb = table.tBodies[0],
th = table.tHead.rows[0].cells, th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0), tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className == 'sort1' ? -1 : 1; i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1;
for (var a = 0, thl = th.length; a < thl; a++) for (var a = 0, thl = th.length; a < thl; a++)
th[a].className = ''; th[a].className = th[a].className.replace(/ *sort-?1 */, " ");
th[col].className = 'sort' + reverse; th[col].className += ' sort' + reverse;
var stype = th[col].getAttribute('sort'); var stype = th[col].getAttribute('sort');
tr = tr.sort(function (a, b) { var vl = [];
if (!a.cells[col]) for (var a = 0; a < tr.length; a++) {
var cell = tr[a].cells[col];
if (!cell) {
vl.push([null, a]);
continue;
}
var v = cell.getAttribute('sortv') || cell.textContent.trim();
if (stype == 'int') {
v = parseInt(v.replace(/[, ]/g, '')) || 0;
}
vl.push([v, a]);
}
vl.sort(function (a, b) {
a = a[0];
b = b[0];
if (a === null)
return -1; return -1;
if (!b.cells[col]) if (b === null)
return 1; return 1;
var v1 = a.cells[col].textContent.trim();
var v2 = b.cells[col].textContent.trim();
if (stype == 'int') { if (stype == 'int') {
v1 = parseInt(v1.replace(/,/g, '')); return reverse * (a - b);
v2 = parseInt(v2.replace(/,/g, ''));
return reverse * (v1 - v2);
} }
return reverse * (v1.localeCompare(v2)); return reverse * (a.localeCompare(b));
}); });
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]); for (i = 0; i < tr.length; ++i) tb.appendChild(tr[vl[i][1]]);
} }
function makeSortable(table) { function makeSortable(table) {
var th = table.tHead, i; var th = table.tHead, i;
@@ -107,7 +133,8 @@ function makeSortable(table) {
if (th) i = th.length; if (th) i = th.length;
else return; // if no `<thead>` then do nothing else return; // if no `<thead>` then do nothing
while (--i >= 0) (function (i) { while (--i >= 0) (function (i) {
th[i].onclick = function () { th[i].onclick = function (e) {
ev(e);
sortTable(table, i); sortTable(table, i);
}; };
}(i)); }(i));
@@ -123,16 +150,13 @@ function makeSortable(table) {
})(); })();
function opclick(ev) { function opclick(e) {
if (ev) //ie ev(e);
ev.preventDefault();
var dest = this.getAttribute('data-dest'); var dest = this.getAttribute('data-dest');
goto(dest); goto(dest);
// writing a blank value makes ie8 segfault w swrite('opmode', dest || undefined);
if (window.localStorage)
localStorage.setItem('opmode', dest || '.');
var input = document.querySelector('.opview.act input:not([type="hidden"])') var input = document.querySelector('.opview.act input:not([type="hidden"])')
if (input) if (input)
@@ -167,11 +191,9 @@ function goto(dest) {
(function () { (function () {
goto(); goto();
if (window.localStorage) { var op = sread('opmode');
var op = localStorage.getItem('opmode'); if (op !== null && op !== '.')
if (op !== null && op !== '.') goto(op);
goto(op);
}
})(); })();
@@ -225,6 +247,12 @@ function unix2iso(ts) {
} }
function s2ms(s) {
var m = Math.floor(s / 60);
return m + ":" + ("0" + (s - m * 60)).slice(-2);
}
function has(haystack, needle) { function has(haystack, needle) {
for (var a = 0; a < haystack.length; a++) for (var a = 0; a < haystack.length; a++)
if (haystack[a] == needle) if (haystack[a] == needle)
@@ -232,3 +260,93 @@ function has(haystack, needle) {
return false; return false;
} }
function sread(key) {
if (window.localStorage)
return localStorage.getItem(key);
return '';
}
function swrite(key, val) {
if (window.localStorage) {
if (val === undefined)
localStorage.removeItem(key);
else
localStorage.setItem(key, val);
}
}
function jread(key, fb) {
var str = sread(key);
if (!str)
return fb;
return JSON.parse(str);
}
function jwrite(key, val) {
if (!val)
swrite(key);
else
swrite(key, JSON.stringify(val));
}
function icfg_get(name, defval) {
var o = ebi(name);
var val = parseInt(sread(name));
if (val === null)
return parseInt(o ? o.value : defval);
if (o)
o.value = val;
return val;
}
function bcfg_get(name, defval) {
var o = ebi(name);
if (!o)
return defval;
var val = sread(name);
if (val === null)
val = defval;
else
val = (val == '1');
bcfg_upd_ui(name, val);
return val;
}
function bcfg_set(name, val) {
swrite(name, val ? '1' : '0');
bcfg_upd_ui(name, val);
return val;
}
function bcfg_upd_ui(name, val) {
var o = ebi(name);
if (!o)
return;
if (o.getAttribute('type') == 'checkbox')
o.checked = val;
else if (o)
o.setAttribute('class', val ? 'on' : '');
}
function hist_push(html, url) {
var key = new Date().getTime();
sessionStorage.setItem(key, html);
history.pushState(key, url, url);
}
function hist_replace(html, url) {
var key = new Date().getTime();
sessionStorage.setItem(key, html);
history.replaceState(key, url, url);
}

View File

@@ -122,7 +122,7 @@ git describe --tags >/dev/null 2>/dev/null && {
exit 1 exit 1
} }
dt="$(git log -1 --format=%cd --date=format:'%Y,%m,%d' | sed -E 's/,0?/, /g')" dt="$(git log -1 --format=%cd --date=short | sed -E 's/-0?/, /g')"
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt" printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
sed -ri ' sed -ri '
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/; s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;

View File

@@ -16,6 +16,12 @@ from copyparty.authsrv import AuthSrv
from copyparty import util from copyparty import util
class Cfg(Namespace):
def __init__(self, a=[], v=[], c=None):
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr mte".split()}
super(Cfg, self).__init__(a=a, v=v, c=c, **ex)
class TestVFS(unittest.TestCase): class TestVFS(unittest.TestCase):
def dump(self, vfs): def dump(self, vfs):
print(json.dumps(vfs, indent=4, sort_keys=True, default=lambda o: o.__dict__)) print(json.dumps(vfs, indent=4, sort_keys=True, default=lambda o: o.__dict__))
@@ -35,7 +41,13 @@ class TestVFS(unittest.TestCase):
def ls(self, vfs, vpath, uname): def ls(self, vfs, vpath, uname):
"""helper for resolving and listing a folder""" """helper for resolving and listing a folder"""
vn, rem = vfs.get(vpath, uname, True, False) vn, rem = vfs.get(vpath, uname, True, False)
return vn.ls(rem, uname) r1 = vn.ls(rem, uname, False)
r2 = vn.ls(rem, uname, False)
self.assertEqual(r1, r2)
fsdir, real, virt = r1
real = [x[0] for x in real]
return fsdir, real, virt
def runcmd(self, *argv): def runcmd(self, *argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE) p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
@@ -102,7 +114,7 @@ class TestVFS(unittest.TestCase):
f.write(fn) f.write(fn)
# defaults # defaults
vfs = AuthSrv(Namespace(c=None, a=[], v=[]), self.log).vfs vfs = AuthSrv(Cfg(), self.log).vfs
self.assertEqual(vfs.nodes, {}) self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "") self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td) self.assertEqual(vfs.realpath, td)
@@ -110,7 +122,7 @@ class TestVFS(unittest.TestCase):
self.assertEqual(vfs.uwrite, ["*"]) self.assertEqual(vfs.uwrite, ["*"])
# single read-only rootfs (relative path) # single read-only rootfs (relative path)
vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs vfs = AuthSrv(Cfg(v=["a/ab/::r"]), self.log).vfs
self.assertEqual(vfs.nodes, {}) self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "") self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab")) self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
@@ -118,9 +130,7 @@ class TestVFS(unittest.TestCase):
self.assertEqual(vfs.uwrite, []) self.assertEqual(vfs.uwrite, [])
# single read-only rootfs (absolute path) # single read-only rootfs (absolute path)
vfs = AuthSrv( vfs = AuthSrv(Cfg(v=[td + "//a/ac/../aa//::r"]), self.log).vfs
Namespace(c=None, a=[], v=[td + "//a/ac/../aa//::r"]), self.log
).vfs
self.assertEqual(vfs.nodes, {}) self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "") self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa")) self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
@@ -129,7 +139,7 @@ class TestVFS(unittest.TestCase):
# read-only rootfs with write-only subdirectory (read-write for k) # read-only rootfs with write-only subdirectory (read-write for k)
vfs = AuthSrv( vfs = AuthSrv(
Namespace(c=None, a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]), Cfg(a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]),
self.log, self.log,
).vfs ).vfs
self.assertEqual(len(vfs.nodes), 1) self.assertEqual(len(vfs.nodes), 1)
@@ -192,7 +202,10 @@ class TestVFS(unittest.TestCase):
self.assertEqual(list(virt), []) self.assertEqual(list(virt), [])
# admin-only rootfs with all-read-only subfolder # admin-only rootfs with all-read-only subfolder
vfs = AuthSrv(Namespace(c=None, a=["k:k"], v=[".::ak", "a:a:r"]), self.log,).vfs vfs = AuthSrv(
Cfg(a=["k:k"], v=[".::ak", "a:a:r"]),
self.log,
).vfs
self.assertEqual(len(vfs.nodes), 1) self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(vfs.vpath, "") self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td) self.assertEqual(vfs.realpath, td)
@@ -211,9 +224,7 @@ class TestVFS(unittest.TestCase):
# breadth-first construction # breadth-first construction
vfs = AuthSrv( vfs = AuthSrv(
Namespace( Cfg(
c=None,
a=[],
v=[ v=[
"a/ac/acb:a/ac/acb:w", "a/ac/acb:a/ac/acb:w",
"a:a:w", "a:a:w",
@@ -234,7 +245,7 @@ class TestVFS(unittest.TestCase):
self.undot(vfs, "./.././foo/..", "") self.undot(vfs, "./.././foo/..", "")
# shadowing # shadowing
vfs = AuthSrv(Namespace(c=None, a=[], v=[".::r", "b:a/ac:r"]), self.log).vfs vfs = AuthSrv(Cfg(v=[".::r", "b:a/ac:r"]), self.log).vfs
fsp, r1, v1 = self.ls(vfs, "", "*") fsp, r1, v1 = self.ls(vfs, "", "*")
self.assertEqual(fsp, td) self.assertEqual(fsp, td)
@@ -271,7 +282,7 @@ class TestVFS(unittest.TestCase):
).encode("utf-8") ).encode("utf-8")
) )
au = AuthSrv(Namespace(c=[cfg_path], a=[], v=[]), self.log) au = AuthSrv(Cfg(c=[cfg_path]), self.log)
self.assertEqual(au.user["a"], "123") self.assertEqual(au.user["a"], "123")
self.assertEqual(au.user["asd"], "fgh:jkl") self.assertEqual(au.user["asd"], "fgh:jkl")
n = au.vfs n = au.vfs