mirror of
https://github.com/9001/copyparty.git
synced 2025-10-24 08:33:58 +00:00
Compare commits
23 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
482dd7a938 | ||
|
|
bddcc69438 | ||
|
|
19d4540630 | ||
|
|
4f5f6c81f5 | ||
|
|
7e4c1238ba | ||
|
|
f7196ac773 | ||
|
|
7a7c832000 | ||
|
|
2b4ccdbebb | ||
|
|
0d16b49489 | ||
|
|
768405b691 | ||
|
|
da01413b7b | ||
|
|
914e22c53e | ||
|
|
43a23bf733 | ||
|
|
92bb00c6d2 | ||
|
|
b0b97a2648 | ||
|
|
2c452fe323 | ||
|
|
ad73d0c77d | ||
|
|
7f9bf1c78c | ||
|
|
61a6bc3a65 | ||
|
|
46e10b0e9f | ||
|
|
8441206e26 | ||
|
|
9fdc5ee748 | ||
|
|
00ff133387 |
5
.vscode/launch.json
vendored
5
.vscode/launch.json
vendored
@@ -13,10 +13,13 @@
|
||||
"-ed",
|
||||
"-emp",
|
||||
"-e2dsa",
|
||||
"-e2ts",
|
||||
"-a",
|
||||
"ed:wark",
|
||||
"-v",
|
||||
"srv::r:aed:cnodupe"
|
||||
"srv::r:aed:cnodupe",
|
||||
"-v",
|
||||
"dist:dist:r"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
2
.vscode/tasks.json
vendored
2
.vscode/tasks.json
vendored
@@ -8,7 +8,7 @@
|
||||
},
|
||||
{
|
||||
"label": "no_dbg",
|
||||
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -a ed:wark -v srv::r:aed:cnodupe ;exit 1",
|
||||
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -e2ts -a ed:wark -v srv::r:aed:cnodupe -v dist:dist:r ;exit 1",
|
||||
"type": "shell"
|
||||
}
|
||||
]
|
||||
|
||||
60
README.md
60
README.md
@@ -59,7 +59,7 @@ you may also want these, especially on servers:
|
||||
* server indexing
|
||||
* ☑ locate files by contents
|
||||
* ☑ search by name/path/date/size
|
||||
* ✖ search by ID3-tags etc.
|
||||
* ☑ search by ID3-tags etc.
|
||||
* markdown
|
||||
* ☑ viewer
|
||||
* ☑ editor (sure why not)
|
||||
@@ -82,7 +82,42 @@ path/name queries are space-separated, AND'ed together, and words are negated wi
|
||||
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
|
||||
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
|
||||
|
||||
other metadata (like song tags etc) are not yet indexed for searching
|
||||
add `-e2ts` to also scan/index tags from music files:
|
||||
|
||||
|
||||
## search configuration
|
||||
|
||||
searching relies on two databases, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`). Configuration can be done through arguments, volume flags, or a mix of both.
|
||||
|
||||
through arguments:
|
||||
* `-e2d` enables file indexing on upload
|
||||
* `-e2ds` scans writable folders on startup
|
||||
* `-e2dsa` scans all mounted volumes (including readonly ones)
|
||||
* `-e2t` enables metadata indexing on upload
|
||||
* `-e2ts` scans for tags in all files that don't have tags yet
|
||||
* `-e2tsr` deletes all existing tags, so a full reindex
|
||||
|
||||
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
|
||||
* `-v ~/music::ce2dsa:ce2tsr` does a full reindex of everything on startup
|
||||
* `-v ~/music::cd2d` disables **all** indexing, even if any `-e2*` are on
|
||||
* `-v ~/music::cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
||||
|
||||
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
|
||||
|
||||
`-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume:
|
||||
* `-v ~/music::cmte=title,artist` indexes and displays *title* followed by *artist*
|
||||
|
||||
if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected
|
||||
|
||||
`-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux`
|
||||
|
||||
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
|
||||
|
||||
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
|
||||
* is about 20x slower than mutagen
|
||||
* catches a few tags that mutagen doesn't
|
||||
* avoids pulling any GPL code into copyparty
|
||||
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
|
||||
|
||||
|
||||
# client examples
|
||||
@@ -91,16 +126,33 @@ other metadata (like song tags etc) are not yet indexed for searching
|
||||
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
|
||||
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
||||
|
||||
* curl/wget: upload some files (post=file, chunk=stdin)
|
||||
* `post(){ curl -b cppwd=wark http://127.0.0.1:3923/ -F act=bput -F f=@"$1";}`
|
||||
`post movie.mkv`
|
||||
* `post(){ wget --header='Cookie: cppwd=wark' http://127.0.0.1:3923/?raw --post-file="$1" -O-;}`
|
||||
`post movie.mkv`
|
||||
* `chunk(){ curl -b cppwd=wark http://127.0.0.1:3923/ -T-;}`
|
||||
`chunk <movie.mkv`
|
||||
|
||||
* FUSE: mount a copyparty server as a local filesystem
|
||||
* cross-platform python client available in [./bin/](bin/)
|
||||
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
||||
|
||||
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
||||
|
||||
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
|
||||
b512 <movie.mkv
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
* `jinja2`
|
||||
* `jinja2` (is built into the SFX)
|
||||
|
||||
optional, will eventually enable thumbnails:
|
||||
**optional,** enables music tags:
|
||||
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
|
||||
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
|
||||
|
||||
**optional,** will eventually enable thumbnails:
|
||||
* `Pillow` (requires py2.7 or py3.5+)
|
||||
|
||||
|
||||
|
||||
@@ -198,7 +198,7 @@ def main():
|
||||
and "cflag" is config flags to set on this volume
|
||||
|
||||
list of cflags:
|
||||
cnodupe rejects existing files (instead of symlinking them)
|
||||
"cnodupe" rejects existing files (instead of symlinking them)
|
||||
|
||||
example:\033[35m
|
||||
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
|
||||
@@ -239,17 +239,28 @@ def main():
|
||||
ap.add_argument("-q", action="store_true", help="quiet")
|
||||
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||
ap.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||
ap.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||
ap.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
||||
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
||||
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)")
|
||||
ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)")
|
||||
ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms")
|
||||
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
|
||||
|
||||
ap2 = ap.add_argument_group('database options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
|
||||
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
||||
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
||||
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
|
||||
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q")
|
||||
|
||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
||||
@@ -257,14 +268,20 @@ def main():
|
||||
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
||||
|
||||
al = ap.parse_args()
|
||||
# fmt: on
|
||||
|
||||
if al.e2dsa:
|
||||
al.e2ds = True
|
||||
|
||||
if al.e2ds:
|
||||
al.e2d = True
|
||||
# propagate implications
|
||||
for k1, k2 in [
|
||||
["e2dsa", "e2ds"],
|
||||
["e2ds", "e2d"],
|
||||
["e2tsr", "e2ts"],
|
||||
["e2ts", "e2t"],
|
||||
["e2t", "e2d"],
|
||||
]:
|
||||
if getattr(al, k1):
|
||||
setattr(al, k2, True)
|
||||
|
||||
al.i = al.i.split(",")
|
||||
try:
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 8, 3)
|
||||
CODENAME = "discovery"
|
||||
BUILD_DT = (2021, 2, 22)
|
||||
VERSION = (0, 9, 3)
|
||||
CODENAME = "the strongest music server"
|
||||
BUILD_DT = (2021, 3, 4)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -6,7 +6,7 @@ import re
|
||||
import threading
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
from .util import undot, Pebkac, fsdec, fsenc
|
||||
from .util import undot, Pebkac, fsdec, fsenc, statdir
|
||||
|
||||
|
||||
class VFS(object):
|
||||
@@ -102,12 +102,11 @@ class VFS(object):
|
||||
|
||||
return fsdec(os.path.realpath(fsenc(rp)))
|
||||
|
||||
def ls(self, rem, uname):
|
||||
def ls(self, rem, uname, scandir, lstat=False):
|
||||
"""return user-readable [fsdir,real,virt] items at vpath"""
|
||||
virt_vis = {} # nodes readable by user
|
||||
abspath = self.canonical(rem)
|
||||
items = os.listdir(fsenc(abspath))
|
||||
real = [fsdec(x) for x in items]
|
||||
real = list(statdir(print, scandir, lstat, abspath))
|
||||
real.sort()
|
||||
if not rem:
|
||||
for name, vn2 in sorted(self.nodes.items()):
|
||||
@@ -115,7 +114,7 @@ class VFS(object):
|
||||
virt_vis[name] = vn2
|
||||
|
||||
# no vfs nodes in the list of real inodes
|
||||
real = [x for x in real if x not in self.nodes]
|
||||
real = [x for x in real if x[0] not in self.nodes]
|
||||
|
||||
return [abspath, real, virt_vis]
|
||||
|
||||
@@ -206,8 +205,11 @@ class AuthSrv(object):
|
||||
if lvl in "wa":
|
||||
mwrite[vol_dst].append(uname)
|
||||
if lvl == "c":
|
||||
# config option, currently switches only
|
||||
mflags[vol_dst][uname] = True
|
||||
cval = True
|
||||
if "=" in uname:
|
||||
uname, cval = uname.split("=", 1)
|
||||
|
||||
mflags[vol_dst][uname] = cval
|
||||
|
||||
def reload(self):
|
||||
"""
|
||||
@@ -248,12 +250,19 @@ class AuthSrv(object):
|
||||
perms = perms.split(":")
|
||||
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
|
||||
if lvl == "c":
|
||||
# config option, currently switches only
|
||||
mflags[dst][uname] = True
|
||||
cval = True
|
||||
if "=" in uname:
|
||||
uname, cval = uname.split("=", 1)
|
||||
|
||||
mflags[dst][uname] = cval
|
||||
continue
|
||||
|
||||
if uname == "":
|
||||
uname = "*"
|
||||
|
||||
if lvl in "ra":
|
||||
mread[dst].append(uname)
|
||||
|
||||
if lvl in "wa":
|
||||
mwrite[dst].append(uname)
|
||||
|
||||
@@ -268,6 +277,7 @@ class AuthSrv(object):
|
||||
elif "" not in mount:
|
||||
# there's volumes but no root; make root inaccessible
|
||||
vfs = VFS(os.path.abspath("."), "")
|
||||
vfs.flags["d2d"] = True
|
||||
|
||||
maxdepth = 0
|
||||
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
|
||||
@@ -300,15 +310,27 @@ class AuthSrv(object):
|
||||
)
|
||||
raise Exception("invalid config")
|
||||
|
||||
for vol in vfs.all_vols.values():
|
||||
if (self.args.e2ds and vol.uwrite) or self.args.e2dsa:
|
||||
vol.flags["e2ds"] = True
|
||||
|
||||
if self.args.e2d or "e2ds" in vol.flags:
|
||||
vol.flags["e2d"] = True
|
||||
|
||||
for k in ["e2t", "e2ts", "e2tsr"]:
|
||||
if getattr(self.args, k):
|
||||
vol.flags[k] = True
|
||||
|
||||
# default tag-list if unset
|
||||
if "mte" not in vol.flags:
|
||||
vol.flags["mte"] = self.args.mte
|
||||
|
||||
try:
|
||||
v, _ = vfs.get("/", "*", False, True)
|
||||
if self.warn_anonwrite and os.getcwd() == v.realpath:
|
||||
self.warn_anonwrite = False
|
||||
self.log(
|
||||
"\033[31manyone can read/write the current directory: {}\033[0m".format(
|
||||
v.realpath
|
||||
)
|
||||
)
|
||||
msg = "\033[31manyone can read/write the current directory: {}\033[0m"
|
||||
self.log(msg.format(v.realpath))
|
||||
except Pebkac:
|
||||
self.warn_anonwrite = True
|
||||
|
||||
|
||||
@@ -222,6 +222,9 @@ class HttpCli(object):
|
||||
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
|
||||
return self.tx_file(static_path)
|
||||
|
||||
if "tree" in self.uparam:
|
||||
return self.tx_tree()
|
||||
|
||||
# conditional redirect to single volumes
|
||||
if self.vpath == "" and not self.uparam:
|
||||
nread = len(self.rvol)
|
||||
@@ -246,9 +249,6 @@ class HttpCli(object):
|
||||
self.vpath = None
|
||||
return self.tx_mounts()
|
||||
|
||||
if "tree" in self.uparam:
|
||||
return self.tx_tree()
|
||||
|
||||
return self.tx_browser()
|
||||
|
||||
def handle_options(self):
|
||||
@@ -323,8 +323,11 @@ class HttpCli(object):
|
||||
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
|
||||
|
||||
def get_body_reader(self):
|
||||
remains = int(self.headers.get("content-length", None))
|
||||
if remains is None:
|
||||
chunked = "chunked" in self.headers.get("transfer-encoding", "").lower()
|
||||
remains = int(self.headers.get("content-length", -1))
|
||||
if chunked:
|
||||
return read_socket_chunked(self.sr), remains
|
||||
elif remains == -1:
|
||||
self.keepalive = False
|
||||
return read_socket_unbounded(self.sr), remains
|
||||
else:
|
||||
@@ -342,6 +345,10 @@ class HttpCli(object):
|
||||
with open(path, "wb", 512 * 1024) as f:
|
||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||
|
||||
self.conn.hsrv.broker.put(
|
||||
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fn
|
||||
)
|
||||
|
||||
return post_sz, sha_b64, remains, path
|
||||
|
||||
def handle_stash(self):
|
||||
@@ -425,7 +432,7 @@ class HttpCli(object):
|
||||
body["ptop"] = vfs.realpath
|
||||
body["prel"] = rem
|
||||
body["addr"] = self.ip
|
||||
body["flag"] = vfs.flags
|
||||
body["vcfg"] = vfs.flags
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||
response = x.get()
|
||||
@@ -442,20 +449,31 @@ class HttpCli(object):
|
||||
vols.append([vfs.vpath, vfs.realpath, vfs.flags])
|
||||
|
||||
idx = self.conn.get_u2idx()
|
||||
t0 = time.time()
|
||||
if "srch" in body:
|
||||
# search by up2k hashlist
|
||||
vbody = copy.deepcopy(body)
|
||||
vbody["hash"] = len(vbody["hash"])
|
||||
self.log("qj: " + repr(vbody))
|
||||
hits = idx.fsearch(vols, body)
|
||||
self.log("q#: " + repr(hits))
|
||||
self.log("q#: {} ({:.2f}s)".format(repr(hits), time.time() - t0))
|
||||
taglist = []
|
||||
else:
|
||||
# search by query params
|
||||
self.log("qj: " + repr(body))
|
||||
hits = idx.search(vols, body)
|
||||
self.log("q#: " + str(len(hits)))
|
||||
hits, taglist = idx.search(vols, body)
|
||||
self.log("q#: {} ({:.2f}s)".format(len(hits), time.time() - t0))
|
||||
|
||||
r = json.dumps(hits).encode("utf-8")
|
||||
order = []
|
||||
cfg = self.args.mte.split(",")
|
||||
for t in cfg:
|
||||
if t in taglist:
|
||||
order.append(t)
|
||||
for t in taglist:
|
||||
if t not in order:
|
||||
order.append(t)
|
||||
|
||||
r = json.dumps({"hits": hits, "tag_order": order}).encode("utf-8")
|
||||
self.reply(r, mime="application/json")
|
||||
return True
|
||||
|
||||
@@ -661,6 +679,9 @@ class HttpCli(object):
|
||||
raise Pebkac(400, "empty files in post")
|
||||
|
||||
files.append([sz, sha512_hex])
|
||||
self.conn.hsrv.broker.put(
|
||||
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname
|
||||
)
|
||||
self.conn.nbyte += sz
|
||||
|
||||
except Pebkac:
|
||||
@@ -1098,7 +1119,7 @@ class HttpCli(object):
|
||||
|
||||
try:
|
||||
vn, rem = self.auth.vfs.get(top, self.uname, True, False)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
|
||||
except:
|
||||
vfs_ls = []
|
||||
vfs_virt = {}
|
||||
@@ -1109,13 +1130,13 @@ class HttpCli(object):
|
||||
|
||||
dirs = []
|
||||
|
||||
vfs_ls = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||
|
||||
if not self.args.ed or "dots" not in self.uparam:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
for fn in [x for x in vfs_ls if x != excl]:
|
||||
abspath = os.path.join(fsroot, fn)
|
||||
if os.path.isdir(abspath):
|
||||
dirs.append(fn)
|
||||
dirs.append(fn)
|
||||
|
||||
for x in vfs_virt.keys():
|
||||
if x != excl:
|
||||
@@ -1154,7 +1175,9 @@ class HttpCli(object):
|
||||
|
||||
return self.tx_file(abspath)
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
|
||||
stats = {k: v for k, v in vfs_ls}
|
||||
vfs_ls = [x[0] for x in vfs_ls]
|
||||
vfs_ls.extend(vfs_virt.keys())
|
||||
|
||||
# check for old versions of files,
|
||||
@@ -1183,6 +1206,11 @@ class HttpCli(object):
|
||||
|
||||
is_ls = "ls" in self.uparam
|
||||
|
||||
icur = None
|
||||
if "e2t" in vn.flags:
|
||||
idx = self.conn.get_u2idx()
|
||||
icur = idx.get_cur(vn.realpath)
|
||||
|
||||
dirs = []
|
||||
files = []
|
||||
for fn in vfs_ls:
|
||||
@@ -1200,7 +1228,7 @@ class HttpCli(object):
|
||||
fspath = fsroot + "/" + fn
|
||||
|
||||
try:
|
||||
inf = os.stat(fsenc(fspath))
|
||||
inf = stats.get(fn) or os.stat(fsenc(fspath))
|
||||
except:
|
||||
self.log("broken symlink: {}".format(repr(fspath)))
|
||||
continue
|
||||
@@ -1232,12 +1260,38 @@ class HttpCli(object):
|
||||
"sz": sz,
|
||||
"ext": ext,
|
||||
"dt": dt,
|
||||
"ts": inf.st_mtime,
|
||||
"ts": int(inf.st_mtime),
|
||||
}
|
||||
if is_dir:
|
||||
dirs.append(item)
|
||||
else:
|
||||
files.append(item)
|
||||
item["rd"] = rem
|
||||
|
||||
taglist = {}
|
||||
for f in files:
|
||||
fn = f["name"]
|
||||
rd = f["rd"]
|
||||
del f["rd"]
|
||||
if icur:
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
r = icur.execute(q, (rd, fn)).fetchone()
|
||||
if not r:
|
||||
continue
|
||||
|
||||
w = r[0][:16]
|
||||
tags = {}
|
||||
q = "select k, v from mt where w = ? and k != 'x'"
|
||||
for k, v in icur.execute(q, (w,)):
|
||||
taglist[k] = True
|
||||
tags[k] = v
|
||||
|
||||
f["tags"] = tags
|
||||
|
||||
if icur:
|
||||
taglist = [k for k in self.args.mte.split(",") if k in taglist]
|
||||
for f in dirs:
|
||||
f["tags"] = {}
|
||||
|
||||
srv_info = []
|
||||
|
||||
@@ -1290,6 +1344,7 @@ class HttpCli(object):
|
||||
"srvinf": srv_info,
|
||||
"perms": perms,
|
||||
"logues": logues,
|
||||
"taglist": taglist,
|
||||
}
|
||||
ret = json.dumps(ret)
|
||||
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
|
||||
@@ -1306,7 +1361,10 @@ class HttpCli(object):
|
||||
files=dirs,
|
||||
ts=ts,
|
||||
perms=json.dumps(perms),
|
||||
have_up2k_idx=self.args.e2d,
|
||||
taglist=taglist,
|
||||
tag_order=json.dumps(self.args.mte.split(",")),
|
||||
have_up2k_idx=("e2d" in vn.flags),
|
||||
have_tags_idx=("e2t" in vn.flags),
|
||||
logues=logues,
|
||||
title=html_escape(self.vpath),
|
||||
srv_info=srv_info,
|
||||
|
||||
@@ -20,10 +20,12 @@ except ImportError:
|
||||
you do not have jinja2 installed,\033[33m
|
||||
choose one of these:\033[0m
|
||||
* apt install python-jinja2
|
||||
* python3 -m pip install --user jinja2
|
||||
* {} -m pip install --user jinja2
|
||||
* (try another python version, if you have one)
|
||||
* (try copyparty.sfx instead)
|
||||
"""
|
||||
""".format(
|
||||
os.path.basename(sys.executable)
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
306
copyparty/mtag.py
Normal file
306
copyparty/mtag.py
Normal file
@@ -0,0 +1,306 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
from .util import fsenc, fsdec
|
||||
|
||||
|
||||
class MTag(object):
|
||||
def __init__(self, log_func, args):
|
||||
self.log_func = log_func
|
||||
self.usable = True
|
||||
self.prefer_mt = False
|
||||
mappings = args.mtm
|
||||
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
|
||||
|
||||
if self.backend == "mutagen":
|
||||
self.get = self.get_mutagen
|
||||
try:
|
||||
import mutagen
|
||||
except:
|
||||
self.log("\033[33mcould not load mutagen, trying ffprobe instead")
|
||||
self.backend = "ffprobe"
|
||||
|
||||
if self.backend == "ffprobe":
|
||||
self.get = self.get_ffprobe
|
||||
self.prefer_mt = True
|
||||
# about 20x slower
|
||||
if PY2:
|
||||
cmd = ["ffprobe", "-version"]
|
||||
try:
|
||||
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
except:
|
||||
self.usable = False
|
||||
else:
|
||||
if not shutil.which("ffprobe"):
|
||||
self.usable = False
|
||||
|
||||
if not self.usable:
|
||||
msg = "\033[31mneed mutagen or ffprobe to read media tags so please run this:\n {} -m pip install --user mutagen \033[0m"
|
||||
self.log(msg.format(os.path.basename(sys.executable)))
|
||||
return
|
||||
|
||||
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
tagmap = {
|
||||
"album": ["album", "talb", "\u00a9alb", "original-album", "toal"],
|
||||
"artist": [
|
||||
"artist",
|
||||
"tpe1",
|
||||
"\u00a9art",
|
||||
"composer",
|
||||
"performer",
|
||||
"arranger",
|
||||
"\u00a9wrt",
|
||||
"tcom",
|
||||
"tpe3",
|
||||
"original-artist",
|
||||
"tope",
|
||||
],
|
||||
"title": ["title", "tit2", "\u00a9nam"],
|
||||
"circle": [
|
||||
"album-artist",
|
||||
"tpe2",
|
||||
"aart",
|
||||
"conductor",
|
||||
"organization",
|
||||
"band",
|
||||
],
|
||||
".tn": ["tracknumber", "trck", "trkn", "track"],
|
||||
"genre": ["genre", "tcon", "\u00a9gen"],
|
||||
"date": [
|
||||
"original-release-date",
|
||||
"release-date",
|
||||
"date",
|
||||
"tdrc",
|
||||
"\u00a9day",
|
||||
"original-date",
|
||||
"original-year",
|
||||
"tyer",
|
||||
"tdor",
|
||||
"tory",
|
||||
"year",
|
||||
"creation-time",
|
||||
],
|
||||
".bpm": ["bpm", "tbpm", "tmpo", "tbp"],
|
||||
"key": ["initial-key", "tkey", "key"],
|
||||
"comment": ["comment", "comm", "\u00a9cmt", "comments", "description"],
|
||||
}
|
||||
|
||||
if mappings:
|
||||
for k, v in [x.split("=") for x in mappings]:
|
||||
tagmap[k] = v.split(",")
|
||||
|
||||
self.tagmap = {}
|
||||
for k, vs in tagmap.items():
|
||||
vs2 = []
|
||||
for v in vs:
|
||||
if "-" not in v:
|
||||
vs2.append(v)
|
||||
continue
|
||||
|
||||
vs2.append(v.replace("-", " "))
|
||||
vs2.append(v.replace("-", "_"))
|
||||
vs2.append(v.replace("-", ""))
|
||||
|
||||
self.tagmap[k] = vs2
|
||||
|
||||
self.rmap = {
|
||||
v: [n, k] for k, vs in self.tagmap.items() for n, v in enumerate(vs)
|
||||
}
|
||||
# self.get = self.compare
|
||||
|
||||
def log(self, msg):
|
||||
self.log_func("mtag", msg)
|
||||
|
||||
def normalize_tags(self, ret, md):
|
||||
for k, v in dict(md).items():
|
||||
if not v:
|
||||
continue
|
||||
|
||||
k = k.lower().split("::")[0].strip()
|
||||
mk = self.rmap.get(k)
|
||||
if not mk:
|
||||
continue
|
||||
|
||||
pref, mk = mk
|
||||
if mk not in ret or ret[mk][0] > pref:
|
||||
ret[mk] = [pref, v[0]]
|
||||
|
||||
# take first value
|
||||
ret = {k: str(v[1]).strip() for k, v in ret.items()}
|
||||
|
||||
# track 3/7 => track 3
|
||||
for k, v in ret.items():
|
||||
if k[0] == ".":
|
||||
v = v.split("/")[0].strip().lstrip("0")
|
||||
ret[k] = v or 0
|
||||
|
||||
return ret
|
||||
|
||||
def compare(self, abspath):
|
||||
if abspath.endswith(".au"):
|
||||
return {}
|
||||
|
||||
print("\n" + abspath)
|
||||
r1 = self.get_mutagen(abspath)
|
||||
r2 = self.get_ffprobe(abspath)
|
||||
|
||||
keys = {}
|
||||
for d in [r1, r2]:
|
||||
for k in d.keys():
|
||||
keys[k] = True
|
||||
|
||||
diffs = []
|
||||
l1 = []
|
||||
l2 = []
|
||||
for k in sorted(keys.keys()):
|
||||
if k in [".q", ".dur"]:
|
||||
continue # lenient
|
||||
|
||||
v1 = r1.get(k)
|
||||
v2 = r2.get(k)
|
||||
if v1 == v2:
|
||||
print(" ", k, v1)
|
||||
elif v1 != "0000": # ffprobe date=0
|
||||
diffs.append(k)
|
||||
print(" 1", k, v1)
|
||||
print(" 2", k, v2)
|
||||
if v1:
|
||||
l1.append(k)
|
||||
if v2:
|
||||
l2.append(k)
|
||||
|
||||
if diffs:
|
||||
raise Exception()
|
||||
|
||||
return r1
|
||||
|
||||
def get_mutagen(self, abspath):
|
||||
import mutagen
|
||||
|
||||
try:
|
||||
md = mutagen.File(abspath, easy=True)
|
||||
x = md.info.length
|
||||
except Exception as ex:
|
||||
return {}
|
||||
|
||||
ret = {}
|
||||
try:
|
||||
dur = int(md.info.length)
|
||||
try:
|
||||
q = int(md.info.bitrate / 1024)
|
||||
except:
|
||||
q = int((os.path.getsize(abspath) / dur) / 128)
|
||||
|
||||
ret[".dur"] = [0, dur]
|
||||
ret[".q"] = [0, q]
|
||||
except:
|
||||
pass
|
||||
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_ffprobe(self, abspath):
|
||||
cmd = ["ffprobe", "-hide_banner", "--", fsenc(abspath)]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
r = p.communicate()
|
||||
txt = r[1].decode("utf-8", "replace")
|
||||
txt = [x.rstrip("\r") for x in txt.split("\n")]
|
||||
|
||||
"""
|
||||
note:
|
||||
tags which contain newline will be truncated on first \n,
|
||||
ffmpeg emits \n and spacepads the : to align visually
|
||||
note:
|
||||
the Stream ln always mentions Audio: if audio
|
||||
the Stream ln usually has kb/s, is more accurate
|
||||
the Duration ln always has kb/s
|
||||
the Metadata: after Chapter may contain BPM info,
|
||||
title : Tempo: 126.0
|
||||
|
||||
Input #0, wav,
|
||||
Metadata:
|
||||
date : <OK>
|
||||
Duration:
|
||||
Chapter #
|
||||
Metadata:
|
||||
title : <NG>
|
||||
|
||||
Input #0, mp3,
|
||||
Metadata:
|
||||
album : <OK>
|
||||
Duration:
|
||||
Stream #0:0: Audio:
|
||||
Stream #0:1: Video:
|
||||
Metadata:
|
||||
comment : <NG>
|
||||
"""
|
||||
|
||||
ptn_md_beg = re.compile("^( +)Metadata:$")
|
||||
ptn_md_kv = re.compile("^( +)([^:]+) *: (.*)")
|
||||
ptn_dur = re.compile("^ *Duration: ([^ ]+)(, |$)")
|
||||
ptn_br1 = re.compile("^ *Duration: .*, bitrate: ([0-9]+) kb/s(, |$)")
|
||||
ptn_br2 = re.compile("^ *Stream.*: Audio:.* ([0-9]+) kb/s(, |$)")
|
||||
ptn_audio = re.compile("^ *Stream .*: Audio: ")
|
||||
ptn_au_parent = re.compile("^ *(Input #|Stream .*: Audio: )")
|
||||
|
||||
ret = {}
|
||||
md = {}
|
||||
in_md = False
|
||||
is_audio = False
|
||||
au_parent = False
|
||||
for ln in txt:
|
||||
m = ptn_md_kv.match(ln)
|
||||
if m and in_md and len(m.group(1)) == in_md:
|
||||
_, k, v = [x.strip() for x in m.groups()]
|
||||
if k != "" and v != "":
|
||||
md[k] = [v]
|
||||
continue
|
||||
else:
|
||||
in_md = False
|
||||
|
||||
m = ptn_md_beg.match(ln)
|
||||
if m and au_parent:
|
||||
in_md = len(m.group(1)) + 2
|
||||
continue
|
||||
|
||||
au_parent = bool(ptn_au_parent.search(ln))
|
||||
|
||||
if ptn_audio.search(ln):
|
||||
is_audio = True
|
||||
|
||||
m = ptn_dur.search(ln)
|
||||
if m:
|
||||
sec = 0
|
||||
tstr = m.group(1)
|
||||
if tstr.lower() != "n/a":
|
||||
try:
|
||||
tf = tstr.split(",")[0].split(".")[0].split(":")
|
||||
for f in tf:
|
||||
sec *= 60
|
||||
sec += int(f)
|
||||
except:
|
||||
self.log(
|
||||
"\033[33minvalid timestr from ffmpeg: [{}]".format(tstr)
|
||||
)
|
||||
|
||||
ret[".dur"] = sec
|
||||
m = ptn_br1.search(ln)
|
||||
if m:
|
||||
ret[".q"] = m.group(1)
|
||||
|
||||
m = ptn_br2.search(ln)
|
||||
if m:
|
||||
ret[".q"] = m.group(1)
|
||||
|
||||
if not is_audio:
|
||||
return {}
|
||||
|
||||
ret = {k: [0, v] for k, v in ret.items()}
|
||||
|
||||
return self.normalize_tags(ret, md)
|
||||
@@ -39,14 +39,6 @@ class SvcHub(object):
|
||||
self.tcpsrv = TcpSrv(self)
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
if self.args.e2ds:
|
||||
auth = AuthSrv(self.args, self.log, False)
|
||||
vols = auth.vfs.all_vols.values()
|
||||
if not self.args.e2dsa:
|
||||
vols = [x for x in vols if x.uwrite]
|
||||
|
||||
self.up2k.build_indexes(vols)
|
||||
|
||||
# decide which worker impl to use
|
||||
if self.check_mp_enable():
|
||||
from .broker_mp import BrokerMp as Broker
|
||||
|
||||
@@ -24,7 +24,7 @@ class U2idx(object):
|
||||
self.log("could not load sqlite3; searchign wqill be disabled")
|
||||
return
|
||||
|
||||
self.dbs = {}
|
||||
self.cur = {}
|
||||
|
||||
def log(self, msg):
|
||||
self.log_func("u2idx", msg)
|
||||
@@ -37,7 +37,19 @@ class U2idx(object):
|
||||
fsize = body["size"]
|
||||
fhash = body["hash"]
|
||||
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
|
||||
return self.run_query(vols, "select * from up where w = ?", [wark])
|
||||
return self.run_query(vols, "w = ?", [wark], "", [])[0]
|
||||
|
||||
def get_cur(self, ptop):
|
||||
cur = self.cur.get(ptop)
|
||||
if cur:
|
||||
return cur
|
||||
|
||||
cur = _open(ptop)
|
||||
if not cur:
|
||||
return None
|
||||
|
||||
self.cur[ptop] = cur
|
||||
return cur
|
||||
|
||||
def search(self, vols, body):
|
||||
"""search by query params"""
|
||||
@@ -45,59 +57,80 @@ class U2idx(object):
|
||||
return []
|
||||
|
||||
qobj = {}
|
||||
_conv_sz(qobj, body, "sz_min", "sz >= ?")
|
||||
_conv_sz(qobj, body, "sz_max", "sz <= ?")
|
||||
_conv_dt(qobj, body, "dt_min", "mt >= ?")
|
||||
_conv_dt(qobj, body, "dt_max", "mt <= ?")
|
||||
for seg, dk in [["path", "rd"], ["name", "fn"]]:
|
||||
_conv_sz(qobj, body, "sz_min", "up.sz >= ?")
|
||||
_conv_sz(qobj, body, "sz_max", "up.sz <= ?")
|
||||
_conv_dt(qobj, body, "dt_min", "up.mt >= ?")
|
||||
_conv_dt(qobj, body, "dt_max", "up.mt <= ?")
|
||||
for seg, dk in [["path", "up.rd"], ["name", "up.fn"]]:
|
||||
if seg in body:
|
||||
_conv_txt(qobj, body, seg, dk)
|
||||
|
||||
qstr = "select * from up"
|
||||
qv = []
|
||||
if qobj:
|
||||
qk = []
|
||||
for k, v in sorted(qobj.items()):
|
||||
qk.append(k.split("\n")[0])
|
||||
qv.append(v)
|
||||
uq, uv = _sqlize(qobj)
|
||||
|
||||
qstr = " and ".join(qk)
|
||||
qstr = "select * from up where " + qstr
|
||||
tq = ""
|
||||
tv = []
|
||||
qobj = {}
|
||||
if "tags" in body:
|
||||
_conv_txt(qobj, body, "tags", "mt.v")
|
||||
tq, tv = _sqlize(qobj)
|
||||
|
||||
return self.run_query(vols, qstr, qv)
|
||||
return self.run_query(vols, uq, uv, tq, tv)
|
||||
|
||||
def run_query(self, vols, qstr, qv):
|
||||
qv = tuple(qv)
|
||||
self.log("qs: {} {}".format(qstr, repr(qv)))
|
||||
def run_query(self, vols, uq, uv, tq, tv):
|
||||
self.log("qs: {} {} , {} {}".format(uq, repr(uv), tq, repr(tv)))
|
||||
|
||||
ret = []
|
||||
lim = 100
|
||||
lim = 1000
|
||||
taglist = {}
|
||||
for (vtop, ptop, flags) in vols:
|
||||
db = self.dbs.get(ptop)
|
||||
if not db:
|
||||
db = _open(ptop)
|
||||
if not db:
|
||||
continue
|
||||
cur = self.get_cur(ptop)
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
self.dbs[ptop] = db
|
||||
# self.log("idx /{} @ {} {}".format(vtop, ptop, flags))
|
||||
if not tq:
|
||||
if not uq:
|
||||
q = "select * from up"
|
||||
v = ()
|
||||
else:
|
||||
q = "select * from up where " + uq
|
||||
v = tuple(uv)
|
||||
else:
|
||||
# naive assumption: tags first
|
||||
q = "select up.* from up inner join mt on substr(up.w,1,16) = mt.w where {}"
|
||||
q = q.format(" and ".join([tq, uq]) if uq else tq)
|
||||
v = tuple(tv + uv)
|
||||
|
||||
c = db.execute(qstr, qv)
|
||||
for _, ts, sz, rd, fn in c:
|
||||
sret = []
|
||||
c = cur.execute(q, v)
|
||||
for hit in c:
|
||||
w, ts, sz, rd, fn = hit
|
||||
lim -= 1
|
||||
if lim <= 0:
|
||||
break
|
||||
|
||||
rp = os.path.join(vtop, rd, fn).replace("\\", "/")
|
||||
ret.append({"ts": int(ts), "sz": sz, "rp": rp})
|
||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||
|
||||
return ret
|
||||
for hit in sret:
|
||||
w = hit["w"]
|
||||
del hit["w"]
|
||||
tags = {}
|
||||
q = "select k, v from mt where w = ? and k != 'x'"
|
||||
for k, v in cur.execute(q, (w,)):
|
||||
taglist[k] = True
|
||||
tags[k] = v
|
||||
|
||||
hit["tags"] = tags
|
||||
|
||||
ret.extend(sret)
|
||||
|
||||
return ret, list(taglist.keys())
|
||||
|
||||
|
||||
def _open(ptop):
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
if os.path.exists(db_path):
|
||||
return sqlite3.connect(db_path)
|
||||
return sqlite3.connect(db_path).cursor()
|
||||
|
||||
|
||||
def _conv_sz(q, body, k, sql):
|
||||
@@ -146,3 +179,13 @@ def _conv_txt(q, body, k, sql):
|
||||
|
||||
qk = "{} {} like {}?{}".format(sql, inv, head, tail)
|
||||
q[qk + "\n" + v] = u8safe(v)
|
||||
|
||||
|
||||
def _sqlize(qobj):
|
||||
keys = []
|
||||
values = []
|
||||
for k, v in sorted(qobj.items()):
|
||||
keys.append(k.split("\n")[0])
|
||||
values.append(v)
|
||||
|
||||
return " and ".join(keys), values
|
||||
|
||||
@@ -12,6 +12,7 @@ import shutil
|
||||
import base64
|
||||
import hashlib
|
||||
import threading
|
||||
import traceback
|
||||
from copy import deepcopy
|
||||
|
||||
from .__init__ import WINDOWS
|
||||
@@ -26,7 +27,10 @@ from .util import (
|
||||
atomic_move,
|
||||
w8b64enc,
|
||||
w8b64dec,
|
||||
statdir,
|
||||
)
|
||||
from .mtag import MTag
|
||||
from .authsrv import AuthSrv
|
||||
|
||||
try:
|
||||
HAVE_SQLITE3 = True
|
||||
@@ -47,22 +51,27 @@ class Up2k(object):
|
||||
self.broker = broker
|
||||
self.args = broker.args
|
||||
self.log_func = broker.log
|
||||
self.persist = self.args.e2d
|
||||
|
||||
# config
|
||||
self.salt = broker.args.salt
|
||||
|
||||
# state
|
||||
self.mutex = threading.Lock()
|
||||
self.hashq = Queue()
|
||||
self.tagq = Queue()
|
||||
self.registry = {}
|
||||
self.db = {}
|
||||
self.entags = {}
|
||||
self.flags = {}
|
||||
self.cur = {}
|
||||
self.mtag = None
|
||||
self.n_mtag_thr_alive = 0
|
||||
self.n_mtag_tags_added = 0
|
||||
|
||||
self.mem_db = None
|
||||
self.mem_cur = None
|
||||
if HAVE_SQLITE3:
|
||||
# mojibake detector
|
||||
self.mem_db = sqlite3.connect(":memory:", check_same_thread=False)
|
||||
self.mem_db.execute(r"create table a (b text)")
|
||||
self.mem_db.commit()
|
||||
self.mem_cur = self._orz(":memory:")
|
||||
self.mem_cur.execute(r"create table a (b text)")
|
||||
|
||||
if WINDOWS:
|
||||
# usually fails to set lastmod too quickly
|
||||
@@ -71,25 +80,37 @@ class Up2k(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if self.persist:
|
||||
# static
|
||||
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
|
||||
|
||||
if not HAVE_SQLITE3:
|
||||
self.log("could not initialize sqlite3, will use in-memory registry only")
|
||||
|
||||
# this is kinda jank
|
||||
auth = AuthSrv(self.args, self.log, False)
|
||||
have_e2d = self.init_indexes(auth)
|
||||
|
||||
if have_e2d:
|
||||
thr = threading.Thread(target=self._snapshot)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
# static
|
||||
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
|
||||
thr = threading.Thread(target=self._tagger)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if self.persist and not HAVE_SQLITE3:
|
||||
self.log("could not initialize sqlite3, will use in-memory registry only")
|
||||
thr = threading.Thread(target=self._hasher)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def log(self, msg):
|
||||
self.log_func("up2k", msg + "\033[K")
|
||||
|
||||
def w8enc(self, rd, fn):
|
||||
ret = []
|
||||
for k, v in [["d", rd], ["f", fn]]:
|
||||
for v in [rd, fn]:
|
||||
try:
|
||||
self.mem_db.execute("select * from a where b = ?", (v,))
|
||||
self.mem_cur.execute("select * from a where b = ?", (v,))
|
||||
ret.append(v)
|
||||
except:
|
||||
ret.append("//" + w8b64enc(v))
|
||||
@@ -120,14 +141,82 @@ class Up2k(object):
|
||||
|
||||
return ret
|
||||
|
||||
def register_vpath(self, ptop):
|
||||
def init_indexes(self, auth):
|
||||
self.pp = ProgressPrinter()
|
||||
vols = auth.vfs.all_vols.values()
|
||||
t0 = time.time()
|
||||
have_e2d = False
|
||||
|
||||
live_vols = []
|
||||
for vol in vols:
|
||||
try:
|
||||
os.listdir(vol.realpath)
|
||||
live_vols.append(vol)
|
||||
except:
|
||||
self.log("\033[31mcannot access " + vol.realpath)
|
||||
|
||||
vols = live_vols
|
||||
|
||||
need_mtag = False
|
||||
for vol in auth.vfs.all_vols.values():
|
||||
if "e2t" in vol.flags:
|
||||
need_mtag = True
|
||||
|
||||
if need_mtag:
|
||||
self.mtag = MTag(self.log_func, self.args)
|
||||
if not self.mtag.usable:
|
||||
self.mtag = None
|
||||
|
||||
# e2ds(a) volumes first,
|
||||
# also covers tags where e2ts is set
|
||||
for vol in vols:
|
||||
en = {}
|
||||
if "mte" in vol.flags:
|
||||
en = {k: True for k in vol.flags["mte"].split(",")}
|
||||
|
||||
self.entags[vol.realpath] = en
|
||||
|
||||
if "e2d" in vol.flags:
|
||||
have_e2d = True
|
||||
|
||||
if "e2ds" in vol.flags:
|
||||
r = self._build_file_index(vol, vols)
|
||||
if not r:
|
||||
needed_mutagen = True
|
||||
|
||||
# open the rest + do any e2ts(a)
|
||||
needed_mutagen = False
|
||||
for vol in vols:
|
||||
r = self.register_vpath(vol.realpath, vol.flags)
|
||||
if not r or "e2ts" not in vol.flags:
|
||||
continue
|
||||
|
||||
cur, db_path, sz0 = r
|
||||
n_add, n_rm, success = self._build_tags_index(vol.realpath)
|
||||
if not success:
|
||||
needed_mutagen = True
|
||||
|
||||
if n_add or n_rm:
|
||||
self.vac(cur, db_path, n_add, n_rm, sz0)
|
||||
|
||||
self.pp.end = True
|
||||
msg = "{} volumes in {:.2f} sec"
|
||||
self.log(msg.format(len(vols), time.time() - t0))
|
||||
|
||||
if needed_mutagen:
|
||||
msg = "\033[31mcould not read tags because no backends are available (mutagen or ffprobe)\033[0m"
|
||||
self.log(msg)
|
||||
|
||||
return have_e2d
|
||||
|
||||
def register_vpath(self, ptop, flags):
|
||||
with self.mutex:
|
||||
if ptop in self.registry:
|
||||
return None
|
||||
|
||||
reg = {}
|
||||
path = os.path.join(ptop, ".hist", "up2k.snap")
|
||||
if self.persist and os.path.exists(path):
|
||||
if "e2d" in flags and os.path.exists(path):
|
||||
with gzip.GzipFile(path, "rb") as f:
|
||||
j = f.read().decode("utf-8")
|
||||
|
||||
@@ -139,8 +228,9 @@ class Up2k(object):
|
||||
m = [m] + self._vis_reg_progress(reg)
|
||||
self.log("\n".join(m))
|
||||
|
||||
self.flags[ptop] = flags
|
||||
self.registry[ptop] = reg
|
||||
if not self.persist or not HAVE_SQLITE3:
|
||||
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
|
||||
return None
|
||||
|
||||
try:
|
||||
@@ -149,73 +239,66 @@ class Up2k(object):
|
||||
pass
|
||||
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
if ptop in self.db:
|
||||
# self.db[ptop].close()
|
||||
if ptop in self.cur:
|
||||
return None
|
||||
|
||||
try:
|
||||
db = self._open_db(db_path)
|
||||
self.db[ptop] = db
|
||||
return db
|
||||
except Exception as ex:
|
||||
self.log("cannot use database at [{}]: {}".format(ptop, repr(ex)))
|
||||
sz0 = 0
|
||||
if os.path.exists(db_path):
|
||||
sz0 = os.path.getsize(db_path) // 1024
|
||||
|
||||
cur = self._open_db(db_path)
|
||||
self.cur[ptop] = cur
|
||||
return [cur, db_path, sz0]
|
||||
except:
|
||||
msg = "cannot use database at [{}]:\n{}"
|
||||
self.log(msg.format(ptop, traceback.format_exc()))
|
||||
|
||||
return None
|
||||
|
||||
def build_indexes(self, writeables):
|
||||
tops = [d.realpath for d in writeables]
|
||||
self.pp = ProgressPrinter()
|
||||
t0 = time.time()
|
||||
for top in tops:
|
||||
db = self.register_vpath(top)
|
||||
if not db:
|
||||
continue
|
||||
def _build_file_index(self, vol, all_vols):
|
||||
do_vac = False
|
||||
top = vol.realpath
|
||||
reg = self.register_vpath(top, vol.flags)
|
||||
if not reg:
|
||||
return
|
||||
|
||||
self.pp.n = next(db.execute("select count(w) from up"))[0]
|
||||
db_path = os.path.join(top, ".hist", "up2k.db")
|
||||
sz0 = os.path.getsize(db_path) // 1024
|
||||
_, db_path, sz0 = reg
|
||||
dbw = [reg[0], 0, time.time()]
|
||||
self.pp.n = next(dbw[0].execute("select count(w) from up"))[0]
|
||||
|
||||
# can be symlink so don't `and d.startswith(top)``
|
||||
excl = set([d for d in tops if d != top])
|
||||
dbw = [db, 0, time.time()]
|
||||
# can be symlink so don't `and d.startswith(top)``
|
||||
excl = set([d.realpath for d in all_vols if d != vol])
|
||||
n_add = self._build_dir(dbw, top, excl, top)
|
||||
n_rm = self._drop_lost(dbw[0], top)
|
||||
if dbw[1]:
|
||||
self.log("commit {} new files".format(dbw[1]))
|
||||
dbw[0].connection.commit()
|
||||
|
||||
n_add = self._build_dir(dbw, top, excl, top)
|
||||
n_rm = self._drop_lost(db, top)
|
||||
if dbw[1]:
|
||||
self.log("commit {} new files".format(dbw[1]))
|
||||
n_add, n_rm, success = self._build_tags_index(vol.realpath)
|
||||
|
||||
db.commit()
|
||||
if n_add or n_rm:
|
||||
db_path = os.path.join(top, ".hist", "up2k.db")
|
||||
sz1 = os.path.getsize(db_path) // 1024
|
||||
db.execute("vacuum")
|
||||
sz2 = os.path.getsize(db_path) // 1024
|
||||
msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format(
|
||||
n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2
|
||||
)
|
||||
self.log(msg)
|
||||
dbw[0].connection.commit()
|
||||
if n_add or n_rm or do_vac:
|
||||
self.vac(dbw[0], db_path, n_add, n_rm, sz0)
|
||||
|
||||
self.pp.end = True
|
||||
self.log("{} volumes in {:.2f} sec".format(len(tops), time.time() - t0))
|
||||
return success
|
||||
|
||||
def vac(self, cur, db_path, n_add, n_rm, sz0):
|
||||
sz1 = os.path.getsize(db_path) // 1024
|
||||
cur.execute("vacuum")
|
||||
sz2 = os.path.getsize(db_path) // 1024
|
||||
msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format(
|
||||
n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2
|
||||
)
|
||||
self.log(msg)
|
||||
|
||||
def _build_dir(self, dbw, top, excl, cdir):
|
||||
try:
|
||||
inodes = [fsdec(x) for x in os.listdir(fsenc(cdir))]
|
||||
except Exception as ex:
|
||||
self.log("listdir: {} @ [{}]".format(repr(ex), cdir))
|
||||
return 0
|
||||
|
||||
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
||||
histdir = os.path.join(top, ".hist")
|
||||
ret = 0
|
||||
for inode in inodes:
|
||||
abspath = os.path.join(cdir, inode)
|
||||
try:
|
||||
inf = os.stat(fsenc(abspath))
|
||||
except Exception as ex:
|
||||
self.log("stat: {} @ [{}]".format(repr(ex), abspath))
|
||||
continue
|
||||
|
||||
for iname, inf in statdir(self.log, not self.args.no_scandir, False, cdir):
|
||||
abspath = os.path.join(cdir, iname)
|
||||
lmod = int(inf.st_mtime)
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
if abspath in excl or abspath == histdir:
|
||||
continue
|
||||
@@ -241,11 +324,11 @@ class Up2k(object):
|
||||
self.log(m.format(top, rp, len(in_db), rep_db))
|
||||
dts = -1
|
||||
|
||||
if dts == inf.st_mtime and dsz == inf.st_size:
|
||||
if dts == lmod and dsz == inf.st_size:
|
||||
continue
|
||||
|
||||
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
|
||||
top, rp, dts, inf.st_mtime, dsz, inf.st_size
|
||||
top, rp, dts, lmod, dsz, inf.st_size
|
||||
)
|
||||
self.log(m)
|
||||
self.db_rm(dbw[0], rd, fn)
|
||||
@@ -264,22 +347,22 @@ class Up2k(object):
|
||||
continue
|
||||
|
||||
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
|
||||
self.db_add(dbw[0], wark, rd, fn, inf.st_mtime, inf.st_size)
|
||||
self.db_add(dbw[0], wark, rd, fn, lmod, inf.st_size)
|
||||
dbw[1] += 1
|
||||
ret += 1
|
||||
td = time.time() - dbw[2]
|
||||
if dbw[1] >= 4096 or td >= 60:
|
||||
self.log("commit {} new files".format(dbw[1]))
|
||||
dbw[0].commit()
|
||||
dbw[0].connection.commit()
|
||||
dbw[1] = 0
|
||||
dbw[2] = time.time()
|
||||
return ret
|
||||
|
||||
def _drop_lost(self, db, top):
|
||||
def _drop_lost(self, cur, top):
|
||||
rm = []
|
||||
nchecked = 0
|
||||
nfiles = next(db.execute("select count(w) from up"))[0]
|
||||
c = db.execute("select * from up")
|
||||
nfiles = next(cur.execute("select count(w) from up"))[0]
|
||||
c = cur.execute("select * from up")
|
||||
for dwark, dts, dsz, drd, dfn in c:
|
||||
nchecked += 1
|
||||
if drd.startswith("//") or dfn.startswith("//"):
|
||||
@@ -298,49 +381,210 @@ class Up2k(object):
|
||||
self.log("forgetting {} deleted files".format(len(rm)))
|
||||
for rd, fn in rm:
|
||||
# self.log("{} / {}".format(rd, fn))
|
||||
self.db_rm(db, rd, fn)
|
||||
self.db_rm(cur, rd, fn)
|
||||
|
||||
return len(rm)
|
||||
|
||||
def _build_tags_index(self, ptop):
|
||||
entags = self.entags[ptop]
|
||||
flags = self.flags[ptop]
|
||||
cur = self.cur[ptop]
|
||||
n_add = 0
|
||||
n_rm = 0
|
||||
n_buf = 0
|
||||
last_write = time.time()
|
||||
|
||||
if "e2tsr" in flags:
|
||||
n_rm = cur.execute("select count(w) from mt").fetchone()[0]
|
||||
if n_rm:
|
||||
self.log("discarding {} media tags for a full rescan".format(n_rm))
|
||||
cur.execute("delete from mt")
|
||||
else:
|
||||
self.log("volume has e2tsr but there are no media tags to discard")
|
||||
|
||||
# integrity: drop tags for tracks that were deleted
|
||||
if "e2t" in flags:
|
||||
drops = []
|
||||
c2 = cur.connection.cursor()
|
||||
up_q = "select w from up where substr(w,1,16) = ?"
|
||||
for (w,) in cur.execute("select w from mt"):
|
||||
if not c2.execute(up_q, (w,)).fetchone():
|
||||
drops.append(w[:16])
|
||||
c2.close()
|
||||
|
||||
if drops:
|
||||
msg = "discarding media tags for {} deleted files"
|
||||
self.log(msg.format(len(drops)))
|
||||
n_rm += len(drops)
|
||||
for w in drops:
|
||||
cur.execute("delete from mt where w = ?", (w,))
|
||||
|
||||
# bail if a volume flag disables indexing
|
||||
if "d2t" in flags or "d2d" in flags:
|
||||
return n_add, n_rm, True
|
||||
|
||||
# add tags for new files
|
||||
if "e2ts" in flags:
|
||||
if not self.mtag:
|
||||
return n_add, n_rm, False
|
||||
|
||||
mpool = False
|
||||
if self.mtag.prefer_mt and not self.args.no_mtag_mt:
|
||||
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
|
||||
# both do crazy runahead so lets reinvent another wheel
|
||||
nw = os.cpu_count()
|
||||
if not self.n_mtag_thr_alive:
|
||||
msg = 'using {} cores for tag reader "{}"'
|
||||
self.log(msg.format(nw, self.mtag.backend))
|
||||
|
||||
self.n_mtag_thr_alive = nw
|
||||
mpool = Queue(nw)
|
||||
for _ in range(nw):
|
||||
thr = threading.Thread(target=self._tag_thr, args=(mpool,))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
c2 = cur.connection.cursor()
|
||||
c3 = cur.connection.cursor()
|
||||
n_left = cur.execute("select count(w) from up").fetchone()[0]
|
||||
for w, rd, fn in cur.execute("select w, rd, fn from up"):
|
||||
n_left -= 1
|
||||
q = "select w from mt where w = ?"
|
||||
if c2.execute(q, (w[:16],)).fetchone():
|
||||
continue
|
||||
|
||||
abspath = os.path.join(ptop, rd, fn)
|
||||
self.pp.msg = "c{} {}".format(n_left, abspath)
|
||||
args = c3, entags, w, abspath
|
||||
if not mpool:
|
||||
n_tags = self._tag_file(*args)
|
||||
else:
|
||||
mpool.put(args)
|
||||
with self.mutex:
|
||||
n_tags = self.n_mtag_tags_added
|
||||
self.n_mtag_tags_added = 0
|
||||
|
||||
n_add += n_tags
|
||||
n_buf += n_tags
|
||||
|
||||
td = time.time() - last_write
|
||||
if n_buf >= 4096 or td >= 60:
|
||||
self.log("commit {} new tags".format(n_buf))
|
||||
cur.connection.commit()
|
||||
last_write = time.time()
|
||||
n_buf = 0
|
||||
|
||||
if self.n_mtag_thr_alive:
|
||||
mpool.join()
|
||||
for _ in range(self.n_mtag_thr_alive):
|
||||
mpool.put(None)
|
||||
|
||||
c3.close()
|
||||
c2.close()
|
||||
|
||||
return n_add, n_rm, True
|
||||
|
||||
def _tag_thr(self, q):
|
||||
while True:
|
||||
task = q.get()
|
||||
if not task:
|
||||
break
|
||||
|
||||
try:
|
||||
write_cur, entags, wark, abspath = task
|
||||
tags = self.mtag.get(abspath)
|
||||
with self.mutex:
|
||||
n = self._tag_file(write_cur, entags, wark, abspath, tags)
|
||||
self.n_mtag_tags_added += n
|
||||
except:
|
||||
with self.mutex:
|
||||
self.n_mtag_thr_alive -= 1
|
||||
raise
|
||||
finally:
|
||||
q.task_done()
|
||||
|
||||
def _tag_file(self, write_cur, entags, wark, abspath, tags=None):
|
||||
tags = tags or self.mtag.get(abspath)
|
||||
tags = {k: v for k, v in tags.items() if k in entags}
|
||||
if not tags:
|
||||
# indicate scanned without tags
|
||||
tags = {"x": 0}
|
||||
|
||||
ret = 0
|
||||
for k, v in tags.items():
|
||||
q = "insert into mt values (?,?,?)"
|
||||
write_cur.execute(q, (wark[:16], k, v))
|
||||
ret += 1
|
||||
|
||||
return ret
|
||||
|
||||
def _orz(self, db_path):
|
||||
return sqlite3.connect(db_path, check_same_thread=False).cursor()
|
||||
|
||||
def _open_db(self, db_path):
|
||||
existed = os.path.exists(db_path)
|
||||
conn = sqlite3.connect(db_path, check_same_thread=False)
|
||||
try:
|
||||
ver = self._read_ver(conn)
|
||||
cur = self._orz(db_path)
|
||||
ver = self._read_ver(cur)
|
||||
if not existed and ver is None:
|
||||
return self._create_db(db_path, cur)
|
||||
|
||||
if ver == 1:
|
||||
conn = self._upgrade_v1(conn, db_path)
|
||||
ver = self._read_ver(conn)
|
||||
orig_ver = ver
|
||||
if not ver or ver < 3:
|
||||
bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
|
||||
db = cur.connection
|
||||
cur.close()
|
||||
db.close()
|
||||
msg = "creating new DB (old is bad); backup: {}"
|
||||
if ver:
|
||||
msg = "creating backup before upgrade: {}"
|
||||
|
||||
if ver == 2:
|
||||
try:
|
||||
nfiles = next(conn.execute("select count(w) from up"))[0]
|
||||
self.log("found DB at {} |{}|".format(db_path, nfiles))
|
||||
return conn
|
||||
except Exception as ex:
|
||||
self.log("WARN: could not list files, DB corrupt?\n " + repr(ex))
|
||||
self.log(msg.format(bak))
|
||||
shutil.copy2(db_path, bak)
|
||||
cur = self._orz(db_path)
|
||||
|
||||
if ver is not None:
|
||||
self.log("REPLACING unsupported DB (v.{}) at {}".format(ver, db_path))
|
||||
elif not existed:
|
||||
raise Exception("whatever")
|
||||
if ver == 1:
|
||||
cur = self._upgrade_v1(cur, db_path)
|
||||
if cur:
|
||||
ver = 2
|
||||
|
||||
conn.close()
|
||||
os.unlink(db_path)
|
||||
conn = sqlite3.connect(db_path, check_same_thread=False)
|
||||
except:
|
||||
pass
|
||||
if ver == 2:
|
||||
cur = self._create_v3(cur)
|
||||
ver = self._read_ver(cur) if cur else None
|
||||
|
||||
# sqlite is variable-width only, no point in using char/nchar/varchar
|
||||
self._create_v2(conn)
|
||||
conn.commit()
|
||||
if ver == 3:
|
||||
if orig_ver != ver:
|
||||
cur.connection.commit()
|
||||
cur.execute("vacuum")
|
||||
cur.connection.commit()
|
||||
|
||||
try:
|
||||
nfiles = next(cur.execute("select count(w) from up"))[0]
|
||||
self.log("OK: {} |{}|".format(db_path, nfiles))
|
||||
return cur
|
||||
except Exception as ex:
|
||||
self.log("WARN: could not list files, DB corrupt?\n " + repr(ex))
|
||||
|
||||
if cur:
|
||||
db = cur.connection
|
||||
cur.close()
|
||||
db.close()
|
||||
|
||||
return self._create_db(db_path, None)
|
||||
|
||||
def _create_db(self, db_path, cur):
|
||||
if not cur:
|
||||
cur = self._orz(db_path)
|
||||
|
||||
self._create_v2(cur)
|
||||
self._create_v3(cur)
|
||||
cur.connection.commit()
|
||||
self.log("created DB at {}".format(db_path))
|
||||
return conn
|
||||
return cur
|
||||
|
||||
def _read_ver(self, conn):
|
||||
def _read_ver(self, cur):
|
||||
for tab in ["ki", "kv"]:
|
||||
try:
|
||||
c = conn.execute(r"select v from {} where k = 'sver'".format(tab))
|
||||
c = cur.execute(r"select v from {} where k = 'sver'".format(tab))
|
||||
except:
|
||||
continue
|
||||
|
||||
@@ -348,26 +592,47 @@ class Up2k(object):
|
||||
if rows:
|
||||
return int(rows[0][0])
|
||||
|
||||
def _create_v2(self, conn):
|
||||
def _create_v2(self, cur):
|
||||
for cmd in [
|
||||
r"create table ks (k text, v text)",
|
||||
r"create table ki (k text, v int)",
|
||||
r"create table up (w text, mt int, sz int, rd text, fn text)",
|
||||
r"insert into ki values ('sver', 2)",
|
||||
r"create index up_w on up(w)",
|
||||
r"create index up_rd on up(rd)",
|
||||
r"create index up_fn on up(fn)",
|
||||
]:
|
||||
conn.execute(cmd)
|
||||
cur.execute(cmd)
|
||||
return cur
|
||||
|
||||
def _create_v3(self, cur):
|
||||
"""
|
||||
collision in 2^(n/2) files where n = bits (6 bits/ch)
|
||||
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx
|
||||
12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx
|
||||
16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx
|
||||
"""
|
||||
for c, ks in [["drop table k", "isv"], ["drop index up_", "w"]]:
|
||||
for k in ks:
|
||||
try:
|
||||
cur.execute(c + k)
|
||||
except:
|
||||
pass
|
||||
|
||||
for cmd in [
|
||||
r"create index up_w on up(substr(w,1,16))",
|
||||
r"create table mt (w text, k text, v int)",
|
||||
r"create index mt_w on mt(w)",
|
||||
r"create index mt_k on mt(k)",
|
||||
r"create index mt_v on mt(v)",
|
||||
r"create table kv (k text, v int)",
|
||||
r"insert into kv values ('sver', 3)",
|
||||
]:
|
||||
cur.execute(cmd)
|
||||
return cur
|
||||
|
||||
def _upgrade_v1(self, odb, db_path):
|
||||
self.log("\033[33mupgrading v1 to v2:\033[0m {}".format(db_path))
|
||||
|
||||
npath = db_path + ".next"
|
||||
if os.path.exists(npath):
|
||||
os.unlink(npath)
|
||||
|
||||
ndb = sqlite3.connect(npath, check_same_thread=False)
|
||||
ndb = self._orz(npath)
|
||||
self._create_v2(ndb)
|
||||
|
||||
c = odb.execute("select * from up")
|
||||
@@ -376,27 +641,29 @@ class Up2k(object):
|
||||
v = (wark, ts, sz, rd, fn)
|
||||
ndb.execute("insert into up values (?,?,?,?,?)", v)
|
||||
|
||||
ndb.commit()
|
||||
ndb.close()
|
||||
odb.close()
|
||||
bpath = db_path + ".bak.v1"
|
||||
self.log("success; backup at: " + bpath)
|
||||
atomic_move(db_path, bpath)
|
||||
ndb.connection.commit()
|
||||
ndb.connection.close()
|
||||
odb.connection.close()
|
||||
atomic_move(npath, db_path)
|
||||
return sqlite3.connect(db_path, check_same_thread=False)
|
||||
return self._orz(db_path)
|
||||
|
||||
def handle_json(self, cj):
|
||||
self.register_vpath(cj["ptop"])
|
||||
if not self.register_vpath(cj["ptop"], cj["vcfg"]):
|
||||
if cj["ptop"] not in self.registry:
|
||||
raise Pebkac(410, "location unavailable")
|
||||
|
||||
cj["name"] = sanitize_fn(cj["name"])
|
||||
cj["poke"] = time.time()
|
||||
wark = self._get_wark(cj)
|
||||
now = time.time()
|
||||
job = None
|
||||
with self.mutex:
|
||||
db = self.db.get(cj["ptop"], None)
|
||||
cur = self.cur.get(cj["ptop"], None)
|
||||
reg = self.registry[cj["ptop"]]
|
||||
if db:
|
||||
cur = db.execute(r"select * from up where w = ?", (wark,))
|
||||
if cur:
|
||||
q = r"select * from up where substr(w,1,16) = ? and w = ?"
|
||||
argv = (wark[:16], wark)
|
||||
cur = cur.execute(q, argv)
|
||||
for _, dtime, dsize, dp_dir, dp_fn in cur:
|
||||
if dp_dir.startswith("//") or dp_fn.startswith("//"):
|
||||
dp_dir, dp_fn = self.w8dec(dp_dir, dp_fn)
|
||||
@@ -409,7 +676,6 @@ class Up2k(object):
|
||||
"prel": dp_dir,
|
||||
"vtop": cj["vtop"],
|
||||
"ptop": cj["ptop"],
|
||||
"flag": cj["flag"],
|
||||
"size": dsize,
|
||||
"lmod": dtime,
|
||||
"hash": [],
|
||||
@@ -446,7 +712,7 @@ class Up2k(object):
|
||||
err = "partial upload exists at a different location; please resume uploading here instead:\n"
|
||||
err += "/" + vsrc + " "
|
||||
raise Pebkac(400, err)
|
||||
elif "nodupe" in job["flag"]:
|
||||
elif "nodupe" in self.flags[job["ptop"]]:
|
||||
self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
|
||||
err = "upload rejected, file already exists:\n/" + vsrc + " "
|
||||
raise Pebkac(400, err)
|
||||
@@ -476,7 +742,6 @@ class Up2k(object):
|
||||
"vtop",
|
||||
"ptop",
|
||||
"prel",
|
||||
"flag",
|
||||
"name",
|
||||
"size",
|
||||
"lmod",
|
||||
@@ -584,16 +849,33 @@ class Up2k(object):
|
||||
if WINDOWS:
|
||||
self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))])
|
||||
|
||||
db = self.db.get(job["ptop"], None)
|
||||
if db:
|
||||
j = job
|
||||
self.db_rm(db, j["prel"], j["name"])
|
||||
self.db_add(db, j["wark"], j["prel"], j["name"], j["lmod"], j["size"])
|
||||
db.commit()
|
||||
# legit api sware 2 me mum
|
||||
if self.idx_wark(
|
||||
job["ptop"],
|
||||
job["wark"],
|
||||
job["prel"],
|
||||
job["name"],
|
||||
job["lmod"],
|
||||
job["size"],
|
||||
):
|
||||
del self.registry[ptop][wark]
|
||||
# in-memory registry is reserved for unfinished uploads
|
||||
|
||||
return ret, dst
|
||||
return ret, dst
|
||||
|
||||
def idx_wark(self, ptop, wark, rd, fn, lmod, sz):
|
||||
cur = self.cur.get(ptop, None)
|
||||
if not cur:
|
||||
return False
|
||||
|
||||
self.db_rm(cur, rd, fn)
|
||||
self.db_add(cur, wark, rd, fn, int(lmod), sz)
|
||||
cur.connection.commit()
|
||||
|
||||
if "e2t" in self.flags[ptop]:
|
||||
self.tagq.put([ptop, wark, rd, fn])
|
||||
|
||||
return True
|
||||
|
||||
def db_rm(self, db, rd, fn):
|
||||
sql = "delete from up where rd = ? and fn = ?"
|
||||
@@ -604,7 +886,7 @@ class Up2k(object):
|
||||
|
||||
def db_add(self, db, wark, rd, fn, ts, sz):
|
||||
sql = "insert into up values (?,?,?,?,?)"
|
||||
v = (wark, ts, sz, rd, fn)
|
||||
v = (wark, int(ts), sz, rd, fn)
|
||||
try:
|
||||
db.execute(sql, v)
|
||||
except:
|
||||
@@ -635,10 +917,9 @@ class Up2k(object):
|
||||
fsz = os.path.getsize(path)
|
||||
csz = up2k_chunksize(fsz)
|
||||
ret = []
|
||||
last_print = time.time()
|
||||
with open(path, "rb", 512 * 1024) as f:
|
||||
while fsz > 0:
|
||||
self.pp.msg = msg = "{} MB".format(int(fsz / 1024 / 1024))
|
||||
self.pp.msg = "{} MB".format(int(fsz / 1024 / 1024))
|
||||
hashobj = hashlib.sha512()
|
||||
rem = min(csz, fsz)
|
||||
fsz -= rem
|
||||
@@ -745,6 +1026,45 @@ class Up2k(object):
|
||||
self.log("snap: {} |{}|".format(path, len(reg.keys())))
|
||||
prev[k] = etag
|
||||
|
||||
def _tagger(self):
|
||||
while True:
|
||||
ptop, wark, rd, fn = self.tagq.get()
|
||||
abspath = os.path.join(ptop, rd, fn)
|
||||
self.log("tagging " + abspath)
|
||||
with self.mutex:
|
||||
cur = self.cur[ptop]
|
||||
if not cur:
|
||||
self.log("\033[31mno cursor to write tags with??")
|
||||
continue
|
||||
|
||||
entags = self.entags[ptop]
|
||||
if not entags:
|
||||
self.log("\033[33mno entags okay.jpg")
|
||||
continue
|
||||
|
||||
if "e2t" in self.flags[ptop]:
|
||||
self._tag_file(cur, entags, wark, abspath)
|
||||
|
||||
cur.connection.commit()
|
||||
|
||||
def _hasher(self):
|
||||
while True:
|
||||
ptop, rd, fn = self.hashq.get()
|
||||
if "e2d" not in self.flags[ptop]:
|
||||
continue
|
||||
|
||||
abspath = os.path.join(ptop, rd, fn)
|
||||
self.log("hashing " + abspath)
|
||||
inf = os.stat(fsenc(abspath))
|
||||
hashes = self._hashlist_from_file(abspath)
|
||||
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
|
||||
with self.mutex:
|
||||
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size)
|
||||
|
||||
def hash_file(self, ptop, flags, rd, fn):
|
||||
self.register_vpath(ptop, flags)
|
||||
self.hashq.put([ptop, rd, fn])
|
||||
|
||||
|
||||
def up2k_chunksize(filesize):
|
||||
chunksize = 1024 * 1024
|
||||
|
||||
@@ -521,9 +521,7 @@ def u8safe(txt):
|
||||
|
||||
|
||||
def exclude_dotfiles(filepaths):
|
||||
for fpath in filepaths:
|
||||
if not fpath.split("/")[-1].startswith("."):
|
||||
yield fpath
|
||||
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
||||
|
||||
|
||||
def html_escape(s, quote=False):
|
||||
@@ -633,6 +631,40 @@ def read_socket_unbounded(sr):
|
||||
yield buf
|
||||
|
||||
|
||||
def read_socket_chunked(sr, log=None):
|
||||
err = "expected chunk length, got [{}] |{}| instead"
|
||||
while True:
|
||||
buf = b""
|
||||
while b"\r" not in buf:
|
||||
rbuf = sr.recv(2)
|
||||
if not rbuf or len(buf) > 16:
|
||||
err = err.format(buf.decode("utf-8", "replace"), len(buf))
|
||||
raise Pebkac(400, err)
|
||||
|
||||
buf += rbuf
|
||||
|
||||
if not buf.endswith(b"\n"):
|
||||
sr.recv(1)
|
||||
|
||||
try:
|
||||
chunklen = int(buf.rstrip(b"\r\n"), 16)
|
||||
except:
|
||||
err = err.format(buf.decode("utf-8", "replace"), len(buf))
|
||||
raise Pebkac(400, err)
|
||||
|
||||
if chunklen == 0:
|
||||
sr.recv(2) # \r\n after final chunk
|
||||
return
|
||||
|
||||
if log:
|
||||
log("receiving {} byte chunk".format(chunklen))
|
||||
|
||||
for chunk in read_socket(sr, chunklen):
|
||||
yield chunk
|
||||
|
||||
sr.recv(2) # \r\n after each chunk too
|
||||
|
||||
|
||||
def hashcopy(actor, fin, fout):
|
||||
u32_lim = int((2 ** 31) * 0.9)
|
||||
hashobj = hashlib.sha512()
|
||||
@@ -692,6 +724,30 @@ def sendfile_kern(lower, upper, f, s):
|
||||
return 0
|
||||
|
||||
|
||||
def statdir(logger, scandir, lstat, top):
|
||||
try:
|
||||
btop = fsenc(top)
|
||||
if scandir and hasattr(os, "scandir"):
|
||||
src = "scandir"
|
||||
with os.scandir(btop) as dh:
|
||||
for fh in dh:
|
||||
try:
|
||||
yield [fsdec(fh.name), fh.stat(follow_symlinks=not lstat)]
|
||||
except Exception as ex:
|
||||
logger("scan-stat: {} @ {}".format(repr(ex), fsdec(fh.path)))
|
||||
else:
|
||||
src = "listdir"
|
||||
fun = os.lstat if lstat else os.stat
|
||||
for name in os.listdir(btop):
|
||||
abspath = os.path.join(btop, name)
|
||||
try:
|
||||
yield [fsdec(name), fun(abspath)]
|
||||
except Exception as ex:
|
||||
logger("list-stat: {} @ {}".format(repr(ex), fsdec(abspath)))
|
||||
except Exception as ex:
|
||||
logger("{}: {} @ {}".format(src, repr(ex), top))
|
||||
|
||||
|
||||
def unescape_cookie(orig):
|
||||
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
|
||||
ret = ""
|
||||
|
||||
@@ -46,7 +46,7 @@ body {
|
||||
display: none;
|
||||
}
|
||||
#files {
|
||||
border-collapse: collapse;
|
||||
border-spacing: 0;
|
||||
margin-top: 2em;
|
||||
z-index: 1;
|
||||
position: relative;
|
||||
@@ -67,16 +67,18 @@ a,
|
||||
#files a:hover {
|
||||
color: #fff;
|
||||
background: #161616;
|
||||
text-decoration: underline;
|
||||
}
|
||||
#files thead a {
|
||||
color: #999;
|
||||
font-weight: normal;
|
||||
}
|
||||
#files tr:hover {
|
||||
#files tr+tr:hover {
|
||||
background: #1c1c1c;
|
||||
}
|
||||
#files thead th {
|
||||
padding: .5em 1.3em .3em 1.3em;
|
||||
cursor: pointer;
|
||||
}
|
||||
#files thead th:last-child {
|
||||
background: #444;
|
||||
@@ -94,6 +96,16 @@ a,
|
||||
margin: 0;
|
||||
padding: 0 .5em;
|
||||
}
|
||||
#files td {
|
||||
border-bottom: 1px solid #111;
|
||||
}
|
||||
#files td+td+td {
|
||||
max-width: 30em;
|
||||
overflow: hidden;
|
||||
}
|
||||
#files tr+tr td {
|
||||
border-top: 1px solid #383838;
|
||||
}
|
||||
#files tbody td:nth-child(3) {
|
||||
font-family: monospace;
|
||||
font-size: 1.3em;
|
||||
@@ -112,6 +124,9 @@ a,
|
||||
padding-bottom: 1.3em;
|
||||
border-bottom: .5em solid #444;
|
||||
}
|
||||
#files tbody tr td:last-child {
|
||||
white-space: nowrap;
|
||||
}
|
||||
#files thead th[style] {
|
||||
width: auto !important;
|
||||
}
|
||||
@@ -160,7 +175,8 @@ a,
|
||||
margin: -.2em;
|
||||
}
|
||||
#files tbody a.play.act {
|
||||
color: #af0;
|
||||
color: #840;
|
||||
text-shadow: 0 0 .3em #b80;
|
||||
}
|
||||
#blocked {
|
||||
position: fixed;
|
||||
@@ -291,6 +307,20 @@ a,
|
||||
width: calc(100% - 10.5em);
|
||||
background: rgba(0,0,0,0.2);
|
||||
}
|
||||
@media (min-width: 90em) {
|
||||
#barpos,
|
||||
#barbuf {
|
||||
width: calc(100% - 24em);
|
||||
left: 9.8em;
|
||||
top: .7em;
|
||||
height: 1.6em;
|
||||
bottom: auto;
|
||||
}
|
||||
#widget {
|
||||
bottom: -3.2em;
|
||||
height: 3.2em;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -400,14 +430,13 @@ input[type="checkbox"]:checked+label {
|
||||
color: #fff;
|
||||
}
|
||||
#files td div a {
|
||||
display: table-cell;
|
||||
display: inline-block;
|
||||
white-space: nowrap;
|
||||
}
|
||||
#files td div a:last-child {
|
||||
width: 100%;
|
||||
}
|
||||
#files td div {
|
||||
display: table;
|
||||
border-collapse: collapse;
|
||||
width: 100%;
|
||||
}
|
||||
@@ -421,12 +450,27 @@ input[type="checkbox"]:checked+label {
|
||||
#tree {
|
||||
padding-top: 2em;
|
||||
}
|
||||
#tree>a+a {
|
||||
padding: .2em .4em;
|
||||
font-size: 1.2em;
|
||||
background: #2a2a2a;
|
||||
box-shadow: 0 .1em .2em #222 inset;
|
||||
border-radius: .3em;
|
||||
margin: .2em;
|
||||
position: relative;
|
||||
top: -.2em;
|
||||
}
|
||||
#tree>a+a:hover {
|
||||
background: #805;
|
||||
}
|
||||
#tree>a+a.on {
|
||||
background: #fc4;
|
||||
color: #400;
|
||||
text-shadow: none;
|
||||
}
|
||||
#detree {
|
||||
padding: .3em .5em;
|
||||
font-size: 1.5em;
|
||||
display: inline-block;
|
||||
min-width: 12em;
|
||||
width: 100%;
|
||||
}
|
||||
#treefiles #files tbody {
|
||||
border-radius: 0 .7em 0 .7em;
|
||||
@@ -447,20 +491,20 @@ input[type="checkbox"]:checked+label {
|
||||
list-style: none;
|
||||
white-space: nowrap;
|
||||
}
|
||||
#tree a.hl {
|
||||
#treeul a.hl {
|
||||
color: #400;
|
||||
background: #fc4;
|
||||
border-radius: .3em;
|
||||
text-shadow: none;
|
||||
}
|
||||
#tree a {
|
||||
#treeul a {
|
||||
display: inline-block;
|
||||
}
|
||||
#tree a+a {
|
||||
#treeul a+a {
|
||||
width: calc(100% - 2em);
|
||||
background: #333;
|
||||
}
|
||||
#tree a+a:hover {
|
||||
#treeul a+a:hover {
|
||||
background: #222;
|
||||
color: #fff;
|
||||
}
|
||||
@@ -489,3 +533,49 @@ input[type="checkbox"]:checked+label {
|
||||
position: absolute;
|
||||
z-index: 9;
|
||||
}
|
||||
#files .cfg {
|
||||
display: none;
|
||||
font-size: 2em;
|
||||
white-space: nowrap;
|
||||
}
|
||||
#files th:hover .cfg,
|
||||
#files th.min .cfg {
|
||||
display: block;
|
||||
width: 1em;
|
||||
border-radius: .2em;
|
||||
margin: -1.3em auto 0 auto;
|
||||
background: #444;
|
||||
}
|
||||
#files th.min .cfg {
|
||||
margin: -.6em;
|
||||
}
|
||||
#files>thead>tr>th.min span {
|
||||
position: absolute;
|
||||
transform: rotate(270deg);
|
||||
background: linear-gradient(90deg, rgba(68,68,68,0), rgba(68,68,68,0.5) 70%, #444);
|
||||
margin-left: -4.6em;
|
||||
padding: .4em;
|
||||
top: 5.4em;
|
||||
width: 8em;
|
||||
text-align: right;
|
||||
letter-spacing: .04em;
|
||||
}
|
||||
#files td:nth-child(2n) {
|
||||
color: #f5a;
|
||||
}
|
||||
#files td.min a {
|
||||
display: none;
|
||||
}
|
||||
#files tr.play td {
|
||||
background: #fc4;
|
||||
border-color: transparent;
|
||||
color: #400;
|
||||
text-shadow: none;
|
||||
}
|
||||
#files tr.play a {
|
||||
color: inherit;
|
||||
}
|
||||
#files tr.play a:hover {
|
||||
color: #300;
|
||||
background: #fea;
|
||||
}
|
||||
|
||||
@@ -26,7 +26,11 @@
|
||||
</div>
|
||||
|
||||
<div id="op_search" class="opview">
|
||||
{%- if have_tags_idx %}
|
||||
<table id="srch_form" class="tags"></table>
|
||||
{%- else %}
|
||||
<table id="srch_form"></table>
|
||||
{%- endif %}
|
||||
<div id="srch_q"></div>
|
||||
</div>
|
||||
{%- include 'upload.html' %}
|
||||
@@ -44,6 +48,9 @@
|
||||
<tr>
|
||||
<td id="tree">
|
||||
<a href="#" id="detree">🍞...</a>
|
||||
<a href="#" step="2" id="twobytwo">+</a>
|
||||
<a href="#" step="-2" id="twig">–</a>
|
||||
<a href="#" id="dyntree">a</a>
|
||||
<ul id="treeul"></ul>
|
||||
</td>
|
||||
<td id="treefiles"></td>
|
||||
@@ -54,16 +61,29 @@
|
||||
<thead>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>File Name</th>
|
||||
<th sort="int">File Size</th>
|
||||
<th>T</th>
|
||||
<th>Date</th>
|
||||
<th><span>File Name</span></th>
|
||||
<th sort="int"><span>Size</span></th>
|
||||
{%- for k in taglist %}
|
||||
{%- if k.startswith('.') %}
|
||||
<th sort="int"><span>{{ k[1:] }}</span></th>
|
||||
{%- else %}
|
||||
<th><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
<th><span>T</span></th>
|
||||
<th><span>Date</span></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
||||
{%- for f in files %}
|
||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
|
||||
{%- if f.tags is defined %}
|
||||
{%- for k in taglist %}
|
||||
<td>{{ f.tags[k] }}</td>
|
||||
{%- endfor %}
|
||||
{%- endif %}
|
||||
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
||||
{%- endfor %}
|
||||
|
||||
</tbody>
|
||||
@@ -86,7 +106,10 @@
|
||||
<canvas id="barbuf"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<script>
|
||||
var tag_order_cfg = {{ tag_order }};
|
||||
</script>
|
||||
<script src="/.cpr/util.js{{ ts }}"></script>
|
||||
<script src="/.cpr/browser.js{{ ts }}"></script>
|
||||
<script src="/.cpr/up2k.js{{ ts }}"></script>
|
||||
|
||||
@@ -6,21 +6,6 @@ function dbg(msg) {
|
||||
ebi('path').innerHTML = msg;
|
||||
}
|
||||
|
||||
function ev(e) {
|
||||
e = e || window.event;
|
||||
if (!e)
|
||||
return;
|
||||
|
||||
if (e.preventDefault)
|
||||
e.preventDefault()
|
||||
|
||||
if (e.stopPropagation)
|
||||
e.stopPropagation();
|
||||
|
||||
e.returnValue = false;
|
||||
return e;
|
||||
}
|
||||
|
||||
makeSortable(ebi('files'));
|
||||
|
||||
|
||||
@@ -55,7 +40,7 @@ function init_mp() {
|
||||
for (var a = 0, aa = tracks.length; a < aa; a++)
|
||||
ebi('trk' + a).onclick = ev_play;
|
||||
|
||||
ret.vol = localStorage.getItem('vol');
|
||||
ret.vol = sread('vol');
|
||||
if (ret.vol !== null)
|
||||
ret.vol = parseFloat(ret.vol);
|
||||
else
|
||||
@@ -67,7 +52,7 @@ function init_mp() {
|
||||
|
||||
ret.setvol = function (vol) {
|
||||
ret.vol = Math.max(Math.min(vol, 1), 0);
|
||||
localStorage.setItem('vol', vol);
|
||||
swrite('vol', vol);
|
||||
|
||||
if (ret.au)
|
||||
ret.au.volume = ret.expvol();
|
||||
@@ -153,6 +138,9 @@ var pbar = (function () {
|
||||
var grad = null;
|
||||
|
||||
r.drawbuf = function () {
|
||||
if (!mp.au)
|
||||
return;
|
||||
|
||||
var cs = getComputedStyle(r.bcan);
|
||||
var sw = parseInt(cs['width']);
|
||||
var sh = parseInt(cs['height']);
|
||||
@@ -179,6 +167,9 @@ var pbar = (function () {
|
||||
}
|
||||
};
|
||||
r.drawpos = function () {
|
||||
if (!mp.au)
|
||||
return;
|
||||
|
||||
var cs = getComputedStyle(r.bcan);
|
||||
var sw = parseInt(cs['width']);
|
||||
var sh = parseInt(cs['height']);
|
||||
@@ -460,6 +451,11 @@ function play(tid, call_depth) {
|
||||
mp.au.volume = mp.expvol();
|
||||
var oid = 'trk' + tid;
|
||||
setclass(oid, 'play act');
|
||||
var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
|
||||
for (var a = 0, aa = trs.length; a < aa; a++) {
|
||||
trs[a].className = trs[a].className.replace(/ *play */, "");
|
||||
}
|
||||
ebi(oid).parentElement.parentElement.className += ' play';
|
||||
|
||||
try {
|
||||
if (hack_attempt_play)
|
||||
@@ -472,7 +468,7 @@ function play(tid, call_depth) {
|
||||
o.setAttribute('id', 'thx_js');
|
||||
if (window.history && history.replaceState) {
|
||||
var nurl = (document.location + '').split('#')[0] + '#' + oid;
|
||||
history.replaceState(ebi('files').tBodies[0].innerHTML, nurl, nurl);
|
||||
hist_replace(ebi('files').innerHTML, nurl);
|
||||
}
|
||||
else {
|
||||
document.location.hash = oid;
|
||||
@@ -591,6 +587,12 @@ function autoplay_blocked() {
|
||||
["name", "name", "name contains (negate with -nope)", "46"]
|
||||
]
|
||||
];
|
||||
|
||||
if (document.querySelector('#srch_form.tags'))
|
||||
sconf.push(["tags",
|
||||
["tags", "tags", "tags contains", "46"]
|
||||
]);
|
||||
|
||||
var html = [];
|
||||
var orig_html = null;
|
||||
for (var a = 0; a < sconf.length; a++) {
|
||||
@@ -610,7 +612,7 @@ function autoplay_blocked() {
|
||||
}
|
||||
ebi('srch_form').innerHTML = html.join('\n');
|
||||
|
||||
var o = document.querySelectorAll('#op_search input[type="text"]');
|
||||
var o = document.querySelectorAll('#op_search input');
|
||||
for (var a = 0; a < o.length; a++) {
|
||||
o[a].oninput = ev_search_input;
|
||||
}
|
||||
@@ -619,8 +621,11 @@ function autoplay_blocked() {
|
||||
|
||||
function ev_search_input() {
|
||||
var v = this.value;
|
||||
var chk = ebi(this.getAttribute('id').slice(0, -1) + 'c');
|
||||
chk.checked = ((v + '').length > 0);
|
||||
var id = this.getAttribute('id');
|
||||
if (id.slice(-1) == 'v') {
|
||||
var chk = ebi(id.slice(0, -1) + 'c');
|
||||
chk.checked = ((v + '').length > 0);
|
||||
}
|
||||
clearTimeout(search_timeout);
|
||||
search_timeout = setTimeout(do_search, 100);
|
||||
}
|
||||
@@ -653,6 +658,9 @@ function autoplay_blocked() {
|
||||
return;
|
||||
}
|
||||
|
||||
var res = JSON.parse(this.responseText),
|
||||
tagord = res.tag_order;
|
||||
|
||||
var ofiles = ebi('files');
|
||||
if (ofiles.getAttribute('ts') > this.ts)
|
||||
return;
|
||||
@@ -660,10 +668,11 @@ function autoplay_blocked() {
|
||||
ebi('path').style.display = 'none';
|
||||
ebi('tree').style.display = 'none';
|
||||
|
||||
var html = ['<tr><td>-</td><td colspan="4"><a href="#" id="unsearch">close search results</a></td></tr>'];
|
||||
var res = JSON.parse(this.responseText);
|
||||
for (var a = 0; a < res.length; a++) {
|
||||
var r = res[a],
|
||||
var html = mk_files_header(tagord);
|
||||
html.push('<tbody>');
|
||||
html.push('<tr><td>-</td><td colspan="42"><a href="#" id="unsearch">close search results</a></td></tr>');
|
||||
for (var a = 0; a < res.hits.length; a++) {
|
||||
var r = res.hits[a],
|
||||
ts = parseInt(r.ts),
|
||||
sz = esc(r.sz + ''),
|
||||
rp = esc(r.rp + ''),
|
||||
@@ -674,15 +683,31 @@ function autoplay_blocked() {
|
||||
ext = '%';
|
||||
|
||||
links = links.join('');
|
||||
html.push('<tr><td>-</td><td><div>' + links + '</div></td><td>' + sz +
|
||||
'</td><td>' + ext + '</td><td>' + unix2iso(ts) + '</td></tr>');
|
||||
var nodes = ['<tr><td>-</td><td><div>' + links + '</div>', sz];
|
||||
for (var b = 0; b < tagord.length; b++) {
|
||||
var k = tagord[b],
|
||||
v = r.tags[k] || "";
|
||||
|
||||
if (k == "dur") {
|
||||
var sv = s2ms(v);
|
||||
nodes[nodes.length - 1] += '</td><td sortv="' + v + '">' + sv;
|
||||
continue;
|
||||
}
|
||||
|
||||
nodes.push(v);
|
||||
}
|
||||
|
||||
nodes = nodes.concat([ext, unix2iso(ts)]);
|
||||
html.push(nodes.join('</td><td>'));
|
||||
html.push('</td></tr>');
|
||||
}
|
||||
|
||||
if (!orig_html)
|
||||
orig_html = ebi('files').tBodies[0].innerHTML;
|
||||
orig_html = ebi('files').innerHTML;
|
||||
|
||||
ofiles.tBodies[0].innerHTML = html.join('\n');
|
||||
ofiles.innerHTML = html.join('\n');
|
||||
ofiles.setAttribute("ts", this.ts);
|
||||
filecols.set_style();
|
||||
reload_browser();
|
||||
|
||||
ebi('unsearch').onclick = unsearch;
|
||||
@@ -692,7 +717,7 @@ function autoplay_blocked() {
|
||||
ev(e);
|
||||
ebi('path').style.display = 'inline-block';
|
||||
ebi('tree').style.display = 'block';
|
||||
ebi('files').tBodies[0].innerHTML = orig_html;
|
||||
ebi('files').innerHTML = orig_html;
|
||||
orig_html = null;
|
||||
reload_browser();
|
||||
}
|
||||
@@ -702,6 +727,10 @@ function autoplay_blocked() {
|
||||
// tree
|
||||
(function () {
|
||||
var treedata = null;
|
||||
var dyn = bcfg_get('dyntree', true);
|
||||
var treesz = icfg_get('treesz', 16);
|
||||
treesz = isNaN(treesz) ? 16 : Math.min(Math.max(treesz, 4), 50);
|
||||
console.log('treesz [' + treesz + ']');
|
||||
|
||||
function entree(e) {
|
||||
ev(e);
|
||||
@@ -716,7 +745,7 @@ function autoplay_blocked() {
|
||||
treefiles.appendChild(ebi('files'));
|
||||
treefiles.appendChild(ebi('epi'));
|
||||
|
||||
localStorage.setItem('entreed', 'tree');
|
||||
swrite('entreed', 'tree');
|
||||
get_tree("", get_vpath());
|
||||
}
|
||||
|
||||
@@ -760,7 +789,7 @@ function autoplay_blocked() {
|
||||
esc(top) + '">' + esc(name) +
|
||||
"</a>\n<ul>\n" + html + "</ul>";
|
||||
|
||||
var links = document.querySelectorAll('#tree a+a');
|
||||
var links = document.querySelectorAll('#treeul a+a');
|
||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
||||
if (links[a].getAttribute('href') == top) {
|
||||
var o = links[a].parentNode;
|
||||
@@ -774,7 +803,10 @@ function autoplay_blocked() {
|
||||
document.querySelector('#treeul>li>a+a').textContent = '[root]';
|
||||
despin('#tree');
|
||||
reload_tree();
|
||||
rescale_tree();
|
||||
}
|
||||
|
||||
function rescale_tree() {
|
||||
var q = '#tree';
|
||||
var nq = 0;
|
||||
while (true) {
|
||||
@@ -783,18 +815,19 @@ function autoplay_blocked() {
|
||||
if (!document.querySelector(q))
|
||||
break;
|
||||
}
|
||||
ebi('treeul').style.width = (24 + nq) + 'em';
|
||||
var w = treesz + (dyn ? nq : 0);
|
||||
ebi('treeul').style.width = w + 'em';
|
||||
}
|
||||
|
||||
function reload_tree() {
|
||||
var cdir = get_vpath();
|
||||
var links = document.querySelectorAll('#tree a+a');
|
||||
var links = document.querySelectorAll('#treeul a+a');
|
||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
||||
var href = links[a].getAttribute('href');
|
||||
links[a].setAttribute('class', href == cdir ? 'hl' : '');
|
||||
links[a].onclick = treego;
|
||||
}
|
||||
links = document.querySelectorAll('#tree li>a:first-child');
|
||||
links = document.querySelectorAll('#treeul li>a:first-child');
|
||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
||||
links[a].setAttribute('dst', links[a].nextSibling.getAttribute('href'));
|
||||
links[a].onclick = treegrow;
|
||||
@@ -825,6 +858,7 @@ function autoplay_blocked() {
|
||||
rm.parentNode.removeChild(rm);
|
||||
}
|
||||
this.textContent = '+';
|
||||
rescale_tree();
|
||||
return;
|
||||
}
|
||||
var dst = this.getAttribute('dst');
|
||||
@@ -851,24 +885,42 @@ function autoplay_blocked() {
|
||||
ebi('srv_info').innerHTML = '<span>' + res.srvinf + '</span>';
|
||||
var nodes = res.dirs.concat(res.files);
|
||||
var top = this.top;
|
||||
var html = [];
|
||||
var html = mk_files_header(res.taglist);
|
||||
html.push('<tbody>');
|
||||
for (var a = 0; a < nodes.length; a++) {
|
||||
var r = nodes[a],
|
||||
ln = '<tr><td>' + r.lead + '</td><td><a href="' +
|
||||
top + r.href + '">' + esc(decodeURIComponent(r.href)) + '</a>';
|
||||
ln = ['<tr><td>' + r.lead + '</td><td><a href="' +
|
||||
top + r.href + '">' + esc(decodeURIComponent(r.href)) + '</a>', r.sz];
|
||||
|
||||
ln = [ln, r.sz, r.ext, unix2iso(r.ts)].join('</td><td>');
|
||||
for (var b = 0; b < res.taglist.length; b++) {
|
||||
var k = res.taglist[b],
|
||||
v = (r.tags || {})[k] || "";
|
||||
|
||||
if (k[0] == '.')
|
||||
k = k.slice(1);
|
||||
|
||||
if (k == "dur") {
|
||||
var sv = s2ms(v);
|
||||
ln[ln.length - 1] += '</td><td sortv="' + v + '">' + sv;
|
||||
continue;
|
||||
}
|
||||
ln.push(v);
|
||||
}
|
||||
ln = ln.concat([r.ext, unix2iso(r.ts)]).join('</td><td>');
|
||||
html.push(ln + '</td></tr>');
|
||||
}
|
||||
html.push('</tbody>');
|
||||
html = html.join('\n');
|
||||
ebi('files').tBodies[0].innerHTML = html;
|
||||
history.pushState(html, this.top, this.top);
|
||||
ebi('files').innerHTML = html;
|
||||
|
||||
hist_push(html, this.top);
|
||||
apply_perms(res.perms);
|
||||
despin('#files');
|
||||
|
||||
ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : "";
|
||||
ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : "";
|
||||
|
||||
filecols.set_style();
|
||||
reload_tree();
|
||||
reload_browser();
|
||||
}
|
||||
@@ -913,26 +965,48 @@ function autoplay_blocked() {
|
||||
ebi('path').style.display = 'inline-block';
|
||||
treetab.style.display = 'none';
|
||||
|
||||
localStorage.setItem('entreed', 'na');
|
||||
swrite('entreed', 'na');
|
||||
}
|
||||
|
||||
function dyntree(e) {
|
||||
ev(e);
|
||||
dyn = !dyn;
|
||||
bcfg_set('dyntree', dyn);
|
||||
rescale_tree();
|
||||
}
|
||||
|
||||
function scaletree(e) {
|
||||
ev(e);
|
||||
treesz += parseInt(this.getAttribute("step"));
|
||||
if (isNaN(treesz))
|
||||
treesz = 16;
|
||||
|
||||
swrite('treesz', treesz);
|
||||
rescale_tree();
|
||||
}
|
||||
|
||||
ebi('entree').onclick = entree;
|
||||
ebi('detree').onclick = detree;
|
||||
if (window.localStorage && localStorage.getItem('entreed') == 'tree')
|
||||
ebi('dyntree').onclick = dyntree;
|
||||
ebi('twig').onclick = scaletree;
|
||||
ebi('twobytwo').onclick = scaletree;
|
||||
if (sread('entreed') == 'tree')
|
||||
entree();
|
||||
|
||||
window.onpopstate = function (e) {
|
||||
console.log(e.url + ' ,, ' + ((e.state + '').slice(0, 64)));
|
||||
if (e.state) {
|
||||
ebi('files').tBodies[0].innerHTML = e.state;
|
||||
reload_tree();
|
||||
reload_browser();
|
||||
}
|
||||
var html = sessionStorage.getItem(e.state || 1);
|
||||
if (!html)
|
||||
return;
|
||||
|
||||
ebi('files').innerHTML = html;
|
||||
reload_tree();
|
||||
reload_browser();
|
||||
};
|
||||
|
||||
if (window.history && history.pushState) {
|
||||
var u = get_vpath();
|
||||
history.replaceState(ebi('files').tBodies[0].innerHTML, u, u);
|
||||
var u = get_vpath() + window.location.hash;
|
||||
hist_replace(ebi('files').innerHTML, u);
|
||||
}
|
||||
})();
|
||||
|
||||
@@ -989,7 +1063,119 @@ function apply_perms(perms) {
|
||||
}
|
||||
|
||||
|
||||
function mk_files_header(taglist) {
|
||||
var html = [
|
||||
'<thead>',
|
||||
'<th></th>',
|
||||
'<th><span>File Name</span></th>',
|
||||
'<th sort="int"><span>Size</span></th>'
|
||||
];
|
||||
for (var a = 0; a < taglist.length; a++) {
|
||||
var tag = taglist[a];
|
||||
var c1 = tag.slice(0, 1).toUpperCase();
|
||||
tag = c1 + tag.slice(1);
|
||||
if (c1 == '.')
|
||||
tag = '<th sort="int"><span>' + tag.slice(1);
|
||||
else
|
||||
tag = '<th><span>' + tag;
|
||||
|
||||
html.push(tag + '</span></th>');
|
||||
}
|
||||
html = html.concat([
|
||||
'<th><span>T</span></th>',
|
||||
'<th><span>Date</span></th>',
|
||||
'</thead>',
|
||||
]);
|
||||
return html;
|
||||
}
|
||||
|
||||
|
||||
var filecols = (function () {
|
||||
var hidden = jread('filecols', []);
|
||||
|
||||
var add_btns = function () {
|
||||
var ths = document.querySelectorAll('#files th>span');
|
||||
for (var a = 0, aa = ths.length; a < aa; a++) {
|
||||
var th = ths[a].parentElement;
|
||||
var is_hidden = has(hidden, ths[a].textContent);
|
||||
th.innerHTML = '<div class="cfg"><a href="#">' +
|
||||
(is_hidden ? '+' : '-') + '</a></div>' + ths[a].outerHTML;
|
||||
|
||||
th.getElementsByTagName('a')[0].onclick = ev_row_tgl;
|
||||
}
|
||||
};
|
||||
|
||||
var set_style = function () {
|
||||
add_btns();
|
||||
|
||||
var ohidden = [],
|
||||
ths = document.querySelectorAll('#files th'),
|
||||
ncols = ths.length;
|
||||
|
||||
for (var a = 0; a < ncols; a++) {
|
||||
var span = ths[a].getElementsByTagName('span');
|
||||
if (span.length <= 0)
|
||||
continue;
|
||||
|
||||
var name = span[0].textContent,
|
||||
cls = '';
|
||||
|
||||
if (has(hidden, name)) {
|
||||
ohidden.push(a);
|
||||
cls = ' min';
|
||||
}
|
||||
ths[a].className = ths[a].className.replace(/ *min */, " ") + cls;
|
||||
}
|
||||
for (var a = 0; a < ncols; a++) {
|
||||
var cls = has(ohidden, a) ? 'min' : '';
|
||||
var tds = document.querySelectorAll('#files>tbody>tr>td:nth-child(' + (a + 1) + ')');
|
||||
for (var b = 0, bb = tds.length; b < bb; b++) {
|
||||
tds[b].setAttribute('class', cls);
|
||||
if (a < 2)
|
||||
continue;
|
||||
|
||||
if (cls) {
|
||||
if (!tds[b].hasAttribute('html')) {
|
||||
tds[b].setAttribute('html', tds[b].innerHTML);
|
||||
tds[b].innerHTML = '...';
|
||||
}
|
||||
}
|
||||
else if (tds[b].hasAttribute('html')) {
|
||||
tds[b].innerHTML = tds[b].getAttribute('html');
|
||||
tds[b].removeAttribute('html');
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
set_style();
|
||||
|
||||
var toggle = function (name) {
|
||||
var ofs = hidden.indexOf(name);
|
||||
if (ofs !== -1)
|
||||
hidden.splice(ofs, 1);
|
||||
else
|
||||
hidden.push(name);
|
||||
|
||||
jwrite("filecols", hidden);
|
||||
set_style();
|
||||
};
|
||||
|
||||
return {
|
||||
"add_btns": add_btns,
|
||||
"set_style": set_style,
|
||||
"toggle": toggle,
|
||||
};
|
||||
})();
|
||||
|
||||
|
||||
function ev_row_tgl(e) {
|
||||
ev(e);
|
||||
filecols.toggle(this.parentElement.parentElement.getElementsByTagName('span')[0].textContent);
|
||||
}
|
||||
|
||||
|
||||
function reload_browser(not_mp) {
|
||||
filecols.set_style();
|
||||
makeSortable(ebi('files'));
|
||||
|
||||
var parts = get_vpath().split('/');
|
||||
|
||||
@@ -524,11 +524,9 @@ dom_navtgl.onclick = function () {
|
||||
dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav';
|
||||
dom_nav.style.display = hidden ? 'none' : 'block';
|
||||
|
||||
if (window.localStorage)
|
||||
localStorage.setItem('hidenav', hidden ? 1 : 0);
|
||||
|
||||
swrite('hidenav', hidden ? 1 : 0);
|
||||
redraw();
|
||||
};
|
||||
|
||||
if (window.localStorage && localStorage.getItem('hidenav') == 1)
|
||||
if (sread('hidenav') == 1)
|
||||
dom_navtgl.onclick();
|
||||
|
||||
@@ -209,42 +209,7 @@ function up2k_init(have_crypto) {
|
||||
};
|
||||
}
|
||||
|
||||
function cfg_get(name) {
|
||||
var val = localStorage.getItem(name);
|
||||
if (val === null)
|
||||
return parseInt(ebi(name).value);
|
||||
|
||||
ebi(name).value = val;
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_get(name, defval) {
|
||||
var o = ebi(name);
|
||||
if (!o)
|
||||
return defval;
|
||||
|
||||
var val = localStorage.getItem(name);
|
||||
if (val === null)
|
||||
val = defval;
|
||||
else
|
||||
val = (val == '1');
|
||||
|
||||
o.checked = val;
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_set(name, val) {
|
||||
localStorage.setItem(
|
||||
name, val ? '1' : '0');
|
||||
|
||||
var o = ebi(name);
|
||||
if (o)
|
||||
o.checked = val;
|
||||
|
||||
return val;
|
||||
}
|
||||
|
||||
var parallel_uploads = cfg_get('nthread');
|
||||
var parallel_uploads = icfg_get('nthread');
|
||||
var multitask = bcfg_get('multitask', true);
|
||||
var ask_up = bcfg_get('ask_up', true);
|
||||
var flag_en = bcfg_get('flag_en', false);
|
||||
@@ -772,13 +737,13 @@ function up2k_init(have_crypto) {
|
||||
if (!response.name) {
|
||||
var msg = '';
|
||||
var smsg = '';
|
||||
if (!response || !response.length) {
|
||||
if (!response || !response.hits || !response.hits.length) {
|
||||
msg = 'not found on server';
|
||||
smsg = '404';
|
||||
}
|
||||
else {
|
||||
smsg = 'found';
|
||||
var hit = response[0],
|
||||
var hit = response.hits[0],
|
||||
msg = linksplit(hit.rp).join(''),
|
||||
tr = unix2iso(hit.ts),
|
||||
tu = unix2iso(t.lmod),
|
||||
@@ -1033,7 +998,7 @@ function up2k_init(have_crypto) {
|
||||
return;
|
||||
|
||||
parallel_uploads = v;
|
||||
localStorage.setItem('nthread', v);
|
||||
swrite('nthread', v);
|
||||
obj.style.background = '#444';
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -43,6 +43,21 @@ function ebi(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
|
||||
function ev(e) {
|
||||
e = e || window.event;
|
||||
if (!e)
|
||||
return;
|
||||
|
||||
if (e.preventDefault)
|
||||
e.preventDefault()
|
||||
|
||||
if (e.stopPropagation)
|
||||
e.stopPropagation();
|
||||
|
||||
e.returnValue = false;
|
||||
return e;
|
||||
}
|
||||
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||
if (!String.prototype.endsWith) {
|
||||
@@ -76,30 +91,41 @@ function import_js(url, cb) {
|
||||
|
||||
|
||||
function sortTable(table, col) {
|
||||
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
|
||||
var tb = table.tBodies[0],
|
||||
th = table.tHead.rows[0].cells,
|
||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||
i, reverse = th[col].className == 'sort1' ? -1 : 1;
|
||||
i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1;
|
||||
for (var a = 0, thl = th.length; a < thl; a++)
|
||||
th[a].className = '';
|
||||
th[col].className = 'sort' + reverse;
|
||||
th[a].className = th[a].className.replace(/ *sort-?1 */, " ");
|
||||
th[col].className += ' sort' + reverse;
|
||||
var stype = th[col].getAttribute('sort');
|
||||
tr = tr.sort(function (a, b) {
|
||||
if (!a.cells[col])
|
||||
var vl = [];
|
||||
for (var a = 0; a < tr.length; a++) {
|
||||
var cell = tr[a].cells[col];
|
||||
if (!cell) {
|
||||
vl.push([null, a]);
|
||||
continue;
|
||||
}
|
||||
var v = cell.getAttribute('sortv') || cell.textContent.trim();
|
||||
if (stype == 'int') {
|
||||
v = parseInt(v.replace(/[, ]/g, '')) || 0;
|
||||
}
|
||||
vl.push([v, a]);
|
||||
}
|
||||
vl.sort(function (a, b) {
|
||||
a = a[0];
|
||||
b = b[0];
|
||||
if (a === null)
|
||||
return -1;
|
||||
if (!b.cells[col])
|
||||
if (b === null)
|
||||
return 1;
|
||||
|
||||
var v1 = a.cells[col].textContent.trim();
|
||||
var v2 = b.cells[col].textContent.trim();
|
||||
if (stype == 'int') {
|
||||
v1 = parseInt(v1.replace(/,/g, ''));
|
||||
v2 = parseInt(v2.replace(/,/g, ''));
|
||||
return reverse * (v1 - v2);
|
||||
return reverse * (a - b);
|
||||
}
|
||||
return reverse * (v1.localeCompare(v2));
|
||||
return reverse * (a.localeCompare(b));
|
||||
});
|
||||
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]);
|
||||
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[vl[i][1]]);
|
||||
}
|
||||
function makeSortable(table) {
|
||||
var th = table.tHead, i;
|
||||
@@ -107,7 +133,8 @@ function makeSortable(table) {
|
||||
if (th) i = th.length;
|
||||
else return; // if no `<thead>` then do nothing
|
||||
while (--i >= 0) (function (i) {
|
||||
th[i].onclick = function () {
|
||||
th[i].onclick = function (e) {
|
||||
ev(e);
|
||||
sortTable(table, i);
|
||||
};
|
||||
}(i));
|
||||
@@ -123,16 +150,13 @@ function makeSortable(table) {
|
||||
})();
|
||||
|
||||
|
||||
function opclick(ev) {
|
||||
if (ev) //ie
|
||||
ev.preventDefault();
|
||||
function opclick(e) {
|
||||
ev(e);
|
||||
|
||||
var dest = this.getAttribute('data-dest');
|
||||
goto(dest);
|
||||
|
||||
// writing a blank value makes ie8 segfault w
|
||||
if (window.localStorage)
|
||||
localStorage.setItem('opmode', dest || '.');
|
||||
swrite('opmode', dest || undefined);
|
||||
|
||||
var input = document.querySelector('.opview.act input:not([type="hidden"])')
|
||||
if (input)
|
||||
@@ -167,11 +191,9 @@ function goto(dest) {
|
||||
|
||||
(function () {
|
||||
goto();
|
||||
if (window.localStorage) {
|
||||
var op = localStorage.getItem('opmode');
|
||||
if (op !== null && op !== '.')
|
||||
goto(op);
|
||||
}
|
||||
var op = sread('opmode');
|
||||
if (op !== null && op !== '.')
|
||||
goto(op);
|
||||
})();
|
||||
|
||||
|
||||
@@ -225,6 +247,12 @@ function unix2iso(ts) {
|
||||
}
|
||||
|
||||
|
||||
function s2ms(s) {
|
||||
var m = Math.floor(s / 60);
|
||||
return m + ":" + ("0" + (s - m * 60)).slice(-2);
|
||||
}
|
||||
|
||||
|
||||
function has(haystack, needle) {
|
||||
for (var a = 0; a < haystack.length; a++)
|
||||
if (haystack[a] == needle)
|
||||
@@ -232,3 +260,93 @@ function has(haystack, needle) {
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
function sread(key) {
|
||||
if (window.localStorage)
|
||||
return localStorage.getItem(key);
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
function swrite(key, val) {
|
||||
if (window.localStorage) {
|
||||
if (val === undefined)
|
||||
localStorage.removeItem(key);
|
||||
else
|
||||
localStorage.setItem(key, val);
|
||||
}
|
||||
}
|
||||
|
||||
function jread(key, fb) {
|
||||
var str = sread(key);
|
||||
if (!str)
|
||||
return fb;
|
||||
|
||||
return JSON.parse(str);
|
||||
}
|
||||
|
||||
function jwrite(key, val) {
|
||||
if (!val)
|
||||
swrite(key);
|
||||
else
|
||||
swrite(key, JSON.stringify(val));
|
||||
}
|
||||
|
||||
function icfg_get(name, defval) {
|
||||
var o = ebi(name);
|
||||
|
||||
var val = parseInt(sread(name));
|
||||
if (val === null)
|
||||
return parseInt(o ? o.value : defval);
|
||||
|
||||
if (o)
|
||||
o.value = val;
|
||||
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_get(name, defval) {
|
||||
var o = ebi(name);
|
||||
if (!o)
|
||||
return defval;
|
||||
|
||||
var val = sread(name);
|
||||
if (val === null)
|
||||
val = defval;
|
||||
else
|
||||
val = (val == '1');
|
||||
|
||||
bcfg_upd_ui(name, val);
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_set(name, val) {
|
||||
swrite(name, val ? '1' : '0');
|
||||
bcfg_upd_ui(name, val);
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_upd_ui(name, val) {
|
||||
var o = ebi(name);
|
||||
if (!o)
|
||||
return;
|
||||
|
||||
if (o.getAttribute('type') == 'checkbox')
|
||||
o.checked = val;
|
||||
else if (o)
|
||||
o.setAttribute('class', val ? 'on' : '');
|
||||
}
|
||||
|
||||
|
||||
function hist_push(html, url) {
|
||||
var key = new Date().getTime();
|
||||
sessionStorage.setItem(key, html);
|
||||
history.pushState(key, url, url);
|
||||
}
|
||||
|
||||
function hist_replace(html, url) {
|
||||
var key = new Date().getTime();
|
||||
sessionStorage.setItem(key, html);
|
||||
history.replaceState(key, url, url);
|
||||
}
|
||||
|
||||
@@ -122,7 +122,7 @@ git describe --tags >/dev/null 2>/dev/null && {
|
||||
exit 1
|
||||
}
|
||||
|
||||
dt="$(git log -1 --format=%cd --date=format:'%Y,%m,%d' | sed -E 's/,0?/, /g')"
|
||||
dt="$(git log -1 --format=%cd --date=short | sed -E 's/-0?/, /g')"
|
||||
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
|
||||
sed -ri '
|
||||
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;
|
||||
|
||||
@@ -16,6 +16,12 @@ from copyparty.authsrv import AuthSrv
|
||||
from copyparty import util
|
||||
|
||||
|
||||
class Cfg(Namespace):
|
||||
def __init__(self, a=[], v=[], c=None):
|
||||
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr mte".split()}
|
||||
super(Cfg, self).__init__(a=a, v=v, c=c, **ex)
|
||||
|
||||
|
||||
class TestVFS(unittest.TestCase):
|
||||
def dump(self, vfs):
|
||||
print(json.dumps(vfs, indent=4, sort_keys=True, default=lambda o: o.__dict__))
|
||||
@@ -35,7 +41,13 @@ class TestVFS(unittest.TestCase):
|
||||
def ls(self, vfs, vpath, uname):
|
||||
"""helper for resolving and listing a folder"""
|
||||
vn, rem = vfs.get(vpath, uname, True, False)
|
||||
return vn.ls(rem, uname)
|
||||
r1 = vn.ls(rem, uname, False)
|
||||
r2 = vn.ls(rem, uname, False)
|
||||
self.assertEqual(r1, r2)
|
||||
|
||||
fsdir, real, virt = r1
|
||||
real = [x[0] for x in real]
|
||||
return fsdir, real, virt
|
||||
|
||||
def runcmd(self, *argv):
|
||||
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
@@ -102,7 +114,7 @@ class TestVFS(unittest.TestCase):
|
||||
f.write(fn)
|
||||
|
||||
# defaults
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=[]), self.log).vfs
|
||||
vfs = AuthSrv(Cfg(), self.log).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td)
|
||||
@@ -110,7 +122,7 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(vfs.uwrite, ["*"])
|
||||
|
||||
# single read-only rootfs (relative path)
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs
|
||||
vfs = AuthSrv(Cfg(v=["a/ab/::r"]), self.log).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
|
||||
@@ -118,9 +130,7 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(vfs.uwrite, [])
|
||||
|
||||
# single read-only rootfs (absolute path)
|
||||
vfs = AuthSrv(
|
||||
Namespace(c=None, a=[], v=[td + "//a/ac/../aa//::r"]), self.log
|
||||
).vfs
|
||||
vfs = AuthSrv(Cfg(v=[td + "//a/ac/../aa//::r"]), self.log).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
|
||||
@@ -129,7 +139,7 @@ class TestVFS(unittest.TestCase):
|
||||
|
||||
# read-only rootfs with write-only subdirectory (read-write for k)
|
||||
vfs = AuthSrv(
|
||||
Namespace(c=None, a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]),
|
||||
Cfg(a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]),
|
||||
self.log,
|
||||
).vfs
|
||||
self.assertEqual(len(vfs.nodes), 1)
|
||||
@@ -192,7 +202,10 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
# admin-only rootfs with all-read-only subfolder
|
||||
vfs = AuthSrv(Namespace(c=None, a=["k:k"], v=[".::ak", "a:a:r"]), self.log,).vfs
|
||||
vfs = AuthSrv(
|
||||
Cfg(a=["k:k"], v=[".::ak", "a:a:r"]),
|
||||
self.log,
|
||||
).vfs
|
||||
self.assertEqual(len(vfs.nodes), 1)
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td)
|
||||
@@ -211,9 +224,7 @@ class TestVFS(unittest.TestCase):
|
||||
|
||||
# breadth-first construction
|
||||
vfs = AuthSrv(
|
||||
Namespace(
|
||||
c=None,
|
||||
a=[],
|
||||
Cfg(
|
||||
v=[
|
||||
"a/ac/acb:a/ac/acb:w",
|
||||
"a:a:w",
|
||||
@@ -234,7 +245,7 @@ class TestVFS(unittest.TestCase):
|
||||
self.undot(vfs, "./.././foo/..", "")
|
||||
|
||||
# shadowing
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=[".::r", "b:a/ac:r"]), self.log).vfs
|
||||
vfs = AuthSrv(Cfg(v=[".::r", "b:a/ac:r"]), self.log).vfs
|
||||
|
||||
fsp, r1, v1 = self.ls(vfs, "", "*")
|
||||
self.assertEqual(fsp, td)
|
||||
@@ -271,7 +282,7 @@ class TestVFS(unittest.TestCase):
|
||||
).encode("utf-8")
|
||||
)
|
||||
|
||||
au = AuthSrv(Namespace(c=[cfg_path], a=[], v=[]), self.log)
|
||||
au = AuthSrv(Cfg(c=[cfg_path]), self.log)
|
||||
self.assertEqual(au.user["a"], "123")
|
||||
self.assertEqual(au.user["asd"], "fgh:jkl")
|
||||
n = au.vfs
|
||||
|
||||
Reference in New Issue
Block a user