Compare commits

...

39 Commits

Author SHA1 Message Date
ed
96495a9bf1 v0.9.6 2021-03-07 21:44:25 +01:00
ed
b2fafec5fc handle key-normalization errors 2021-03-07 21:41:36 +01:00
ed
0850b8ae2b v0.9.5 2021-03-07 19:25:24 +01:00
ed
8a68a96c57 css tweaks 2021-03-07 19:15:19 +01:00
ed
d3aae8ed6a more mojibake fixes 2021-03-07 18:58:26 +01:00
ed
c62ebadda8 separate tree scrollbar 2021-03-07 18:26:57 +01:00
ed
ffcee6d390 add tooltips and more mojibake compat 2021-03-07 04:14:55 +01:00
ed
de32838346 key notation normalization (why tho) 2021-03-07 02:46:17 +01:00
ed
b9a4e47ea2 mojibake support for the spa stuff 2021-03-06 22:48:49 +01:00
ed
57d994422d logging cleanup 2021-03-06 17:38:56 +01:00
ed
6ecd745323 so much for sessionStorage 2021-03-06 16:34:55 +01:00
ed
bd769f5bdb fix py2 + encourage py3 2021-03-06 02:42:17 +01:00
ed
2381692aba js cfg 2021-03-06 02:30:36 +01:00
ed
24fdada0a0 did you know rhel 7 has an sqlite3 from 2015 2021-03-06 02:28:49 +01:00
ed
bb5169710a warn people when they're gonna have a bad time 2021-03-06 00:30:05 +01:00
ed
9cde2352f3 v0.9.4 2021-03-05 02:06:18 +01:00
ed
482dd7a938 v0.9.3 2021-03-05 00:00:22 +01:00
ed
bddcc69438 v0.9.2 2021-03-04 22:58:22 +01:00
ed
19d4540630 good 2021-03-04 22:38:12 +01:00
ed
4f5f6c81f5 add buttons to adjust tree width 2021-03-04 22:34:09 +01:00
ed
7e4c1238ba oh 2021-03-04 21:12:54 +01:00
ed
f7196ac773 dodge pushstate size limit 2021-03-04 21:06:59 +01:00
ed
7a7c832000 sfx-builder: support ancient git versions 2021-03-04 20:30:28 +01:00
ed
2b4ccdbebb multithread the slow mtag backends 2021-03-04 20:28:03 +01:00
ed
0d16b49489 broke this too 2021-03-04 01:35:09 +01:00
ed
768405b691 tree broke 2021-03-04 01:32:44 +01:00
ed
da01413b7b remove speedbumps 2021-03-04 01:21:04 +01:00
ed
914e22c53e async tagging of incoming files 2021-03-03 18:36:05 +01:00
ed
43a23bf733 v0.9.1 2021-03-03 01:28:32 +01:00
ed
92bb00c6d2 faster sorting 2021-03-03 01:27:41 +01:00
ed
b0b97a2648 fix bugs 2021-03-03 00:46:15 +01:00
ed
2c452fe323 readme nitpicks 2021-03-02 01:02:13 +01:00
ed
ad73d0c77d update feature list in readme 2021-03-02 00:31:08 +01:00
ed
7f9bf1c78c v0.9.0 2021-03-02 00:12:15 +01:00
ed
61a6bc3a65 make browser columns compactable 2021-03-02 00:07:04 +01:00
ed
46e10b0e9f yab 2021-03-01 03:15:41 +01:00
ed
8441206e26 read media-tags from files (for display/searching) 2021-03-01 02:50:10 +01:00
ed
9fdc5ee748 use one sqlite3 cursor, closes #1 2021-02-25 22:30:40 +01:00
ed
00ff133387 support receiving chunked PUT 2021-02-25 22:26:03 +01:00
29 changed files with 2246 additions and 615 deletions

5
.vscode/launch.json vendored
View File

@@ -13,10 +13,13 @@
"-ed", "-ed",
"-emp", "-emp",
"-e2dsa", "-e2dsa",
"-e2ts",
"-a", "-a",
"ed:wark", "ed:wark",
"-v", "-v",
"srv::r:aed:cnodupe" "srv::r:aed:cnodupe",
"-v",
"dist:dist:r"
] ]
}, },
{ {

2
.vscode/tasks.json vendored
View File

@@ -8,7 +8,7 @@
}, },
{ {
"label": "no_dbg", "label": "no_dbg",
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -a ed:wark -v srv::r:aed:cnodupe ;exit 1", "command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -e2ts -a ed:wark -v srv::r:aed:cnodupe -v dist:dist:r ;exit 1",
"type": "shell" "type": "shell"
} }
] ]

View File

@@ -59,7 +59,7 @@ you may also want these, especially on servers:
* server indexing * server indexing
* ☑ locate files by contents * ☑ locate files by contents
* ☑ search by name/path/date/size * ☑ search by name/path/date/size
* search by ID3-tags etc. * search by ID3-tags etc.
* markdown * markdown
* ☑ viewer * ☑ viewer
* ☑ editor (sure why not) * ☑ editor (sure why not)
@@ -69,7 +69,9 @@ summary: it works! you can use it! (but technically not even close to beta)
# bugs # bugs
* probably, pls let me know * Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* probably more, pls let me know
# searching # searching
@@ -82,7 +84,42 @@ path/name queries are space-separated, AND'ed together, and words are negated wi
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path * path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9) * name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
other metadata (like song tags etc) are not yet indexed for searching add `-e2ts` to also scan/index tags from music files:
## search configuration
searching relies on two databases, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`). Configuration can be done through arguments, volume flags, or a mix of both.
through arguments:
* `-e2d` enables file indexing on upload
* `-e2ds` scans writable folders on startup
* `-e2dsa` scans all mounted volumes (including readonly ones)
* `-e2t` enables metadata indexing on upload
* `-e2ts` scans for tags in all files that don't have tags yet
* `-e2tsr` deletes all existing tags, so a full reindex
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
* `-v ~/music::ce2dsa:ce2tsr` does a full reindex of everything on startup
* `-v ~/music::cd2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
`-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume:
* `-v ~/music::cmte=title,artist` indexes and displays *title* followed by *artist*
if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected
`-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux`
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
* is about 20x slower than mutagen
* catches a few tags that mutagen doesn't
* avoids pulling any GPL code into copyparty
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
# client examples # client examples
@@ -91,16 +128,33 @@ other metadata (like song tags etc) are not yet indexed for searching
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});` * `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');` * `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* curl/wget: upload some files (post=file, chunk=stdin)
* `post(){ curl -b cppwd=wark http://127.0.0.1:3923/ -F act=bput -F f=@"$1";}`
`post movie.mkv`
* `post(){ wget --header='Cookie: cppwd=wark' http://127.0.0.1:3923/?raw --post-file="$1" -O-;}`
`post movie.mkv`
* `chunk(){ curl -b cppwd=wark http://127.0.0.1:3923/ -T-;}`
`chunk <movie.mkv`
* FUSE: mount a copyparty server as a local filesystem * FUSE: mount a copyparty server as a local filesystem
* cross-platform python client available in [./bin/](bin/) * cross-platform python client available in [./bin/](bin/)
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md) * [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
b512 <movie.mkv
# dependencies # dependencies
* `jinja2` * `jinja2` (is built into the SFX)
optional, will eventually enable thumbnails: **optional,** enables music tags:
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
**optional,** will eventually enable thumbnails:
* `Pillow` (requires py2.7 or py3.5+) * `Pillow` (requires py2.7 or py3.5+)

View File

@@ -12,7 +12,7 @@
Description=copyparty file server Description=copyparty file server
[Service] [Service]
ExecStart=/usr/bin/python /usr/local/bin/copyparty-sfx.py -q -v /mnt::a ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
[Install] [Install]

View File

@@ -18,7 +18,7 @@ import locale
import argparse import argparse
from textwrap import dedent from textwrap import dedent
from .__init__ import E, WINDOWS, VT100 from .__init__ import E, WINDOWS, VT100, PY2
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub from .svchub import SvcHub
from .util import py_desc, align_tab from .util import py_desc, align_tab
@@ -53,6 +53,10 @@ class RiceFormatter(argparse.HelpFormatter):
return "".join(indent + line + "\n" for line in text.splitlines()) return "".join(indent + line + "\n" for line in text.splitlines())
def warn(msg):
print("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
def ensure_locale(): def ensure_locale():
for x in [ for x in [
"en_US.UTF-8", "en_US.UTF-8",
@@ -198,7 +202,7 @@ def main():
and "cflag" is config flags to set on this volume and "cflag" is config flags to set on this volume
list of cflags: list of cflags:
cnodupe rejects existing files (instead of symlinking them) "cnodupe" rejects existing files (instead of symlinking them)
example:\033[35m example:\033[35m
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m -a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
@@ -239,17 +243,28 @@ def main():
ap.add_argument("-q", action="store_true", help="quiet") ap.add_argument("-q", action="store_true", help="quiet")
ap.add_argument("-ed", action="store_true", help="enable ?dots") ap.add_argument("-ed", action="store_true", help="enable ?dots")
ap.add_argument("-emp", action="store_true", help="enable markdown plugins") ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap.add_argument("-e2d", action="store_true", help="enable up2k database")
ap.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
ap.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate") ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)") ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap.add_argument("-nih", action="store_true", help="no info hostname") ap.add_argument("-nih", action="store_true", help="no info hostname")
ap.add_argument("-nid", action="store_true", help="no info disk-usage") ap.add_argument("-nid", action="store_true", help="no info disk-usage")
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile") ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)")
ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)")
ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms") ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms")
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt") ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
ap2 = ap.add_argument_group('database options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q")
ap2 = ap.add_argument_group('SSL/TLS options') ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls") ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext") ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
@@ -257,14 +272,20 @@ def main():
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers") ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info") ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets") ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
al = ap.parse_args() al = ap.parse_args()
# fmt: on # fmt: on
if al.e2dsa: # propagate implications
al.e2ds = True for k1, k2 in [
["e2dsa", "e2ds"],
if al.e2ds: ["e2ds", "e2d"],
al.e2d = True ["e2tsr", "e2ts"],
["e2ts", "e2t"],
["e2t", "e2d"],
]:
if getattr(al, k1):
setattr(al, k2, True)
al.i = al.i.split(",") al.i = al.i.split(",")
try: try:
@@ -283,7 +304,13 @@ def main():
if al.ciphers: if al.ciphers:
configure_ssl_ciphers(al) configure_ssl_ciphers(al)
else: else:
print("\033[33m ssl module does not exist; cannot enable https\033[0m\n") warn("ssl module does not exist; cannot enable https")
if PY2 and WINDOWS and al.e2d:
warn(
"windows py2 cannot do unicode filenames with -e2d\n"
+ " (if you crash with codec errors then that is why)"
)
SvcHub(al).run() SvcHub(al).run()

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (0, 8, 3) VERSION = (0, 9, 6)
CODENAME = "discovery" CODENAME = "the strongest music server"
BUILD_DT = (2021, 2, 22) BUILD_DT = (2021, 3, 7)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -6,7 +6,7 @@ import re
import threading import threading
from .__init__ import PY2, WINDOWS from .__init__ import PY2, WINDOWS
from .util import undot, Pebkac, fsdec, fsenc from .util import undot, Pebkac, fsdec, fsenc, statdir, nuprint
class VFS(object): class VFS(object):
@@ -102,12 +102,11 @@ class VFS(object):
return fsdec(os.path.realpath(fsenc(rp))) return fsdec(os.path.realpath(fsenc(rp)))
def ls(self, rem, uname): def ls(self, rem, uname, scandir, lstat=False):
"""return user-readable [fsdir,real,virt] items at vpath""" """return user-readable [fsdir,real,virt] items at vpath"""
virt_vis = {} # nodes readable by user virt_vis = {} # nodes readable by user
abspath = self.canonical(rem) abspath = self.canonical(rem)
items = os.listdir(fsenc(abspath)) real = list(statdir(nuprint, scandir, lstat, abspath))
real = [fsdec(x) for x in items]
real.sort() real.sort()
if not rem: if not rem:
for name, vn2 in sorted(self.nodes.items()): for name, vn2 in sorted(self.nodes.items()):
@@ -115,7 +114,7 @@ class VFS(object):
virt_vis[name] = vn2 virt_vis[name] = vn2
# no vfs nodes in the list of real inodes # no vfs nodes in the list of real inodes
real = [x for x in real if x not in self.nodes] real = [x for x in real if x[0] not in self.nodes]
return [abspath, real, virt_vis] return [abspath, real, virt_vis]
@@ -148,8 +147,8 @@ class AuthSrv(object):
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.reload() self.reload()
def log(self, msg): def log(self, msg, c=0):
self.log_func("auth", msg) self.log_func("auth", msg, c)
def invert(self, orig): def invert(self, orig):
if PY2: if PY2:
@@ -206,8 +205,11 @@ class AuthSrv(object):
if lvl in "wa": if lvl in "wa":
mwrite[vol_dst].append(uname) mwrite[vol_dst].append(uname)
if lvl == "c": if lvl == "c":
# config option, currently switches only cval = True
mflags[vol_dst][uname] = True if "=" in uname:
uname, cval = uname.split("=", 1)
mflags[vol_dst][uname] = cval
def reload(self): def reload(self):
""" """
@@ -248,12 +250,19 @@ class AuthSrv(object):
perms = perms.split(":") perms = perms.split(":")
for (lvl, uname) in [[x[0], x[1:]] for x in perms]: for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
if lvl == "c": if lvl == "c":
# config option, currently switches only cval = True
mflags[dst][uname] = True if "=" in uname:
uname, cval = uname.split("=", 1)
mflags[dst][uname] = cval
continue
if uname == "": if uname == "":
uname = "*" uname = "*"
if lvl in "ra": if lvl in "ra":
mread[dst].append(uname) mread[dst].append(uname)
if lvl in "wa": if lvl in "wa":
mwrite[dst].append(uname) mwrite[dst].append(uname)
@@ -268,6 +277,7 @@ class AuthSrv(object):
elif "" not in mount: elif "" not in mount:
# there's volumes but no root; make root inaccessible # there's volumes but no root; make root inaccessible
vfs = VFS(os.path.abspath("."), "") vfs = VFS(os.path.abspath("."), "")
vfs.flags["d2d"] = True
maxdepth = 0 maxdepth = 0
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))): for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
@@ -294,21 +304,33 @@ class AuthSrv(object):
if missing_users: if missing_users:
self.log( self.log(
"\033[31myou must -a the following users: " "you must -a the following users: "
+ ", ".join(k for k in sorted(missing_users)) + ", ".join(k for k in sorted(missing_users)),
+ "\033[0m" c=1,
) )
raise Exception("invalid config") raise Exception("invalid config")
for vol in vfs.all_vols.values():
if (self.args.e2ds and vol.uwrite) or self.args.e2dsa:
vol.flags["e2ds"] = True
if self.args.e2d or "e2ds" in vol.flags:
vol.flags["e2d"] = True
for k in ["e2t", "e2ts", "e2tsr"]:
if getattr(self.args, k):
vol.flags[k] = True
# default tag-list if unset
if "mte" not in vol.flags:
vol.flags["mte"] = self.args.mte
try: try:
v, _ = vfs.get("/", "*", False, True) v, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == v.realpath: if self.warn_anonwrite and os.getcwd() == v.realpath:
self.warn_anonwrite = False self.warn_anonwrite = False
self.log( msg = "anyone can read/write the current directory: {}"
"\033[31manyone can read/write the current directory: {}\033[0m".format( self.log(msg.format(v.realpath), c=1)
v.realpath
)
)
except Pebkac: except Pebkac:
self.warn_anonwrite = True self.warn_anonwrite = True

View File

@@ -49,11 +49,11 @@ class MpWorker(object):
# print('k') # print('k')
pass pass
def log(self, src, msg): def log(self, src, msg, c=0):
self.q_yield.put([0, "log", [src, msg]]) self.q_yield.put([0, "log", [src, msg, c]])
def logw(self, msg): def logw(self, msg, c=0):
self.log("mp{}".format(self.n), msg) self.log("mp{}".format(self.n), msg, c)
def httpdrop(self, addr): def httpdrop(self, addr):
self.q_yield.put([0, "httpdrop", [addr]]) self.q_yield.put([0, "httpdrop", [addr]])
@@ -73,7 +73,7 @@ class MpWorker(object):
if PY2: if PY2:
sck = pickle.loads(sck) # nosec sck = pickle.loads(sck) # nosec
self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,)) self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
self.httpsrv.accept(sck, addr) self.httpsrv.accept(sck, addr)
with self.mutex: with self.mutex:

View File

@@ -28,7 +28,7 @@ class BrokerThr(object):
def put(self, want_retval, dest, *args): def put(self, want_retval, dest, *args):
if dest == "httpconn": if dest == "httpconn":
sck, addr = args sck, addr = args
self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,)) self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
self.httpsrv.accept(sck, addr) self.httpsrv.accept(sck, addr)
else: else:

View File

@@ -41,8 +41,8 @@ class HttpCli(object):
self.absolute_urls = False self.absolute_urls = False
self.out_headers = {"Access-Control-Allow-Origin": "*"} self.out_headers = {"Access-Control-Allow-Origin": "*"}
def log(self, msg): def log(self, msg, c=0):
self.log_func(self.log_src, msg) self.log_func(self.log_src, msg, c)
def _check_nonfatal(self, ex): def _check_nonfatal(self, ex):
return ex.code < 400 or ex.code == 404 return ex.code < 400 or ex.code == 404
@@ -63,7 +63,7 @@ class HttpCli(object):
if not headerlines[0]: if not headerlines[0]:
# seen after login with IE6.0.2900.5512.xpsp.080413-2111 (xp-sp3) # seen after login with IE6.0.2900.5512.xpsp.080413-2111 (xp-sp3)
self.log("\033[1;31mBUG: trailing newline from previous request\033[0m") self.log("BUG: trailing newline from previous request", c="1;31")
headerlines.pop(0) headerlines.pop(0)
try: try:
@@ -74,7 +74,7 @@ class HttpCli(object):
except Pebkac as ex: except Pebkac as ex:
# self.log("pebkac at httpcli.run #1: " + repr(ex)) # self.log("pebkac at httpcli.run #1: " + repr(ex))
self.keepalive = self._check_nonfatal(ex) self.keepalive = self._check_nonfatal(ex)
self.loud_reply(str(ex), status=ex.code) self.loud_reply(unicode(ex), status=ex.code)
return self.keepalive return self.keepalive
# time.sleep(0.4) # time.sleep(0.4)
@@ -163,7 +163,7 @@ class HttpCli(object):
response = ["HTTP/1.1 {} {}".format(status, HTTPCODE[status])] response = ["HTTP/1.1 {} {}".format(status, HTTPCODE[status])]
if length is not None: if length is not None:
response.append("Content-Length: " + str(length)) response.append("Content-Length: " + unicode(length))
# close if unknown length, otherwise take client's preference # close if unknown length, otherwise take client's preference
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close")) response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
@@ -222,6 +222,9 @@ class HttpCli(object):
static_path = os.path.join(E.mod, "web/", self.vpath[5:]) static_path = os.path.join(E.mod, "web/", self.vpath[5:])
return self.tx_file(static_path) return self.tx_file(static_path)
if "tree" in self.uparam:
return self.tx_tree()
# conditional redirect to single volumes # conditional redirect to single volumes
if self.vpath == "" and not self.uparam: if self.vpath == "" and not self.uparam:
nread = len(self.rvol) nread = len(self.rvol)
@@ -246,9 +249,6 @@ class HttpCli(object):
self.vpath = None self.vpath = None
return self.tx_mounts() return self.tx_mounts()
if "tree" in self.uparam:
return self.tx_tree()
return self.tx_browser() return self.tx_browser()
def handle_options(self): def handle_options(self):
@@ -323,8 +323,11 @@ class HttpCli(object):
raise Pebkac(405, "don't know how to handle POST({})".format(ctype)) raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
def get_body_reader(self): def get_body_reader(self):
remains = int(self.headers.get("content-length", None)) chunked = "chunked" in self.headers.get("transfer-encoding", "").lower()
if remains is None: remains = int(self.headers.get("content-length", -1))
if chunked:
return read_socket_chunked(self.sr), remains
elif remains == -1:
self.keepalive = False self.keepalive = False
return read_socket_unbounded(self.sr), remains return read_socket_unbounded(self.sr), remains
else: else:
@@ -342,6 +345,10 @@ class HttpCli(object):
with open(path, "wb", 512 * 1024) as f: with open(path, "wb", 512 * 1024) as f:
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f) post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fn
)
return post_sz, sha_b64, remains, path return post_sz, sha_b64, remains, path
def handle_stash(self): def handle_stash(self):
@@ -425,7 +432,7 @@ class HttpCli(object):
body["ptop"] = vfs.realpath body["ptop"] = vfs.realpath
body["prel"] = rem body["prel"] = rem
body["addr"] = self.ip body["addr"] = self.ip
body["flag"] = vfs.flags body["vcfg"] = vfs.flags
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body) x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
response = x.get() response = x.get()
@@ -442,20 +449,31 @@ class HttpCli(object):
vols.append([vfs.vpath, vfs.realpath, vfs.flags]) vols.append([vfs.vpath, vfs.realpath, vfs.flags])
idx = self.conn.get_u2idx() idx = self.conn.get_u2idx()
t0 = time.time()
if "srch" in body: if "srch" in body:
# search by up2k hashlist # search by up2k hashlist
vbody = copy.deepcopy(body) vbody = copy.deepcopy(body)
vbody["hash"] = len(vbody["hash"]) vbody["hash"] = len(vbody["hash"])
self.log("qj: " + repr(vbody)) self.log("qj: " + repr(vbody))
hits = idx.fsearch(vols, body) hits = idx.fsearch(vols, body)
self.log("q#: " + repr(hits)) self.log("q#: {} ({:.2f}s)".format(repr(hits), time.time() - t0))
taglist = []
else: else:
# search by query params # search by query params
self.log("qj: " + repr(body)) self.log("qj: " + repr(body))
hits = idx.search(vols, body) hits, taglist = idx.search(vols, body)
self.log("q#: " + str(len(hits))) self.log("q#: {} ({:.2f}s)".format(len(hits), time.time() - t0))
r = json.dumps(hits).encode("utf-8") order = []
cfg = self.args.mte.split(",")
for t in cfg:
if t in taglist:
order.append(t)
for t in taglist:
if t not in order:
order.append(t)
r = json.dumps({"hits": hits, "tag_order": order}).encode("utf-8")
self.reply(r, mime="application/json") self.reply(r, mime="application/json")
return True return True
@@ -503,7 +521,7 @@ class HttpCli(object):
if len(cstart) > 1 and path != os.devnull: if len(cstart) > 1 and path != os.devnull:
self.log( self.log(
"clone {} to {}".format( "clone {} to {}".format(
cstart[0], " & ".join(str(x) for x in cstart[1:]) cstart[0], " & ".join(unicode(x) for x in cstart[1:])
) )
) )
ofs = 0 ofs = 0
@@ -661,6 +679,9 @@ class HttpCli(object):
raise Pebkac(400, "empty files in post") raise Pebkac(400, "empty files in post")
files.append([sz, sha512_hex]) files.append([sz, sha512_hex])
self.conn.hsrv.broker.put(
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname
)
self.conn.nbyte += sz self.conn.nbyte += sz
except Pebkac: except Pebkac:
@@ -676,7 +697,7 @@ class HttpCli(object):
raise raise
except Pebkac as ex: except Pebkac as ex:
errmsg = str(ex) errmsg = unicode(ex)
td = max(0.1, time.time() - t0) td = max(0.1, time.time() - t0)
sz_total = sum(x[0] for x in files) sz_total = sum(x[0] for x in files)
@@ -985,7 +1006,7 @@ class HttpCli(object):
mime=guess_mime(req_path)[0] or "application/octet-stream", mime=guess_mime(req_path)[0] or "application/octet-stream",
) )
logmsg += str(status) + logtail logmsg += unicode(status) + logtail
if self.mode == "HEAD" or not do_send: if self.mode == "HEAD" or not do_send:
self.log(logmsg) self.log(logmsg)
@@ -999,7 +1020,7 @@ class HttpCli(object):
remains = sendfile_py(lower, upper, f, self.s) remains = sendfile_py(lower, upper, f, self.s)
if remains > 0: if remains > 0:
logmsg += " \033[31m" + str(upper - remains) + "\033[0m" logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
spd = self._spd((upper - lower) - remains) spd = self._spd((upper - lower) - remains)
self.log("{}, {}".format(logmsg, spd)) self.log("{}, {}".format(logmsg, spd))
@@ -1046,7 +1067,7 @@ class HttpCli(object):
sz_html = len(template.render(**targs).encode("utf-8")) sz_html = len(template.render(**targs).encode("utf-8"))
self.send_headers(sz_html + sz_md, status) self.send_headers(sz_html + sz_md, status)
logmsg += str(status) logmsg += unicode(status)
if self.mode == "HEAD" or not do_send: if self.mode == "HEAD" or not do_send:
self.log(logmsg) self.log(logmsg)
return True return True
@@ -1060,7 +1081,7 @@ class HttpCli(object):
self.log(logmsg + " \033[31md/c\033[0m") self.log(logmsg + " \033[31md/c\033[0m")
return False return False
self.log(logmsg + " " + str(len(html))) self.log(logmsg + " " + unicode(len(html)))
return True return True
def tx_mounts(self): def tx_mounts(self):
@@ -1094,11 +1115,12 @@ class HttpCli(object):
excl = None excl = None
if target: if target:
excl, target = (target.split("/", 1) + [""])[:2] excl, target = (target.split("/", 1) + [""])[:2]
ret["k" + excl] = self.gen_tree("/".join([top, excl]).strip("/"), target) sub = self.gen_tree("/".join([top, excl]).strip("/"), target)
ret["k" + quotep(excl)] = sub
try: try:
vn, rem = self.auth.vfs.get(top, self.uname, True, False) vn, rem = self.auth.vfs.get(top, self.uname, True, False)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname) fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
except: except:
vfs_ls = [] vfs_ls = []
vfs_virt = {} vfs_virt = {}
@@ -1109,13 +1131,13 @@ class HttpCli(object):
dirs = [] dirs = []
vfs_ls = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
if not self.args.ed or "dots" not in self.uparam: if not self.args.ed or "dots" not in self.uparam:
vfs_ls = exclude_dotfiles(vfs_ls) vfs_ls = exclude_dotfiles(vfs_ls)
for fn in [x for x in vfs_ls if x != excl]: for fn in [x for x in vfs_ls if x != excl]:
abspath = os.path.join(fsroot, fn) dirs.append(quotep(fn))
if os.path.isdir(abspath):
dirs.append(fn)
for x in vfs_virt.keys(): for x in vfs_virt.keys():
if x != excl: if x != excl:
@@ -1154,7 +1176,9 @@ class HttpCli(object):
return self.tx_file(abspath) return self.tx_file(abspath)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname) fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
stats = {k: v for k, v in vfs_ls}
vfs_ls = [x[0] for x in vfs_ls]
vfs_ls.extend(vfs_virt.keys()) vfs_ls.extend(vfs_virt.keys())
# check for old versions of files, # check for old versions of files,
@@ -1183,6 +1207,11 @@ class HttpCli(object):
is_ls = "ls" in self.uparam is_ls = "ls" in self.uparam
icur = None
if "e2t" in vn.flags:
idx = self.conn.get_u2idx()
icur = idx.get_cur(vn.realpath)
dirs = [] dirs = []
files = [] files = []
for fn in vfs_ls: for fn in vfs_ls:
@@ -1200,7 +1229,7 @@ class HttpCli(object):
fspath = fsroot + "/" + fn fspath = fsroot + "/" + fn
try: try:
inf = os.stat(fsenc(fspath)) inf = stats.get(fn) or os.stat(fsenc(fspath))
except: except:
self.log("broken symlink: {}".format(repr(fspath))) self.log("broken symlink: {}".format(repr(fspath)))
continue continue
@@ -1232,18 +1261,49 @@ class HttpCli(object):
"sz": sz, "sz": sz,
"ext": ext, "ext": ext,
"dt": dt, "dt": dt,
"ts": inf.st_mtime, "ts": int(inf.st_mtime),
} }
if is_dir: if is_dir:
dirs.append(item) dirs.append(item)
else: else:
files.append(item) files.append(item)
item["rd"] = rem
taglist = {}
for f in files:
fn = f["name"]
rd = f["rd"]
del f["rd"]
if icur:
q = "select w from up where rd = ? and fn = ?"
try:
r = icur.execute(q, (rd, fn)).fetchone()
except:
args = s3enc(idx.mem_cur, rd, fn)
r = icur.execute(q, args).fetchone()
if not r:
continue
w = r[0][:16]
tags = {}
q = "select k, v from mt where w = ? and k != 'x'"
for k, v in icur.execute(q, (w,)):
taglist[k] = True
tags[k] = v
f["tags"] = tags
if icur:
taglist = [k for k in self.args.mte.split(",") if k in taglist]
for f in dirs:
f["tags"] = {}
srv_info = [] srv_info = []
try: try:
if not self.args.nih: if not self.args.nih:
srv_info.append(str(socket.gethostname()).split(".")[0]) srv_info.append(unicode(socket.gethostname()).split(".")[0])
except: except:
self.log("#wow #whoa") self.log("#wow #whoa")
pass pass
@@ -1290,6 +1350,7 @@ class HttpCli(object):
"srvinf": srv_info, "srvinf": srv_info,
"perms": perms, "perms": perms,
"logues": logues, "logues": logues,
"taglist": taglist,
} }
ret = json.dumps(ret) ret = json.dumps(ret)
self.reply(ret.encode("utf-8", "replace"), mime="application/json") self.reply(ret.encode("utf-8", "replace"), mime="application/json")
@@ -1306,7 +1367,10 @@ class HttpCli(object):
files=dirs, files=dirs,
ts=ts, ts=ts,
perms=json.dumps(perms), perms=json.dumps(perms),
have_up2k_idx=self.args.e2d, taglist=taglist,
tag_order=json.dumps(self.args.mte.split(",")),
have_up2k_idx=("e2d" in vn.flags),
have_tags_idx=("e2t" in vn.flags),
logues=logues, logues=logues,
title=html_escape(self.vpath), title=html_escape(self.vpath),
srv_info=srv_info, srv_info=srv_info,

View File

@@ -20,10 +20,12 @@ except ImportError:
you do not have jinja2 installed,\033[33m you do not have jinja2 installed,\033[33m
choose one of these:\033[0m choose one of these:\033[0m
* apt install python-jinja2 * apt install python-jinja2
* python3 -m pip install --user jinja2 * {} -m pip install --user jinja2
* (try another python version, if you have one) * (try another python version, if you have one)
* (try copyparty.sfx instead) * (try copyparty.sfx instead)
""" """.format(
os.path.basename(sys.executable)
)
) )
sys.exit(1) sys.exit(1)
@@ -79,8 +81,8 @@ class HttpConn(object):
def respath(self, res_name): def respath(self, res_name):
return os.path.join(E.mod, "web", res_name) return os.path.join(E.mod, "web", res_name)
def log(self, msg): def log(self, msg, c=0):
self.log_func(self.log_src, msg) self.log_func(self.log_src, msg, c)
def get_u2idx(self): def get_u2idx(self):
if not self.u2idx: if not self.u2idx:
@@ -127,7 +129,7 @@ class HttpConn(object):
if is_https: if is_https:
if self.sr: if self.sr:
self.log("\033[1;31mTODO: cannot do https in jython\033[0m") self.log("TODO: cannot do https in jython", c="1;31")
return return
self.log_src = self.log_src.replace("[36m", "[35m") self.log_src = self.log_src.replace("[36m", "[35m")
@@ -178,7 +180,7 @@ class HttpConn(object):
pass pass
else: else:
self.log("\033[35mhandshake\033[0m " + em) self.log("handshake\033[0m " + em, c=5)
return return

View File

@@ -38,7 +38,7 @@ class HttpSrv(object):
def accept(self, sck, addr): def accept(self, sck, addr):
"""takes an incoming tcp connection and creates a thread to handle it""" """takes an incoming tcp connection and creates a thread to handle it"""
self.log("%s %s" % addr, "\033[1;30m|%sC-cthr\033[0m" % ("-" * 5,)) self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
thr = threading.Thread(target=self.thr_client, args=(sck, addr)) thr = threading.Thread(target=self.thr_client, args=(sck, addr))
thr.daemon = True thr.daemon = True
thr.start() thr.start()
@@ -66,11 +66,11 @@ class HttpSrv(object):
thr.start() thr.start()
try: try:
self.log("%s %s" % addr, "\033[1;30m|%sC-crun\033[0m" % ("-" * 6,)) self.log("%s %s" % addr, "|%sC-crun" % ("-" * 6,), c="1;30")
cli.run() cli.run()
finally: finally:
self.log("%s %s" % addr, "\033[1;30m|%sC-cdone\033[0m" % ("-" * 7,)) self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")
try: try:
sck.shutdown(socket.SHUT_RDWR) sck.shutdown(socket.SHUT_RDWR)
sck.close() sck.close()
@@ -78,7 +78,8 @@ class HttpSrv(object):
if not MACOS: if not MACOS:
self.log( self.log(
"%s %s" % addr, "%s %s" % addr,
"\033[1;30mshut({}): {}\033[0m".format(sck.fileno(), ex), "shut({}): {}".format(sck.fileno(), ex),
c="1;30",
) )
if ex.errno not in [10038, 10054, 107, 57, 9]: if ex.errno not in [10038, 10054, 107, 57, 9]:
# 10038 No longer considered a socket # 10038 No longer considered a socket

314
copyparty/mtag.py Normal file
View File

@@ -0,0 +1,314 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import re
import os
import sys
import shutil
import subprocess as sp
from .__init__ import PY2, WINDOWS
from .util import fsenc, fsdec
if not PY2:
unicode = str
class MTag(object):
def __init__(self, log_func, args):
self.log_func = log_func
self.usable = True
self.prefer_mt = False
mappings = args.mtm
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
or_ffprobe = " or ffprobe"
if self.backend == "mutagen":
self.get = self.get_mutagen
try:
import mutagen
except:
self.log("could not load mutagen, trying ffprobe instead", c=3)
self.backend = "ffprobe"
if self.backend == "ffprobe":
self.get = self.get_ffprobe
self.prefer_mt = True
# about 20x slower
if PY2:
cmd = [b"ffprobe", b"-version"]
try:
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
except:
self.usable = False
else:
if not shutil.which("ffprobe"):
self.usable = False
if self.usable and WINDOWS and sys.version_info < (3, 8):
self.usable = False
or_ffprobe = " or python >= 3.8"
msg = "found ffprobe but your python is too old; need 3.8 or newer"
self.log(msg, c=1)
if not self.usable:
msg = "need mutagen{} to read media tags so please run this:\n {} -m pip install --user mutagen"
self.log(msg.format(or_ffprobe, os.path.basename(sys.executable)), c=1)
return
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
tagmap = {
"album": ["album", "talb", "\u00a9alb", "original-album", "toal"],
"artist": [
"artist",
"tpe1",
"\u00a9art",
"composer",
"performer",
"arranger",
"\u00a9wrt",
"tcom",
"tpe3",
"original-artist",
"tope",
],
"title": ["title", "tit2", "\u00a9nam"],
"circle": [
"album-artist",
"tpe2",
"aart",
"conductor",
"organization",
"band",
],
".tn": ["tracknumber", "trck", "trkn", "track"],
"genre": ["genre", "tcon", "\u00a9gen"],
"date": [
"original-release-date",
"release-date",
"date",
"tdrc",
"\u00a9day",
"original-date",
"original-year",
"tyer",
"tdor",
"tory",
"year",
"creation-time",
],
".bpm": ["bpm", "tbpm", "tmpo", "tbp"],
"key": ["initial-key", "tkey", "key"],
"comment": ["comment", "comm", "\u00a9cmt", "comments", "description"],
}
if mappings:
for k, v in [x.split("=") for x in mappings]:
tagmap[k] = v.split(",")
self.tagmap = {}
for k, vs in tagmap.items():
vs2 = []
for v in vs:
if "-" not in v:
vs2.append(v)
continue
vs2.append(v.replace("-", " "))
vs2.append(v.replace("-", "_"))
vs2.append(v.replace("-", ""))
self.tagmap[k] = vs2
self.rmap = {
v: [n, k] for k, vs in self.tagmap.items() for n, v in enumerate(vs)
}
# self.get = self.compare
def log(self, msg, c=0):
self.log_func("mtag", msg, c)
def normalize_tags(self, ret, md):
for k, v in dict(md).items():
if not v:
continue
k = k.lower().split("::")[0].strip()
mk = self.rmap.get(k)
if not mk:
continue
pref, mk = mk
if mk not in ret or ret[mk][0] > pref:
ret[mk] = [pref, v[0]]
# take first value
ret = {k: unicode(v[1]).strip() for k, v in ret.items()}
# track 3/7 => track 3
for k, v in ret.items():
if k[0] == ".":
v = v.split("/")[0].strip().lstrip("0")
ret[k] = v or 0
return ret
def compare(self, abspath):
if abspath.endswith(".au"):
return {}
print("\n" + abspath)
r1 = self.get_mutagen(abspath)
r2 = self.get_ffprobe(abspath)
keys = {}
for d in [r1, r2]:
for k in d.keys():
keys[k] = True
diffs = []
l1 = []
l2 = []
for k in sorted(keys.keys()):
if k in [".q", ".dur"]:
continue # lenient
v1 = r1.get(k)
v2 = r2.get(k)
if v1 == v2:
print(" ", k, v1)
elif v1 != "0000": # ffprobe date=0
diffs.append(k)
print(" 1", k, v1)
print(" 2", k, v2)
if v1:
l1.append(k)
if v2:
l2.append(k)
if diffs:
raise Exception()
return r1
def get_mutagen(self, abspath):
import mutagen
try:
md = mutagen.File(abspath, easy=True)
x = md.info.length
except Exception as ex:
return {}
ret = {}
try:
dur = int(md.info.length)
try:
q = int(md.info.bitrate / 1024)
except:
q = int((os.path.getsize(abspath) / dur) / 128)
ret[".dur"] = [0, dur]
ret[".q"] = [0, q]
except:
pass
return self.normalize_tags(ret, md)
def get_ffprobe(self, abspath):
cmd = [b"ffprobe", b"-hide_banner", b"--", fsenc(abspath)]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
r = p.communicate()
txt = r[1].decode("utf-8", "replace")
txt = [x.rstrip("\r") for x in txt.split("\n")]
"""
note:
tags which contain newline will be truncated on first \n,
ffmpeg emits \n and spacepads the : to align visually
note:
the Stream ln always mentions Audio: if audio
the Stream ln usually has kb/s, is more accurate
the Duration ln always has kb/s
the Metadata: after Chapter may contain BPM info,
title : Tempo: 126.0
Input #0, wav,
Metadata:
date : <OK>
Duration:
Chapter #
Metadata:
title : <NG>
Input #0, mp3,
Metadata:
album : <OK>
Duration:
Stream #0:0: Audio:
Stream #0:1: Video:
Metadata:
comment : <NG>
"""
ptn_md_beg = re.compile("^( +)Metadata:$")
ptn_md_kv = re.compile("^( +)([^:]+) *: (.*)")
ptn_dur = re.compile("^ *Duration: ([^ ]+)(, |$)")
ptn_br1 = re.compile("^ *Duration: .*, bitrate: ([0-9]+) kb/s(, |$)")
ptn_br2 = re.compile("^ *Stream.*: Audio:.* ([0-9]+) kb/s(, |$)")
ptn_audio = re.compile("^ *Stream .*: Audio: ")
ptn_au_parent = re.compile("^ *(Input #|Stream .*: Audio: )")
ret = {}
md = {}
in_md = False
is_audio = False
au_parent = False
for ln in txt:
m = ptn_md_kv.match(ln)
if m and in_md and len(m.group(1)) == in_md:
_, k, v = [x.strip() for x in m.groups()]
if k != "" and v != "":
md[k] = [v]
continue
else:
in_md = False
m = ptn_md_beg.match(ln)
if m and au_parent:
in_md = len(m.group(1)) + 2
continue
au_parent = bool(ptn_au_parent.search(ln))
if ptn_audio.search(ln):
is_audio = True
m = ptn_dur.search(ln)
if m:
sec = 0
tstr = m.group(1)
if tstr.lower() != "n/a":
try:
tf = tstr.split(",")[0].split(".")[0].split(":")
for f in tf:
sec *= 60
sec += int(f)
except:
self.log("invalid timestr from ffmpeg: [{}]".format(tstr), c=3)
ret[".dur"] = sec
m = ptn_br1.search(ln)
if m:
ret[".q"] = m.group(1)
m = ptn_br2.search(ln)
if m:
ret[".q"] = m.group(1)
if not is_audio:
return {}
ret = {k: [0, v] for k, v in ret.items()}
return self.normalize_tags(ret, md)

View File

@@ -9,7 +9,6 @@ from datetime import datetime, timedelta
import calendar import calendar
from .__init__ import PY2, WINDOWS, MACOS, VT100 from .__init__ import PY2, WINDOWS, MACOS, VT100
from .authsrv import AuthSrv
from .tcpsrv import TcpSrv from .tcpsrv import TcpSrv
from .up2k import Up2k from .up2k import Up2k
from .util import mp from .util import mp
@@ -39,14 +38,6 @@ class SvcHub(object):
self.tcpsrv = TcpSrv(self) self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self) self.up2k = Up2k(self)
if self.args.e2ds:
auth = AuthSrv(self.args, self.log, False)
vols = auth.vfs.all_vols.values()
if not self.args.e2dsa:
vols = [x for x in vols if x.uwrite]
self.up2k.build_indexes(vols)
# decide which worker impl to use # decide which worker impl to use
if self.check_mp_enable(): if self.check_mp_enable():
from .broker_mp import BrokerMp as Broker from .broker_mp import BrokerMp as Broker
@@ -74,10 +65,10 @@ class SvcHub(object):
self.broker.shutdown() self.broker.shutdown()
print("nailed it") print("nailed it")
def _log_disabled(self, src, msg): def _log_disabled(self, src, msg, c=0):
pass pass
def _log_enabled(self, src, msg): def _log_enabled(self, src, msg, c=0):
"""handles logging from all components""" """handles logging from all components"""
with self.log_mutex: with self.log_mutex:
now = time.time() now = time.time()
@@ -100,6 +91,13 @@ class SvcHub(object):
msg = self.ansi_re.sub("", msg) msg = self.ansi_re.sub("", msg)
if "\033" in src: if "\033" in src:
src = self.ansi_re.sub("", src) src = self.ansi_re.sub("", src)
elif c:
if isinstance(c, int):
msg = "\033[3{}m{}".format(c, msg)
elif "\033" not in c:
msg = "\033[{}m{}\033[0m".format(c, msg)
else:
msg = "{}{}\033[0m".format(c, msg)
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3] ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
msg = fmt.format(ts, src, msg) msg = fmt.format(ts, src, msg)

View File

@@ -68,21 +68,22 @@ class TcpSrv(object):
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port)) self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
while True: while True:
self.log("tcpsrv", "\033[1;30m|%sC-ncli\033[0m" % ("-" * 1,)) self.log("tcpsrv", "|%sC-ncli" % ("-" * 1,), c="1;30")
if self.num_clients.v >= self.args.nc: if self.num_clients.v >= self.args.nc:
time.sleep(0.1) time.sleep(0.1)
continue continue
self.log("tcpsrv", "\033[1;30m|%sC-acc1\033[0m" % ("-" * 2,)) self.log("tcpsrv", "|%sC-acc1" % ("-" * 2,), c="1;30")
ready, _, _ = select.select(self.srv, [], []) ready, _, _ = select.select(self.srv, [], [])
for srv in ready: for srv in ready:
sck, addr = srv.accept() sck, addr = srv.accept()
sip, sport = srv.getsockname() sip, sport = srv.getsockname()
self.log( self.log(
"%s %s" % addr, "%s %s" % addr,
"\033[1;30m|{}C-acc2 \033[0;36m{} \033[3{}m{}".format( "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
"-" * 3, sip, sport % 8, sport "-" * 3, sip, sport % 8, sport
), ),
c="1;30",
) )
self.num_clients.add() self.num_clients.add()
self.hub.broker.put(False, "httpconn", sck, addr) self.hub.broker.put(False, "httpconn", sck, addr)

View File

@@ -24,10 +24,12 @@ class U2idx(object):
self.log("could not load sqlite3; searchign wqill be disabled") self.log("could not load sqlite3; searchign wqill be disabled")
return return
self.dbs = {} self.cur = {}
self.mem_cur = sqlite3.connect(":memory:")
self.mem_cur.execute(r"create table a (b text)")
def log(self, msg): def log(self, msg, c=0):
self.log_func("u2idx", msg) self.log_func("u2idx", msg, c)
def fsearch(self, vols, body): def fsearch(self, vols, body):
"""search by up2k hashlist""" """search by up2k hashlist"""
@@ -37,7 +39,23 @@ class U2idx(object):
fsize = body["size"] fsize = body["size"]
fhash = body["hash"] fhash = body["hash"]
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash) wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
return self.run_query(vols, "select * from up where w = ?", [wark])
uq = "substr(w,1,16) = ? and w = ?"
uv = [wark[:16], wark]
return self.run_query(vols, uq, uv, "", [])[0]
def get_cur(self, ptop):
cur = self.cur.get(ptop)
if cur:
return cur
cur = _open(ptop)
if not cur:
return None
self.cur[ptop] = cur
return cur
def search(self, vols, body): def search(self, vols, body):
"""search by query params""" """search by query params"""
@@ -45,59 +63,83 @@ class U2idx(object):
return [] return []
qobj = {} qobj = {}
_conv_sz(qobj, body, "sz_min", "sz >= ?") _conv_sz(qobj, body, "sz_min", "up.sz >= ?")
_conv_sz(qobj, body, "sz_max", "sz <= ?") _conv_sz(qobj, body, "sz_max", "up.sz <= ?")
_conv_dt(qobj, body, "dt_min", "mt >= ?") _conv_dt(qobj, body, "dt_min", "up.mt >= ?")
_conv_dt(qobj, body, "dt_max", "mt <= ?") _conv_dt(qobj, body, "dt_max", "up.mt <= ?")
for seg, dk in [["path", "rd"], ["name", "fn"]]: for seg, dk in [["path", "up.rd"], ["name", "up.fn"]]:
if seg in body: if seg in body:
_conv_txt(qobj, body, seg, dk) _conv_txt(qobj, body, seg, dk)
qstr = "select * from up" uq, uv = _sqlize(qobj)
qv = []
if qobj:
qk = []
for k, v in sorted(qobj.items()):
qk.append(k.split("\n")[0])
qv.append(v)
qstr = " and ".join(qk) tq = ""
qstr = "select * from up where " + qstr tv = []
qobj = {}
if "tags" in body:
_conv_txt(qobj, body, "tags", "mt.v")
tq, tv = _sqlize(qobj)
return self.run_query(vols, qstr, qv) return self.run_query(vols, uq, uv, tq, tv)
def run_query(self, vols, qstr, qv): def run_query(self, vols, uq, uv, tq, tv):
qv = tuple(qv) self.log("qs: {} {} , {} {}".format(uq, repr(uv), tq, repr(tv)))
self.log("qs: {} {}".format(qstr, repr(qv)))
ret = [] ret = []
lim = 100 lim = 1000
taglist = {}
for (vtop, ptop, flags) in vols: for (vtop, ptop, flags) in vols:
db = self.dbs.get(ptop) cur = self.get_cur(ptop)
if not db: if not cur:
db = _open(ptop) continue
if not db:
continue
self.dbs[ptop] = db if not tq:
# self.log("idx /{} @ {} {}".format(vtop, ptop, flags)) if not uq:
q = "select * from up"
v = ()
else:
q = "select * from up where " + uq
v = tuple(uv)
else:
# naive assumption: tags first
q = "select up.* from up inner join mt on substr(up.w,1,16) = mt.w where {}"
q = q.format(" and ".join([tq, uq]) if uq else tq)
v = tuple(tv + uv)
c = db.execute(qstr, qv) sret = []
for _, ts, sz, rd, fn in c: c = cur.execute(q, v)
for hit in c:
w, ts, sz, rd, fn = hit
lim -= 1 lim -= 1
if lim <= 0: if lim <= 0:
break break
rp = os.path.join(vtop, rd, fn).replace("\\", "/") if rd.startswith("//") or fn.startswith("//"):
ret.append({"ts": int(ts), "sz": sz, "rp": rp}) rd, fn = s3dec(rd, fn)
return ret rp = os.path.join(vtop, rd, fn).replace("\\", "/")
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
for hit in sret:
w = hit["w"]
del hit["w"]
tags = {}
q = "select k, v from mt where w = ? and k != 'x'"
for k, v in cur.execute(q, (w,)):
taglist[k] = True
tags[k] = v
hit["tags"] = tags
ret.extend(sret)
return ret, list(taglist.keys())
def _open(ptop): def _open(ptop):
db_path = os.path.join(ptop, ".hist", "up2k.db") db_path = os.path.join(ptop, ".hist", "up2k.db")
if os.path.exists(db_path): if os.path.exists(db_path):
return sqlite3.connect(db_path) return sqlite3.connect(db_path).cursor()
def _conv_sz(q, body, k, sql): def _conv_sz(q, body, k, sql):
@@ -146,3 +188,13 @@ def _conv_txt(q, body, k, sql):
qk = "{} {} like {}?{}".format(sql, inv, head, tail) qk = "{} {} like {}?{}".format(sql, inv, head, tail)
q[qk + "\n" + v] = u8safe(v) q[qk + "\n" + v] = u8safe(v)
def _sqlize(qobj):
keys = []
values = []
for k, v in sorted(qobj.items()):
keys.append(k.split("\n")[0])
values.append(v)
return " and ".join(keys), values

View File

@@ -12,6 +12,7 @@ import shutil
import base64 import base64
import hashlib import hashlib
import threading import threading
import traceback
from copy import deepcopy from copy import deepcopy
from .__init__ import WINDOWS from .__init__ import WINDOWS
@@ -24,9 +25,12 @@ from .util import (
sanitize_fn, sanitize_fn,
ren_open, ren_open,
atomic_move, atomic_move,
w8b64enc, s3enc,
w8b64dec, s3dec,
statdir,
) )
from .mtag import MTag
from .authsrv import AuthSrv
try: try:
HAVE_SQLITE3 = True HAVE_SQLITE3 = True
@@ -47,22 +51,31 @@ class Up2k(object):
self.broker = broker self.broker = broker
self.args = broker.args self.args = broker.args
self.log_func = broker.log self.log_func = broker.log
self.persist = self.args.e2d
# config # config
self.salt = broker.args.salt self.salt = broker.args.salt
# state # state
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.hashq = Queue()
self.tagq = Queue()
self.registry = {} self.registry = {}
self.db = {} self.entags = {}
self.flags = {}
self.cur = {}
self.mtag = None
self.n_mtag_tags_added = -1
self.mem_db = None self.mem_cur = None
self.sqlite_ver = None
self.no_expr_idx = False
if HAVE_SQLITE3: if HAVE_SQLITE3:
# mojibake detector # mojibake detector
self.mem_db = sqlite3.connect(":memory:", check_same_thread=False) self.mem_cur = self._orz(":memory:")
self.mem_db.execute(r"create table a (b text)") self.mem_cur.execute(r"create table a (b text)")
self.mem_db.commit() self.sqlite_ver = tuple([int(x) for x in sqlite3.sqlite_version.split(".")])
if self.sqlite_ver < (3, 9):
self.no_expr_idx = True
if WINDOWS: if WINDOWS:
# usually fails to set lastmod too quickly # usually fails to set lastmod too quickly
@@ -71,42 +84,31 @@ class Up2k(object):
thr.daemon = True thr.daemon = True
thr.start() thr.start()
if self.persist: # static
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
if not HAVE_SQLITE3:
self.log("could not initialize sqlite3, will use in-memory registry only")
# this is kinda jank
auth = AuthSrv(self.args, self.log_func, False)
have_e2d = self.init_indexes(auth)
if have_e2d:
thr = threading.Thread(target=self._snapshot) thr = threading.Thread(target=self._snapshot)
thr.daemon = True thr.daemon = True
thr.start() thr.start()
# static thr = threading.Thread(target=self._tagger)
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$") thr.daemon = True
thr.start()
if self.persist and not HAVE_SQLITE3: thr = threading.Thread(target=self._hasher)
self.log("could not initialize sqlite3, will use in-memory registry only") thr.daemon = True
thr.start()
def log(self, msg): def log(self, msg, c=0):
self.log_func("up2k", msg + "\033[K") self.log_func("up2k", msg + "\033[K", c)
def w8enc(self, rd, fn):
ret = []
for k, v in [["d", rd], ["f", fn]]:
try:
self.mem_db.execute("select * from a where b = ?", (v,))
ret.append(v)
except:
ret.append("//" + w8b64enc(v))
# self.log("mojien/{} [{}] {}".format(k, v, ret[-1][2:]))
return tuple(ret)
def w8dec(self, rd, fn):
ret = []
for k, v in [["d", rd], ["f", fn]]:
if v.startswith("//"):
ret.append(w8b64dec(v[2:]))
# self.log("mojide/{} [{}] {}".format(k, ret[-1], v[2:]))
else:
ret.append(v)
return tuple(ret)
def _vis_job_progress(self, job): def _vis_job_progress(self, job):
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"])) perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
@@ -120,14 +122,106 @@ class Up2k(object):
return ret return ret
def register_vpath(self, ptop): def _expr_idx_filter(self, flags):
if not self.no_expr_idx:
return False, flags
ret = {k: v for k, v in flags.items() if not k.startswith("e2t")}
if ret.keys() == flags.keys():
return False, flags
return True, ret
def init_indexes(self, auth):
self.pp = ProgressPrinter()
vols = auth.vfs.all_vols.values()
t0 = time.time()
have_e2d = False
if self.no_expr_idx:
modified = False
for vol in vols:
m, f = self._expr_idx_filter(vol.flags)
if m:
vol.flags = f
modified = True
if modified:
msg = "disabling -e2t because your sqlite belongs in a museum"
self.log(msg, c=3)
live_vols = []
for vol in vols:
try:
os.listdir(vol.realpath)
live_vols.append(vol)
except:
self.log("cannot access " + vol.realpath, c=1)
vols = live_vols
need_mtag = False
for vol in vols:
if "e2t" in vol.flags:
need_mtag = True
if need_mtag:
self.mtag = MTag(self.log_func, self.args)
if not self.mtag.usable:
self.mtag = None
# e2ds(a) volumes first,
# also covers tags where e2ts is set
for vol in vols:
en = {}
if "mte" in vol.flags:
en = {k: True for k in vol.flags["mte"].split(",")}
self.entags[vol.realpath] = en
if "e2d" in vol.flags:
have_e2d = True
if "e2ds" in vol.flags:
r = self._build_file_index(vol, vols)
if not r:
needed_mutagen = True
# open the rest + do any e2ts(a)
needed_mutagen = False
for vol in vols:
r = self.register_vpath(vol.realpath, vol.flags)
if not r or "e2ts" not in vol.flags:
continue
cur, db_path, sz0 = r
n_add, n_rm, success = self._build_tags_index(vol.realpath)
if not success:
needed_mutagen = True
if n_add or n_rm:
self.vac(cur, db_path, n_add, n_rm, sz0)
self.pp.end = True
msg = "{} volumes in {:.2f} sec"
self.log(msg.format(len(vols), time.time() - t0))
if needed_mutagen:
msg = "could not read tags because no backends are available (mutagen or ffprobe)"
self.log(msg, c=1)
return have_e2d
def register_vpath(self, ptop, flags):
with self.mutex: with self.mutex:
if ptop in self.registry: if ptop in self.registry:
return None return None
_, flags = self._expr_idx_filter(flags)
reg = {} reg = {}
path = os.path.join(ptop, ".hist", "up2k.snap") path = os.path.join(ptop, ".hist", "up2k.snap")
if self.persist and os.path.exists(path): if "e2d" in flags and os.path.exists(path):
with gzip.GzipFile(path, "rb") as f: with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8") j = f.read().decode("utf-8")
@@ -139,8 +233,9 @@ class Up2k(object):
m = [m] + self._vis_reg_progress(reg) m = [m] + self._vis_reg_progress(reg)
self.log("\n".join(m)) self.log("\n".join(m))
self.flags[ptop] = flags
self.registry[ptop] = reg self.registry[ptop] = reg
if not self.persist or not HAVE_SQLITE3: if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
return None return None
try: try:
@@ -149,73 +244,66 @@ class Up2k(object):
pass pass
db_path = os.path.join(ptop, ".hist", "up2k.db") db_path = os.path.join(ptop, ".hist", "up2k.db")
if ptop in self.db: if ptop in self.cur:
# self.db[ptop].close()
return None return None
try: try:
db = self._open_db(db_path) sz0 = 0
self.db[ptop] = db if os.path.exists(db_path):
return db sz0 = os.path.getsize(db_path) // 1024
except Exception as ex:
self.log("cannot use database at [{}]: {}".format(ptop, repr(ex))) cur = self._open_db(db_path)
self.cur[ptop] = cur
return [cur, db_path, sz0]
except:
msg = "cannot use database at [{}]:\n{}"
self.log(msg.format(ptop, traceback.format_exc()))
return None return None
def build_indexes(self, writeables): def _build_file_index(self, vol, all_vols):
tops = [d.realpath for d in writeables] do_vac = False
self.pp = ProgressPrinter() top = vol.realpath
t0 = time.time() reg = self.register_vpath(top, vol.flags)
for top in tops: if not reg:
db = self.register_vpath(top) return
if not db:
continue
self.pp.n = next(db.execute("select count(w) from up"))[0] _, db_path, sz0 = reg
db_path = os.path.join(top, ".hist", "up2k.db") dbw = [reg[0], 0, time.time()]
sz0 = os.path.getsize(db_path) // 1024 self.pp.n = next(dbw[0].execute("select count(w) from up"))[0]
# can be symlink so don't `and d.startswith(top)`` # can be symlink so don't `and d.startswith(top)``
excl = set([d for d in tops if d != top]) excl = set([d.realpath for d in all_vols if d != vol])
dbw = [db, 0, time.time()] n_add = self._build_dir(dbw, top, excl, top)
n_rm = self._drop_lost(dbw[0], top)
if dbw[1]:
self.log("commit {} new files".format(dbw[1]))
dbw[0].connection.commit()
n_add = self._build_dir(dbw, top, excl, top) n_add, n_rm, success = self._build_tags_index(vol.realpath)
n_rm = self._drop_lost(db, top)
if dbw[1]:
self.log("commit {} new files".format(dbw[1]))
db.commit() dbw[0].connection.commit()
if n_add or n_rm: if n_add or n_rm or do_vac:
db_path = os.path.join(top, ".hist", "up2k.db") self.vac(dbw[0], db_path, n_add, n_rm, sz0)
sz1 = os.path.getsize(db_path) // 1024
db.execute("vacuum")
sz2 = os.path.getsize(db_path) // 1024
msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format(
n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2
)
self.log(msg)
self.pp.end = True return success
self.log("{} volumes in {:.2f} sec".format(len(tops), time.time() - t0))
def vac(self, cur, db_path, n_add, n_rm, sz0):
sz1 = os.path.getsize(db_path) // 1024
cur.execute("vacuum")
sz2 = os.path.getsize(db_path) // 1024
msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format(
n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2
)
self.log(msg)
def _build_dir(self, dbw, top, excl, cdir): def _build_dir(self, dbw, top, excl, cdir):
try:
inodes = [fsdec(x) for x in os.listdir(fsenc(cdir))]
except Exception as ex:
self.log("listdir: {} @ [{}]".format(repr(ex), cdir))
return 0
self.pp.msg = "a{} {}".format(self.pp.n, cdir) self.pp.msg = "a{} {}".format(self.pp.n, cdir)
histdir = os.path.join(top, ".hist") histdir = os.path.join(top, ".hist")
ret = 0 ret = 0
for inode in inodes: for iname, inf in statdir(self.log, not self.args.no_scandir, False, cdir):
abspath = os.path.join(cdir, inode) abspath = os.path.join(cdir, iname)
try: lmod = int(inf.st_mtime)
inf = os.stat(fsenc(abspath))
except Exception as ex:
self.log("stat: {} @ [{}]".format(repr(ex), abspath))
continue
if stat.S_ISDIR(inf.st_mode): if stat.S_ISDIR(inf.st_mode):
if abspath in excl or abspath == histdir: if abspath in excl or abspath == histdir:
continue continue
@@ -229,7 +317,7 @@ class Up2k(object):
try: try:
c = dbw[0].execute(sql, (rd, fn)) c = dbw[0].execute(sql, (rd, fn))
except: except:
c = dbw[0].execute(sql, self.w8enc(rd, fn)) c = dbw[0].execute(sql, s3enc(self.mem_cur, rd, fn))
in_db = list(c.fetchall()) in_db = list(c.fetchall())
if in_db: if in_db:
@@ -241,11 +329,11 @@ class Up2k(object):
self.log(m.format(top, rp, len(in_db), rep_db)) self.log(m.format(top, rp, len(in_db), rep_db))
dts = -1 dts = -1
if dts == inf.st_mtime and dsz == inf.st_size: if dts == lmod and dsz == inf.st_size:
continue continue
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format( m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
top, rp, dts, inf.st_mtime, dsz, inf.st_size top, rp, dts, lmod, dsz, inf.st_size
) )
self.log(m) self.log(m)
self.db_rm(dbw[0], rd, fn) self.db_rm(dbw[0], rd, fn)
@@ -264,26 +352,26 @@ class Up2k(object):
continue continue
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes) wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
self.db_add(dbw[0], wark, rd, fn, inf.st_mtime, inf.st_size) self.db_add(dbw[0], wark, rd, fn, lmod, inf.st_size)
dbw[1] += 1 dbw[1] += 1
ret += 1 ret += 1
td = time.time() - dbw[2] td = time.time() - dbw[2]
if dbw[1] >= 4096 or td >= 60: if dbw[1] >= 4096 or td >= 60:
self.log("commit {} new files".format(dbw[1])) self.log("commit {} new files".format(dbw[1]))
dbw[0].commit() dbw[0].connection.commit()
dbw[1] = 0 dbw[1] = 0
dbw[2] = time.time() dbw[2] = time.time()
return ret return ret
def _drop_lost(self, db, top): def _drop_lost(self, cur, top):
rm = [] rm = []
nchecked = 0 nchecked = 0
nfiles = next(db.execute("select count(w) from up"))[0] nfiles = next(cur.execute("select count(w) from up"))[0]
c = db.execute("select * from up") c = cur.execute("select * from up")
for dwark, dts, dsz, drd, dfn in c: for dwark, dts, dsz, drd, dfn in c:
nchecked += 1 nchecked += 1
if drd.startswith("//") or dfn.startswith("//"): if drd.startswith("//") or dfn.startswith("//"):
drd, dfn = self.w8dec(drd, dfn) drd, dfn = s3dec(drd, dfn)
abspath = os.path.join(top, drd, dfn) abspath = os.path.join(top, drd, dfn)
# almost zero overhead dw # almost zero overhead dw
@@ -298,49 +386,214 @@ class Up2k(object):
self.log("forgetting {} deleted files".format(len(rm))) self.log("forgetting {} deleted files".format(len(rm)))
for rd, fn in rm: for rd, fn in rm:
# self.log("{} / {}".format(rd, fn)) # self.log("{} / {}".format(rd, fn))
self.db_rm(db, rd, fn) self.db_rm(cur, rd, fn)
return len(rm) return len(rm)
def _build_tags_index(self, ptop):
entags = self.entags[ptop]
flags = self.flags[ptop]
cur = self.cur[ptop]
n_add = 0
n_rm = 0
n_buf = 0
last_write = time.time()
if "e2tsr" in flags:
n_rm = cur.execute("select count(w) from mt").fetchone()[0]
if n_rm:
self.log("discarding {} media tags for a full rescan".format(n_rm))
cur.execute("delete from mt")
else:
self.log("volume has e2tsr but there are no media tags to discard")
# integrity: drop tags for tracks that were deleted
if "e2t" in flags:
drops = []
c2 = cur.connection.cursor()
up_q = "select w from up where substr(w,1,16) = ?"
for (w,) in cur.execute("select w from mt"):
if not c2.execute(up_q, (w,)).fetchone():
drops.append(w[:16])
c2.close()
if drops:
msg = "discarding media tags for {} deleted files"
self.log(msg.format(len(drops)))
n_rm += len(drops)
for w in drops:
cur.execute("delete from mt where w = ?", (w,))
# bail if a volume flag disables indexing
if "d2t" in flags or "d2d" in flags:
return n_add, n_rm, True
# add tags for new files
if "e2ts" in flags:
if not self.mtag:
return n_add, n_rm, False
mpool = False
if self.mtag.prefer_mt and not self.args.no_mtag_mt:
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
# both do crazy runahead so lets reinvent another wheel
nw = os.cpu_count() if hasattr(os, "cpu_count") else 4
if self.n_mtag_tags_added == -1:
self.log("using {}x {}".format(nw, self.mtag.backend))
self.n_mtag_tags_added = 0
mpool = Queue(nw)
for _ in range(nw):
thr = threading.Thread(target=self._tag_thr, args=(mpool,))
thr.daemon = True
thr.start()
c2 = cur.connection.cursor()
c3 = cur.connection.cursor()
n_left = cur.execute("select count(w) from up").fetchone()[0]
for w, rd, fn in cur.execute("select w, rd, fn from up"):
n_left -= 1
q = "select w from mt where w = ?"
if c2.execute(q, (w[:16],)).fetchone():
continue
if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn)
abspath = os.path.join(ptop, rd, fn)
self.pp.msg = "c{} {}".format(n_left, abspath)
args = c3, entags, w, abspath
if not mpool:
n_tags = self._tag_file(*args)
else:
mpool.put(args)
with self.mutex:
n_tags = self.n_mtag_tags_added
self.n_mtag_tags_added = 0
n_add += n_tags
n_buf += n_tags
td = time.time() - last_write
if n_buf >= 4096 or td >= 60:
self.log("commit {} new tags".format(n_buf))
cur.connection.commit()
last_write = time.time()
n_buf = 0
if mpool:
for _ in range(mpool.maxsize):
mpool.put(None)
mpool.join()
c3.close()
c2.close()
return n_add, n_rm, True
def _tag_thr(self, q):
while True:
task = q.get()
if not task:
q.task_done()
return
try:
write_cur, entags, wark, abspath = task
tags = self.mtag.get(abspath)
with self.mutex:
n = self._tag_file(write_cur, entags, wark, abspath, tags)
self.n_mtag_tags_added += n
except:
ex = traceback.format_exc()
msg = "{} failed to read tags from {}:\n{}"
self.log(msg.format(self.mtag.backend, abspath, ex), c=3)
q.task_done()
def _tag_file(self, write_cur, entags, wark, abspath, tags=None):
tags = tags or self.mtag.get(abspath)
tags = {k: v for k, v in tags.items() if k in entags}
if not tags:
# indicate scanned without tags
tags = {"x": 0}
ret = 0
for k, v in tags.items():
q = "insert into mt values (?,?,?)"
write_cur.execute(q, (wark[:16], k, v))
ret += 1
return ret
def _orz(self, db_path):
return sqlite3.connect(db_path, check_same_thread=False).cursor()
def _open_db(self, db_path): def _open_db(self, db_path):
existed = os.path.exists(db_path) existed = os.path.exists(db_path)
conn = sqlite3.connect(db_path, check_same_thread=False) cur = self._orz(db_path)
try: ver = self._read_ver(cur)
ver = self._read_ver(conn) if not existed and ver is None:
return self._create_db(db_path, cur)
if ver == 1: orig_ver = ver
conn = self._upgrade_v1(conn, db_path) if not ver or ver < 3:
ver = self._read_ver(conn) bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
db = cur.connection
cur.close()
db.close()
msg = "creating new DB (old is bad); backup: {}"
if ver:
msg = "creating backup before upgrade: {}"
if ver == 2: self.log(msg.format(bak))
try: shutil.copy2(db_path, bak)
nfiles = next(conn.execute("select count(w) from up"))[0] cur = self._orz(db_path)
self.log("found DB at {} |{}|".format(db_path, nfiles))
return conn
except Exception as ex:
self.log("WARN: could not list files, DB corrupt?\n " + repr(ex))
if ver is not None: if ver == 1:
self.log("REPLACING unsupported DB (v.{}) at {}".format(ver, db_path)) cur = self._upgrade_v1(cur, db_path)
elif not existed: if cur:
raise Exception("whatever") ver = 2
conn.close() if ver == 2:
os.unlink(db_path) cur = self._create_v3(cur)
conn = sqlite3.connect(db_path, check_same_thread=False) ver = self._read_ver(cur) if cur else None
except:
pass
# sqlite is variable-width only, no point in using char/nchar/varchar if ver == 3:
self._create_v2(conn) if orig_ver != ver:
conn.commit() cur.connection.commit()
cur.execute("vacuum")
cur.connection.commit()
try:
nfiles = next(cur.execute("select count(w) from up"))[0]
self.log("OK: {} |{}|".format(db_path, nfiles))
return cur
except Exception as ex:
self.log("WARN: could not list files, DB corrupt?\n " + repr(ex))
if cur:
db = cur.connection
cur.close()
db.close()
return self._create_db(db_path, None)
def _create_db(self, db_path, cur):
if not cur:
cur = self._orz(db_path)
self._create_v2(cur)
self._create_v3(cur)
cur.connection.commit()
self.log("created DB at {}".format(db_path)) self.log("created DB at {}".format(db_path))
return conn return cur
def _read_ver(self, conn): def _read_ver(self, cur):
for tab in ["ki", "kv"]: for tab in ["ki", "kv"]:
try: try:
c = conn.execute(r"select v from {} where k = 'sver'".format(tab)) c = cur.execute(r"select v from {} where k = 'sver'".format(tab))
except: except:
continue continue
@@ -348,26 +601,51 @@ class Up2k(object):
if rows: if rows:
return int(rows[0][0]) return int(rows[0][0])
def _create_v2(self, conn): def _create_v2(self, cur):
for cmd in [ for cmd in [
r"create table ks (k text, v text)",
r"create table ki (k text, v int)",
r"create table up (w text, mt int, sz int, rd text, fn text)", r"create table up (w text, mt int, sz int, rd text, fn text)",
r"insert into ki values ('sver', 2)",
r"create index up_w on up(w)",
r"create index up_rd on up(rd)", r"create index up_rd on up(rd)",
r"create index up_fn on up(fn)", r"create index up_fn on up(fn)",
]: ]:
conn.execute(cmd) cur.execute(cmd)
return cur
def _create_v3(self, cur):
"""
collision in 2^(n/2) files where n = bits (6 bits/ch)
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx
12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx
16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx
"""
for c, ks in [["drop table k", "isv"], ["drop index up_", "w"]]:
for k in ks:
try:
cur.execute(c + k)
except:
pass
idx = r"create index up_w on up(substr(w,1,16))"
if self.no_expr_idx:
idx = r"create index up_w on up(w)"
for cmd in [
idx,
r"create table mt (w text, k text, v int)",
r"create index mt_w on mt(w)",
r"create index mt_k on mt(k)",
r"create index mt_v on mt(v)",
r"create table kv (k text, v int)",
r"insert into kv values ('sver', 3)",
]:
cur.execute(cmd)
return cur
def _upgrade_v1(self, odb, db_path): def _upgrade_v1(self, odb, db_path):
self.log("\033[33mupgrading v1 to v2:\033[0m {}".format(db_path))
npath = db_path + ".next" npath = db_path + ".next"
if os.path.exists(npath): if os.path.exists(npath):
os.unlink(npath) os.unlink(npath)
ndb = sqlite3.connect(npath, check_same_thread=False) ndb = self._orz(npath)
self._create_v2(ndb) self._create_v2(ndb)
c = odb.execute("select * from up") c = odb.execute("select * from up")
@@ -376,30 +654,37 @@ class Up2k(object):
v = (wark, ts, sz, rd, fn) v = (wark, ts, sz, rd, fn)
ndb.execute("insert into up values (?,?,?,?,?)", v) ndb.execute("insert into up values (?,?,?,?,?)", v)
ndb.commit() ndb.connection.commit()
ndb.close() ndb.connection.close()
odb.close() odb.connection.close()
bpath = db_path + ".bak.v1"
self.log("success; backup at: " + bpath)
atomic_move(db_path, bpath)
atomic_move(npath, db_path) atomic_move(npath, db_path)
return sqlite3.connect(db_path, check_same_thread=False) return self._orz(db_path)
def handle_json(self, cj): def handle_json(self, cj):
self.register_vpath(cj["ptop"]) if not self.register_vpath(cj["ptop"], cj["vcfg"]):
if cj["ptop"] not in self.registry:
raise Pebkac(410, "location unavailable")
cj["name"] = sanitize_fn(cj["name"]) cj["name"] = sanitize_fn(cj["name"])
cj["poke"] = time.time() cj["poke"] = time.time()
wark = self._get_wark(cj) wark = self._get_wark(cj)
now = time.time() now = time.time()
job = None job = None
with self.mutex: with self.mutex:
db = self.db.get(cj["ptop"], None) cur = self.cur.get(cj["ptop"], None)
reg = self.registry[cj["ptop"]] reg = self.registry[cj["ptop"]]
if db: if cur:
cur = db.execute(r"select * from up where w = ?", (wark,)) if self.no_expr_idx:
q = r"select * from up where w = ?"
argv = (wark,)
else:
q = r"select * from up where substr(w,1,16) = ? and w = ?"
argv = (wark[:16], wark)
cur = cur.execute(q, argv)
for _, dtime, dsize, dp_dir, dp_fn in cur: for _, dtime, dsize, dp_dir, dp_fn in cur:
if dp_dir.startswith("//") or dp_fn.startswith("//"): if dp_dir.startswith("//") or dp_fn.startswith("//"):
dp_dir, dp_fn = self.w8dec(dp_dir, dp_fn) dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
dp_abs = os.path.join(cj["ptop"], dp_dir, dp_fn).replace("\\", "/") dp_abs = os.path.join(cj["ptop"], dp_dir, dp_fn).replace("\\", "/")
# relying on path.exists to return false on broken symlinks # relying on path.exists to return false on broken symlinks
@@ -409,7 +694,6 @@ class Up2k(object):
"prel": dp_dir, "prel": dp_dir,
"vtop": cj["vtop"], "vtop": cj["vtop"],
"ptop": cj["ptop"], "ptop": cj["ptop"],
"flag": cj["flag"],
"size": dsize, "size": dsize,
"lmod": dtime, "lmod": dtime,
"hash": [], "hash": [],
@@ -446,7 +730,7 @@ class Up2k(object):
err = "partial upload exists at a different location; please resume uploading here instead:\n" err = "partial upload exists at a different location; please resume uploading here instead:\n"
err += "/" + vsrc + " " err += "/" + vsrc + " "
raise Pebkac(400, err) raise Pebkac(400, err)
elif "nodupe" in job["flag"]: elif "nodupe" in self.flags[job["ptop"]]:
self.log("dupe-reject:\n {0}\n {1}".format(src, dst)) self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
err = "upload rejected, file already exists:\n/" + vsrc + " " err = "upload rejected, file already exists:\n/" + vsrc + " "
raise Pebkac(400, err) raise Pebkac(400, err)
@@ -476,7 +760,6 @@ class Up2k(object):
"vtop", "vtop",
"ptop", "ptop",
"prel", "prel",
"flag",
"name", "name",
"size", "size",
"lmod", "lmod",
@@ -523,8 +806,13 @@ class Up2k(object):
raise OSError() raise OSError()
elif fs1 == fs2: elif fs1 == fs2:
# same fs; make symlink as relative as possible # same fs; make symlink as relative as possible
nsrc = src.replace("\\", "/").split("/") v = []
ndst = dst.replace("\\", "/").split("/") for p in [src, dst]:
if WINDOWS:
p = p.replace("\\", "/")
v.append(p.split("/"))
nsrc, ndst = v
nc = 0 nc = 0
for a, b in zip(nsrc, ndst): for a, b in zip(nsrc, ndst):
if a != b: if a != b:
@@ -532,7 +820,8 @@ class Up2k(object):
nc += 1 nc += 1
if nc > 1: if nc > 1:
lsrc = nsrc[nc:] lsrc = nsrc[nc:]
lsrc = "../" * (len(lsrc) - 1) + "/".join(lsrc) hops = len(ndst[nc:]) - 1
lsrc = "../" * hops + "/".join(lsrc)
os.symlink(fsenc(lsrc), fsenc(ldst)) os.symlink(fsenc(lsrc), fsenc(ldst))
except (AttributeError, OSError) as ex: except (AttributeError, OSError) as ex:
self.log("cannot symlink; creating copy: " + repr(ex)) self.log("cannot symlink; creating copy: " + repr(ex))
@@ -584,31 +873,48 @@ class Up2k(object):
if WINDOWS: if WINDOWS:
self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))]) self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))])
db = self.db.get(job["ptop"], None) # legit api sware 2 me mum
if db: if self.idx_wark(
j = job job["ptop"],
self.db_rm(db, j["prel"], j["name"]) job["wark"],
self.db_add(db, j["wark"], j["prel"], j["name"], j["lmod"], j["size"]) job["prel"],
db.commit() job["name"],
job["lmod"],
job["size"],
):
del self.registry[ptop][wark] del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads # in-memory registry is reserved for unfinished uploads
return ret, dst return ret, dst
def idx_wark(self, ptop, wark, rd, fn, lmod, sz):
cur = self.cur.get(ptop, None)
if not cur:
return False
self.db_rm(cur, rd, fn)
self.db_add(cur, wark, rd, fn, int(lmod), sz)
cur.connection.commit()
if "e2t" in self.flags[ptop]:
self.tagq.put([ptop, wark, rd, fn])
return True
def db_rm(self, db, rd, fn): def db_rm(self, db, rd, fn):
sql = "delete from up where rd = ? and fn = ?" sql = "delete from up where rd = ? and fn = ?"
try: try:
db.execute(sql, (rd, fn)) db.execute(sql, (rd, fn))
except: except:
db.execute(sql, self.w8enc(rd, fn)) db.execute(sql, s3enc(self.mem_cur, rd, fn))
def db_add(self, db, wark, rd, fn, ts, sz): def db_add(self, db, wark, rd, fn, ts, sz):
sql = "insert into up values (?,?,?,?,?)" sql = "insert into up values (?,?,?,?,?)"
v = (wark, ts, sz, rd, fn) v = (wark, int(ts), sz, rd, fn)
try: try:
db.execute(sql, v) db.execute(sql, v)
except: except:
rd, fn = self.w8enc(rd, fn) rd, fn = s3enc(self.mem_cur, rd, fn)
v = (wark, ts, sz, rd, fn) v = (wark, ts, sz, rd, fn)
db.execute(sql, v) db.execute(sql, v)
@@ -635,10 +941,9 @@ class Up2k(object):
fsz = os.path.getsize(path) fsz = os.path.getsize(path)
csz = up2k_chunksize(fsz) csz = up2k_chunksize(fsz)
ret = [] ret = []
last_print = time.time()
with open(path, "rb", 512 * 1024) as f: with open(path, "rb", 512 * 1024) as f:
while fsz > 0: while fsz > 0:
self.pp.msg = msg = "{} MB".format(int(fsz / 1024 / 1024)) self.pp.msg = "{} MB, {}".format(int(fsz / 1024 / 1024), path)
hashobj = hashlib.sha512() hashobj = hashlib.sha512()
rem = min(csz, fsz) rem = min(csz, fsz)
fsz -= rem fsz -= rem
@@ -745,6 +1050,45 @@ class Up2k(object):
self.log("snap: {} |{}|".format(path, len(reg.keys()))) self.log("snap: {} |{}|".format(path, len(reg.keys())))
prev[k] = etag prev[k] = etag
def _tagger(self):
while True:
ptop, wark, rd, fn = self.tagq.get()
abspath = os.path.join(ptop, rd, fn)
self.log("tagging " + abspath)
with self.mutex:
cur = self.cur[ptop]
if not cur:
self.log("no cursor to write tags with??", c=1)
continue
entags = self.entags[ptop]
if not entags:
self.log("no entags okay.jpg", c=3)
continue
if "e2t" in self.flags[ptop]:
self._tag_file(cur, entags, wark, abspath)
cur.connection.commit()
def _hasher(self):
while True:
ptop, rd, fn = self.hashq.get()
if "e2d" not in self.flags[ptop]:
continue
abspath = os.path.join(ptop, rd, fn)
self.log("hashing " + abspath)
inf = os.stat(fsenc(abspath))
hashes = self._hashlist_from_file(abspath)
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
with self.mutex:
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size)
def hash_file(self, ptop, flags, rd, fn):
self.register_vpath(ptop, flags)
self.hashq.put([ptop, rd, fn])
def up2k_chunksize(filesize): def up2k_chunksize(filesize):
chunksize = 1024 * 1024 chunksize = 1024 * 1024

View File

@@ -119,19 +119,26 @@ class ProgressPrinter(threading.Thread):
continue continue
msg = self.msg msg = self.msg
m = " {}\033[K\r".format(msg) uprint(" {}\033[K\r".format(msg))
try:
print(m, end="")
except UnicodeEncodeError:
try:
print(m.encode("utf-8", "replace").decode(), end="")
except:
print(m.encode("ascii", "replace").decode(), end="")
print("\033[K", end="") print("\033[K", end="")
sys.stdout.flush() # necessary on win10 even w/ stderr btw sys.stdout.flush() # necessary on win10 even w/ stderr btw
def uprint(msg):
try:
print(msg, end="")
except UnicodeEncodeError:
try:
print(msg.encode("utf-8", "replace").decode(), end="")
except:
print(msg.encode("ascii", "replace").decode(), end="")
def nuprint(msg):
uprint("{}\n".format(msg))
@contextlib.contextmanager @contextlib.contextmanager
def ren_open(fname, *args, **kwargs): def ren_open(fname, *args, **kwargs):
fdir = kwargs.pop("fdir", None) fdir = kwargs.pop("fdir", None)
@@ -521,9 +528,7 @@ def u8safe(txt):
def exclude_dotfiles(filepaths): def exclude_dotfiles(filepaths):
for fpath in filepaths: return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
if not fpath.split("/")[-1].startswith("."):
yield fpath
def html_escape(s, quote=False): def html_escape(s, quote=False):
@@ -599,6 +604,31 @@ else:
fsdec = w8dec fsdec = w8dec
def s3enc(mem_cur, rd, fn):
ret = []
for v in [rd, fn]:
try:
mem_cur.execute("select * from a where b = ?", (v,))
ret.append(v)
except:
ret.append("//" + w8b64enc(v))
# self.log("mojien/{} [{}] {}".format(k, v, ret[-1][2:]))
return tuple(ret)
def s3dec(rd, fn):
ret = []
for k, v in [["d", rd], ["f", fn]]:
if v.startswith("//"):
ret.append(w8b64dec(v[2:]))
# self.log("mojide/{} [{}] {}".format(k, ret[-1], v[2:]))
else:
ret.append(v)
return tuple(ret)
def atomic_move(src, dst): def atomic_move(src, dst):
if not PY2: if not PY2:
os.replace(src, dst) os.replace(src, dst)
@@ -633,6 +663,40 @@ def read_socket_unbounded(sr):
yield buf yield buf
def read_socket_chunked(sr, log=None):
err = "expected chunk length, got [{}] |{}| instead"
while True:
buf = b""
while b"\r" not in buf:
rbuf = sr.recv(2)
if not rbuf or len(buf) > 16:
err = err.format(buf.decode("utf-8", "replace"), len(buf))
raise Pebkac(400, err)
buf += rbuf
if not buf.endswith(b"\n"):
sr.recv(1)
try:
chunklen = int(buf.rstrip(b"\r\n"), 16)
except:
err = err.format(buf.decode("utf-8", "replace"), len(buf))
raise Pebkac(400, err)
if chunklen == 0:
sr.recv(2) # \r\n after final chunk
return
if log:
log("receiving {} byte chunk".format(chunklen))
for chunk in read_socket(sr, chunklen):
yield chunk
sr.recv(2) # \r\n after each chunk too
def hashcopy(actor, fin, fout): def hashcopy(actor, fin, fout):
u32_lim = int((2 ** 31) * 0.9) u32_lim = int((2 ** 31) * 0.9)
hashobj = hashlib.sha512() hashobj = hashlib.sha512()
@@ -692,6 +756,33 @@ def sendfile_kern(lower, upper, f, s):
return 0 return 0
def statdir(logger, scandir, lstat, top):
try:
btop = fsenc(top)
if scandir and hasattr(os, "scandir"):
src = "scandir"
with os.scandir(btop) as dh:
for fh in dh:
try:
yield [fsdec(fh.name), fh.stat(follow_symlinks=not lstat)]
except Exception as ex:
msg = "scan-stat: \033[36m{} @ {}"
logger(msg.format(repr(ex), fsdec(fh.path)))
else:
src = "listdir"
fun = os.lstat if lstat else os.stat
for name in os.listdir(btop):
abspath = os.path.join(btop, name)
try:
yield [fsdec(name), fun(abspath)]
except Exception as ex:
msg = "list-stat: \033[36m{} @ {}"
logger(msg.format(repr(ex), fsdec(abspath)))
except Exception as ex:
logger("{}: \033[31m{} @ {}".format(src, repr(ex), top))
def unescape_cookie(orig): def unescape_cookie(orig):
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn # mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
ret = "" ret = ""

View File

@@ -42,12 +42,8 @@ body {
#path #entree { #path #entree {
margin-left: -.7em; margin-left: -.7em;
} }
#treetab {
display: none;
}
#files { #files {
border-collapse: collapse; border-spacing: 0;
margin-top: 2em;
z-index: 1; z-index: 1;
position: relative; position: relative;
} }
@@ -55,11 +51,10 @@ body {
display: block; display: block;
padding: .3em 0; padding: .3em 0;
} }
#files[ts] tbody div a { #files tbody div a {
color: #f5a; color: #f5a;
} }
a, a, #files tbody div a:last-child {
#files[ts] tbody div a:last-child {
color: #fc5; color: #fc5;
padding: .2em; padding: .2em;
text-decoration: none; text-decoration: none;
@@ -67,16 +62,18 @@ a,
#files a:hover { #files a:hover {
color: #fff; color: #fff;
background: #161616; background: #161616;
text-decoration: underline;
} }
#files thead a { #files thead a {
color: #999; color: #999;
font-weight: normal; font-weight: normal;
} }
#files tr:hover { #files tr+tr:hover {
background: #1c1c1c; background: #1c1c1c;
} }
#files thead th { #files thead th {
padding: .5em 1.3em .3em 1.3em; padding: .5em 1.3em .3em 1.3em;
cursor: pointer;
} }
#files thead th:last-child { #files thead th:last-child {
background: #444; background: #444;
@@ -94,6 +91,16 @@ a,
margin: 0; margin: 0;
padding: 0 .5em; padding: 0 .5em;
} }
#files td {
border-bottom: 1px solid #111;
}
#files td+td+td {
max-width: 30em;
overflow: hidden;
}
#files tr+tr td {
border-top: 1px solid #383838;
}
#files tbody td:nth-child(3) { #files tbody td:nth-child(3) {
font-family: monospace; font-family: monospace;
font-size: 1.3em; font-size: 1.3em;
@@ -112,6 +119,9 @@ a,
padding-bottom: 1.3em; padding-bottom: 1.3em;
border-bottom: .5em solid #444; border-bottom: .5em solid #444;
} }
#files tbody tr td:last-child {
white-space: nowrap;
}
#files thead th[style] { #files thead th[style] {
width: auto !important; width: auto !important;
} }
@@ -143,6 +153,15 @@ a,
.logue { .logue {
padding: .2em 1.5em; padding: .2em 1.5em;
} }
.logue:empty {
display: none;
}
#pro.logue {
margin-bottom: .8em;
}
#epi.logue {
margin: .8em 0;
}
#srv_info { #srv_info {
opacity: .5; opacity: .5;
font-size: .8em; font-size: .8em;
@@ -160,7 +179,8 @@ a,
margin: -.2em; margin: -.2em;
} }
#files tbody a.play.act { #files tbody a.play.act {
color: #af0; color: #840;
text-shadow: 0 0 .3em #b80;
} }
#blocked { #blocked {
position: fixed; position: fixed;
@@ -291,6 +311,20 @@ a,
width: calc(100% - 10.5em); width: calc(100% - 10.5em);
background: rgba(0,0,0,0.2); background: rgba(0,0,0,0.2);
} }
@media (min-width: 90em) {
#barpos,
#barbuf {
width: calc(100% - 24em);
left: 9.8em;
top: .7em;
height: 1.6em;
bottom: auto;
}
#widget {
bottom: -3.2em;
height: 3.2em;
}
}
@@ -400,39 +434,75 @@ input[type="checkbox"]:checked+label {
color: #fff; color: #fff;
} }
#files td div a { #files td div a {
display: table-cell; display: inline-block;
white-space: nowrap; white-space: nowrap;
} }
#files td div a:last-child { #files td div a:last-child {
width: 100%; width: 100%;
} }
#files td div { #files td div {
display: table;
border-collapse: collapse; border-collapse: collapse;
width: 100%; width: 100%;
} }
#files td div a:last-child { #files td div a:last-child {
width: 100%; width: 100%;
} }
#tree, #wrap {
#treefiles { margin-top: 2em;
vertical-align: top;
} }
#tree { #tree {
padding-top: 2em; display: none;
position: fixed;
left: 0;
bottom: 0;
top: 7em;
padding-top: .2em;
overflow-y: auto;
-ms-scroll-chaining: none;
overscroll-behavior-y: none;
scrollbar-color: #eb0 #333;
}
#thx_ff {
padding: 5em 0;
}
#tree::-webkit-scrollbar-track {
background: #333;
}
#tree::-webkit-scrollbar {
background: #333;
}
#tree::-webkit-scrollbar-thumb {
background: #eb0;
}
#tree:hover {
z-index: 2;
}
#treeul {
position: relative;
left: -1.7em;
width: calc(100% + 1.3em);
}
#tree>a+a {
padding: .2em .4em;
font-size: 1.2em;
background: #2a2a2a;
box-shadow: 0 .1em .2em #222 inset;
border-radius: .3em;
margin: .2em;
position: relative;
top: -.2em;
}
#tree>a+a:hover {
background: #805;
}
#tree>a+a.on {
background: #fc4;
color: #400;
text-shadow: none;
} }
#detree { #detree {
padding: .3em .5em; padding: .3em .5em;
font-size: 1.5em; font-size: 1.5em;
display: inline-block;
min-width: 12em;
width: 100%;
}
#treefiles #files tbody {
border-radius: 0 .7em 0 .7em;
}
#treefiles #files thead th:nth-child(1) {
border-radius: .7em 0 0 0;
} }
#tree ul, #tree ul,
#tree li { #tree li {
@@ -440,43 +510,35 @@ input[type="checkbox"]:checked+label {
margin: 0; margin: 0;
} }
#tree ul { #tree ul {
border-left: .2em solid #444; border-left: .2em solid #555;
} }
#tree li { #tree li {
margin-left: 1em; margin-left: 1em;
list-style: none; list-style: none;
white-space: nowrap; border-top: 1px solid #4c4c4c;
border-bottom: 1px solid #222;
} }
#tree a.hl { #tree li:last-child {
border-bottom: none;
}
#treeul a.hl {
color: #400; color: #400;
background: #fc4; background: #fc4;
border-radius: .3em; border-radius: .3em;
text-shadow: none; text-shadow: none;
} }
#tree a { #treeul a {
display: inline-block; display: inline-block;
} }
#tree a+a { #treeul a+a {
width: calc(100% - 2em); width: calc(100% - 2em);
background: #333; background: #333;
line-height: 1em;
} }
#tree a+a:hover { #treeul a+a:hover {
background: #222; background: #222;
color: #fff; color: #fff;
} }
#treeul {
position: relative;
overflow: hidden;
left: -1.7em;
}
#treeul:hover {
z-index: 2;
overflow: visible;
}
#treeul:hover a+a {
width: auto;
min-width: calc(100% - 2em);
}
#treeul a:first-child { #treeul a:first-child {
font-family: monospace, monospace; font-family: monospace, monospace;
} }
@@ -489,3 +551,84 @@ input[type="checkbox"]:checked+label {
position: absolute; position: absolute;
z-index: 9; z-index: 9;
} }
#files .cfg {
display: none;
font-size: 2em;
white-space: nowrap;
}
#files th:hover .cfg,
#files th.min .cfg {
display: block;
width: 1em;
border-radius: .2em;
margin: -1.3em auto 0 auto;
background: #444;
}
#files th.min .cfg {
margin: -.6em;
}
#files>thead>tr>th.min span {
position: absolute;
transform: rotate(270deg);
background: linear-gradient(90deg, rgba(68,68,68,0), rgba(68,68,68,0.5) 70%, #444);
margin-left: -4.6em;
padding: .4em;
top: 5.4em;
width: 8em;
text-align: right;
letter-spacing: .04em;
}
#files td:nth-child(2n) {
color: #f5a;
}
#files td.min a {
display: none;
}
#files tr.play td {
background: #fc4;
border-color: transparent;
color: #400;
text-shadow: none;
}
#files tr.play a {
color: inherit;
}
#files tr.play a:hover {
color: #300;
background: #fea;
}
#op_cfg {
max-width: none;
margin-right: 1.5em;
}
#key_notation>span {
display: inline-block;
padding: .2em .4em;
}
#op_cfg h3 {
margin: .8em 0 0 .6em;
padding: 0;
border-bottom: 1px solid #555;
}
#opdesc {
display: none;
}
#ops:hover #opdesc {
display: block;
background: linear-gradient(0deg,#555, #4c4c4c 80%, #444);
box-shadow: 0 .3em 1em #222;
padding: 1em;
border-radius: .3em;
position: absolute;
z-index: 3;
top: 6em;
right: 1.5em;
}
#opdesc code {
background: #3c3c3c;
padding: .2em .3em;
border-top: 1px solid #777;
border-radius: .3em;
font-family: monospace, monospace;
line-height: 2em;
}

View File

@@ -12,25 +12,37 @@
<body> <body>
<div id="ops"> <div id="ops">
<a href="#" data-dest="">---</a> <a href="#" data-dest="" data-desc="close submenu">---</a>
<a href="#" data-perm="read" data-dest="search">🔎</a> <a href="#" data-perm="read" data-dest="search" data-desc="search for files by attributes, path/name, music tags, or any combination of those.&lt;br /&gt;&lt;br /&gt;&lt;code&gt;foo bar&lt;/code&gt; = must contain both foo and bar,&lt;br /&gt;&lt;code&gt;foo -bar&lt;/code&gt; = must contain foo but not bar,&lt;br /&gt;&lt;code&gt;^yana .opus$&lt;/code&gt; = must start with yana and have the opus extension">🔎</a>
{%- if have_up2k_idx %} {%- if have_up2k_idx %}
<a href="#" data-dest="up2k">🚀</a> <a href="#" data-dest="up2k" data-desc="up2k: upload files (if you have write-access) or toggle into the search-mode and drag files onto the search button to see if they exist somewhere on the server">🚀</a>
{%- else %} {%- else %}
<a href="#" data-perm="write" data-dest="up2k">🚀</a> <a href="#" data-perm="write" data-dest="up2k" data-desc="up2k: upload files with resume support (close your browser and drop the same files in later)">🚀</a>
{%- endif %} {%- endif %}
<a href="#" data-perm="write" data-dest="bup">🎈</a> <a href="#" data-perm="write" data-dest="bup" data-desc="bup: basic uploader, even supports netscape 4.0">🎈</a>
<a href="#" data-perm="write" data-dest="mkdir">📂</a> <a href="#" data-perm="write" data-dest="mkdir" data-desc="mkdir: create a new directory">📂</a>
<a href="#" data-perm="write" data-dest="new_md">📝</a> <a href="#" data-perm="write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
<a href="#" data-perm="write" data-dest="msg">📟</a> <a href="#" data-perm="write" data-dest="msg" data-desc="msg: send a message to the server log">📟</a>
<a href="#" data-dest="cfg" data-desc="configuration options">⚙️</a>
<div id="opdesc"></div>
</div> </div>
<div id="op_search" class="opview"> <div id="op_search" class="opview">
{%- if have_tags_idx %}
<table id="srch_form" class="tags"></table>
{%- else %}
<table id="srch_form"></table> <table id="srch_form"></table>
{%- endif %}
<div id="srch_q"></div> <div id="srch_q"></div>
</div> </div>
{%- include 'upload.html' %} {%- include 'upload.html' %}
<div id="op_cfg" class="opview opbox">
<h3>key notation</h3>
<div id="key_notation"></div>
</div>
<h1 id="path"> <h1 id="path">
<a href="#" id="entree">🌲</a> <a href="#" id="entree">🌲</a>
{%- for n in vpnodes %} {%- for n in vpnodes %}
@@ -38,32 +50,46 @@
{%- endfor %} {%- endfor %}
</h1> </h1>
<div id="pro" class="logue">{{ logues[0] }}</div> <div id="tree">
<a href="#" id="detree">🍞...</a>
<a href="#" step="2" id="twobytwo">+</a>
<a href="#" step="-2" id="twig">&ndash;</a>
<a href="#" id="dyntree">a</a>
<ul id="treeul"></ul>
<div id="thx_ff">&nbsp;</div>
</div>
<table id="treetab"> <div id="wrap">
<tr>
<td id="tree"> <div id="pro" class="logue">{{ logues[0] }}</div>
<a href="#" id="detree">🍞...</a>
<ul id="treeul"></ul>
</td>
<td id="treefiles"></td>
</tr>
</table>
<table id="files"> <table id="files">
<thead> <thead>
<tr> <tr>
<th></th> <th></th>
<th>File Name</th> <th><span>File Name</span></th>
<th sort="int">File Size</th> <th sort="int"><span>Size</span></th>
<th>T</th> {%- for k in taglist %}
<th>Date</th> {%- if k.startswith('.') %}
<th sort="int"><span>{{ k[1:] }}</span></th>
{%- else %}
<th><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
{%- endif %}
{%- endfor %}
<th><span>T</span></th>
<th><span>Date</span></th>
</tr> </tr>
</thead> </thead>
<tbody> <tbody>
{%- for f in files %} {%- for f in files %}
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr> <tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
{%- if f.tags is defined %}
{%- for k in taglist %}
<td>{{ f.tags[k] }}</td>
{%- endfor %}
{%- endif %}
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
{%- endfor %} {%- endfor %}
</tbody> </tbody>
@@ -73,6 +99,8 @@
<h2><a href="?h">control-panel</a></h2> <h2><a href="?h">control-panel</a></h2>
</div>
{%- if srv_info %} {%- if srv_info %}
<div id="srv_info"><span>{{ srv_info }}</span></div> <div id="srv_info"><span>{{ srv_info }}</span></div>
{%- endif %} {%- endif %}
@@ -87,6 +115,9 @@
</div> </div>
</div> </div>
<script>
var tag_order_cfg = {{ tag_order }};
</script>
<script src="/.cpr/util.js{{ ts }}"></script> <script src="/.cpr/util.js{{ ts }}"></script>
<script src="/.cpr/browser.js{{ ts }}"></script> <script src="/.cpr/browser.js{{ ts }}"></script>
<script src="/.cpr/up2k.js{{ ts }}"></script> <script src="/.cpr/up2k.js{{ ts }}"></script>

View File

@@ -6,21 +6,6 @@ function dbg(msg) {
ebi('path').innerHTML = msg; ebi('path').innerHTML = msg;
} }
function ev(e) {
e = e || window.event;
if (!e)
return;
if (e.preventDefault)
e.preventDefault()
if (e.stopPropagation)
e.stopPropagation();
e.returnValue = false;
return e;
}
makeSortable(ebi('files')); makeSortable(ebi('files'));
@@ -55,7 +40,7 @@ function init_mp() {
for (var a = 0, aa = tracks.length; a < aa; a++) for (var a = 0, aa = tracks.length; a < aa; a++)
ebi('trk' + a).onclick = ev_play; ebi('trk' + a).onclick = ev_play;
ret.vol = localStorage.getItem('vol'); ret.vol = sread('vol');
if (ret.vol !== null) if (ret.vol !== null)
ret.vol = parseFloat(ret.vol); ret.vol = parseFloat(ret.vol);
else else
@@ -67,7 +52,7 @@ function init_mp() {
ret.setvol = function (vol) { ret.setvol = function (vol) {
ret.vol = Math.max(Math.min(vol, 1), 0); ret.vol = Math.max(Math.min(vol, 1), 0);
localStorage.setItem('vol', vol); swrite('vol', vol);
if (ret.au) if (ret.au)
ret.au.volume = ret.expvol(); ret.au.volume = ret.expvol();
@@ -153,6 +138,9 @@ var pbar = (function () {
var grad = null; var grad = null;
r.drawbuf = function () { r.drawbuf = function () {
if (!mp.au)
return;
var cs = getComputedStyle(r.bcan); var cs = getComputedStyle(r.bcan);
var sw = parseInt(cs['width']); var sw = parseInt(cs['width']);
var sh = parseInt(cs['height']); var sh = parseInt(cs['height']);
@@ -179,6 +167,9 @@ var pbar = (function () {
} }
}; };
r.drawpos = function () { r.drawpos = function () {
if (!mp.au)
return;
var cs = getComputedStyle(r.bcan); var cs = getComputedStyle(r.bcan);
var sw = parseInt(cs['width']); var sw = parseInt(cs['width']);
var sh = parseInt(cs['height']); var sh = parseInt(cs['height']);
@@ -460,6 +451,11 @@ function play(tid, call_depth) {
mp.au.volume = mp.expvol(); mp.au.volume = mp.expvol();
var oid = 'trk' + tid; var oid = 'trk' + tid;
setclass(oid, 'play act'); setclass(oid, 'play act');
var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
for (var a = 0, aa = trs.length; a < aa; a++) {
trs[a].className = trs[a].className.replace(/ *play */, "");
}
ebi(oid).parentElement.parentElement.className += ' play';
try { try {
if (hack_attempt_play) if (hack_attempt_play)
@@ -471,8 +467,7 @@ function play(tid, call_depth) {
var o = ebi(oid); var o = ebi(oid);
o.setAttribute('id', 'thx_js'); o.setAttribute('id', 'thx_js');
if (window.history && history.replaceState) { if (window.history && history.replaceState) {
var nurl = (document.location + '').split('#')[0] + '#' + oid; hist_replace(document.location.pathname + '#' + oid);
history.replaceState(ebi('files').tBodies[0].innerHTML, nurl, nurl);
} }
else { else {
document.location.hash = oid; document.location.hash = oid;
@@ -515,7 +510,7 @@ function evau_error(e) {
if (eplaya.error.message) if (eplaya.error.message)
err += '\n\n' + eplaya.error.message; err += '\n\n' + eplaya.error.message;
err += '\n\nFile: «' + decodeURIComponent(eplaya.src.split('/').slice(-1)[0]) + '»'; err += '\n\nFile: «' + uricom_dec(eplaya.src.split('/').slice(-1)[0])[0] + '»';
alert(err); alert(err);
} }
@@ -550,7 +545,7 @@ function autoplay_blocked() {
var na = ebi('blk_na'); var na = ebi('blk_na');
var fn = mp.tracks[mp.au.tid].split(/\//).pop(); var fn = mp.tracks[mp.au.tid].split(/\//).pop();
fn = decodeURIComponent(fn.replace(/\+/g, ' ')); fn = uricom_dec(fn.replace(/\+/g, ' '))[0];
go.textContent = 'Play "' + fn + '"'; go.textContent = 'Play "' + fn + '"';
go.onclick = function (e) { go.onclick = function (e) {
@@ -591,6 +586,13 @@ function autoplay_blocked() {
["name", "name", "name contains &nbsp; (negate with -nope)", "46"] ["name", "name", "name contains &nbsp; (negate with -nope)", "46"]
] ]
]; ];
var oldcfg = [];
if (document.querySelector('#srch_form.tags'))
sconf.push(["tags",
["tags", "tags", "tags contains &nbsp; (^=start, end=$)", "46"]
]);
var html = []; var html = [];
var orig_html = null; var orig_html = null;
for (var a = 0; a < sconf.length; a++) { for (var a = 0; a < sconf.length; a++) {
@@ -610,7 +612,7 @@ function autoplay_blocked() {
} }
ebi('srch_form').innerHTML = html.join('\n'); ebi('srch_form').innerHTML = html.join('\n');
var o = document.querySelectorAll('#op_search input[type="text"]'); var o = document.querySelectorAll('#op_search input');
for (var a = 0; a < o.length; a++) { for (var a = 0; a < o.length; a++) {
o[a].oninput = ev_search_input; o[a].oninput = ev_search_input;
} }
@@ -619,8 +621,11 @@ function autoplay_blocked() {
function ev_search_input() { function ev_search_input() {
var v = this.value; var v = this.value;
var chk = ebi(this.getAttribute('id').slice(0, -1) + 'c'); var id = this.getAttribute('id');
chk.checked = ((v + '').length > 0); if (id.slice(-1) == 'v') {
var chk = ebi(id.slice(0, -1) + 'c');
chk.checked = ((v + '').length > 0);
}
clearTimeout(search_timeout); clearTimeout(search_timeout);
search_timeout = setTimeout(do_search, 100); search_timeout = setTimeout(do_search, 100);
} }
@@ -649,21 +654,33 @@ function autoplay_blocked() {
return; return;
if (this.status !== 200) { if (this.status !== 200) {
alert('ah fug\n' + this.status + ": " + this.responseText); alert("http " + this.status + ": " + this.responseText);
return; return;
} }
var res = JSON.parse(this.responseText),
tagord = res.tag_order;
var ofiles = ebi('files'); var ofiles = ebi('files');
if (ofiles.getAttribute('ts') > this.ts) if (ofiles.getAttribute('ts') > this.ts)
return; return;
ebi('path').style.display = 'none'; if (!oldcfg.length) {
ebi('tree').style.display = 'none'; oldcfg = [
ebi('path').style.display,
ebi('tree').style.display,
ebi('wrap').style.marginLeft
];
ebi('path').style.display = 'none';
ebi('tree').style.display = 'none';
ebi('wrap').style.marginLeft = '0';
}
var html = ['<tr><td>-</td><td colspan="4"><a href="#" id="unsearch">close search results</a></td></tr>']; var html = mk_files_header(tagord);
var res = JSON.parse(this.responseText); html.push('<tbody>');
for (var a = 0; a < res.length; a++) { html.push('<tr><td>-</td><td colspan="42"><a href="#" id="unsearch">close search results</a></td></tr>');
var r = res[a], for (var a = 0; a < res.hits.length; a++) {
var r = res.hits[a],
ts = parseInt(r.ts), ts = parseInt(r.ts),
sz = esc(r.sz + ''), sz = esc(r.sz + ''),
rp = esc(r.rp + ''), rp = esc(r.rp + ''),
@@ -674,15 +691,31 @@ function autoplay_blocked() {
ext = '%'; ext = '%';
links = links.join(''); links = links.join('');
html.push('<tr><td>-</td><td><div>' + links + '</div></td><td>' + sz + var nodes = ['<tr><td>-</td><td><div>' + links + '</div>', sz];
'</td><td>' + ext + '</td><td>' + unix2iso(ts) + '</td></tr>'); for (var b = 0; b < tagord.length; b++) {
var k = tagord[b],
v = r.tags[k] || "";
if (k == "dur") {
var sv = s2ms(v);
nodes[nodes.length - 1] += '</td><td sortv="' + v + '">' + sv;
continue;
}
nodes.push(v);
}
nodes = nodes.concat([ext, unix2iso(ts)]);
html.push(nodes.join('</td><td>'));
html.push('</td></tr>');
} }
if (!orig_html) if (!orig_html)
orig_html = ebi('files').tBodies[0].innerHTML; orig_html = ebi('files').innerHTML;
ofiles.tBodies[0].innerHTML = html.join('\n'); ofiles.innerHTML = html.join('\n');
ofiles.setAttribute("ts", this.ts); ofiles.setAttribute("ts", this.ts);
filecols.set_style();
reload_browser(); reload_browser();
ebi('unsearch').onclick = unsearch; ebi('unsearch').onclick = unsearch;
@@ -690,40 +723,77 @@ function autoplay_blocked() {
function unsearch(e) { function unsearch(e) {
ev(e); ev(e);
ebi('path').style.display = 'inline-block'; ebi('path').style.display = oldcfg[0];
ebi('tree').style.display = 'block'; ebi('tree').style.display = oldcfg[1];
ebi('files').tBodies[0].innerHTML = orig_html; ebi('wrap').style.marginLeft = oldcfg[2];
oldcfg = [];
ebi('files').innerHTML = orig_html;
orig_html = null; orig_html = null;
reload_browser(); reload_browser();
} }
})(); })();
// tree var treectl = (function () {
(function () { var dyn = bcfg_get('dyntree', true);
var treedata = null; var treesz = icfg_get('treesz', 16);
treesz = Math.min(Math.max(treesz, 4), 50);
console.log('treesz [' + treesz + ']');
function entree(e) { function entree(e) {
ev(e); ev(e);
ebi('path').style.display = 'none'; ebi('path').style.display = 'none';
var treetab = ebi('treetab'); var tree = ebi('tree');
var treefiles = ebi('treefiles'); tree.style.display = 'block';
treetab.style.display = 'table'; swrite('entreed', 'tree');
get_tree("", get_evpath(), true);
treefiles.appendChild(ebi('pro')); onresize();
treefiles.appendChild(ebi('files'));
treefiles.appendChild(ebi('epi'));
localStorage.setItem('entreed', 'tree');
get_tree("", get_vpath());
} }
function get_tree(top, dst) { function detree(e) {
ev(e);
ebi('tree').style.display = 'none';
ebi('path').style.display = 'inline-block';
ebi('wrap').style.marginLeft = '0';
swrite('entreed', 'na');
}
function onscroll() {
var top = ebi('wrap').getBoundingClientRect().top;
ebi('tree').style.top = Math.max(0, parseInt(top)) + 'px';
}
window.addEventListener('scroll', onscroll);
function periodic() {
onscroll();
setTimeout(periodic, document.visibilityState ? 200 : 5000);
}
periodic();
function onresize(e) {
var q = '#tree';
var nq = 0;
while (dyn) {
nq++;
q += '>ul>li';
if (!document.querySelector(q))
break;
}
var w = treesz + nq;
ebi('tree').style.width = w + 'em';
ebi('wrap').style.marginLeft = w + 'em';
onscroll();
}
window.addEventListener('resize', onresize);
function get_tree(top, dst, rst) {
var xhr = new XMLHttpRequest(); var xhr = new XMLHttpRequest();
xhr.top = top; xhr.top = top;
xhr.dst = dst; xhr.dst = dst;
xhr.rst = rst;
xhr.ts = new Date().getTime();
xhr.open('GET', dst + '?tree=' + top, true); xhr.open('GET', dst + '?tree=' + top, true);
xhr.onreadystatechange = recvtree; xhr.onreadystatechange = recvtree;
xhr.send(); xhr.send();
@@ -735,12 +805,19 @@ function autoplay_blocked() {
return; return;
if (this.status !== 200) { if (this.status !== 200) {
alert('ah fug\n' + this.status + ": " + this.responseText); alert("http " + this.status + ": " + this.responseText);
return; return;
} }
var cur = ebi('treeul').getAttribute('ts');
if (cur && parseInt(cur) > this.ts) {
console.log("reject tree");
return;
}
ebi('treeul').setAttribute('ts', this.ts);
var top = this.top == '.' ? this.dst : this.top, var top = this.top == '.' ? this.dst : this.top,
name = top.split('/').slice(-2)[0], name = uricom_dec(top.split('/').slice(-2)[0])[0],
rtop = top.replace(/^\/+/, ""); rtop = top.replace(/^\/+/, "");
try { try {
@@ -752,7 +829,7 @@ function autoplay_blocked() {
var html = parsetree(res, rtop); var html = parsetree(res, rtop);
if (!this.top) { if (!this.top) {
html = '<li><a href="#">-</a><a href="/">[root]</a>\n<ul>' + html; html = '<li><a href="#">-</a><a href="/">[root]</a>\n<ul>' + html;
if (!ebi('treeul').getElementsByTagName('li').length) if (this.rst || !ebi('treeul').getElementsByTagName('li').length)
ebi('treeul').innerHTML = html + '</ul></li>'; ebi('treeul').innerHTML = html + '</ul></li>';
} }
else { else {
@@ -760,7 +837,7 @@ function autoplay_blocked() {
esc(top) + '">' + esc(name) + esc(top) + '">' + esc(name) +
"</a>\n<ul>\n" + html + "</ul>"; "</a>\n<ul>\n" + html + "</ul>";
var links = document.querySelectorAll('#tree a+a'); var links = document.querySelectorAll('#treeul a+a');
for (var a = 0, aa = links.length; a < aa; a++) { for (var a = 0, aa = links.length; a < aa; a++) {
if (links[a].getAttribute('href') == top) { if (links[a].getAttribute('href') == top) {
var o = links[a].parentNode; var o = links[a].parentNode;
@@ -774,27 +851,18 @@ function autoplay_blocked() {
document.querySelector('#treeul>li>a+a').textContent = '[root]'; document.querySelector('#treeul>li>a+a').textContent = '[root]';
despin('#tree'); despin('#tree');
reload_tree(); reload_tree();
onresize();
var q = '#tree';
var nq = 0;
while (true) {
nq++;
q += '>ul>li';
if (!document.querySelector(q))
break;
}
ebi('treeul').style.width = (24 + nq) + 'em';
} }
function reload_tree() { function reload_tree() {
var cdir = get_vpath(); var cdir = get_evpath();
var links = document.querySelectorAll('#tree a+a'); var links = document.querySelectorAll('#treeul a+a');
for (var a = 0, aa = links.length; a < aa; a++) { for (var a = 0, aa = links.length; a < aa; a++) {
var href = links[a].getAttribute('href'); var href = links[a].getAttribute('href');
links[a].setAttribute('class', href == cdir ? 'hl' : ''); links[a].setAttribute('class', href == cdir ? 'hl' : '');
links[a].onclick = treego; links[a].onclick = treego;
} }
links = document.querySelectorAll('#tree li>a:first-child'); links = document.querySelectorAll('#treeul li>a:first-child');
for (var a = 0, aa = links.length; a < aa; a++) { for (var a = 0, aa = links.length; a < aa; a++) {
links[a].setAttribute('dst', links[a].nextSibling.getAttribute('href')); links[a].setAttribute('dst', links[a].nextSibling.getAttribute('href'));
links[a].onclick = treegrow; links[a].onclick = treegrow;
@@ -808,12 +876,20 @@ function autoplay_blocked() {
treegrow.call(this.previousSibling, e); treegrow.call(this.previousSibling, e);
return; return;
} }
reqls(this.getAttribute('href'), true);
}
function reqls(url, hpush) {
var xhr = new XMLHttpRequest(); var xhr = new XMLHttpRequest();
xhr.top = this.getAttribute('href'); xhr.top = url;
xhr.hpush = hpush;
xhr.ts = new Date().getTime();
xhr.open('GET', xhr.top + '?ls', true); xhr.open('GET', xhr.top + '?ls', true);
xhr.onreadystatechange = recvls; xhr.onreadystatechange = recvls;
xhr.send(); xhr.send();
get_tree('.', xhr.top); if (hpush)
get_tree('.', xhr.top);
enspin('#files'); enspin('#files');
} }
@@ -825,6 +901,7 @@ function autoplay_blocked() {
rm.parentNode.removeChild(rm); rm.parentNode.removeChild(rm);
} }
this.textContent = '+'; this.textContent = '+';
onresize();
return; return;
} }
var dst = this.getAttribute('dst'); var dst = this.getAttribute('dst');
@@ -836,10 +913,17 @@ function autoplay_blocked() {
return; return;
if (this.status !== 200) { if (this.status !== 200) {
alert('ah fug\n' + this.status + ": " + this.responseText); alert("http " + this.status + ": " + this.responseText);
return; return;
} }
var cur = ebi('files').getAttribute('ts');
if (cur && parseInt(cur) > this.ts) {
console.log("reject ls");
return;
}
ebi('files').setAttribute('ts', this.ts);
try { try {
var res = JSON.parse(this.responseText); var res = JSON.parse(this.responseText);
} }
@@ -851,24 +935,45 @@ function autoplay_blocked() {
ebi('srv_info').innerHTML = '<span>' + res.srvinf + '</span>'; ebi('srv_info').innerHTML = '<span>' + res.srvinf + '</span>';
var nodes = res.dirs.concat(res.files); var nodes = res.dirs.concat(res.files);
var top = this.top; var top = this.top;
var html = []; var html = mk_files_header(res.taglist);
html.push('<tbody>');
for (var a = 0; a < nodes.length; a++) { for (var a = 0; a < nodes.length; a++) {
var r = nodes[a], var r = nodes[a],
ln = '<tr><td>' + r.lead + '</td><td><a href="' + ln = ['<tr><td>' + r.lead + '</td><td><a href="' +
top + r.href + '">' + esc(decodeURIComponent(r.href)) + '</a>'; top + r.href + '">' + esc(uricom_dec(r.href)[0]) + '</a>', r.sz];
ln = [ln, r.sz, r.ext, unix2iso(r.ts)].join('</td><td>'); for (var b = 0; b < res.taglist.length; b++) {
var k = res.taglist[b],
v = (r.tags || {})[k] || "";
if (k[0] == '.')
k = k.slice(1);
if (k == "dur") {
var sv = s2ms(v);
ln[ln.length - 1] += '</td><td sortv="' + v + '">' + sv;
continue;
}
ln.push(v);
}
ln = ln.concat([r.ext, unix2iso(r.ts)]).join('</td><td>');
html.push(ln + '</td></tr>'); html.push(ln + '</td></tr>');
} }
html.push('</tbody>');
html = html.join('\n'); html = html.join('\n');
ebi('files').tBodies[0].innerHTML = html; ebi('files').innerHTML = html;
history.pushState(html, this.top, this.top);
if (this.hpush)
hist_push(this.top);
apply_perms(res.perms); apply_perms(res.perms);
despin('#files'); despin('#files');
ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : ""; ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : "";
ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : ""; ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : "";
filecols.set_style();
mukey.render();
reload_tree(); reload_tree();
reload_browser(); reload_browser();
} }
@@ -884,12 +989,14 @@ function autoplay_blocked() {
keys.sort(); keys.sort();
for (var a = 0; a < keys.length; a++) { for (var a = 0; a < keys.length; a++) {
var kk = keys[a], var kk = keys[a],
k = kk.slice(1), ks = kk.slice(1),
url = '/' + (top ? top + k : k) + '/', k = uricom_dec(ks),
ek = esc(k), hek = esc(k[0]),
uek = k[1] ? uricom_enc(k[0], true) : k[0],
url = '/' + (top ? top + uek : uek) + '/',
sym = res[kk] ? '-' : '+', sym = res[kk] ? '-' : '+',
link = '<a href="#">' + sym + '</a><a href="' + link = '<a href="#">' + sym + '</a><a href="' +
esc(url) + '">' + ek + '</a>'; url + '">' + hek + '</a>';
if (res[kk]) { if (res[kk]) {
var subtree = parsetree(res[kk], url.slice(1)); var subtree = parsetree(res[kk], url.slice(1));
@@ -902,37 +1009,48 @@ function autoplay_blocked() {
return ret; return ret;
} }
function detree(e) { function dyntree(e) {
ev(e); ev(e);
var treetab = ebi('treetab'); dyn = !dyn;
bcfg_set('dyntree', dyn);
onresize();
}
treetab.parentNode.insertBefore(ebi('pro'), treetab); function scaletree(e) {
treetab.parentNode.insertBefore(ebi('files'), treetab.nextSibling); ev(e);
treetab.parentNode.insertBefore(ebi('epi'), ebi('files').nextSibling); treesz += parseInt(this.getAttribute("step"));
if (isNaN(treesz))
treesz = 16;
ebi('path').style.display = 'inline-block'; swrite('treesz', treesz);
treetab.style.display = 'none'; onresize();
localStorage.setItem('entreed', 'na');
} }
ebi('entree').onclick = entree; ebi('entree').onclick = entree;
ebi('detree').onclick = detree; ebi('detree').onclick = detree;
if (window.localStorage && localStorage.getItem('entreed') == 'tree') ebi('dyntree').onclick = dyntree;
ebi('twig').onclick = scaletree;
ebi('twobytwo').onclick = scaletree;
if (sread('entreed') == 'tree')
entree(); entree();
window.onpopstate = function (e) { window.onpopstate = function (e) {
console.log(e.url + ' ,, ' + ((e.state + '').slice(0, 64))); console.log("h-pop " + e.state);
if (e.state) { if (!e.state)
ebi('files').tBodies[0].innerHTML = e.state; return;
reload_tree();
reload_browser(); var url = new URL(e.state, "https://" + document.location.host);
} url = url.pathname;
get_tree("", url, true);
reqls(url);
}; };
if (window.history && history.pushState) { if (window.history && history.pushState) {
var u = get_vpath(); hist_replace(get_evpath() + window.location.hash);
history.replaceState(ebi('files').tBodies[0].innerHTML, u, u); }
return {
"onscroll": onscroll
} }
})(); })();
@@ -989,10 +1107,253 @@ function apply_perms(perms) {
} }
function mk_files_header(taglist) {
var html = [
'<thead>',
'<th></th>',
'<th><span>File Name</span></th>',
'<th sort="int"><span>Size</span></th>'
];
for (var a = 0; a < taglist.length; a++) {
var tag = taglist[a];
var c1 = tag.slice(0, 1).toUpperCase();
tag = c1 + tag.slice(1);
if (c1 == '.')
tag = '<th sort="int"><span>' + tag.slice(1);
else
tag = '<th><span>' + tag;
html.push(tag + '</span></th>');
}
html = html.concat([
'<th><span>T</span></th>',
'<th><span>Date</span></th>',
'</thead>',
]);
return html;
}
var filecols = (function () {
var hidden = jread('filecols', []);
var add_btns = function () {
var ths = document.querySelectorAll('#files th>span');
for (var a = 0, aa = ths.length; a < aa; a++) {
var th = ths[a].parentElement;
var is_hidden = has(hidden, ths[a].textContent);
th.innerHTML = '<div class="cfg"><a href="#">' +
(is_hidden ? '+' : '-') + '</a></div>' + ths[a].outerHTML;
th.getElementsByTagName('a')[0].onclick = ev_row_tgl;
}
};
var set_style = function () {
add_btns();
var ohidden = [],
ths = document.querySelectorAll('#files th'),
ncols = ths.length;
for (var a = 0; a < ncols; a++) {
var span = ths[a].getElementsByTagName('span');
if (span.length <= 0)
continue;
var name = span[0].textContent,
cls = '';
if (has(hidden, name)) {
ohidden.push(a);
cls = ' min';
}
ths[a].className = ths[a].className.replace(/ *min */, " ") + cls;
}
for (var a = 0; a < ncols; a++) {
var cls = has(ohidden, a) ? 'min' : '';
var tds = document.querySelectorAll('#files>tbody>tr>td:nth-child(' + (a + 1) + ')');
for (var b = 0, bb = tds.length; b < bb; b++) {
tds[b].setAttribute('class', cls);
if (a < 2)
continue;
if (cls) {
if (!tds[b].hasAttribute('html')) {
tds[b].setAttribute('html', tds[b].innerHTML);
tds[b].innerHTML = '...';
}
}
else if (tds[b].hasAttribute('html')) {
tds[b].innerHTML = tds[b].getAttribute('html');
tds[b].removeAttribute('html');
}
}
}
};
set_style();
var toggle = function (name) {
var ofs = hidden.indexOf(name);
if (ofs !== -1)
hidden.splice(ofs, 1);
else
hidden.push(name);
jwrite("filecols", hidden);
set_style();
};
return {
"add_btns": add_btns,
"set_style": set_style,
"toggle": toggle,
};
})();
var mukey = (function () {
var maps = {
"rekobo_alnum": [
"1B ", "2B ", "3B ", "4B ", "5B ", "6B ", "7B ", "8B ", "9B ", "10B", "11B", "12B",
"1A ", "2A ", "3A ", "4A ", "5A ", "6A ", "7A ", "8A ", "9A ", "10A", "11A", "12A"
],
"rekobo_classic": [
"B ", "F# ", "Db ", "Ab ", "Eb ", "Bb ", "F ", "C ", "G ", "D ", "A ", "E ",
"Abm", "Ebm", "Bbm", "Fm ", "Cm ", "Gm ", "Dm ", "Am ", "Em ", "Bm ", "F#m", "Dbm"
],
"traktor_musical": [
"B ", "Gb ", "Db ", "Ab ", "Eb ", "Bb ", "F ", "C ", "G ", "D ", "A ", "E ",
"Abm", "Ebm", "Bbm", "Fm ", "Cm ", "Gm ", "Dm ", "Am ", "Em ", "Bm ", "Gbm", "Dbm"
],
"traktor_sharps": [
"B ", "F# ", "C# ", "G# ", "D# ", "A# ", "F ", "C ", "G ", "D ", "A ", "E ",
"G#m", "D#m", "A#m", "Fm ", "Cm ", "Gm ", "Dm ", "Am ", "Em ", "Bm ", "F#m", "C#m"
],
"traktor_open": [
"6d ", "7d ", "8d ", "9d ", "10d", "11d", "12d", "1d ", "2d ", "3d ", "4d ", "5d ",
"6m ", "7m ", "8m ", "9m ", "10m", "11m", "12m", "1m ", "2m ", "3m ", "4m ", "5m "
]
};
var map = {};
var html = [];
for (var k in maps) {
if (!maps.hasOwnProperty(k))
continue;
html.push(
'<span><input type="radio" name="keytype" value="' + k + '" id="key_' + k + '">' +
'<label for="key_' + k + '">' + k + '</label></span>');
for (var a = 0; a < 24; a++)
maps[k][a] = maps[k][a].trim();
}
ebi('key_notation').innerHTML = html.join('\n');
function set_key_notation(e) {
ev(e);
var notation = this.getAttribute('value');
load_notation(notation);
render();
}
function load_notation(notation) {
swrite("key_notation", notation);
map = {};
var dst = maps[notation];
for (var k in maps)
if (k != notation && maps.hasOwnProperty(k))
for (var a = 0; a < 24; a++)
if (maps[k][a] != dst[a])
map[maps[k][a]] = dst[a];
}
function render() {
var tds = ebi('files').tHead.getElementsByTagName('th');
var i = -1;
var min = false;
for (var a = 0; a < tds.length; a++) {
var spans = tds[a].getElementsByTagName('span');
if (spans.length && spans[0].textContent == 'Key') {
min = tds[a].getAttribute('class').indexOf('min') !== -1;
i = a;
break;
}
}
if (i == -1)
return;
var rows = ebi('files').tBodies[0].rows;
if (min)
for (var a = 0, aa = rows.length; a < aa; a++) {
var c = rows[a].cells[i];
if (!c)
continue;
var v = c.getAttribute('html');
c.setAttribute('html', map[v] || v);
}
else
for (var a = 0, aa = rows.length; a < aa; a++) {
var c = rows[a].cells[i];
if (!c)
continue;
var v = c.textContent;
c.textContent = map[v] || v;
}
}
function try_render() {
try {
render();
}
catch (ex) {
console.log("key notation failed: " + ex);
}
}
var notation = sread("key_notation") || "rekobo_alnum";
ebi('key_' + notation).checked = true;
load_notation(notation);
var o = document.querySelectorAll('#key_notation input');
for (var a = 0; a < o.length; a++) {
o[a].onchange = set_key_notation;
}
return {
"render": try_render
};
})();
(function () {
function set_tooltip(e) {
ev(e);
ebi('opdesc').innerHTML = this.getAttribute('data-desc');
}
var btns = document.querySelectorAll('#ops, #ops>a');
for (var a = 0; a < btns.length; a++) {
btns[a].onmouseenter = set_tooltip;
}
})();
function ev_row_tgl(e) {
ev(e);
filecols.toggle(this.parentElement.parentElement.getElementsByTagName('span')[0].textContent);
}
function reload_browser(not_mp) { function reload_browser(not_mp) {
filecols.set_style();
makeSortable(ebi('files')); makeSortable(ebi('files'));
var parts = get_vpath().split('/'); var parts = get_evpath().split('/');
var rm = document.querySelectorAll('#path>a+a+a'); var rm = document.querySelectorAll('#path>a+a+a');
for (a = rm.length - 1; a >= 0; a--) for (a = rm.length - 1; a >= 0; a--)
rm[a].parentNode.removeChild(rm[a]); rm[a].parentNode.removeChild(rm[a]);
@@ -1027,3 +1388,4 @@ function reload_browser(not_mp) {
up2k.set_fsearch(); up2k.set_fsearch();
} }
reload_browser(true); reload_browser(true);
mukey.render();

View File

@@ -65,7 +65,7 @@ function statify(obj) {
if (a > 0) if (a > 0)
loc.push(n[a]); loc.push(n[a]);
var dec = hesc(decodeURIComponent(n[a])); var dec = hesc(uricom_dec(n[a])[0]);
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>'); nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
} }
@@ -524,11 +524,9 @@ dom_navtgl.onclick = function () {
dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav'; dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav';
dom_nav.style.display = hidden ? 'none' : 'block'; dom_nav.style.display = hidden ? 'none' : 'block';
if (window.localStorage) swrite('hidenav', hidden ? 1 : 0);
localStorage.setItem('hidenav', hidden ? 1 : 0);
redraw(); redraw();
}; };
if (window.localStorage && localStorage.getItem('hidenav') == 1) if (sread('hidenav') == 1)
dom_navtgl.onclick(); dom_navtgl.onclick();

View File

@@ -15,7 +15,7 @@ var dom_md = ebi('mt');
if (a > 0) if (a > 0)
loc.push(n[a]); loc.push(n[a]);
var dec = decodeURIComponent(n[a]).replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;"); var dec = uricom_dec(n[a])[0].replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>'); nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
} }

View File

@@ -46,9 +46,9 @@ function up2k_flagbus() {
var dbg = function (who, msg) { var dbg = function (who, msg) {
console.log('flagbus(' + flag.id + '): [' + who + '] ' + msg); console.log('flagbus(' + flag.id + '): [' + who + '] ' + msg);
}; };
flag.ch.onmessage = function (ev) { flag.ch.onmessage = function (e) {
var who = ev.data[0], var who = e.data[0],
what = ev.data[1]; what = e.data[1];
if (who == flag.id) { if (who == flag.id) {
dbg(who, 'hi me (??)'); dbg(who, 'hi me (??)');
@@ -83,7 +83,7 @@ function up2k_flagbus() {
flag.ch.postMessage([flag.id, "hey"]); flag.ch.postMessage([flag.id, "hey"]);
} }
else { else {
dbg('?', ev.data); dbg('?', e.data);
} }
}; };
var tx = function (now, msg) { var tx = function (now, msg) {
@@ -194,7 +194,7 @@ function up2k_init(have_crypto) {
// handle user intent to use the basic uploader instead // handle user intent to use the basic uploader instead
ebi('u2nope').onclick = function (e) { ebi('u2nope').onclick = function (e) {
e.preventDefault(); ev(e);
setmsg(); setmsg();
goto('bup'); goto('bup');
}; };
@@ -209,42 +209,7 @@ function up2k_init(have_crypto) {
}; };
} }
function cfg_get(name) { var parallel_uploads = icfg_get('nthread');
var val = localStorage.getItem(name);
if (val === null)
return parseInt(ebi(name).value);
ebi(name).value = val;
return val;
}
function bcfg_get(name, defval) {
var o = ebi(name);
if (!o)
return defval;
var val = localStorage.getItem(name);
if (val === null)
val = defval;
else
val = (val == '1');
o.checked = val;
return val;
}
function bcfg_set(name, val) {
localStorage.setItem(
name, val ? '1' : '0');
var o = ebi(name);
if (o)
o.checked = val;
return val;
}
var parallel_uploads = cfg_get('nthread');
var multitask = bcfg_get('multitask', true); var multitask = bcfg_get('multitask', true);
var ask_up = bcfg_get('ask_up', true); var ask_up = bcfg_get('ask_up', true);
var flag_en = bcfg_get('flag_en', false); var flag_en = bcfg_get('flag_en', false);
@@ -289,29 +254,29 @@ function up2k_init(have_crypto) {
} }
ebi('u2btn').addEventListener('click', nav, false); ebi('u2btn').addEventListener('click', nav, false);
function ondrag(ev) { function ondrag(e) {
ev.stopPropagation(); e.stopPropagation();
ev.preventDefault(); e.preventDefault();
ev.dataTransfer.dropEffect = 'copy'; e.dataTransfer.dropEffect = 'copy';
ev.dataTransfer.effectAllowed = 'copy'; e.dataTransfer.effectAllowed = 'copy';
} }
ebi('u2btn').addEventListener('dragover', ondrag, false); ebi('u2btn').addEventListener('dragover', ondrag, false);
ebi('u2btn').addEventListener('dragenter', ondrag, false); ebi('u2btn').addEventListener('dragenter', ondrag, false);
function gotfile(ev) { function gotfile(e) {
ev.stopPropagation(); e.stopPropagation();
ev.preventDefault(); e.preventDefault();
var files; var files;
var is_itemlist = false; var is_itemlist = false;
if (ev.dataTransfer) { if (e.dataTransfer) {
if (ev.dataTransfer.items) { if (e.dataTransfer.items) {
files = ev.dataTransfer.items; // DataTransferItemList files = e.dataTransfer.items; // DataTransferItemList
is_itemlist = true; is_itemlist = true;
} }
else files = ev.dataTransfer.files; // FileList else files = e.dataTransfer.files; // FileList
} }
else files = ev.target.files; else files = e.target.files;
if (files.length == 0) if (files.length == 0)
return alert('no files selected??'); return alert('no files selected??');
@@ -367,7 +332,7 @@ function up2k_init(have_crypto) {
"name": fobj.name, "name": fobj.name,
"size": fobj.size, "size": fobj.size,
"lmod": lmod / 1000, "lmod": lmod / 1000,
"purl": get_vpath(), "purl": get_evpath(),
"done": false, "done": false,
"hash": [] "hash": []
}; };
@@ -690,8 +655,8 @@ function up2k_init(have_crypto) {
prog(t.n, nchunk, col_hashing); prog(t.n, nchunk, col_hashing);
}; };
var segm_load = function (ev) { var segm_load = function (e) {
cache_buf = ev.target.result; cache_buf = e.target.result;
cache_ofs = 0; cache_ofs = 0;
hash_calc(); hash_calc();
}; };
@@ -765,20 +730,20 @@ function up2k_init(have_crypto) {
st.busy.handshake.push(t); st.busy.handshake.push(t);
var xhr = new XMLHttpRequest(); var xhr = new XMLHttpRequest();
xhr.onload = function (ev) { xhr.onload = function (e) {
if (xhr.status == 200) { if (xhr.status == 200) {
var response = JSON.parse(xhr.responseText); var response = JSON.parse(xhr.responseText);
if (!response.name) { if (!response.name) {
var msg = ''; var msg = '';
var smsg = ''; var smsg = '';
if (!response || !response.length) { if (!response || !response.hits || !response.hits.length) {
msg = 'not found on server'; msg = 'not found on server';
smsg = '404'; smsg = '404';
} }
else { else {
smsg = 'found'; smsg = 'found';
var hit = response[0], var hit = response.hits[0],
msg = linksplit(hit.rp).join(''), msg = linksplit(hit.rp).join(''),
tr = unix2iso(hit.ts), tr = unix2iso(hit.ts),
tu = unix2iso(t.lmod), tu = unix2iso(t.lmod),
@@ -916,7 +881,7 @@ function up2k_init(have_crypto) {
alert('y o u b r o k e i t\n\n(was that a folder? just files please)'); alert('y o u b r o k e i t\n\n(was that a folder? just files please)');
}; };
reader.onload = function (ev) { reader.onload = function (e) {
var xhr = new XMLHttpRequest(); var xhr = new XMLHttpRequest();
xhr.upload.onprogress = function (xev) { xhr.upload.onprogress = function (xev) {
var perc = xev.loaded / (cdr - car) * 100; var perc = xev.loaded / (cdr - car) * 100;
@@ -950,7 +915,7 @@ function up2k_init(have_crypto) {
xhr.setRequestHeader('Content-Type', 'application/octet-stream'); xhr.setRequestHeader('Content-Type', 'application/octet-stream');
xhr.overrideMimeType('Content-Type', 'application/octet-stream'); xhr.overrideMimeType('Content-Type', 'application/octet-stream');
xhr.responseType = 'text'; xhr.responseType = 'text';
xhr.send(ev.target.result); xhr.send(e.target.result);
}; };
reader.readAsArrayBuffer(bobslice.call(t.fobj, car, cdr)); reader.readAsArrayBuffer(bobslice.call(t.fobj, car, cdr));
@@ -979,7 +944,7 @@ function up2k_init(have_crypto) {
/// config ui /// config ui
// //
function onresize(ev) { function onresize(e) {
var bar = ebi('ops'), var bar = ebi('ops'),
wpx = innerWidth, wpx = innerWidth,
fpx = parseInt(getComputedStyle(bar)['font-size']), fpx = parseInt(getComputedStyle(bar)['font-size']),
@@ -994,17 +959,17 @@ function up2k_init(have_crypto) {
ebi('u2conf').setAttribute('class', wide ? 'has_btn' : ''); ebi('u2conf').setAttribute('class', wide ? 'has_btn' : '');
} }
} }
window.onresize = onresize; window.addEventListener('resize', onresize);
onresize(); onresize();
function desc_show(ev) { function desc_show(e) {
var msg = this.getAttribute('alt'); var msg = this.getAttribute('alt');
msg = msg.replace(/\$N/g, "<br />"); msg = msg.replace(/\$N/g, "<br />");
var cdesc = ebi('u2cdesc'); var cdesc = ebi('u2cdesc');
cdesc.innerHTML = msg; cdesc.innerHTML = msg;
cdesc.setAttribute('class', 'show'); cdesc.setAttribute('class', 'show');
} }
function desc_hide(ev) { function desc_hide(e) {
ebi('u2cdesc').setAttribute('class', ''); ebi('u2cdesc').setAttribute('class', '');
} }
var o = document.querySelectorAll('#u2conf *[alt]'); var o = document.querySelectorAll('#u2conf *[alt]');
@@ -1033,7 +998,7 @@ function up2k_init(have_crypto) {
return; return;
parallel_uploads = v; parallel_uploads = v;
localStorage.setItem('nthread', v); swrite('nthread', v);
obj.style.background = '#444'; obj.style.background = '#444';
return; return;
} }
@@ -1119,17 +1084,17 @@ function up2k_init(have_crypto) {
} }
} }
function nop(ev) { function nop(e) {
ev.preventDefault(); ev(e);
this.click(); this.click();
} }
ebi('nthread_add').onclick = function (ev) { ebi('nthread_add').onclick = function (e) {
ev.preventDefault(); ev(e);
bumpthread(1); bumpthread(1);
}; };
ebi('nthread_sub').onclick = function (ev) { ebi('nthread_sub').onclick = function (e) {
ev.preventDefault(); ev(e);
bumpthread(-1); bumpthread(-1);
}; };

View File

@@ -62,7 +62,7 @@
width: calc(100% - 2em); width: calc(100% - 2em);
max-width: 100em; max-width: 100em;
} }
#u2form.srch #u2tab { #op_up2k.srch #u2tab {
max-width: none; max-width: none;
} }
#u2tab td { #u2tab td {
@@ -76,7 +76,7 @@
#u2tab td:nth-child(3) { #u2tab td:nth-child(3) {
width: 40%; width: 40%;
} }
#u2form.srch #u2tab td:nth-child(3) { #op_up2k.srch #u2tab td:nth-child(3) {
font-family: sans-serif; font-family: sans-serif;
width: auto; width: auto;
} }

View File

@@ -23,6 +23,7 @@ function esc(txt) {
} }
function vis_exh(msg, url, lineNo, columnNo, error) { function vis_exh(msg, url, lineNo, columnNo, error) {
window.onerror = undefined; window.onerror = undefined;
window['vis_exh'] = null;
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>', var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>']; esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
@@ -43,6 +44,21 @@ function ebi(id) {
return document.getElementById(id); return document.getElementById(id);
} }
function ev(e) {
e = e || window.event;
if (!e)
return;
if (e.preventDefault)
e.preventDefault()
if (e.stopPropagation)
e.stopPropagation();
e.returnValue = false;
return e;
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
if (!String.prototype.endsWith) { if (!String.prototype.endsWith) {
@@ -76,30 +92,41 @@ function import_js(url, cb) {
function sortTable(table, col) { function sortTable(table, col) {
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows var tb = table.tBodies[0],
th = table.tHead.rows[0].cells, th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0), tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className == 'sort1' ? -1 : 1; i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1;
for (var a = 0, thl = th.length; a < thl; a++) for (var a = 0, thl = th.length; a < thl; a++)
th[a].className = ''; th[a].className = th[a].className.replace(/ *sort-?1 */, " ");
th[col].className = 'sort' + reverse; th[col].className += ' sort' + reverse;
var stype = th[col].getAttribute('sort'); var stype = th[col].getAttribute('sort');
tr = tr.sort(function (a, b) { var vl = [];
if (!a.cells[col]) for (var a = 0; a < tr.length; a++) {
var cell = tr[a].cells[col];
if (!cell) {
vl.push([null, a]);
continue;
}
var v = cell.getAttribute('sortv') || cell.textContent.trim();
if (stype == 'int') {
v = parseInt(v.replace(/[, ]/g, '')) || 0;
}
vl.push([v, a]);
}
vl.sort(function (a, b) {
a = a[0];
b = b[0];
if (a === null)
return -1; return -1;
if (!b.cells[col]) if (b === null)
return 1; return 1;
var v1 = a.cells[col].textContent.trim();
var v2 = b.cells[col].textContent.trim();
if (stype == 'int') { if (stype == 'int') {
v1 = parseInt(v1.replace(/,/g, '')); return reverse * (a - b);
v2 = parseInt(v2.replace(/,/g, ''));
return reverse * (v1 - v2);
} }
return reverse * (v1.localeCompare(v2)); return reverse * (a.localeCompare(b));
}); });
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]); for (i = 0; i < tr.length; ++i) tb.appendChild(tr[vl[i][1]]);
} }
function makeSortable(table) { function makeSortable(table) {
var th = table.tHead, i; var th = table.tHead, i;
@@ -107,7 +134,8 @@ function makeSortable(table) {
if (th) i = th.length; if (th) i = th.length;
else return; // if no `<thead>` then do nothing else return; // if no `<thead>` then do nothing
while (--i >= 0) (function (i) { while (--i >= 0) (function (i) {
th[i].onclick = function () { th[i].onclick = function (e) {
ev(e);
sortTable(table, i); sortTable(table, i);
}; };
}(i)); }(i));
@@ -123,16 +151,13 @@ function makeSortable(table) {
})(); })();
function opclick(ev) { function opclick(e) {
if (ev) //ie ev(e);
ev.preventDefault();
var dest = this.getAttribute('data-dest'); var dest = this.getAttribute('data-dest');
goto(dest); goto(dest);
// writing a blank value makes ie8 segfault w swrite('opmode', dest || null);
if (window.localStorage)
localStorage.setItem('opmode', dest || '.');
var input = document.querySelector('.opview.act input:not([type="hidden"])') var input = document.querySelector('.opview.act input:not([type="hidden"])')
if (input) if (input)
@@ -149,10 +174,6 @@ function goto(dest) {
for (var a = obj.length - 1; a >= 0; a--) for (var a = obj.length - 1; a >= 0; a--)
obj[a].classList.remove('act'); obj[a].classList.remove('act');
var others = ['path', 'files', 'widget'];
for (var a = 0; a < others.length; a++)
ebi(others[a]).classList.remove('hidden');
if (dest) { if (dest) {
var ui = ebi('op_' + dest); var ui = ebi('op_' + dest);
ui.classList.add('act'); ui.classList.add('act');
@@ -162,16 +183,17 @@ function goto(dest) {
if (fn) if (fn)
fn(); fn();
} }
if (window['treectl'])
treectl.onscroll();
} }
(function () { (function () {
goto(); goto();
if (window.localStorage) { var op = sread('opmode');
var op = localStorage.getItem('opmode'); if (op !== null && op !== '.')
if (op !== null && op !== '.') goto(op);
goto(op);
}
})(); })();
@@ -202,6 +224,31 @@ function linksplit(rp) {
} }
function uricom_enc(txt, do_fb_enc) {
try {
return encodeURIComponent(txt);
}
catch (ex) {
console.log("uce-err [" + txt + "]");
if (do_fb_enc)
return esc(txt);
return txt;
}
}
function uricom_dec(txt) {
try {
return [decodeURIComponent(txt), true];
}
catch (ex) {
console.log("ucd-err [" + txt + "]");
return [txt, false];
}
}
function get_evpath() { function get_evpath() {
var ret = document.location.pathname; var ret = document.location.pathname;
@@ -216,7 +263,7 @@ function get_evpath() {
function get_vpath() { function get_vpath() {
return decodeURIComponent(get_evpath()); return uricom_dec(get_evpath())[0];
} }
@@ -225,6 +272,12 @@ function unix2iso(ts) {
} }
function s2ms(s) {
var m = Math.floor(s / 60);
return m + ":" + ("0" + (s - m * 60)).slice(-2);
}
function has(haystack, needle) { function has(haystack, needle) {
for (var a = 0; a < haystack.length; a++) for (var a = 0; a < haystack.length; a++)
if (haystack[a] == needle) if (haystack[a] == needle)
@@ -232,3 +285,91 @@ function has(haystack, needle) {
return false; return false;
} }
function sread(key) {
if (window.localStorage)
return localStorage.getItem(key);
return null;
}
function swrite(key, val) {
if (window.localStorage) {
if (val === undefined || val === null)
localStorage.removeItem(key);
else
localStorage.setItem(key, val);
}
}
function jread(key, fb) {
var str = sread(key);
if (!str)
return fb;
return JSON.parse(str);
}
function jwrite(key, val) {
if (!val)
swrite(key);
else
swrite(key, JSON.stringify(val));
}
function icfg_get(name, defval) {
var o = ebi(name);
var val = parseInt(sread(name));
if (isNaN(val))
return parseInt(o ? o.value : defval);
if (o)
o.value = val;
return val;
}
function bcfg_get(name, defval) {
var o = ebi(name);
if (!o)
return defval;
var val = sread(name);
if (val === null)
val = defval;
else
val = (val == '1');
bcfg_upd_ui(name, val);
return val;
}
function bcfg_set(name, val) {
swrite(name, val ? '1' : '0');
bcfg_upd_ui(name, val);
return val;
}
function bcfg_upd_ui(name, val) {
var o = ebi(name);
if (!o)
return;
if (o.getAttribute('type') == 'checkbox')
o.checked = val;
else if (o)
o.setAttribute('class', val ? 'on' : '');
}
function hist_push(url) {
console.log("h-push " + url);
history.pushState(url, url, url);
}
function hist_replace(url) {
console.log("h-repl " + url);
history.replaceState(url, url, url);
}

View File

@@ -11,6 +11,13 @@ gzip -d < .hist/up2k.snap | jq -r '.[].tnam' | while IFS= read -r f; do rm -f --
gzip -d < .hist/up2k.snap | jq -r '.[].name' | while IFS= read -r f; do wc -c -- "$f" | grep -qiE '^[^0-9a-z]*0' && rm -f -- "$f"; done gzip -d < .hist/up2k.snap | jq -r '.[].name' | while IFS= read -r f; do wc -c -- "$f" | grep -qiE '^[^0-9a-z]*0' && rm -f -- "$f"; done
##
## detect partial uploads based on file contents
## (in case of context loss or old copyparties)
echo; find -type f | while IFS= read -r x; do printf '\033[A\033[36m%s\033[K\033[0m\n' "$x"; tail -c$((1024*1024)) <"$x" | xxd -a | awk 'NR==1&&/^[0: ]+.{16}$/{next} NR==2&&/^\*$/{next} NR==3&&/^[0f]+: [0 ]+65 +.{16}$/{next} {e=1} END {exit e}' || continue; printf '\033[A\033[31msus:\033[33m %s \033[0m\n\n' "$x"; done
## ##
## create a test payload ## create a test payload

View File

@@ -122,7 +122,7 @@ git describe --tags >/dev/null 2>/dev/null && {
exit 1 exit 1
} }
dt="$(git log -1 --format=%cd --date=format:'%Y,%m,%d' | sed -E 's/,0?/, /g')" dt="$(git log -1 --format=%cd --date=short | sed -E 's/-0?/, /g')"
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt" printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
sed -ri ' sed -ri '
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/; s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;

View File

@@ -16,6 +16,12 @@ from copyparty.authsrv import AuthSrv
from copyparty import util from copyparty import util
class Cfg(Namespace):
def __init__(self, a=[], v=[], c=None):
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr mte".split()}
super(Cfg, self).__init__(a=a, v=v, c=c, **ex)
class TestVFS(unittest.TestCase): class TestVFS(unittest.TestCase):
def dump(self, vfs): def dump(self, vfs):
print(json.dumps(vfs, indent=4, sort_keys=True, default=lambda o: o.__dict__)) print(json.dumps(vfs, indent=4, sort_keys=True, default=lambda o: o.__dict__))
@@ -35,7 +41,13 @@ class TestVFS(unittest.TestCase):
def ls(self, vfs, vpath, uname): def ls(self, vfs, vpath, uname):
"""helper for resolving and listing a folder""" """helper for resolving and listing a folder"""
vn, rem = vfs.get(vpath, uname, True, False) vn, rem = vfs.get(vpath, uname, True, False)
return vn.ls(rem, uname) r1 = vn.ls(rem, uname, False)
r2 = vn.ls(rem, uname, False)
self.assertEqual(r1, r2)
fsdir, real, virt = r1
real = [x[0] for x in real]
return fsdir, real, virt
def runcmd(self, *argv): def runcmd(self, *argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE) p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
@@ -78,7 +90,7 @@ class TestVFS(unittest.TestCase):
finally: finally:
return ret return ret
def log(self, src, msg): def log(self, src, msg, c=0):
pass pass
def test(self): def test(self):
@@ -102,7 +114,7 @@ class TestVFS(unittest.TestCase):
f.write(fn) f.write(fn)
# defaults # defaults
vfs = AuthSrv(Namespace(c=None, a=[], v=[]), self.log).vfs vfs = AuthSrv(Cfg(), self.log).vfs
self.assertEqual(vfs.nodes, {}) self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "") self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td) self.assertEqual(vfs.realpath, td)
@@ -110,7 +122,7 @@ class TestVFS(unittest.TestCase):
self.assertEqual(vfs.uwrite, ["*"]) self.assertEqual(vfs.uwrite, ["*"])
# single read-only rootfs (relative path) # single read-only rootfs (relative path)
vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs vfs = AuthSrv(Cfg(v=["a/ab/::r"]), self.log).vfs
self.assertEqual(vfs.nodes, {}) self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "") self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab")) self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
@@ -118,9 +130,7 @@ class TestVFS(unittest.TestCase):
self.assertEqual(vfs.uwrite, []) self.assertEqual(vfs.uwrite, [])
# single read-only rootfs (absolute path) # single read-only rootfs (absolute path)
vfs = AuthSrv( vfs = AuthSrv(Cfg(v=[td + "//a/ac/../aa//::r"]), self.log).vfs
Namespace(c=None, a=[], v=[td + "//a/ac/../aa//::r"]), self.log
).vfs
self.assertEqual(vfs.nodes, {}) self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "") self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa")) self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
@@ -129,7 +139,7 @@ class TestVFS(unittest.TestCase):
# read-only rootfs with write-only subdirectory (read-write for k) # read-only rootfs with write-only subdirectory (read-write for k)
vfs = AuthSrv( vfs = AuthSrv(
Namespace(c=None, a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]), Cfg(a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]),
self.log, self.log,
).vfs ).vfs
self.assertEqual(len(vfs.nodes), 1) self.assertEqual(len(vfs.nodes), 1)
@@ -192,7 +202,10 @@ class TestVFS(unittest.TestCase):
self.assertEqual(list(virt), []) self.assertEqual(list(virt), [])
# admin-only rootfs with all-read-only subfolder # admin-only rootfs with all-read-only subfolder
vfs = AuthSrv(Namespace(c=None, a=["k:k"], v=[".::ak", "a:a:r"]), self.log,).vfs vfs = AuthSrv(
Cfg(a=["k:k"], v=[".::ak", "a:a:r"]),
self.log,
).vfs
self.assertEqual(len(vfs.nodes), 1) self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(vfs.vpath, "") self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td) self.assertEqual(vfs.realpath, td)
@@ -211,9 +224,7 @@ class TestVFS(unittest.TestCase):
# breadth-first construction # breadth-first construction
vfs = AuthSrv( vfs = AuthSrv(
Namespace( Cfg(
c=None,
a=[],
v=[ v=[
"a/ac/acb:a/ac/acb:w", "a/ac/acb:a/ac/acb:w",
"a:a:w", "a:a:w",
@@ -234,7 +245,7 @@ class TestVFS(unittest.TestCase):
self.undot(vfs, "./.././foo/..", "") self.undot(vfs, "./.././foo/..", "")
# shadowing # shadowing
vfs = AuthSrv(Namespace(c=None, a=[], v=[".::r", "b:a/ac:r"]), self.log).vfs vfs = AuthSrv(Cfg(v=[".::r", "b:a/ac:r"]), self.log).vfs
fsp, r1, v1 = self.ls(vfs, "", "*") fsp, r1, v1 = self.ls(vfs, "", "*")
self.assertEqual(fsp, td) self.assertEqual(fsp, td)
@@ -271,7 +282,7 @@ class TestVFS(unittest.TestCase):
).encode("utf-8") ).encode("utf-8")
) )
au = AuthSrv(Namespace(c=[cfg_path], a=[], v=[]), self.log) au = AuthSrv(Cfg(c=[cfg_path]), self.log)
self.assertEqual(au.user["a"], "123") self.assertEqual(au.user["a"], "123")
self.assertEqual(au.user["asd"], "fgh:jkl") self.assertEqual(au.user["asd"], "fgh:jkl")
n = au.vfs n = au.vfs