Compare commits

...

43 Commits

Author SHA1 Message Date
ed
7f9bf1c78c v0.9.0 2021-03-02 00:12:15 +01:00
ed
61a6bc3a65 make browser columns compactable 2021-03-02 00:07:04 +01:00
ed
46e10b0e9f yab 2021-03-01 03:15:41 +01:00
ed
8441206e26 read media-tags from files (for display/searching) 2021-03-01 02:50:10 +01:00
ed
9fdc5ee748 use one sqlite3 cursor, closes #1 2021-02-25 22:30:40 +01:00
ed
00ff133387 support receiving chunked PUT 2021-02-25 22:26:03 +01:00
ed
96164cb934 v0.8.3 2021-02-22 21:58:37 +01:00
ed
82fb21ae69 v0.8.2 2021-02-22 21:40:55 +01:00
ed
89d4a2b4c4 hide up2k mode-toggle in read-only folders 2021-02-22 21:27:44 +01:00
ed
fc0c7ff374 correct up2k mode in mixed-r/w 2021-02-22 21:11:30 +01:00
ed
5148c4f2e9 include pro/epilogues in ?ls 2021-02-22 21:09:57 +01:00
ed
c3b59f7bcf restore win8/7/xp support 2021-02-22 20:59:44 +01:00
ed
61e148202b too much 2021-02-22 20:56:19 +01:00
ed
8a4e0739bc v0.8.1 2021-02-22 03:54:34 +01:00
ed
f75c5f2fe5 v0.8.0 2021-02-22 03:46:02 +01:00
ed
81d5859588 h 2021-02-22 03:33:24 +01:00
ed
721886bb7a this isnt really helping is it 2021-02-22 03:01:32 +01:00
ed
b23c272820 mention the search syntax 2021-02-22 02:33:30 +01:00
ed
cd02bfea7a better path/name search syntax 2021-02-22 02:16:47 +01:00
ed
6774bd88f9 make search/upload toggling more visible 2021-02-22 01:25:13 +01:00
ed
1046a4f376 update web deps 2021-02-22 00:47:53 +01:00
ed
8081f9ddfd add up2k cleanup button 2021-02-22 00:47:21 +01:00
ed
fa656577d1 prevent non-spa navigation while uploading 2021-02-21 21:08:53 +01:00
ed
b14b86990f toggle upload widgets in spa 2021-02-21 20:50:12 +01:00
ed
2a6dd7b512 add close button to search results 2021-02-21 05:33:57 +00:00
ed
feebdee88b correctness 2021-02-21 05:15:08 +00:00
ed
99d9277f5d look at him go 2021-02-21 05:36:26 +01:00
ed
9af64d6156 debug pypy3/7.3.3/gcc9.2.0/gentoo 2021-02-21 02:48:25 +00:00
ed
5e3775c1af fuse.py prefers ?ls if available 2021-02-21 02:07:34 +00:00
ed
2d2e8a3da7 less jank ?ls 2021-02-21 01:31:49 +00:00
ed
b2a560b76f update readme with new features 2021-02-21 00:29:10 +00:00
ed
39397a489d rearrange readme status list 2021-02-21 00:26:29 +00:00
ed
ff593a0904 fix folder tree presentation in mixed-r/w volumes 2021-02-20 19:10:16 +00:00
ed
f12789cf44 reversible mojibake marshaling for sqlite 2021-02-20 18:12:36 +00:00
ed
4f8cf2fc87 qol 2021-02-20 17:39:08 +01:00
ed
fda98730ac 77.6KiB changeset nice 2021-02-20 04:59:43 +00:00
ed
06c6ddffb6 v0.7.7 2021-02-14 02:13:52 +01:00
ed
d29f0c066c logging 2021-02-14 01:32:16 +01:00
ed
c9e4de3346 up2k: fix rejected files not counting as progress 2021-02-13 04:30:46 +01:00
ed
ca0b97f72d oh cool 2021-02-13 03:59:38 +01:00
ed
b38f20b408 up2k: make tabsync optional 2021-02-13 03:45:40 +01:00
ed
05b1dbaf56 up2k: upload semaphore across tabs/windows 2021-02-13 02:57:51 +01:00
ed
b8481e32ba lovely priority inversions 2021-02-12 23:53:13 +01:00
29 changed files with 3289 additions and 633 deletions

8
.vscode/launch.json vendored
View File

@@ -12,12 +12,14 @@
//"-nw",
"-ed",
"-emp",
"-e2d",
"-e2s",
"-e2dsa",
"-e2ts",
"-a",
"ed:wark",
"-v",
"srv::r:aed:cnodupe"
"srv::r:aed:cnodupe",
"-v",
"dist:dist:r"
]
},
{

2
.vscode/tasks.json vendored
View File

@@ -8,7 +8,7 @@
},
{
"label": "no_dbg",
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2d -e2s -a ed:wark -v srv::r:aed:cnodupe ;exit 1",
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -e2ts -a ed:wark -v srv::r:aed:cnodupe -v dist:dist:r ;exit 1",
"type": "shell"
}
]

105
README.md
View File

@@ -36,34 +36,111 @@ you may also want these, especially on servers:
## status
* [x] sanic multipart parser
* [x] load balancer (multiprocessing)
* [x] upload (plain multipart, ie6 support)
* [x] upload (js, resumable, multithreaded)
* [x] download
* [x] browser
* [x] media player
* [ ] thumbnails
* [ ] download as zip
* [x] volumes
* [x] accounts
* [x] markdown viewer
* [x] markdown editor
* [x] FUSE client (read-only)
* backend stuff
* ☑ sanic multipart parser
* ☑ load balancer (multiprocessing)
* ☑ volumes (mountpoints)
* ☑ accounts
* upload
* ☑ basic: plain multipart, ie6 support
* ☑ up2k: js, resumable, multithreaded
* ☑ stash: simple PUT filedropper
* ☑ symlink/discard existing files (content-matching)
* download
* ☑ single files in browser
* ✖ folders as zip files
* FUSE client (read-only)
* browser
* ☑ tree-view
* ☑ media player
* ✖ thumbnails
* ✖ SPA (browse while uploading)
* currently safe using the file-tree on the left only, not folders in the file list
* server indexing
* ☑ locate files by contents
* ☑ search by name/path/date/size
* ✖ search by ID3-tags etc.
* markdown
* ☑ viewer
* ☑ editor (sure why not)
summary: it works! you can use it! (but technically not even close to beta)
# bugs
* probably, pls let me know
# searching
when started with `-e2dsa` copyparty will scan/index all your files. This avoids duplicates on upload, and also makes the volumes searchable through the web-ui:
* make search queries by `size`/`date`/`directory-path`/`filename`, or...
* drag/drop a local file to see if the same contents exist somewhere on the server (you get the URL if it does)
path/name queries are space-separated, AND'ed together, and words are negated with a `-` prefix, so for example:
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
## search configuration
searching relies on two databases, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`). Configuration can be done through arguments, volume flags, or a mix of both.
through arguments:
* `-e2d` enables file indexing on upload
* `-e2ds` scans writable folders on startup
* `-e2dsa` scans all mounted volumes (including readonly ones)
* `-e2t` enables metadata indexing on upload
* `-e2ts` scans for tags in all files that don't have tags yet
* `-e2tsr` deletes all existing tags, so a full reindex
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
* `-v ~/music::ce2dsa:ce2tsr` does a full reindex of everything on startup
* `-v ~/music::cd2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
`-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume:
* `-v ~/music::cmte=title,artist` indexes and displays *title* followed by *artist*
if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected
`-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux`
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
* is about 20x slower than mutagen
* catches a few tags that mutagen doesn't
* avoids pulling any GPL code into copyparty
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
# client examples
* javascript: dump some state into a file (two separate examples)
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* curl/wget: upload some files (post=file, chunk=stdin)
* `post(){ curl -b cppwd=wark http://127.0.0.1:3923/ -F act=bput -F f=@"$1";}`
`post movie.mkv`
* `post(){ wget --header='Cookie: cppwd=wark' http://127.0.0.1:3923/?raw --post-file="$1" -O-;}`
`post movie.mkv`
* `chunk(){ curl -b cppwd=wark http://127.0.0.1:3923/ -T-;}`
`chunk <movie.mkv`
* FUSE: mount a copyparty server as a local filesystem
* cross-platform python client available in [./bin/](bin/)
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
b512 <movie.mkv
# dependencies

View File

@@ -33,6 +33,7 @@ import re
import os
import sys
import time
import json
import stat
import errno
import struct
@@ -323,7 +324,7 @@ class Gateway(object):
if bad_good:
path = dewin(path)
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls"
r = self.sendreq("GET", web_path)
if r.status != 200:
self.closeconn()
@@ -334,12 +335,17 @@ class Gateway(object):
)
raise FuseOSError(errno.ENOENT)
if not r.getheader("Content-Type", "").startswith("text/html"):
ctype = r.getheader("Content-Type", "")
if ctype == "application/json":
parser = self.parse_jls
elif ctype.startswith("text/html"):
parser = self.parse_html
else:
log("listdir on file: {}".format(path))
raise FuseOSError(errno.ENOENT)
try:
return self.parse_html(r)
return parser(r)
except:
info(repr(path) + "\n" + traceback.format_exc())
raise
@@ -367,6 +373,29 @@ class Gateway(object):
return r.read()
def parse_jls(self, datasrc):
rsp = b""
while True:
buf = datasrc.read(1024 * 32)
if not buf:
break
rsp += buf
rsp = json.loads(rsp.decode("utf-8"))
ret = []
for is_dir, nodes in [[True, rsp["dirs"]], [False, rsp["files"]]]:
for n in nodes:
fname = unquote(n["href"]).rstrip(b"/")
fname = fname.decode("wtf-8")
if bad_good:
fname = enwin(fname)
fun = self.stat_dir if is_dir else self.stat_file
ret.append([fname, fun(n["ts"], n["sz"]), 0])
return ret
def parse_html(self, datasrc):
ret = []
remainder = b""
@@ -818,9 +847,9 @@ class CPPF(Operations):
return cache_stat
fun = info
if MACOS and path.split('/')[-1].startswith('._'):
if MACOS and path.split("/")[-1].startswith("._"):
fun = dbg
fun("=ENOENT ({})".format(hexler(path)))
raise FuseOSError(errno.ENOENT)

View File

@@ -174,6 +174,18 @@ def main():
if HAVE_SSL:
ensure_cert()
deprecated = [["-e2s", "-e2ds"]]
for dk, nk in deprecated:
try:
idx = sys.argv.index(dk)
except:
continue
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
print(msg.format(dk, nk))
sys.argv[idx] = nk
time.sleep(2)
ap = argparse.ArgumentParser(
formatter_class=RiceFormatter,
prog="copyparty",
@@ -186,7 +198,7 @@ def main():
and "cflag" is config flags to set on this volume
list of cflags:
cnodupe rejects existing files (instead of symlinking them)
"cnodupe" rejects existing files (instead of symlinking them)
example:\033[35m
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
@@ -227,14 +239,25 @@ def main():
ap.add_argument("-q", action="store_true", help="quiet")
ap.add_argument("-ed", action="store_true", help="enable ?dots")
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap.add_argument("-e2d", action="store_true", help="enable up2k database")
ap.add_argument("-e2s", action="store_true", help="enable up2k db-scanner")
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap.add_argument("-nih", action="store_true", help="no info hostname")
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms")
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
ap2 = ap.add_argument_group('database options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q")
ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
@@ -243,9 +266,21 @@ def main():
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
al = ap.parse_args()
# fmt: on
# propagate implications
for k1, k2 in [
["e2dsa", "e2ds"],
["e2ds", "e2d"],
["e2tsr", "e2ts"],
["e2ts", "e2t"],
["e2t", "e2d"],
]:
if getattr(al, k1):
setattr(al, k2, True)
al.i = al.i.split(",")
try:
if "-" in al.p:

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (0, 7, 6)
CODENAME = "keeping track"
BUILD_DT = (2021, 2, 12)
VERSION = (0, 9, 0)
CODENAME = "the strongest music server"
BUILD_DT = (2021, 3, 2)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -19,6 +19,11 @@ class VFS(object):
self.uwrite = uwrite # users who can write this
self.flags = flags # config switches
self.nodes = {} # child nodes
self.all_vols = {vpath: self} # flattened recursive
def _trk(self, vol):
self.all_vols[vol.vpath] = vol
return vol
def add(self, src, dst):
"""get existing, or add new path to the vfs"""
@@ -30,7 +35,7 @@ class VFS(object):
name, dst = dst.split("/", 1)
if name in self.nodes:
# exists; do not manipulate permissions
return self.nodes[name].add(src, dst)
return self._trk(self.nodes[name].add(src, dst))
vn = VFS(
"{}/{}".format(self.realpath, name),
@@ -40,7 +45,7 @@ class VFS(object):
self.flags,
)
self.nodes[name] = vn
return vn.add(src, dst)
return self._trk(vn.add(src, dst))
if dst in self.nodes:
# leaf exists; return as-is
@@ -50,7 +55,7 @@ class VFS(object):
vp = "{}/{}".format(self.vpath, dst).lstrip("/")
vn = VFS(src, vp)
self.nodes[dst] = vn
return vn
return self._trk(vn)
def _find(self, vpath):
"""return [vfs,remainder]"""
@@ -201,8 +206,11 @@ class AuthSrv(object):
if lvl in "wa":
mwrite[vol_dst].append(uname)
if lvl == "c":
# config option, currently switches only
mflags[vol_dst][uname] = True
cval = True
if "=" in uname:
uname, cval = uname.split("=", 1)
mflags[vol_dst][uname] = cval
def reload(self):
"""
@@ -243,12 +251,19 @@ class AuthSrv(object):
perms = perms.split(":")
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
if lvl == "c":
# config option, currently switches only
mflags[dst][uname] = True
cval = True
if "=" in uname:
uname, cval = uname.split("=", 1)
mflags[dst][uname] = cval
continue
if uname == "":
uname = "*"
if lvl in "ra":
mread[dst].append(uname)
if lvl in "wa":
mwrite[dst].append(uname)
@@ -257,13 +272,13 @@ class AuthSrv(object):
with open(cfg_fn, "rb") as f:
self._parse_config_file(f, user, mread, mwrite, mflags, mount)
self.all_writable = []
if not mount:
# -h says our defaults are CWD at root and read/write for everyone
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
elif "" not in mount:
# there's volumes but no root; make root inaccessible
vfs = VFS(os.path.abspath("."), "")
vfs.flags["d2d"] = True
maxdepth = 0
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
@@ -280,11 +295,6 @@ class AuthSrv(object):
v.uread = mread[dst]
v.uwrite = mwrite[dst]
v.flags = mflags[dst]
if v.uwrite:
self.all_writable.append(v)
if vfs.uwrite and vfs not in self.all_writable:
self.all_writable.append(vfs)
missing_users = {}
for d in [mread, mwrite]:
@@ -301,15 +311,27 @@ class AuthSrv(object):
)
raise Exception("invalid config")
for vol in vfs.all_vols.values():
if (self.args.e2ds and vol.uwrite) or self.args.e2dsa:
vol.flags["e2ds"] = True
if self.args.e2d:
vol.flags["e2d"] = True
for k in ["e2t", "e2ts", "e2tsr"]:
if getattr(self.args, k):
vol.flags[k] = True
# default tag-list if unset
if "mte" not in vol.flags:
vol.flags["mte"] = self.args.mte
try:
v, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == v.realpath:
self.warn_anonwrite = False
self.log(
"\033[31manyone can read/write the current directory: {}\033[0m".format(
v.realpath
)
)
msg = "\033[31manyone can read/write the current directory: {}\033[0m"
self.log(msg.format(v.realpath))
except Pebkac:
self.warn_anonwrite = True

View File

@@ -5,6 +5,7 @@ import os
import stat
import gzip
import time
import copy
import json
import socket
import ctypes
@@ -34,6 +35,7 @@ class HttpCli(object):
self.auth = conn.auth
self.log_func = conn.log_func
self.log_src = conn.log_src
self.tls = hasattr(self.s, "cipher")
self.bufsz = 1024 * 32
self.absolute_urls = False
@@ -75,6 +77,8 @@ class HttpCli(object):
self.loud_reply(str(ex), status=ex.code)
return self.keepalive
# time.sleep(0.4)
# normalize incoming headers to lowercase;
# outgoing headers however are Correct-Case
for header_line in headerlines[1:]:
@@ -124,25 +128,15 @@ class HttpCli(object):
k, v = k.split("=", 1)
uparam[k.lower()] = v.strip()
else:
uparam[k.lower()] = True
uparam[k.lower()] = False
self.uparam = uparam
self.vpath = unquotep(vpath)
ua = self.headers.get("user-agent", "")
if ua.startswith("rclone/"):
uparam["raw"] = True
uparam["dots"] = True
if hasattr(self.s, "cipher"):
self.ssl_suf = "".join(
[
" \033[3{}m{}".format(c, s)
for c, s in zip([6, 3, 6], self.s.cipher())
]
)
else:
self.ssl_suf = ""
uparam["raw"] = False
uparam["dots"] = False
try:
if self.mode in ["GET", "HEAD"]:
@@ -221,13 +215,16 @@ class HttpCli(object):
logmsg += " [\033[36m" + rval + "\033[0m]"
self.log(logmsg + self.ssl_suf)
self.log(logmsg)
# "embedded" resources
if self.vpath.startswith(".cpr"):
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
return self.tx_file(static_path)
if "tree" in self.uparam:
return self.tx_tree()
# conditional redirect to single volumes
if self.vpath == "" and not self.uparam:
nread = len(self.rvol)
@@ -246,7 +243,7 @@ class HttpCli(object):
)
if not self.readable and not self.writable:
self.log("inaccessible: [{}]".format(self.vpath))
self.uparam = {"h": True}
self.uparam = {"h": False}
if "h" in self.uparam:
self.vpath = None
@@ -255,7 +252,7 @@ class HttpCli(object):
return self.tx_browser()
def handle_options(self):
self.log("OPTIONS " + self.req + self.ssl_suf)
self.log("OPTIONS " + self.req)
self.send_headers(
None,
204,
@@ -268,7 +265,7 @@ class HttpCli(object):
return True
def handle_put(self):
self.log("PUT " + self.req + self.ssl_suf)
self.log("PUT " + self.req)
if self.headers.get("expect", "").lower() == "100-continue":
try:
@@ -279,7 +276,7 @@ class HttpCli(object):
return self.handle_stash()
def handle_post(self):
self.log("POST " + self.req + self.ssl_suf)
self.log("POST " + self.req)
if self.headers.get("expect", "").lower() == "100-continue":
try:
@@ -316,7 +313,7 @@ class HttpCli(object):
reader, _ = self.get_body_reader()
for buf in reader:
buf = buf.decode("utf-8", "replace")
self.log("urlform:\n {}\n".format(buf))
self.log("urlform @ {}\n {}\n".format(self.vpath, buf))
if "get" in opt:
return self.handle_get()
@@ -326,8 +323,11 @@ class HttpCli(object):
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
def get_body_reader(self):
remains = int(self.headers.get("content-length", None))
if remains is None:
chunked = "chunked" in self.headers.get("transfer-encoding", "").lower()
remains = int(self.headers.get("content-length", -1))
if chunked:
return read_socket_chunked(self.sr), remains
elif remains == -1:
self.keepalive = False
return read_socket_unbounded(self.sr), remains
else:
@@ -410,6 +410,9 @@ class HttpCli(object):
except:
raise Pebkac(422, "you POSTed invalid json")
if "srch" in self.uparam or "srch" in body:
return self.handle_search(body)
# prefer this over undot; no reason to allow traversion
if "/" in body["name"]:
raise Pebkac(400, "folders verboten")
@@ -425,7 +428,6 @@ class HttpCli(object):
body["ptop"] = vfs.realpath
body["prel"] = rem
body["addr"] = self.ip
body["flag"] = vfs.flags
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
response = x.get()
@@ -435,6 +437,41 @@ class HttpCli(object):
self.reply(response.encode("utf-8"), mime="application/json")
return True
def handle_search(self, body):
vols = []
for vtop in self.rvol:
vfs, _ = self.conn.auth.vfs.get(vtop, self.uname, True, False)
vols.append([vfs.vpath, vfs.realpath, vfs.flags])
idx = self.conn.get_u2idx()
t0 = time.time()
if "srch" in body:
# search by up2k hashlist
vbody = copy.deepcopy(body)
vbody["hash"] = len(vbody["hash"])
self.log("qj: " + repr(vbody))
hits = idx.fsearch(vols, body)
self.log("q#: {} ({:.2f}s)".format(repr(hits), time.time() - t0))
taglist = []
else:
# search by query params
self.log("qj: " + repr(body))
hits, taglist = idx.search(vols, body)
self.log("q#: {} ({:.2f}s)".format(len(hits), time.time() - t0))
order = []
cfg = self.args.mte.split(",")
for t in cfg:
if t in taglist:
order.append(t)
for t in taglist:
if t not in order:
order.append(t)
r = json.dumps({"hits": hits, "tag_order": order}).encode("utf-8")
self.reply(r, mime="application/json")
return True
def handle_post_binary(self):
try:
remains = int(self.headers["content-length"])
@@ -496,7 +533,12 @@ class HttpCli(object):
self.log("clone {} done".format(cstart[0]))
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
num_left, path = x.get()
x = x.get()
try:
num_left, path = x
except:
self.loud_reply(x, status=500)
return False
if not WINDOWS and num_left == 0:
times = (int(time.time()), int(lastmod))
@@ -938,7 +980,7 @@ class HttpCli(object):
# 512 kB is optimal for huge files, use 64k
open_args = [fsenc(fs_path), "rb", 64 * 1024]
use_sendfile = (
not self.ssl_suf
not self.tls #
and not self.args.no_sendfile
and hasattr(os, "sendfile")
)
@@ -1041,6 +1083,60 @@ class HttpCli(object):
self.reply(html.encode("utf-8"))
return True
def tx_tree(self):
top = self.uparam["tree"] or ""
dst = self.vpath
if top in [".", ".."]:
top = undot(self.vpath + "/" + top)
if top == dst:
dst = ""
elif top:
if not dst.startswith(top + "/"):
raise Pebkac(400, "arg funk")
dst = dst[len(top) + 1 :]
ret = self.gen_tree(top, dst)
ret = json.dumps(ret)
self.reply(ret.encode("utf-8"), mime="application/json")
return True
def gen_tree(self, top, target):
ret = {}
excl = None
if target:
excl, target = (target.split("/", 1) + [""])[:2]
ret["k" + excl] = self.gen_tree("/".join([top, excl]).strip("/"), target)
try:
vn, rem = self.auth.vfs.get(top, self.uname, True, False)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)
except:
vfs_ls = []
vfs_virt = {}
for v in self.rvol:
d1, d2 = v.rsplit("/", 1) if "/" in v else ["", v]
if d1 == top:
vfs_virt[d2] = 0
dirs = []
if not self.args.ed or "dots" not in self.uparam:
vfs_ls = exclude_dotfiles(vfs_ls)
for fn in [x for x in vfs_ls if x != excl]:
abspath = os.path.join(fsroot, fn)
if os.path.isdir(abspath):
dirs.append(fn)
for x in vfs_virt.keys():
if x != excl:
dirs.append(x)
ret["a"] = dirs
return ret
def tx_browser(self):
vpath = ""
vpnodes = [["", "/"]]
@@ -1066,8 +1162,7 @@ class HttpCli(object):
if abspath.endswith(".md") and "raw" not in self.uparam:
return self.tx_md(abspath)
bad = "{0}.hist{0}up2k.".format(os.sep)
if abspath.endswith(bad + "db") or abspath.endswith(bad + "snap"):
if rem.startswith(".hist/up2k."):
raise Pebkac(403)
return self.tx_file(abspath)
@@ -1096,21 +1191,28 @@ class HttpCli(object):
vfs_ls = exclude_dotfiles(vfs_ls)
hidden = []
if fsroot.endswith(str(os.sep) + ".hist"):
hidden = ["up2k.db", "up2k.snap"]
if rem == ".hist":
hidden = ["up2k."]
is_ls = "ls" in self.uparam
icur = None
if "e2t" in vn.flags:
idx = self.conn.get_u2idx()
icur = idx.get_cur(vn.realpath)
dirs = []
files = []
for fn in vfs_ls:
base = ""
href = fn
if self.absolute_urls and vpath:
if not is_ls and self.absolute_urls and vpath:
base = "/" + vpath + "/"
href = base + fn
if fn in vfs_virt:
fspath = vfs_virt[fn].realpath
elif fn in hidden:
elif hidden and any(fn.startswith(x) for x in hidden):
continue
else:
fspath = fsroot + "/" + fn
@@ -1141,29 +1243,44 @@ class HttpCli(object):
except:
ext = "%"
item = [margin, quotep(href), html_escape(fn), sz, ext, dt]
item = {
"lead": margin,
"href": quotep(href),
"name": fn,
"sz": sz,
"ext": ext,
"dt": dt,
"ts": inf.st_mtime,
}
if is_dir:
dirs.append(item)
else:
files.append(item)
item["rd"] = rem
logues = [None, None]
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
fn = os.path.join(abspath, fn)
if os.path.exists(fsenc(fn)):
with open(fsenc(fn), "rb") as f:
logues[n] = f.read().decode("utf-8")
taglist = {}
for f in files:
fn = f["name"]
rd = f["rd"]
del f["rd"]
if icur:
q = "select w from up where rd = ? and fn = ?"
r = icur.execute(q, (rd, fn)).fetchone()
if not r:
continue
if False:
# this is a mistake
md = None
for fn in [x[2] for x in files]:
if fn.lower() == "readme.md":
fn = os.path.join(abspath, fn)
with open(fn, "rb") as f:
md = f.read().decode("utf-8")
w = r[0][:16]
tags = {}
for k, v in icur.execute("select k, v from mt where w = ?", (w,)):
taglist[k] = True
tags[k] = v
break
f["tags"] = tags
if icur:
taglist = [k for k in self.args.mte.split(",") if k in taglist]
for f in dirs:
f["tags"] = {}
srv_info = []
@@ -1193,21 +1310,53 @@ class HttpCli(object):
except:
pass
srv_info = "</span> /// <span>".join(srv_info)
perms = []
if self.readable:
perms.append("read")
if self.writable:
perms.append("write")
logues = ["", ""]
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
fn = os.path.join(abspath, fn)
if os.path.exists(fsenc(fn)):
with open(fsenc(fn), "rb") as f:
logues[n] = f.read().decode("utf-8")
if is_ls:
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
ret = {
"dirs": dirs,
"files": files,
"srvinf": srv_info,
"perms": perms,
"logues": logues,
"taglist": taglist,
}
ret = json.dumps(ret)
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
return True
ts = ""
# ts = "?{}".format(time.time())
dirs.extend(files)
html = self.conn.tpl_browser.render(
vdir=quotep(self.vpath),
vpnodes=vpnodes,
files=dirs,
can_upload=self.writable,
can_read=self.readable,
ts=ts,
prologue=logues[0],
epilogue=logues[1],
perms=json.dumps(perms),
taglist=taglist,
tag_order=json.dumps(self.args.mte.split(",")),
have_up2k_idx=("e2d" in vn.flags),
have_tags_idx=("e2t" in vn.flags),
logues=logues,
title=html_escape(self.vpath),
srv_info="</span> /// <span>".join(srv_info),
srv_info=srv_info,
)
self.reply(html.encode("utf-8", "replace"))
return True

View File

@@ -20,16 +20,19 @@ except ImportError:
you do not have jinja2 installed,\033[33m
choose one of these:\033[0m
* apt install python-jinja2
* python3 -m pip install --user jinja2
* {} -m pip install --user jinja2
* (try another python version, if you have one)
* (try copyparty.sfx instead)
"""
""".format(
os.path.basename(sys.executable)
)
)
sys.exit(1)
from .__init__ import E
from .util import Unrecv
from .httpcli import HttpCli
from .u2idx import U2idx
class HttpConn(object):
@@ -50,6 +53,7 @@ class HttpConn(object):
self.t0 = time.time()
self.nbyte = 0
self.workload = 0
self.u2idx = None
self.log_func = hsrv.log
self.set_rproxy()
@@ -80,6 +84,12 @@ class HttpConn(object):
def log(self, msg):
self.log_func(self.log_src, msg)
def get_u2idx(self):
if not self.u2idx:
self.u2idx = U2idx(self.args, self.log_func)
return self.u2idx
def _detect_https(self):
method = None
if self.cert_path:
@@ -141,6 +151,12 @@ class HttpConn(object):
ctx.set_ciphers(self.args.ciphers)
self.s = ctx.wrap_socket(self.s, server_side=True)
msg = [
"\033[1;3{:d}m{}".format(c, s)
for c, s in zip([0, 5, 0], self.s.cipher())
]
self.log(" ".join(msg) + "\033[0m")
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
overlap = [y[::-1] for y in self.s.shared_ciphers()]
lines = [str(x) for x in (["TLS cipher overlap:"] + overlap)]

View File

@@ -78,7 +78,7 @@ class HttpSrv(object):
if not MACOS:
self.log(
"%s %s" % addr,
"shut_rdwr err:\n {}\n {}".format(repr(sck), ex),
"\033[1;30mshut({}): {}\033[0m".format(sck.fileno(), ex),
)
if ex.errno not in [10038, 10054, 107, 57, 9]:
# 10038 No longer considered a socket

305
copyparty/mtag.py Normal file
View File

@@ -0,0 +1,305 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
from math import fabs
import re
import os
import sys
import shutil
import subprocess as sp
from .__init__ import PY2, WINDOWS
from .util import fsenc, fsdec
class MTag(object):
def __init__(self, log_func, args):
self.log_func = log_func
self.usable = True
mappings = args.mtm
backend = "ffprobe" if args.no_mutagen else "mutagen"
if backend == "mutagen":
self.get = self.get_mutagen
try:
import mutagen
except:
self.log("\033[33mcould not load mutagen, trying ffprobe instead")
backend = "ffprobe"
if backend == "ffprobe":
self.get = self.get_ffprobe
# about 20x slower
if PY2:
cmd = ["ffprobe", "-version"]
try:
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
except:
self.usable = False
else:
if not shutil.which("ffprobe"):
self.usable = False
if not self.usable:
msg = "\033[31mneed mutagen or ffprobe to read media tags so please run this:\n {} -m pip install --user mutagen \033[0m"
self.log(msg.format(os.path.basename(sys.executable)))
return
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
tagmap = {
"album": ["album", "talb", "\u00a9alb", "original-album", "toal"],
"artist": [
"artist",
"tpe1",
"\u00a9art",
"composer",
"performer",
"arranger",
"\u00a9wrt",
"tcom",
"tpe3",
"original-artist",
"tope",
],
"title": ["title", "tit2", "\u00a9nam"],
"circle": [
"album-artist",
"tpe2",
"aart",
"conductor",
"organization",
"band",
],
".tn": ["tracknumber", "trck", "trkn", "track"],
"genre": ["genre", "tcon", "\u00a9gen"],
"date": [
"original-release-date",
"release-date",
"date",
"tdrc",
"\u00a9day",
"original-date",
"original-year",
"tyer",
"tdor",
"tory",
"year",
"creation-time",
],
".bpm": ["bpm", "tbpm", "tmpo", "tbp"],
"key": ["initial-key", "tkey", "key"],
"comment": ["comment", "comm", "\u00a9cmt", "comments", "description"],
}
if mappings:
for k, v in [x.split("=") for x in mappings]:
tagmap[k] = v.split(",")
self.tagmap = {}
for k, vs in tagmap.items():
vs2 = []
for v in vs:
if "-" not in v:
vs2.append(v)
continue
vs2.append(v.replace("-", " "))
vs2.append(v.replace("-", "_"))
vs2.append(v.replace("-", ""))
self.tagmap[k] = vs2
self.rmap = {
v: [n, k] for k, vs in self.tagmap.items() for n, v in enumerate(vs)
}
# self.get = self.compare
def log(self, msg):
self.log_func("mtag", msg)
def normalize_tags(self, ret, md):
for k, v in dict(md).items():
if not v:
continue
k = k.lower().split("::")[0].strip()
mk = self.rmap.get(k)
if not mk:
continue
pref, mk = mk
if mk not in ret or ret[mk][0] > pref:
ret[mk] = [pref, v[0]]
# take first value
ret = {k: str(v[1]).strip() for k, v in ret.items()}
# track 3/7 => track 3
for k, v in ret.items():
if k[0] == ".":
v = v.split("/")[0].strip().lstrip("0")
ret[k] = v or 0
return ret
def compare(self, abspath):
if abspath.endswith(".au"):
return {}
print("\n" + abspath)
r1 = self.get_mutagen(abspath)
r2 = self.get_ffprobe(abspath)
keys = {}
for d in [r1, r2]:
for k in d.keys():
keys[k] = True
diffs = []
l1 = []
l2 = []
for k in sorted(keys.keys()):
if k in [".q", ".dur"]:
continue # lenient
v1 = r1.get(k)
v2 = r2.get(k)
if v1 == v2:
print(" ", k, v1)
elif v1 != "0000": # ffprobe date=0
diffs.append(k)
print(" 1", k, v1)
print(" 2", k, v2)
if v1:
l1.append(k)
if v2:
l2.append(k)
if diffs:
raise Exception()
return r1
def get_mutagen(self, abspath):
import mutagen
try:
md = mutagen.File(abspath, easy=True)
x = md.info.length
except Exception as ex:
return {}
ret = {}
try:
dur = int(md.info.length)
try:
q = int(md.info.bitrate / 1024)
except:
q = int((os.path.getsize(abspath) / dur) / 128)
ret[".dur"] = [0, dur]
ret[".q"] = [0, q]
except:
pass
return self.normalize_tags(ret, md)
def get_ffprobe(self, abspath):
cmd = ["ffprobe", "-hide_banner", "--", fsenc(abspath)]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
r = p.communicate()
txt = r[1].decode("utf-8", "replace")
txt = [x.rstrip("\r") for x in txt.split("\n")]
"""
note:
tags which contain newline will be truncated on first \n,
ffmpeg emits \n and spacepads the : to align visually
note:
the Stream ln always mentions Audio: if audio
the Stream ln usually has kb/s, is more accurate
the Duration ln always has kb/s
the Metadata: after Chapter may contain BPM info,
title : Tempo: 126.0
Input #0, wav,
Metadata:
date : <OK>
Duration:
Chapter #
Metadata:
title : <NG>
Input #0, mp3,
Metadata:
album : <OK>
Duration:
Stream #0:0: Audio:
Stream #0:1: Video:
Metadata:
comment : <NG>
"""
ptn_md_beg = re.compile("^( +)Metadata:$")
ptn_md_kv = re.compile("^( +)([^:]+) *: (.*)")
ptn_dur = re.compile("^ *Duration: ([^ ]+)(, |$)")
ptn_br1 = re.compile("^ *Duration: .*, bitrate: ([0-9]+) kb/s(, |$)")
ptn_br2 = re.compile("^ *Stream.*: Audio:.* ([0-9]+) kb/s(, |$)")
ptn_audio = re.compile("^ *Stream .*: Audio: ")
ptn_au_parent = re.compile("^ *(Input #|Stream .*: Audio: )")
ret = {}
md = {}
in_md = False
is_audio = False
au_parent = False
for ln in txt:
m = ptn_md_kv.match(ln)
if m and in_md and len(m.group(1)) == in_md:
_, k, v = [x.strip() for x in m.groups()]
if k != "" and v != "":
md[k] = [v]
continue
else:
in_md = False
m = ptn_md_beg.match(ln)
if m and au_parent:
in_md = len(m.group(1)) + 2
continue
au_parent = bool(ptn_au_parent.search(ln))
if ptn_audio.search(ln):
is_audio = True
m = ptn_dur.search(ln)
if m:
sec = 0
tstr = m.group(1)
if tstr.lower() != "n/a":
try:
tf = tstr.split(",")[0].split(".")[0].split(":")
for f in tf:
sec *= 60
sec += int(f)
except:
self.log(
"\033[33minvalid timestr from ffmpeg: [{}]".format(tstr)
)
ret[".dur"] = sec
m = ptn_br1.search(ln)
if m:
ret[".q"] = m.group(1)
m = ptn_br2.search(ln)
if m:
ret[".q"] = m.group(1)
if not is_audio:
return {}
ret = {k: [0, v] for k, v in ret.items()}
return self.normalize_tags(ret, md)

View File

@@ -39,10 +39,6 @@ class SvcHub(object):
self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self)
if self.args.e2d and self.args.e2s:
auth = AuthSrv(self.args, self.log, False)
self.up2k.build_indexes(auth.all_writable)
# decide which worker impl to use
if self.check_mp_enable():
from .broker_mp import BrokerMp as Broker
@@ -79,7 +75,7 @@ class SvcHub(object):
now = time.time()
if now >= self.next_day:
dt = datetime.utcfromtimestamp(now)
print("\033[36m{}\033[0m".format(dt.strftime("%Y-%m-%d")))
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
# unix timestamp of next 00:00:00 (leap-seconds safe)
day_now = dt.day
@@ -89,9 +85,9 @@ class SvcHub(object):
dt = dt.replace(hour=0, minute=0, second=0)
self.next_day = calendar.timegm(dt.utctimetuple())
fmt = "\033[36m{} \033[33m{:21} \033[0m{}"
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
if not VT100:
fmt = "{} {:21} {}"
fmt = "{} {:21} {}\n"
if "\033" in msg:
msg = self.ansi_re.sub("", msg)
if "\033" in src:
@@ -100,12 +96,12 @@ class SvcHub(object):
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
msg = fmt.format(ts, src, msg)
try:
print(msg)
print(msg, end="")
except UnicodeEncodeError:
try:
print(msg.encode("utf-8", "replace").decode())
print(msg.encode("utf-8", "replace").decode(), end="")
except:
print(msg.encode("ascii", "replace").decode())
print(msg.encode("ascii", "replace").decode(), end="")
def check_mp_support(self):
vmin = sys.version_info[1]

191
copyparty/u2idx.py Normal file
View File

@@ -0,0 +1,191 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
from datetime import datetime
from .util import u8safe
from .up2k import up2k_wark_from_hashlist
try:
HAVE_SQLITE3 = True
import sqlite3
except:
HAVE_SQLITE3 = False
class U2idx(object):
def __init__(self, args, log_func):
self.args = args
self.log_func = log_func
if not HAVE_SQLITE3:
self.log("could not load sqlite3; searchign wqill be disabled")
return
self.cur = {}
def log(self, msg):
self.log_func("u2idx", msg)
def fsearch(self, vols, body):
"""search by up2k hashlist"""
if not HAVE_SQLITE3:
return []
fsize = body["size"]
fhash = body["hash"]
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
return self.run_query(vols, "w = ?", [wark], "", [])
def get_cur(self, ptop):
cur = self.cur.get(ptop)
if cur:
return cur
cur = _open(ptop)
if not cur:
return None
self.cur[ptop] = cur
return cur
def search(self, vols, body):
"""search by query params"""
if not HAVE_SQLITE3:
return []
qobj = {}
_conv_sz(qobj, body, "sz_min", "up.sz >= ?")
_conv_sz(qobj, body, "sz_max", "up.sz <= ?")
_conv_dt(qobj, body, "dt_min", "up.mt >= ?")
_conv_dt(qobj, body, "dt_max", "up.mt <= ?")
for seg, dk in [["path", "up.rd"], ["name", "up.fn"]]:
if seg in body:
_conv_txt(qobj, body, seg, dk)
uq, uv = _sqlize(qobj)
tq = ""
tv = []
qobj = {}
if "tags" in body:
_conv_txt(qobj, body, "tags", "mt.v")
tq, tv = _sqlize(qobj)
return self.run_query(vols, uq, uv, tq, tv)
def run_query(self, vols, uq, uv, tq, tv):
self.log("qs: {} {} , {} {}".format(uq, repr(uv), tq, repr(tv)))
ret = []
lim = 1000
taglist = {}
for (vtop, ptop, flags) in vols:
cur = self.get_cur(ptop)
if not cur:
continue
if not tq:
if not uq:
q = "select * from up"
v = ()
else:
q = "select * from up where " + uq
v = tuple(uv)
else:
# naive assumption: tags first
q = "select up.* from up inner join mt on substr(up.w,1,16) = mt.w where {}"
q = q.format(" and ".join([tq, uq]) if uq else tq)
v = tuple(tv + uv)
sret = []
c = cur.execute(q, v)
for hit in c:
w, ts, sz, rd, fn = hit
lim -= 1
if lim <= 0:
break
rp = os.path.join(vtop, rd, fn).replace("\\", "/")
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
for hit in sret:
w = hit["w"]
del hit["w"]
tags = {}
q = "select k, v from mt where w = ?"
for k, v in cur.execute(q, (w,)):
taglist[k] = True
tags[k] = v
hit["tags"] = tags
ret.extend(sret)
return ret, taglist.keys()
def _open(ptop):
db_path = os.path.join(ptop, ".hist", "up2k.db")
if os.path.exists(db_path):
return sqlite3.connect(db_path).cursor()
def _conv_sz(q, body, k, sql):
if k in body:
q[sql] = int(float(body[k]) * 1024 * 1024)
def _conv_dt(q, body, k, sql):
if k not in body:
return
v = body[k].upper().rstrip("Z").replace(",", " ").replace("T", " ")
while " " in v:
v = v.replace(" ", " ")
for fmt in ["%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M", "%Y-%m-%d %H", "%Y-%m-%d"]:
try:
ts = datetime.strptime(v, fmt).timestamp()
break
except:
ts = None
if ts:
q[sql] = ts
def _conv_txt(q, body, k, sql):
for v in body[k].split(" "):
inv = ""
if v.startswith("-"):
inv = "not"
v = v[1:]
if not v:
continue
head = "'%'||"
if v.startswith("^"):
head = ""
v = v[1:]
tail = "||'%'"
if v.endswith("$"):
tail = ""
v = v[:-1]
qk = "{} {} like {}?{}".format(sql, inv, head, tail)
q[qk + "\n" + v] = u8safe(v)
def _sqlize(qobj):
keys = []
values = []
for k, v in sorted(qobj.items()):
keys.append(k.split("\n")[0])
values.append(v)
return " and ".join(keys), values

View File

@@ -1,9 +1,9 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import re
import os
import sys
import time
import math
import json
@@ -13,18 +13,30 @@ import shutil
import base64
import hashlib
import threading
import traceback
from copy import deepcopy
from .__init__ import WINDOWS
from .util import Pebkac, Queue, fsdec, fsenc, sanitize_fn, ren_open, atomic_move
from .util import (
Pebkac,
Queue,
ProgressPrinter,
fsdec,
fsenc,
sanitize_fn,
ren_open,
atomic_move,
w8b64enc,
w8b64dec,
)
from .mtag import MTag
from .authsrv import AuthSrv
HAVE_SQLITE3 = False
try:
import sqlite3
HAVE_SQLITE3 = True
import sqlite3
except:
pass
HAVE_SQLITE3 = False
class Up2k(object):
@@ -38,16 +50,24 @@ class Up2k(object):
def __init__(self, broker):
self.broker = broker
self.args = broker.args
self.log = broker.log
self.log_func = broker.log
self.persist = self.args.e2d
# config
self.salt = "hunter2" # TODO: config
self.salt = broker.args.salt
# state
self.mutex = threading.Lock()
self.registry = {}
self.db = {}
self.entags = {}
self.flags = {}
self.cur = {}
self.mem_cur = None
if HAVE_SQLITE3:
# mojibake detector
self.mem_cur = self._orz(":memory:")
self.mem_cur.execute(r"create table a (b text)")
if WINDOWS:
# usually fails to set lastmod too quickly
@@ -56,17 +76,50 @@ class Up2k(object):
thr.daemon = True
thr.start()
if self.persist:
thr = threading.Thread(target=self._snapshot)
thr.daemon = True
thr.start()
self.mtag = MTag(self.log_func, self.args)
if not self.mtag.usable:
self.mtag = None
# static
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
if self.persist and not HAVE_SQLITE3:
m = "could not initialize sqlite3, will use in-memory registry only"
self.log("up2k", m)
self.log("could not initialize sqlite3, will use in-memory registry only")
# this is kinda jank
auth = AuthSrv(self.args, self.log, False)
self.init_indexes(auth)
if self.persist:
thr = threading.Thread(target=self._snapshot)
thr.daemon = True
thr.start()
def log(self, msg):
self.log_func("up2k", msg + "\033[K")
def w8enc(self, rd, fn):
ret = []
for v in [rd, fn]:
try:
self.mem_cur.execute("select * from a where b = ?", (v,))
ret.append(v)
except:
ret.append("//" + w8b64enc(v))
# self.log("mojien/{} [{}] {}".format(k, v, ret[-1][2:]))
return tuple(ret)
def w8dec(self, rd, fn):
ret = []
for k, v in [["d", rd], ["f", fn]]:
if v.startswith("//"):
ret.append(w8b64dec(v[2:]))
# self.log("mojide/{} [{}] {}".format(k, ret[-1], v[2:]))
else:
ret.append(v)
return tuple(ret)
def _vis_job_progress(self, job):
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
@@ -80,7 +133,49 @@ class Up2k(object):
return ret
def register_vpath(self, ptop):
def init_indexes(self, auth):
self.pp = ProgressPrinter()
vols = auth.vfs.all_vols.values()
t0 = time.time()
# e2ds(a) volumes first,
# also covers tags where e2ts is set
for vol in vols:
en = {}
if "mte" in vol.flags:
en = {k: True for k in vol.flags["mte"].split(",")}
self.entags[vol.realpath] = en
if "e2ds" in vol.flags:
r = self._build_file_index(vol, vols)
if not r:
needed_mutagen = True
# open the rest + do any e2ts(a)
needed_mutagen = False
for vol in vols:
r = self.register_vpath(vol.realpath, vol.flags)
if not r or "e2ts" not in vol.flags:
continue
cur, db_path, sz0 = r
n_add, n_rm, success = self._build_tags_index(vol.realpath)
if not success:
needed_mutagen = True
if n_add or n_rm:
self.vac(cur, db_path, n_add, n_rm, sz0)
self.pp.end = True
msg = "{} volumes in {:.2f} sec"
self.log(msg.format(len(vols), time.time() - t0))
if needed_mutagen:
msg = "\033[31mcould not read tags because no backends are available (mutagen or ffprobe)\033[0m"
self.log(msg)
def register_vpath(self, ptop, flags):
with self.mutex:
if ptop in self.registry:
return None
@@ -97,10 +192,11 @@ class Up2k(object):
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
m = [m] + self._vis_reg_progress(reg)
self.log("up2k", "\n".join(m))
self.log("\n".join(m))
self.flags[ptop] = flags
self.registry[ptop] = reg
if not self.persist or not HAVE_SQLITE3:
if not self.persist or not HAVE_SQLITE3 or "d2d" in flags:
return None
try:
@@ -109,66 +205,100 @@ class Up2k(object):
pass
db_path = os.path.join(ptop, ".hist", "up2k.db")
if ptop in self.db:
# self.db[ptop].close()
if ptop in self.cur:
return None
try:
db = self._open_db(db_path)
self.db[ptop] = db
return db
except Exception as ex:
m = "failed to open [{}]: {}".format(ptop, repr(ex))
self.log("up2k", m)
sz0 = 0
if os.path.exists(db_path):
sz0 = os.path.getsize(db_path) // 1024
cur = self._open_db(db_path)
self.cur[ptop] = cur
return [cur, db_path, sz0]
except:
msg = "cannot use database at [{}]:\n{}"
self.log(msg.format(ptop, traceback.format_exc()))
return None
def build_indexes(self, writeables):
tops = [d.realpath for d in writeables]
for top in tops:
db = self.register_vpath(top)
if db:
# can be symlink so don't `and d.startswith(top)``
excl = set([d for d in tops if d != top])
dbw = [db, 0, time.time()]
self._build_dir(dbw, top, excl, top)
self._drop_lost(db, top)
if dbw[1]:
self.log("up2k", "commit {} new files".format(dbw[1]))
def _build_file_index(self, vol, all_vols):
do_vac = False
top = vol.realpath
reg = self.register_vpath(top, vol.flags)
if not reg:
return
db.commit()
_, db_path, sz0 = reg
dbw = [reg[0], 0, time.time()]
self.pp.n = next(dbw[0].execute("select count(w) from up"))[0]
# can be symlink so don't `and d.startswith(top)``
excl = set([d.realpath for d in all_vols if d != vol])
n_add = self._build_dir(dbw, top, excl, top)
n_rm = self._drop_lost(dbw[0], top)
if dbw[1]:
self.log("commit {} new files".format(dbw[1]))
dbw[0].connection.commit()
n_add, n_rm, success = self._build_tags_index(vol.realpath)
dbw[0].connection.commit()
if n_add or n_rm or do_vac:
self.vac(dbw[0], db_path, n_add, n_rm, sz0)
return success
def vac(self, cur, db_path, n_add, n_rm, sz0):
sz1 = os.path.getsize(db_path) // 1024
cur.execute("vacuum")
sz2 = os.path.getsize(db_path) // 1024
msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format(
n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2
)
self.log(msg)
def _build_dir(self, dbw, top, excl, cdir):
try:
inodes = [fsdec(x) for x in os.listdir(fsenc(cdir))]
except Exception as ex:
self.log("up2k", "listdir: {} @ [{}]".format(repr(ex), cdir))
return
self.log("listdir: {} @ [{}]".format(repr(ex), cdir))
return 0
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
histdir = os.path.join(top, ".hist")
ret = 0
for inode in inodes:
abspath = os.path.join(cdir, inode)
try:
inf = os.stat(fsenc(abspath))
except Exception as ex:
self.log("up2k", "stat: {} @ [{}]".format(repr(ex), abspath))
self.log("stat: {} @ [{}]".format(repr(ex), abspath))
continue
if stat.S_ISDIR(inf.st_mode):
if abspath in excl or abspath == histdir:
continue
# self.log("up2k", " dir: {}".format(abspath))
self._build_dir(dbw, top, excl, abspath)
# self.log(" dir: {}".format(abspath))
ret += self._build_dir(dbw, top, excl, abspath)
else:
# self.log("up2k", "file: {}".format(abspath))
# self.log("file: {}".format(abspath))
rp = abspath[len(top) :].replace("\\", "/").strip("/")
c = dbw[0].execute("select * from up where rp = ?", (rp,))
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
sql = "select * from up where rd = ? and fn = ?"
try:
c = dbw[0].execute(sql, (rd, fn))
except:
c = dbw[0].execute(sql, self.w8enc(rd, fn))
in_db = list(c.fetchall())
if in_db:
_, dts, dsz, _ = in_db[0]
self.pp.n -= 1
_, dts, dsz, _, _ = in_db[0]
if len(in_db) > 1:
m = "WARN: multiple entries: [{}] => [{}] ({})"
self.log("up2k", m.format(top, rp, len(in_db)))
m = "WARN: multiple entries: [{}] => [{}] |{}|\n{}"
rep_db = "\n".join([repr(x) for x in in_db])
self.log(m.format(top, rp, len(in_db), rep_db))
dts = -1
if dts == inf.st_mtime and dsz == inf.st_size:
@@ -177,115 +307,295 @@ class Up2k(object):
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
top, rp, dts, inf.st_mtime, dsz, inf.st_size
)
self.log("up2k", m)
self.db_rm(dbw[0], rp)
self.log(m)
self.db_rm(dbw[0], rd, fn)
ret += 1
dbw[1] += 1
in_db = None
self.log("up2k", "file: {}".format(abspath))
self.pp.msg = "a{} {}".format(self.pp.n, abspath)
if inf.st_size > 1024 * 1024:
self.log("file: {}".format(abspath))
try:
hashes = self._hashlist_from_file(abspath)
except Exception as ex:
self.log("up2k", "hash: {} @ [{}]".format(repr(ex), abspath))
self.log("hash: {} @ [{}]".format(repr(ex), abspath))
continue
wark = self._wark_from_hashlist(inf.st_size, hashes)
self.db_add(dbw[0], wark, rp, inf.st_mtime, inf.st_size)
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
self.db_add(dbw[0], wark, rd, fn, inf.st_mtime, inf.st_size)
dbw[1] += 1
ret += 1
td = time.time() - dbw[2]
if dbw[1] > 1024 or td > 60:
self.log("up2k", "commit {} new files".format(dbw[1]))
dbw[0].commit()
if dbw[1] >= 4096 or td >= 60:
self.log("commit {} new files".format(dbw[1]))
dbw[0].connection.commit()
dbw[1] = 0
dbw[2] = time.time()
return ret
def _drop_lost(self, db, top):
def _drop_lost(self, cur, top):
rm = []
c = db.execute("select * from up")
for dwark, dts, dsz, drp in c:
abspath = os.path.join(top, drp)
nchecked = 0
nfiles = next(cur.execute("select count(w) from up"))[0]
c = cur.execute("select * from up")
for dwark, dts, dsz, drd, dfn in c:
nchecked += 1
if drd.startswith("//") or dfn.startswith("//"):
drd, dfn = self.w8dec(drd, dfn)
abspath = os.path.join(top, drd, dfn)
# almost zero overhead dw
self.pp.msg = "b{} {}".format(nfiles - nchecked, abspath)
try:
if not os.path.exists(fsenc(abspath)):
rm.append(drp)
rm.append([drd, dfn])
except Exception as ex:
self.log("up2k", "stat-rm: {} @ [{}]".format(repr(ex), abspath))
self.log("stat-rm: {} @ [{}]".format(repr(ex), abspath))
if not rm:
return
if rm:
self.log("forgetting {} deleted files".format(len(rm)))
for rd, fn in rm:
# self.log("{} / {}".format(rd, fn))
self.db_rm(cur, rd, fn)
self.log("up2k", "forgetting {} deleted files".format(len(rm)))
for rp in rm:
self.db_rm(db, rp)
return len(rm)
def _build_tags_index(self, ptop):
entags = self.entags[ptop]
flags = self.flags[ptop]
cur = self.cur[ptop]
n_add = 0
n_rm = 0
n_buf = 0
last_write = time.time()
if "e2tsr" in flags:
n_rm = cur.execute("select count(w) from mt").fetchone()[0]
if n_rm:
self.log("discarding {} media tags for a full rescan".format(n_rm))
cur.execute("delete from mt")
else:
self.log("volume has e2tsr but there are no media tags to discard")
# integrity: drop tags for tracks that were deleted
if "e2t" in flags:
drops = []
c2 = cur.connection.cursor()
up_q = "select w from up where substr(w,1,16) = ?"
for (w,) in cur.execute("select w from mt"):
if not c2.execute(up_q, (w,)).fetchone():
drops.append(w[:16])
c2.close()
if drops:
msg = "discarding media tags for {} deleted files"
self.log(msg.format(len(drops)))
n_rm += len(drops)
for w in drops:
cur.execute("delete from mt where w = ?", (w,))
# bail if a volume flag disables indexing
if "d2t" in flags or "d2d" in flags:
return n_add, n_rm, True
# add tags for new files
if "e2ts" in flags:
if not self.mtag:
return n_add, n_rm, False
c2 = cur.connection.cursor()
n_left = cur.execute("select count(w) from up").fetchone()[0]
for w, rd, fn in cur.execute("select w, rd, fn from up"):
n_left -= 1
q = "select w from mt where w = ?"
if c2.execute(q, (w[:16],)).fetchone():
continue
abspath = os.path.join(ptop, rd, fn)
self.pp.msg = "c{} {}".format(n_left, abspath)
tags = self.mtag.get(abspath)
tags = {k: v for k, v in tags.items() if k in entags}
if not tags:
# indicate scanned without tags
tags = {"x": 0}
for k, v in tags.items():
q = "insert into mt values (?,?,?)"
c2.execute(q, (w[:16], k, v))
n_add += 1
n_buf += 1
td = time.time() - last_write
if n_buf >= 4096 or td >= 60:
self.log("commit {} new tags".format(n_buf))
cur.connection.commit()
last_write = time.time()
n_buf = 0
c2.close()
return n_add, n_rm, True
def _orz(self, db_path):
return sqlite3.connect(db_path, check_same_thread=False).cursor()
def _open_db(self, db_path):
conn = sqlite3.connect(db_path, check_same_thread=False)
existed = os.path.exists(db_path)
cur = self._orz(db_path)
try:
c = conn.execute(r"select * from kv where k = 'sver'")
ver = self._read_ver(cur)
except:
ver = None
if not existed:
return self._create_db(db_path, cur)
orig_ver = ver
if not ver or ver < 3:
bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
db = cur.connection
cur.close()
db.close()
msg = "creating new DB (old is bad); backup: {}"
if ver:
msg = "creating backup before upgrade: {}"
self.log(msg.format(bak))
shutil.copy2(db_path, bak)
cur = self._orz(db_path)
if ver == 1:
cur = self._upgrade_v1(cur, db_path)
if cur:
ver = 2
if ver == 2:
cur = self._create_v3(cur)
ver = self._read_ver(cur) if cur else None
if ver == 3:
if orig_ver != ver:
cur.connection.commit()
cur.execute("vacuum")
cur.connection.commit()
try:
nfiles = next(cur.execute("select count(w) from up"))[0]
self.log("OK: {} |{}|".format(db_path, nfiles))
return cur
except Exception as ex:
self.log("WARN: could not list files, DB corrupt?\n " + repr(ex))
if cur:
db = cur.connection
cur.close()
db.close()
return self._create_db(db_path, None)
def _create_db(self, db_path, cur):
if not cur:
cur = self._orz(db_path)
self._create_v2(cur)
self._create_v3(cur)
cur.connection.commit()
self.log("created DB at {}".format(db_path))
return cur
def _read_ver(self, cur):
for tab in ["ki", "kv"]:
try:
c = cur.execute(r"select v from {} where k = 'sver'".format(tab))
except:
continue
rows = c.fetchall()
if rows:
ver = rows[0][1]
else:
self.log("up2k", "WARN: no sver in kv, DB corrupt?")
ver = "unknown"
return int(rows[0][0])
if ver == "1":
try:
nfiles = next(conn.execute("select count(w) from up"))[0]
self.log("up2k", "found DB at {} |{}|".format(db_path, nfiles))
return conn
except Exception as ex:
m = "WARN: could not list files, DB corrupt?\n " + repr(ex)
self.log("up2k", m)
m = "REPLACING unsupported DB (v.{}) at {}".format(ver, db_path)
self.log("up2k", m)
conn.close()
os.unlink(db_path)
conn = sqlite3.connect(db_path, check_same_thread=False)
except:
pass
# sqlite is variable-width only, no point in using char/nchar/varchar
def _create_v2(self, cur):
for cmd in [
r"create table kv (k text, v text)",
r"create table up (w text, mt int, sz int, rp text)",
r"insert into kv values ('sver', '1')",
r"create index up_w on up(w)",
r"create table up (w text, mt int, sz int, rd text, fn text)",
r"create index up_rd on up(rd)",
r"create index up_fn on up(fn)",
]:
conn.execute(cmd)
cur.execute(cmd)
return cur
conn.commit()
self.log("up2k", "created DB at {}".format(db_path))
return conn
def _create_v3(self, cur):
"""
collision in 2^(n/2) files where n = bits (6 bits/ch)
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx
12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx
16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx
"""
for c, ks in [["drop table k", "isv"], ["drop index up_", "w"]]:
for k in ks:
try:
cur.execute(c + k)
except:
pass
for cmd in [
r"create index up_w on up(substr(w,1,16))",
r"create table mt (w text, k text, v int)",
r"create index mt_w on mt(w)",
r"create index mt_k on mt(k)",
r"create index mt_v on mt(v)",
r"create table kv (k text, v int)",
r"insert into kv values ('sver', 3)",
]:
cur.execute(cmd)
return cur
def _upgrade_v1(self, odb, db_path):
npath = db_path + ".next"
if os.path.exists(npath):
os.unlink(npath)
ndb = self._orz(npath)
self._create_v2(ndb)
c = odb.execute("select * from up")
for wark, ts, sz, rp in c:
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
v = (wark, ts, sz, rd, fn)
ndb.execute("insert into up values (?,?,?,?,?)", v)
ndb.connection.commit()
ndb.connection.close()
odb.connection.close()
atomic_move(npath, db_path)
return self._orz(db_path)
def handle_json(self, cj):
self.register_vpath(cj["ptop"])
cj["name"] = sanitize_fn(cj["name"])
cj["poke"] = time.time()
wark = self._get_wark(cj)
now = time.time()
job = None
with self.mutex:
db = self.db.get(cj["ptop"], None)
cur = self.cur.get(cj["ptop"], None)
reg = self.registry[cj["ptop"]]
if db:
cur = db.execute(r"select * from up where w = ?", (wark,))
for _, dtime, dsize, dp_rel in cur:
dp_abs = os.path.join(cj["ptop"], dp_rel).replace("\\", "/")
if cur:
cur = cur.execute(
r"select * from up where substr(w,1,16) = ? and w = ?",
(wark[:16], wark,),
)
for _, dtime, dsize, dp_dir, dp_fn in cur:
if dp_dir.startswith("//") or dp_fn.startswith("//"):
dp_dir, dp_fn = self.w8dec(dp_dir, dp_fn)
dp_abs = os.path.join(cj["ptop"], dp_dir, dp_fn).replace("\\", "/")
# relying on path.exists to return false on broken symlinks
if os.path.exists(fsenc(dp_abs)):
try:
prel, name = dp_rel.rsplit("/", 1)
except:
prel = ""
name = dp_rel
job = {
"name": name,
"prel": prel,
"name": dp_fn,
"prel": dp_dir,
"vtop": cj["vtop"],
"ptop": cj["ptop"],
"flag": cj["flag"],
"size": dsize,
"lmod": dtime,
"hash": [],
@@ -318,13 +628,13 @@ class Up2k(object):
vsrc = os.path.join(job["vtop"], job["prel"], job["name"])
vsrc = vsrc.replace("\\", "/") # just for prints anyways
if job["need"]:
self.log("up2k", "unfinished:\n {0}\n {1}".format(src, dst))
self.log("unfinished:\n {0}\n {1}".format(src, dst))
err = "partial upload exists at a different location; please resume uploading here instead:\n"
err += vsrc + " "
err += "/" + vsrc + " "
raise Pebkac(400, err)
elif "nodupe" in job["flag"]:
self.log("up2k", "dupe-reject:\n {0}\n {1}".format(src, dst))
err = "upload rejected, file already exists:\n " + vsrc + " "
elif "nodupe" in self.flags[job["ptop"]]:
self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
err = "upload rejected, file already exists:\n/" + vsrc + " "
raise Pebkac(400, err)
else:
# symlink to the client-provided name,
@@ -352,7 +662,6 @@ class Up2k(object):
"vtop",
"ptop",
"prel",
"flag",
"name",
"size",
"lmod",
@@ -388,7 +697,7 @@ class Up2k(object):
def _symlink(self, src, dst):
# TODO store this in linktab so we never delete src if there are links to it
self.log("up2k", "linking dupe:\n {0}\n {1}".format(src, dst))
self.log("linking dupe:\n {0}\n {1}".format(src, dst))
try:
lsrc = src
ldst = dst
@@ -411,7 +720,7 @@ class Up2k(object):
lsrc = "../" * (len(lsrc) - 1) + "/".join(lsrc)
os.symlink(fsenc(lsrc), fsenc(ldst))
except (AttributeError, OSError) as ex:
self.log("up2k", "cannot symlink; creating copy: " + repr(ex))
self.log("cannot symlink; creating copy: " + repr(ex))
shutil.copy2(fsenc(src), fsenc(dst))
def handle_chunk(self, ptop, wark, chash):
@@ -421,7 +730,7 @@ class Up2k(object):
raise Pebkac(400, "unknown wark")
if chash not in job["need"]:
raise Pebkac(200, "already got that but thanks??")
raise Pebkac(400, "already got that but thanks??")
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
if not nchunk:
@@ -429,7 +738,7 @@ class Up2k(object):
job["poke"] = time.time()
chunksize = self._get_chunksize(job["size"])
chunksize = up2k_chunksize(job["size"])
ofs = [chunksize * x for x in nchunk]
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
@@ -438,12 +747,19 @@ class Up2k(object):
def confirm_chunk(self, ptop, wark, chash):
with self.mutex:
job = self.registry[ptop][wark]
pdir = os.path.join(job["ptop"], job["prel"])
src = os.path.join(pdir, job["tnam"])
dst = os.path.join(pdir, job["name"])
try:
job = self.registry[ptop][wark]
pdir = os.path.join(job["ptop"], job["prel"])
src = os.path.join(pdir, job["tnam"])
dst = os.path.join(pdir, job["name"])
except Exception as ex:
return "confirm_chunk, wark, " + repr(ex)
try:
job["need"].remove(chash)
except Exception as ex:
return "confirm_chunk, chash, " + repr(ex)
job["need"].remove(chash)
ret = len(job["need"])
if ret > 0:
return ret, src
@@ -453,35 +769,34 @@ class Up2k(object):
if WINDOWS:
self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))])
db = self.db.get(job["ptop"], None)
if db:
rp = os.path.join(job["prel"], job["name"]).replace("\\", "/")
self.db_rm(db, rp)
self.db_add(db, job["wark"], rp, job["lmod"], job["size"])
db.commit()
cur = self.cur.get(job["ptop"], None)
if cur:
j = job
self.db_rm(cur, j["prel"], j["name"])
self.db_add(cur, j["wark"], j["prel"], j["name"], j["lmod"], j["size"])
cur.connection.commit()
del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads
return ret, dst
def _get_chunksize(self, filesize):
chunksize = 1024 * 1024
stepsize = 512 * 1024
while True:
for mul in [1, 2]:
nchunks = math.ceil(filesize * 1.0 / chunksize)
if nchunks <= 256 or chunksize >= 32 * 1024 * 1024:
return chunksize
def db_rm(self, db, rd, fn):
sql = "delete from up where rd = ? and fn = ?"
try:
db.execute(sql, (rd, fn))
except:
db.execute(sql, self.w8enc(rd, fn))
chunksize += stepsize
stepsize *= mul
def db_rm(self, db, rp):
db.execute("delete from up where rp = ?", (rp,))
def db_add(self, db, wark, rp, ts, sz):
v = (wark, ts, sz, rp)
db.execute("insert into up values (?,?,?,?)", v)
def db_add(self, db, wark, rd, fn, ts, sz):
sql = "insert into up values (?,?,?,?,?)"
v = (wark, int(ts), sz, rd, fn)
try:
db.execute(sql, v)
except:
rd, fn = self.w8enc(rd, fn)
v = (wark, ts, sz, rd, fn)
db.execute(sql, v)
def _get_wark(self, cj):
if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB
@@ -499,35 +814,16 @@ class Up2k(object):
except:
cj["lmod"] = int(time.time())
wark = self._wark_from_hashlist(cj["size"], cj["hash"])
wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"])
return wark
def _wark_from_hashlist(self, filesize, hashes):
""" server-reproducible file identifier, independent of name or location """
ident = [self.salt, str(filesize)]
ident.extend(hashes)
ident = "\n".join(ident)
hasher = hashlib.sha512()
hasher.update(ident.encode("utf-8"))
digest = hasher.digest()[:32]
wark = base64.urlsafe_b64encode(digest)
return wark.decode("utf-8").rstrip("=")
def _hashlist_from_file(self, path):
fsz = os.path.getsize(path)
csz = self._get_chunksize(fsz)
csz = up2k_chunksize(fsz)
ret = []
last_print = time.time()
with open(path, "rb", 512 * 1024) as f:
while fsz > 0:
now = time.time()
td = now - last_print
if td >= 0.3:
last_print = now
print(" {} \n\033[A".format(fsz), end="")
self.pp.msg = "{} MB".format(int(fsz / 1024 / 1024))
hashobj = hashlib.sha512()
rem = min(csz, fsz)
fsz -= rem
@@ -565,13 +861,14 @@ class Up2k(object):
while not self.lastmod_q.empty():
ready.append(self.lastmod_q.get())
# self.log("lmod", "got {}".format(len(ready)))
# self.log("lmod: got {}".format(len(ready)))
time.sleep(5)
for path, times in ready:
self.log("lmod: setting times {} on {}".format(times, path))
try:
os.utime(fsenc(path), times)
except:
self.log("lmod", "failed to utime ({}, {})".format(path, times))
self.log("lmod: failed to utime ({}, {})".format(path, times))
def _snapshot(self):
persist_interval = 30 # persist unfinished uploads index every 30 sec
@@ -589,7 +886,7 @@ class Up2k(object):
if rm:
m = "dropping {} abandoned uploads in {}".format(len(rm), k)
vis = [self._vis_job_progress(x) for x in rm]
self.log("up2k", "\n".join([m] + vis))
self.log("\n".join([m] + vis))
for job in rm:
del reg[job["wark"]]
try:
@@ -618,6 +915,11 @@ class Up2k(object):
if etag == prev.get(k, None):
return
try:
os.mkdir(os.path.join(k, ".hist"))
except:
pass
path2 = "{}.{}".format(path, os.getpid())
j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8")
with gzip.GzipFile(path2, "wb") as f:
@@ -625,5 +927,32 @@ class Up2k(object):
atomic_move(path2, path)
self.log("up2k", "snap: {} |{}|".format(path, len(reg.keys())))
self.log("snap: {} |{}|".format(path, len(reg.keys())))
prev[k] = etag
def up2k_chunksize(filesize):
chunksize = 1024 * 1024
stepsize = 512 * 1024
while True:
for mul in [1, 2]:
nchunks = math.ceil(filesize * 1.0 / chunksize)
if nchunks <= 256 or chunksize >= 32 * 1024 * 1024:
return chunksize
chunksize += stepsize
stepsize *= mul
def up2k_wark_from_hashlist(salt, filesize, hashes):
""" server-reproducible file identifier, independent of name or location """
ident = [salt, str(filesize)]
ident.extend(hashes)
ident = "\n".join(ident)
hasher = hashlib.sha512()
hasher.update(ident.encode("utf-8"))
digest = hasher.digest()[:32]
wark = base64.urlsafe_b64encode(digest)
return wark.decode("utf-8").rstrip("=")

View File

@@ -99,6 +99,39 @@ class Unrecv(object):
self.buf = buf + self.buf
class ProgressPrinter(threading.Thread):
"""
periodically print progress info without linefeeds
"""
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True
self.msg = None
self.end = False
self.start()
def run(self):
msg = None
while not self.end:
time.sleep(0.1)
if msg == self.msg or self.end:
continue
msg = self.msg
m = " {}\033[K\r".format(msg)
try:
print(m, end="")
except UnicodeEncodeError:
try:
print(m.encode("utf-8", "replace").decode(), end="")
except:
print(m.encode("ascii", "replace").decode(), end="")
print("\033[K", end="")
sys.stdout.flush() # necessary on win10 even w/ stderr btw
@contextlib.contextmanager
def ren_open(fname, *args, **kwargs):
fdir = kwargs.pop("fdir", None)
@@ -146,7 +179,7 @@ def ren_open(fname, *args, **kwargs):
except OSError as ex_:
ex = ex_
if ex.errno != 36:
if ex.errno not in [36, 63] and (not WINDOWS or ex.errno != 22):
raise
if not b64:
@@ -480,6 +513,13 @@ def sanitize_fn(fn):
return fn.strip()
def u8safe(txt):
try:
return txt.encode("utf-8", "xmlcharrefreplace").decode("utf-8", "replace")
except:
return txt.encode("utf-8", "replace").decode("utf-8", "replace")
def exclude_dotfiles(filepaths):
for fpath in filepaths:
if not fpath.split("/")[-1].startswith("."):
@@ -536,6 +576,16 @@ def w8enc(txt):
return txt.encode(FS_ENCODING, "surrogateescape")
def w8b64dec(txt):
"""decodes base64(filesystem-bytes) to wtf8"""
return w8dec(base64.urlsafe_b64decode(txt.encode("ascii")))
def w8b64enc(txt):
"""encodes wtf8 to base64(filesystem-bytes)"""
return base64.urlsafe_b64encode(w8enc(txt)).decode("ascii")
if PY2 and WINDOWS:
# moonrunes become \x3f with bytestrings,
# losing mojibake support is worth
@@ -583,6 +633,40 @@ def read_socket_unbounded(sr):
yield buf
def read_socket_chunked(sr, log=None):
err = "expected chunk length, got [{}] |{}| instead"
while True:
buf = b""
while b"\r" not in buf:
rbuf = sr.recv(2)
if not rbuf or len(buf) > 16:
err = err.format(buf.decode("utf-8", "replace"), len(buf))
raise Pebkac(400, err)
buf += rbuf
if not buf.endswith(b"\n"):
sr.recv(1)
try:
chunklen = int(buf.rstrip(b"\r\n"), 16)
except:
err = err.format(buf.decode("utf-8", "replace"), len(buf))
raise Pebkac(400, err)
if chunklen == 0:
sr.recv(2) # \r\n after final chunk
return
if log:
log("receiving {} byte chunk".format(chunklen))
for chunk in read_socket(sr, chunklen):
yield chunk
sr.recv(2) # \r\n after each chunk too
def hashcopy(actor, fin, fout):
u32_lim = int((2 ** 31) * 0.9)
hashobj = hashlib.sha512()

View File

@@ -39,15 +39,27 @@ body {
margin: 1.3em 0 0 0;
font-size: 1.4em;
}
#path #entree {
margin-left: -.7em;
}
#treetab {
display: none;
}
#files {
border-collapse: collapse;
border-spacing: 0;
margin-top: 2em;
z-index: 1;
position: relative;
}
#files tbody a {
display: block;
padding: .3em 0;
}
a {
#files[ts] tbody div a {
color: #f5a;
}
a,
#files[ts] tbody div a:last-child {
color: #fc5;
padding: .2em;
text-decoration: none;
@@ -82,6 +94,16 @@ a {
margin: 0;
padding: 0 .5em;
}
#files td {
border-bottom: 1px solid #111;
}
#files td+td+td {
max-width: 30em;
overflow: hidden;
}
#files tr+tr td {
border-top: 1px solid #383838;
}
#files tbody td:nth-child(3) {
font-family: monospace;
font-size: 1.3em;
@@ -99,6 +121,7 @@ a {
#files tbody tr:last-child td {
padding-bottom: 1.3em;
border-bottom: .5em solid #444;
white-space: nowrap;
}
#files thead th[style] {
width: auto !important;
@@ -142,11 +165,14 @@ a {
#srv_info span {
color: #fff;
}
a.play {
#files tbody a.play {
color: #e70;
padding: .2em;
margin: -.2em;
}
a.play.act {
color: #af0;
#files tbody a.play.act {
color: #840;
text-shadow: 0 0 .3em #b80;
}
#blocked {
position: fixed;
@@ -156,7 +182,7 @@ a.play.act {
height: 100%;
background: #333;
font-size: 2.5em;
z-index:99;
z-index: 99;
}
#blk_play,
#blk_abrt {
@@ -190,6 +216,7 @@ a.play.act {
bottom: -6em;
height: 6em;
width: 100%;
z-index: 3;
transition: bottom 0.15s;
}
#widget.open {
@@ -214,6 +241,9 @@ a.play.act {
75% {cursor: url(/.cpr/dd/5.png), pointer}
85% {cursor: url(/.cpr/dd/1.png), pointer}
}
@keyframes spin {
100% {transform: rotate(360deg)}
}
#wtoggle {
position: absolute;
top: -1.2em;
@@ -273,3 +303,253 @@ a.play.act {
width: calc(100% - 10.5em);
background: rgba(0,0,0,0.2);
}
@media (min-width: 100em) {
#barpos,
#barbuf {
width: calc(100% - 24em);
left: 10em;
top: .7em;
height: 1.6em;
bottom: auto;
}
#widget {
bottom: -3.2em;
height: 3.2em;
}
}
.opview {
display: none;
}
.opview.act {
display: block;
}
#ops a {
color: #fc5;
font-size: 1.5em;
padding: .25em .3em;
margin: 0;
outline: none;
}
#ops a.act {
background: #281838;
border-radius: 0 0 .2em .2em;
border-bottom: .3em solid #d90;
box-shadow: 0 -.15em .2em #000 inset;
padding-bottom: .3em;
}
#ops i {
font-size: 1.5em;
}
#ops i:before {
content: 'x';
color: #282828;
text-shadow: 0 0 .08em #01a7e1;
position: relative;
}
#ops i:after {
content: 'x';
color: #282828;
text-shadow: 0 0 .08em #ff3f1a;
margin-left: -.35em;
font-size: 1.05em;
}
#ops,
.opbox {
border: 1px solid #3a3a3a;
box-shadow: 0 0 1em #222 inset;
}
#ops {
background: #333;
margin: 1.7em 1.5em 0 1.5em;
padding: .3em .6em;
border-radius: .3em;
border-width: .15em 0;
}
.opbox {
background: #2d2d2d;
margin: 1.5em 0 0 0;
padding: .5em;
border-radius: 0 1em 1em 0;
border-width: .15em .3em .3em 0;
max-width: 40em;
}
.opbox input {
margin: .5em;
}
.opview input[type=text] {
color: #fff;
background: #383838;
border: none;
box-shadow: 0 0 .3em #222;
border-bottom: 1px solid #fc5;
border-radius: .2em;
padding: .2em .3em;
}
input[type="checkbox"]+label {
color: #f5a;
}
input[type="checkbox"]:checked+label {
color: #fc5;
}
#op_search table {
border: 1px solid #3a3a3a;
box-shadow: 0 0 1em #222 inset;
background: #2d2d2d;
border-radius: .4em;
margin: 1.4em;
margin-bottom: 0;
padding: 0 .5em .5em 0;
}
#srch_form td {
padding: .6em .6em;
}
#op_search input {
margin: 0;
}
#srch_q {
white-space: pre;
}
#files td div span {
color: #fff;
padding: 0 .4em;
font-weight: bold;
font-style: italic;
}
#files td div a:hover {
background: #444;
color: #fff;
}
#files td div a {
display: inline-block;
white-space: nowrap;
}
#files td div a:last-child {
width: 100%;
}
#files td div {
border-collapse: collapse;
width: 100%;
}
#files td div a:last-child {
width: 100%;
}
#tree,
#treefiles {
vertical-align: top;
}
#tree {
padding-top: 2em;
}
#detree {
padding: .3em .5em;
font-size: 1.5em;
display: inline-block;
min-width: 12em;
width: 100%;
}
#treefiles #files tbody {
border-radius: 0 .7em 0 .7em;
}
#treefiles #files thead th:nth-child(1) {
border-radius: .7em 0 0 0;
}
#tree ul,
#tree li {
padding: 0;
margin: 0;
}
#tree ul {
border-left: .2em solid #444;
}
#tree li {
margin-left: 1em;
list-style: none;
white-space: nowrap;
}
#tree a.hl {
color: #400;
background: #fc4;
border-radius: .3em;
text-shadow: none;
}
#tree a {
display: inline-block;
}
#tree a+a {
width: calc(100% - 2em);
background: #333;
}
#tree a+a:hover {
background: #222;
color: #fff;
}
#treeul {
position: relative;
overflow: hidden;
left: -1.7em;
}
#treeul:hover {
z-index: 2;
overflow: visible;
}
#treeul:hover a+a {
width: auto;
min-width: calc(100% - 2em);
}
#treeul a:first-child {
font-family: monospace, monospace;
}
.dumb_loader_thing {
display: inline-block;
margin: 1em .3em 1em 1em;
padding: 0 1.2em 0 0;
font-size: 4em;
animation: spin 1s linear infinite;
position: absolute;
z-index: 9;
}
#files .cfg {
display: none;
font-size: 2em;
white-space: nowrap;
}
#files th:hover .cfg,
#files th.min .cfg {
display: block;
width: 1em;
border-radius: .2em;
margin: -1.3em auto 0 auto;
background: #444;
}
#files th.min .cfg {
margin: -.6em;
}
#files>thead>tr>th.min span {
position: absolute;
transform: rotate(270deg);
background: linear-gradient(90deg, #222, #444);
margin-left: -4.6em;
padding: .4em;
top: 5.4em;
width: 8em;
text-align: right;
letter-spacing: .04em;
}
#files td:nth-child(2n) {
color: #f5a;
}
#files td.min a {
display: none;
}
#files tr.play td {
background: #fc4;
border-color: transparent;
color: #400;
text-shadow: none;
}

View File

@@ -7,50 +7,86 @@
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css{{ ts }}">
{%- if can_upload %}
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css{{ ts }}">
{%- endif %}
</head>
<body>
{%- if can_upload %}
<div id="ops">
<a href="#" data-dest="">---</a>
<a href="#" data-perm="read" data-dest="search">🔎</a>
{%- if have_up2k_idx %}
<a href="#" data-dest="up2k">🚀</a>
{%- else %}
<a href="#" data-perm="write" data-dest="up2k">🚀</a>
{%- endif %}
<a href="#" data-perm="write" data-dest="bup">🎈</a>
<a href="#" data-perm="write" data-dest="mkdir">📂</a>
<a href="#" data-perm="write" data-dest="new_md">📝</a>
<a href="#" data-perm="write" data-dest="msg">📟</a>
</div>
<div id="op_search" class="opview">
{%- if have_tags_idx %}
<table id="srch_form" class="tags"></table>
{%- else %}
<table id="srch_form"></table>
{%- endif %}
<div id="srch_q"></div>
</div>
{%- include 'upload.html' %}
{%- endif %}
<h1 id="path">
<a href="#" id="entree">🌲</a>
{%- for n in vpnodes %}
<a href="/{{ n[0] }}">{{ n[1] }}</a>
{%- endfor %}
</h1>
{%- if can_read %}
{%- if prologue %}
<div id="pro" class="logue">{{ prologue }}</div>
{%- endif %}
<div id="pro" class="logue">{{ logues[0] }}</div>
<table id="treetab">
<tr>
<td id="tree">
<a href="#" id="detree">🍞...</a>
<ul id="treeul"></ul>
</td>
<td id="treefiles"></td>
</tr>
</table>
<table id="files">
<thead>
<tr>
<th></th>
<th>File Name</th>
<th sort="int">File Size</th>
<th>T</th>
<th>Date</th>
<th><span>File Name</span></th>
<th sort="int"><span>Size</span></th>
{%- for k in taglist %}
{%- if k.startswith('.') %}
<th sort="int"><span>{{ k[1:] }}</span></th>
{%- else %}
<th><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
{%- endif %}
{%- endfor %}
<th><span>T</span></th>
<th><span>Date</span></th>
</tr>
</thead>
<tbody>
{%- for f in files %}
<tr><td>{{ f[0] }}</td><td><a href="{{ f[1] }}">{{ f[2] }}</a></td><td>{{ f[3] }}</td><td>{{ f[4] }}</td><td>{{ f[5] }}</td></tr>
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
{%- if f.tags is defined %}
{%- for k in taglist %}
<td>{{ f.tags[k] }}</td>
{%- endfor %}
{%- endif %}
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
{%- endfor %}
</tbody>
</table>
{%- if epilogue %}
<div id="epi" class="logue">{{ epilogue }}</div>
{%- endif %}
{%- endif %}
<div id="epi" class="logue">{{ logues[1] }}</div>
<h2><a href="?h">control-panel</a></h2>
@@ -67,16 +103,16 @@
<canvas id="barbuf"></canvas>
</div>
</div>
<script src="/.cpr/util.js{{ ts }}"></script>
{%- if can_read %}
<script>
var tag_order_cfg = {{ tag_order }};
</script>
<script src="/.cpr/util.js{{ ts }}"></script>
<script src="/.cpr/browser.js{{ ts }}"></script>
{%- endif %}
{%- if can_upload %}
<script src="/.cpr/up2k.js{{ ts }}"></script>
{%- endif %}
<script>
apply_perms({{ perms }});
</script>
</body>
</html>

View File

@@ -6,24 +6,11 @@ function dbg(msg) {
ebi('path').innerHTML = msg;
}
function ev(e) {
e = e || window.event;
if (e.preventDefault)
e.preventDefault()
if (e.stopPropagation)
e.stopPropagation();
e.returnValue = false;
return e;
}
makeSortable(ebi('files'));
// extract songs + add play column
var mp = (function () {
function init_mp() {
var tracks = [];
var ret = {
'au': null,
@@ -37,7 +24,8 @@ var mp = (function () {
var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
for (var a = 0, aa = trs.length; a < aa; a++) {
var tds = trs[a].getElementsByTagName('td');
var link = tds[1].getElementsByTagName('a')[0];
var link = tds[1].getElementsByTagName('a');
link = link[link.length - 1];
var url = link.getAttribute('href');
var m = re_audio.exec(url);
@@ -52,7 +40,7 @@ var mp = (function () {
for (var a = 0, aa = tracks.length; a < aa; a++)
ebi('trk' + a).onclick = ev_play;
ret.vol = localStorage.getItem('vol');
ret.vol = sread('vol');
if (ret.vol !== null)
ret.vol = parseFloat(ret.vol);
else
@@ -64,14 +52,15 @@ var mp = (function () {
ret.setvol = function (vol) {
ret.vol = Math.max(Math.min(vol, 1), 0);
localStorage.setItem('vol', vol);
swrite('vol', vol);
if (ret.au)
ret.au.volume = ret.expvol();
};
return ret;
})();
}
var mp = init_mp();
// toggle player widget
@@ -456,6 +445,11 @@ function play(tid, call_depth) {
mp.au.volume = mp.expvol();
var oid = 'trk' + tid;
setclass(oid, 'play act');
var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
for (var a = 0, aa = trs.length; a < aa; a++) {
trs[a].className = trs[a].className.replace(/ *play */, "");
}
ebi(oid).parentElement.parentElement.className += ' play';
try {
if (hack_attempt_play)
@@ -466,7 +460,13 @@ function play(tid, call_depth) {
var o = ebi(oid);
o.setAttribute('id', 'thx_js');
location.hash = oid;
if (window.history && history.replaceState) {
var nurl = (document.location + '').split('#')[0] + '#' + oid;
history.replaceState(ebi('files').innerHTML, nurl, nurl);
}
else {
document.location.hash = oid;
}
o.setAttribute('id', oid);
pbar.drawbuf();
@@ -561,3 +561,616 @@ function autoplay_blocked() {
//widget.open();
// search
(function () {
var sconf = [
["size",
["szl", "sz_min", "minimum MiB", ""],
["szu", "sz_max", "maximum MiB", ""]
],
["date",
["dtl", "dt_min", "min. iso8601", ""],
["dtu", "dt_max", "max. iso8601", ""]
],
["path",
["path", "path", "path contains &nbsp; (space-separated)", "46"]
],
["name",
["name", "name", "name contains &nbsp; (negate with -nope)", "46"]
]
];
if (document.querySelector('#srch_form.tags'))
sconf.push(["tags",
["tags", "tags", "tags contains", "46"]
]);
var html = [];
var orig_html = null;
for (var a = 0; a < sconf.length; a++) {
html.push('<tr><td><br />' + sconf[a][0] + '</td>');
for (var b = 1; b < 3; b++) {
var hn = "srch_" + sconf[a][b][0];
var csp = (sconf[a].length == 2) ? 2 : 1;
html.push(
'<td colspan="' + csp + '"><input id="' + hn + 'c" type="checkbox">\n' +
'<label for="' + hn + 'c">' + sconf[a][b][2] + '</label>\n' +
'<br /><input id="' + hn + 'v" type="text" size="' + sconf[a][b][3] +
'" name="' + sconf[a][b][1] + '" /></td>');
if (csp == 2)
break;
}
html.push('</tr>');
}
ebi('srch_form').innerHTML = html.join('\n');
var o = document.querySelectorAll('#op_search input[type="text"]');
for (var a = 0; a < o.length; a++) {
o[a].oninput = ev_search_input;
}
var search_timeout;
function ev_search_input() {
var v = this.value;
var chk = ebi(this.getAttribute('id').slice(0, -1) + 'c');
chk.checked = ((v + '').length > 0);
clearTimeout(search_timeout);
search_timeout = setTimeout(do_search, 100);
}
function do_search() {
clearTimeout(search_timeout);
var params = {};
var o = document.querySelectorAll('#op_search input[type="text"]');
for (var a = 0; a < o.length; a++) {
var chk = ebi(o[a].getAttribute('id').slice(0, -1) + 'c');
if (!chk.checked)
continue;
params[o[a].getAttribute('name')] = o[a].value;
}
// ebi('srch_q').textContent = JSON.stringify(params, null, 4);
var xhr = new XMLHttpRequest();
xhr.open('POST', '/?srch', true);
xhr.onreadystatechange = xhr_search_results;
xhr.ts = new Date().getTime();
xhr.send(JSON.stringify(params));
}
function xhr_search_results() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) {
alert('ah fug\n' + this.status + ": " + this.responseText);
return;
}
var res = JSON.parse(this.responseText),
tagord = res.tag_order;
var ofiles = ebi('files');
if (ofiles.getAttribute('ts') > this.ts)
return;
ebi('path').style.display = 'none';
ebi('tree').style.display = 'none';
var html = mk_files_header(tagord);
html.push('<tbody>');
html.push('<tr><td>-</td><td colspan="42"><a href="#" id="unsearch">close search results</a></td></tr>');
for (var a = 0; a < res.hits.length; a++) {
var r = res.hits[a],
ts = parseInt(r.ts),
sz = esc(r.sz + ''),
rp = esc(r.rp + ''),
ext = rp.lastIndexOf('.') > 0 ? rp.split('.').slice(-1)[0] : '%',
links = linksplit(rp);
if (ext.length > 8)
ext = '%';
links = links.join('');
var nodes = ['<tr><td>-</td><td><div>' + links + '</div>', sz];
for (var b = 0; b < tagord.length; b++) {
var k = tagord[b],
v = r.tags[k] || "";
if (k == "dur") {
var sv = s2ms(v);
nodes[nodes.length - 1] += '</td><td sortv="' + v + '">' + sv;
continue;
}
nodes.push(v);
}
nodes = nodes.concat([ext, unix2iso(ts)]);
html.push(nodes.join('</td><td>'));
html.push('</td></tr>');
}
if (!orig_html)
orig_html = ebi('files').innerHTML;
ofiles.innerHTML = html.join('\n');
ofiles.setAttribute("ts", this.ts);
filecols.set_style();
reload_browser();
ebi('unsearch').onclick = unsearch;
}
function unsearch(e) {
ev(e);
ebi('path').style.display = 'inline-block';
ebi('tree').style.display = 'block';
ebi('files').innerHTML = orig_html;
orig_html = null;
reload_browser();
}
})();
// tree
(function () {
var treedata = null;
function entree(e) {
ev(e);
ebi('path').style.display = 'none';
var treetab = ebi('treetab');
var treefiles = ebi('treefiles');
treetab.style.display = 'table';
treefiles.appendChild(ebi('pro'));
treefiles.appendChild(ebi('files'));
treefiles.appendChild(ebi('epi'));
swrite('entreed', 'tree');
get_tree("", get_vpath());
}
function get_tree(top, dst) {
var xhr = new XMLHttpRequest();
xhr.top = top;
xhr.dst = dst;
xhr.open('GET', dst + '?tree=' + top, true);
xhr.onreadystatechange = recvtree;
xhr.send();
enspin('#tree');
}
function recvtree() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) {
alert('ah fug\n' + this.status + ": " + this.responseText);
return;
}
var top = this.top == '.' ? this.dst : this.top,
name = top.split('/').slice(-2)[0],
rtop = top.replace(/^\/+/, "");
try {
var res = JSON.parse(this.responseText);
}
catch (ex) {
return;
}
var html = parsetree(res, rtop);
if (!this.top) {
html = '<li><a href="#">-</a><a href="/">[root]</a>\n<ul>' + html;
if (!ebi('treeul').getElementsByTagName('li').length)
ebi('treeul').innerHTML = html + '</ul></li>';
}
else {
html = '<a href="#">-</a><a href="' +
esc(top) + '">' + esc(name) +
"</a>\n<ul>\n" + html + "</ul>";
var links = document.querySelectorAll('#tree a+a');
for (var a = 0, aa = links.length; a < aa; a++) {
if (links[a].getAttribute('href') == top) {
var o = links[a].parentNode;
if (!o.getElementsByTagName('li').length)
o.innerHTML = html;
//else
// links[a].previousSibling.textContent = '-';
}
}
}
document.querySelector('#treeul>li>a+a').textContent = '[root]';
despin('#tree');
reload_tree();
var q = '#tree';
var nq = 0;
while (true) {
nq++;
q += '>ul>li';
if (!document.querySelector(q))
break;
}
ebi('treeul').style.width = (24 + nq) + 'em';
}
function reload_tree() {
var cdir = get_vpath();
var links = document.querySelectorAll('#tree a+a');
for (var a = 0, aa = links.length; a < aa; a++) {
var href = links[a].getAttribute('href');
links[a].setAttribute('class', href == cdir ? 'hl' : '');
links[a].onclick = treego;
}
links = document.querySelectorAll('#tree li>a:first-child');
for (var a = 0, aa = links.length; a < aa; a++) {
links[a].setAttribute('dst', links[a].nextSibling.getAttribute('href'));
links[a].onclick = treegrow;
}
}
function treego(e) {
ev(e);
if (this.getAttribute('class') == 'hl' &&
this.previousSibling.textContent == '-') {
treegrow.call(this.previousSibling, e);
return;
}
var xhr = new XMLHttpRequest();
xhr.top = this.getAttribute('href');
xhr.open('GET', xhr.top + '?ls', true);
xhr.onreadystatechange = recvls;
xhr.send();
get_tree('.', xhr.top);
enspin('#files');
}
function treegrow(e) {
ev(e);
if (this.textContent == '-') {
while (this.nextSibling.nextSibling) {
var rm = this.nextSibling.nextSibling;
rm.parentNode.removeChild(rm);
}
this.textContent = '+';
return;
}
var dst = this.getAttribute('dst');
get_tree('.', dst);
}
function recvls() {
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) {
alert('ah fug\n' + this.status + ": " + this.responseText);
return;
}
try {
var res = JSON.parse(this.responseText);
}
catch (ex) {
window.location = this.top;
return;
}
ebi('srv_info').innerHTML = '<span>' + res.srvinf + '</span>';
var nodes = res.dirs.concat(res.files);
var top = this.top;
var html = mk_files_header(res.taglist);
html.push('<tbody>');
for (var a = 0; a < nodes.length; a++) {
var r = nodes[a],
ln = ['<tr><td>' + r.lead + '</td><td><a href="' +
top + r.href + '">' + esc(decodeURIComponent(r.href)) + '</a>', r.sz];
for (var b = 0; b < res.taglist.length; b++) {
var k = res.taglist[b],
v = (r.tags || {})[k] || "";
if (k[0] == '.')
k = k.slice(1);
if (k == "dur") {
var sv = s2ms(v);
ln[ln.length - 1] += '</td><td sortv="' + v + '">' + sv;
continue;
}
ln.push(v);
}
ln = ln.concat([r.ext, unix2iso(r.ts)]).join('</td><td>');
html.push(ln + '</td></tr>');
}
html.push('</tbody>');
html = html.join('\n');
ebi('files').innerHTML = html;
history.pushState(html, this.top, this.top);
apply_perms(res.perms);
despin('#files');
ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : "";
ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : "";
filecols.set_style();
reload_tree();
reload_browser();
}
function parsetree(res, top) {
var ret = '';
for (var a = 0; a < res.a.length; a++) {
if (res.a[a] !== '')
res['k' + res.a[a]] = 0;
}
delete res['a'];
var keys = Object.keys(res);
keys.sort();
for (var a = 0; a < keys.length; a++) {
var kk = keys[a],
k = kk.slice(1),
url = '/' + (top ? top + k : k) + '/',
ek = esc(k),
sym = res[kk] ? '-' : '+',
link = '<a href="#">' + sym + '</a><a href="' +
esc(url) + '">' + ek + '</a>';
if (res[kk]) {
var subtree = parsetree(res[kk], url.slice(1));
ret += '<li>' + link + '\n<ul>\n' + subtree + '</ul></li>\n';
}
else {
ret += '<li>' + link + '</li>\n';
}
}
return ret;
}
function detree(e) {
ev(e);
var treetab = ebi('treetab');
treetab.parentNode.insertBefore(ebi('pro'), treetab);
treetab.parentNode.insertBefore(ebi('files'), treetab.nextSibling);
treetab.parentNode.insertBefore(ebi('epi'), ebi('files').nextSibling);
ebi('path').style.display = 'inline-block';
treetab.style.display = 'none';
swrite('entreed', 'na');
}
ebi('entree').onclick = entree;
ebi('detree').onclick = detree;
if (sread('entreed') == 'tree')
entree();
window.onpopstate = function (e) {
console.log(e.url + ' ,, ' + ((e.state + '').slice(0, 64)));
if (e.state) {
ebi('files').innerHTML = e.state;
reload_tree();
reload_browser();
}
};
if (window.history && history.pushState) {
var u = get_vpath() + window.location.hash;
history.replaceState(ebi('files').innerHTML, u, u);
}
})();
function enspin(sel) {
despin(sel);
var d = document.createElement('div');
d.setAttribute('class', 'dumb_loader_thing');
d.innerHTML = '🌲';
var tgt = document.querySelector(sel);
tgt.insertBefore(d, tgt.childNodes[0]);
}
function despin(sel) {
var o = document.querySelectorAll(sel + '>.dumb_loader_thing');
for (var a = o.length - 1; a >= 0; a--)
o[a].parentNode.removeChild(o[a]);
}
function apply_perms(perms) {
perms = perms || [];
var o = document.querySelectorAll('#ops>a[data-perm]');
for (var a = 0; a < o.length; a++)
o[a].style.display = 'none';
for (var a = 0; a < perms.length; a++) {
o = document.querySelectorAll('#ops>a[data-perm="' + perms[a] + '"]');
for (var b = 0; b < o.length; b++)
o[b].style.display = 'inline';
}
var act = document.querySelector('#ops>a.act');
if (act) {
var areq = act.getAttribute('data-perm');
if (areq && !has(perms, areq))
goto();
}
document.body.setAttribute('perms', perms.join(' '));
var have_write = has(perms, "write");
var tds = document.querySelectorAll('#u2conf td');
for (var a = 0; a < tds.length; a++) {
tds[a].style.display =
(have_write || tds[a].getAttribute('data-perm') == 'read') ?
'table-cell' : 'none';
}
if (window['up2k'])
up2k.set_fsearch();
}
function mk_files_header(taglist) {
var html = [
'<thead>',
'<th></th>',
'<th><span>File Name</span></th>',
'<th sort="int"><span>Size</span></th>'
];
for (var a = 0; a < taglist.length; a++) {
var tag = taglist[a];
var c1 = tag.slice(0, 1).toUpperCase();
tag = c1 + tag.slice(1);
if (c1 == '.')
tag = '<th sort="int"><span>' + tag.slice(1);
else
tag = '<th><span>' + tag;
html.push(tag + '</span></th>');
}
html = html.concat([
'<th><span>T</span></th>',
'<th><span>Date</span></th>',
'</thead>',
]);
return html;
}
var filecols = (function () {
var hidden = jread('filecols', []);
var add_btns = function () {
var ths = document.querySelectorAll('#files th>span');
for (var a = 0, aa = ths.length; a < aa; a++) {
var th = ths[a].parentElement;
var is_hidden = has(hidden, ths[a].textContent);
th.innerHTML = '<div class="cfg"><a href="#">' +
(is_hidden ? '+' : '-') + '</a></div>' + ths[a].outerHTML;
th.getElementsByTagName('a')[0].onclick = ev_row_tgl;
}
};
var set_style = function () {
add_btns();
var ohidden = [],
ths = document.querySelectorAll('#files th'),
ncols = ths.length;
for (var a = 0; a < ncols; a++) {
var span = ths[a].getElementsByTagName('span');
if (span.length <= 0)
continue;
var name = span[0].textContent,
cls = '';
if (has(hidden, name)) {
ohidden.push(a);
cls = ' min';
}
ths[a].className = ths[a].className.replace(/ *min */, " ") + cls;
}
for (var a = 0; a < ncols; a++) {
var cls = has(ohidden, a) ? 'min' : '';
var tds = document.querySelectorAll('#files>tbody>tr>td:nth-child(' + (a + 1) + ')');
for (var b = 0, bb = tds.length; b < bb; b++) {
tds[b].setAttribute('class', cls);
if (a < 2)
continue;
if (cls) {
if (!tds[b].hasAttribute('html')) {
tds[b].setAttribute('html', tds[b].innerHTML);
tds[b].innerHTML = '...';
}
}
else if (tds[b].hasAttribute('html')) {
tds[b].innerHTML = tds[b].getAttribute('html');
tds[b].removeAttribute('html');
}
}
}
};
set_style();
var toggle = function (name) {
var ofs = hidden.indexOf(name);
if (ofs !== -1)
hidden.splice(ofs, 1);
else
hidden.push(name);
jwrite("filecols", hidden);
set_style();
};
return {
"add_btns": add_btns,
"set_style": set_style,
"toggle": toggle,
};
})();
function ev_row_tgl(e) {
ev(e);
filecols.toggle(this.parentElement.parentElement.getElementsByTagName('span')[0].textContent);
}
function reload_browser(not_mp) {
filecols.set_style();
makeSortable(ebi('files'));
var parts = get_vpath().split('/');
var rm = document.querySelectorAll('#path>a+a+a');
for (a = rm.length - 1; a >= 0; a--)
rm[a].parentNode.removeChild(rm[a]);
var link = '/';
for (var a = 1; a < parts.length - 1; a++) {
link += parts[a] + '/';
var o = document.createElement('a');
o.setAttribute('href', link);
o.innerHTML = parts[a];
ebi('path').appendChild(o);
}
var oo = document.querySelectorAll('#files>tbody>tr>td:nth-child(3)');
for (var a = 0, aa = oo.length; a < aa; a++) {
var sz = oo[a].textContent.replace(/ /g, ""),
hsz = sz.replace(/\B(?=(\d{3})+(?!\d))/g, " ");
oo[a].textContent = hsz;
oo[a].setAttribute("sortv", sz);
}
if (!not_mp) {
if (mp && mp.au) {
mp.au.pause();
mp.au = null;
}
widget.close();
mp = init_mp();
}
if (window['up2k'])
up2k.set_fsearch();
}
reload_browser(true);

View File

@@ -524,11 +524,9 @@ dom_navtgl.onclick = function () {
dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav';
dom_nav.style.display = hidden ? 'none' : 'block';
if (window.localStorage)
localStorage.setItem('hidenav', hidden ? 1 : 0);
swrite('hidenav', hidden ? 1 : 0);
redraw();
};
if (window.localStorage && localStorage.getItem('hidenav') == 1)
if (sread('hidenav') == 1)
dom_navtgl.onclick();

View File

@@ -124,5 +124,3 @@ html.dark #toast {
transition: opacity 0.2s ease-in-out;
opacity: 1;
}
# mt {opacity: .5;top:1px}

View File

@@ -3,51 +3,6 @@
window.onerror = vis_exh;
(function () {
var ops = document.querySelectorAll('#ops>a');
for (var a = 0; a < ops.length; a++) {
ops[a].onclick = opclick;
}
})();
function opclick(ev) {
if (ev) //ie
ev.preventDefault();
var dest = this.getAttribute('data-dest');
goto(dest);
// writing a blank value makes ie8 segfault w
if (window.localStorage)
localStorage.setItem('opmode', dest || '.');
var input = document.querySelector('.opview.act input:not([type="hidden"])')
if (input)
input.focus();
}
function goto(dest) {
var obj = document.querySelectorAll('.opview.act');
for (var a = obj.length - 1; a >= 0; a--)
obj[a].classList.remove('act');
obj = document.querySelectorAll('#ops>a');
for (var a = obj.length - 1; a >= 0; a--)
obj[a].classList.remove('act');
if (dest) {
ebi('op_' + dest).classList.add('act');
document.querySelector('#ops>a[data-dest=' + dest + ']').classList.add('act');
var fn = window['goto_' + dest];
if (fn)
fn();
}
}
function goto_up2k() {
if (up2k === false)
return goto('bup');
@@ -59,17 +14,6 @@ function goto_up2k() {
}
(function () {
goto();
if (window.localStorage) {
var op = localStorage.getItem('opmode');
if (op !== null && op !== '.')
goto(op);
}
ebi('ops').style.display = 'block';
})();
// chrome requires https to use crypto.subtle,
// usually it's undefined but some chromes throw on invoke
var up2k = null;
@@ -89,6 +33,104 @@ catch (ex) {
}
function up2k_flagbus() {
var flag = {
"id": Math.floor(Math.random() * 1024 * 1024 * 1023 * 2),
"ch": new BroadcastChannel("up2k_flagbus"),
"ours": false,
"owner": null,
"wants": null,
"act": false,
"last_tx": ["x", null]
};
var dbg = function (who, msg) {
console.log('flagbus(' + flag.id + '): [' + who + '] ' + msg);
};
flag.ch.onmessage = function (ev) {
var who = ev.data[0],
what = ev.data[1];
if (who == flag.id) {
dbg(who, 'hi me (??)');
return;
}
flag.act = new Date().getTime();
if (what == "want") {
// lowest id wins, don't care if that's us
if (who < flag.id) {
dbg(who, 'wants (ack)');
flag.wants = [who, flag.act];
}
else {
dbg(who, 'wants (ign)');
}
}
else if (what == "have") {
dbg(who, 'have');
flag.owner = [who, flag.act];
}
else if (what == "give") {
if (flag.owner && flag.owner[0] == who) {
flag.owner = null;
dbg(who, 'give (ok)');
}
else {
dbg(who, 'give, INVALID, ' + flag.owner);
}
}
else if (what == "hi") {
dbg(who, 'hi');
flag.ch.postMessage([flag.id, "hey"]);
}
else {
dbg('?', ev.data);
}
};
var tx = function (now, msg) {
var td = now - flag.last_tx[1];
if (td > 500 || flag.last_tx[0] != msg) {
dbg('*', 'tx ' + msg);
flag.ch.postMessage([flag.id, msg]);
flag.last_tx = [msg, now];
}
};
var do_take = function (now) {
//dbg('*', 'do_take');
tx(now, "have");
flag.owner = [flag.id, now];
flag.ours = true;
};
var do_want = function (now) {
//dbg('*', 'do_want');
tx(now, "want");
};
flag.take = function (now) {
if (flag.ours) {
do_take(now);
return;
}
if (flag.owner && now - flag.owner[1] > 5000) {
flag.owner = null;
}
if (flag.wants && now - flag.wants[1] > 5000) {
flag.wants = null;
}
if (!flag.owner && !flag.wants) {
do_take(now);
return;
}
do_want(now);
};
flag.give = function () {
dbg('#', 'put give');
flag.ch.postMessage([flag.id, "give"]);
flag.owner = null;
flag.ours = false;
};
flag.ch.postMessage([flag.id, 'hi']);
return flag;
}
function up2k_init(have_crypto) {
//have_crypto = false;
var need_filereader_cache = undefined;
@@ -109,10 +151,6 @@ function up2k_init(have_crypto) {
ebi('u2notbtn').innerHTML = '';
}
var post_url = ebi('op_bup').getElementsByTagName('form')[0].getAttribute('action');
if (post_url && post_url.charAt(post_url.length - 1) !== '/')
post_url += '/';
var shame = 'your browser <a href="https://www.chromium.org/blink/webcrypto">disables sha512</a> unless you <a href="' + (window.location + '').replace(':', 's:') + '">use https</a>'
var is_https = (window.location + '').indexOf('https:') === 0;
if (is_https)
@@ -157,7 +195,7 @@ function up2k_init(have_crypto) {
// handle user intent to use the basic uploader instead
ebi('u2nope').onclick = function (e) {
e.preventDefault();
setmsg('');
setmsg();
goto('bup');
};
@@ -172,7 +210,7 @@ function up2k_init(have_crypto) {
}
function cfg_get(name) {
var val = localStorage.getItem(name);
var val = sread(name);
if (val === null)
return parseInt(ebi(name).value);
@@ -181,27 +219,35 @@ function up2k_init(have_crypto) {
}
function bcfg_get(name, defval) {
var val = localStorage.getItem(name);
var o = ebi(name);
if (!o)
return defval;
var val = sread(name);
if (val === null)
val = defval;
else
val = (val == '1');
ebi(name).checked = val;
o.checked = val;
return val;
}
function bcfg_set(name, val) {
localStorage.setItem(
name, val ? '1' : '0');
swrite(name, val ? '1' : '0');
var o = ebi(name);
if (o)
o.checked = val;
ebi(name).checked = val;
return val;
}
var parallel_uploads = cfg_get('nthread');
var multitask = bcfg_get('multitask', true);
var ask_up = bcfg_get('ask_up', true);
var flag_en = bcfg_get('flag_en', false);
var fsearch = bcfg_get('fsearch', false);
var col_hashing = '#00bbff';
var col_hashed = '#004466';
@@ -233,6 +279,10 @@ function up2k_init(have_crypto) {
if (!bobslice || !window.FileReader || !window.FileList)
return un2k("this is the basic uploader; up2k needs at least<br />chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1");
var flag = false;
apply_flag_cfg();
set_fsearch();
function nav() {
ebi('file' + fdom_ctr).click();
}
@@ -302,7 +352,7 @@ function up2k_init(have_crypto) {
for (var a = 0; a < good_files.length; a++)
msg.push(good_files[a].name);
if (ask_up && !confirm(msg.join('\n')))
if (ask_up && !fsearch && !confirm(msg.join('\n')))
return;
for (var a = 0; a < good_files.length; a++) {
@@ -316,6 +366,8 @@ function up2k_init(have_crypto) {
"name": fobj.name,
"size": fobj.size,
"lmod": lmod / 1000,
"purl": get_vpath(),
"done": false,
"hash": []
};
@@ -330,7 +382,7 @@ function up2k_init(have_crypto) {
var tr = document.createElement('tr');
tr.innerHTML = '<td id="f{0}n"></td><td id="f{0}t">hashing</td><td id="f{0}p" class="prog"></td>'.format(st.files.length);
tr.getElementsByTagName('td')[0].textContent = entry.name;
tr.getElementsByTagName('td')[0].innerHTML = fsearch ? entry.name : linksplit(esc(entry.purl + entry.name)).join(' ');
ebi('u2tab').appendChild(tr);
st.files.push(entry);
@@ -348,6 +400,19 @@ function up2k_init(have_crypto) {
}
more_one_file();
function u2cleanup(e) {
ev(e);
for (var a = 0; a < st.files.length; a++) {
var t = st.files[a];
if (t.done && t.name) {
var tr = ebi('f{0}p'.format(t.n)).parentNode;
tr.parentNode.removeChild(tr);
t.name = undefined;
}
}
}
ebi('u2cleanup').onclick = u2cleanup;
/////
////
/// actuator
@@ -372,6 +437,7 @@ function up2k_init(have_crypto) {
var tasker = (function () {
var mutex = false;
var was_busy = false;
function taskerd() {
if (mutex)
@@ -379,8 +445,63 @@ function up2k_init(have_crypto) {
mutex = true;
while (true) {
if (false) {
ebi('srv_info').innerHTML =
new Date().getTime() + ", " +
st.todo.hash.length + ", " +
st.todo.handshake.length + ", " +
st.todo.upload.length + ", " +
st.busy.hash.length + ", " +
st.busy.handshake.length + ", " +
st.busy.upload.length;
}
var is_busy = 0 !=
st.todo.hash.length +
st.todo.handshake.length +
st.todo.upload.length +
st.busy.hash.length +
st.busy.handshake.length +
st.busy.upload.length;
if (was_busy != is_busy) {
was_busy = is_busy;
if (is_busy)
window.addEventListener("beforeunload", warn_uploader_busy);
else
window.removeEventListener("beforeunload", warn_uploader_busy);
}
if (flag) {
if (is_busy) {
var now = new Date().getTime();
flag.take(now);
if (!flag.ours) {
setTimeout(taskerd, 100);
mutex = false;
return;
}
}
else if (flag.ours) {
flag.give();
}
}
var mou_ikkai = false;
if (st.todo.handshake.length > 0 &&
st.busy.handshake.length == 0 && (
st.todo.handshake[0].t3 || (
handshakes_permitted() &&
st.busy.upload.length < parallel_uploads
)
)
) {
exec_handshake();
mou_ikkai = true;
}
if (handshakes_permitted() &&
st.todo.handshake.length > 0 &&
st.busy.handshake.length == 0 &&
@@ -520,6 +641,7 @@ function up2k_init(have_crypto) {
var t = st.todo.hash.shift();
st.busy.hash.push(t);
st.bytes.hashed += t.size;
t.bytes_uploaded = 0;
t.t1 = new Date().getTime();
var nchunk = 0;
@@ -646,10 +768,38 @@ function up2k_init(have_crypto) {
if (xhr.status == 200) {
var response = JSON.parse(xhr.responseText);
if (!response.name) {
var msg = '';
var smsg = '';
if (!response || !response.hits || !response.hits.length) {
msg = 'not found on server';
smsg = '404';
}
else {
smsg = 'found';
var hit = response.hits[0],
msg = linksplit(hit.rp).join(''),
tr = unix2iso(hit.ts),
tu = unix2iso(t.lmod),
diff = parseInt(t.lmod) - parseInt(hit.ts),
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
msg += '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</span></span>';
}
ebi('f{0}p'.format(t.n)).innerHTML = msg;
ebi('f{0}t'.format(t.n)).innerHTML = smsg;
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
st.bytes.uploaded += t.size;
t.done = true;
tasker();
return;
}
if (response.name !== t.name) {
// file exists; server renamed us
t.name = response.name;
ebi('f{0}n'.format(t.n)).textContent = t.name;
ebi('f{0}n'.format(t.n)).innerHTML = linksplit(esc(t.purl + t.name)).join(' ');
}
t.postlist = [];
@@ -683,11 +833,15 @@ function up2k_init(have_crypto) {
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
if (done) {
t.done = true;
st.bytes.uploaded += t.size - t.bytes_uploaded;
var spd1 = (t.size / ((t.t2 - t.t1) / 1000.)) / (1024 * 1024.);
var spd2 = (t.size / ((t.t3 - t.t2) / 1000.)) / (1024 * 1024.);
ebi('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format(
spd1.toFixed(2), spd2.toFixed(2));
}
else t.t3 = undefined;
tasker();
}
else {
@@ -699,6 +853,11 @@ function up2k_init(have_crypto) {
var ofs = err.lastIndexOf(' : ');
if (ofs > 0)
err = err.slice(0, ofs);
ofs = err.indexOf('\n/');
if (ofs !== -1) {
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2, -1)).join(' ');
}
}
if (err != "") {
ebi('f{0}t'.format(t.n)).innerHTML = "ERROR";
@@ -715,14 +874,19 @@ function up2k_init(have_crypto) {
"no further information"));
}
};
xhr.open('POST', post_url + 'handshake.php', true);
xhr.responseType = 'text';
xhr.send(JSON.stringify({
var req = {
"name": t.name,
"size": t.size,
"lmod": t.lmod,
"hash": t.hash
}));
};
if (fsearch)
req.srch = 1;
xhr.open('POST', t.purl + 'handshake.php', true);
xhr.responseType = 'text';
xhr.send(JSON.stringify(req));
}
/////
@@ -761,12 +925,13 @@ function up2k_init(have_crypto) {
if (xhr.status == 200) {
prog(t.n, npart, col_uploaded);
st.bytes.uploaded += cdr - car;
t.bytes_uploaded += cdr - car;
st.busy.upload.splice(st.busy.upload.indexOf(upt), 1);
t.postlist.splice(t.postlist.indexOf(npart), 1);
if (t.postlist.length == 0) {
t.t3 = new Date().getTime();
ebi('f{0}t'.format(t.n)).innerHTML = 'verifying';
st.todo.handshake.push(t);
st.todo.handshake.unshift(t);
}
tasker();
}
@@ -777,7 +942,7 @@ function up2k_init(have_crypto) {
(xhr.responseText && xhr.responseText) ||
"no further information"));
};
xhr.open('POST', post_url + 'chunkpit.php', true);
xhr.open('POST', t.purl + 'chunkpit.php', true);
//xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart].substr(1) + "x");
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
xhr.setRequestHeader("X-Up2k-Wark", t.wark);
@@ -813,6 +978,46 @@ function up2k_init(have_crypto) {
/// config ui
//
function onresize(ev) {
var bar = ebi('ops'),
wpx = innerWidth,
fpx = parseInt(getComputedStyle(bar)['font-size']),
wem = wpx * 1.0 / fpx,
wide = wem > 54,
parent = ebi(wide ? 'u2btn_cw' : 'u2btn_ct'),
btn = ebi('u2btn');
//console.log([wpx, fpx, wem]);
if (btn.parentNode !== parent) {
parent.appendChild(btn);
ebi('u2conf').setAttribute('class', wide ? 'has_btn' : '');
}
}
window.onresize = onresize;
onresize();
function desc_show(ev) {
var msg = this.getAttribute('alt');
msg = msg.replace(/\$N/g, "<br />");
var cdesc = ebi('u2cdesc');
cdesc.innerHTML = msg;
cdesc.setAttribute('class', 'show');
}
function desc_hide(ev) {
ebi('u2cdesc').setAttribute('class', '');
}
var o = document.querySelectorAll('#u2conf *[alt]');
for (var a = o.length - 1; a >= 0; a--) {
o[a].parentNode.getElementsByTagName('input')[0].setAttribute('alt', o[a].getAttribute('alt'));
}
var o = document.querySelectorAll('#u2conf *[alt]');
for (var a = 0; a < o.length; a++) {
o[a].onfocus = desc_show;
o[a].onblur = desc_hide;
o[a].onmouseenter = desc_show;
o[a].onmouseleave = desc_hide;
}
function bumpthread(dir) {
try {
dir.stopPropagation();
@@ -827,7 +1032,7 @@ function up2k_init(have_crypto) {
return;
parallel_uploads = v;
localStorage.setItem('nthread', v);
swrite('nthread', v);
obj.style.background = '#444';
return;
}
@@ -854,6 +1059,65 @@ function up2k_init(have_crypto) {
bcfg_set('ask_up', ask_up);
}
function tgl_fsearch() {
set_fsearch(!fsearch);
}
function set_fsearch(new_state) {
var perms = document.body.getAttribute('perms');
var read_only = false;
if (!ebi('fsearch')) {
new_state = false;
}
else if (perms && perms.indexOf('write') === -1) {
new_state = true;
read_only = true;
}
if (new_state !== undefined) {
fsearch = new_state;
bcfg_set('fsearch', fsearch);
}
try {
document.querySelector('label[for="fsearch"]').style.opacity = read_only ? '0' : '1';
}
catch (ex) { }
try {
var fun = fsearch ? 'add' : 'remove';
ebi('op_up2k').classList[fun]('srch');
var ico = fsearch ? '🔎' : '🚀';
var desc = fsearch ? 'Search' : 'Upload';
ebi('u2bm').innerHTML = ico + ' <sup>' + desc + '</sup>';
}
catch (ex) { }
}
function tgl_flag_en() {
flag_en = !flag_en;
bcfg_set('flag_en', flag_en);
apply_flag_cfg();
}
function apply_flag_cfg() {
if (flag_en && !flag) {
try {
flag = up2k_flagbus();
}
catch (ex) {
console.log("flag error: " + ex.toString());
tgl_flag_en();
}
}
else if (!flag_en && flag) {
flag.ch.close();
flag = false;
}
}
function nop(ev) {
ev.preventDefault();
this.click();
@@ -871,12 +1135,27 @@ function up2k_init(have_crypto) {
ebi('nthread').addEventListener('input', bumpthread, false);
ebi('multitask').addEventListener('click', tgl_multitask, false);
ebi('ask_up').addEventListener('click', tgl_ask_up, false);
ebi('flag_en').addEventListener('click', tgl_flag_en, false);
var o = ebi('fsearch');
if (o)
o.addEventListener('click', tgl_fsearch, false);
var nodes = ebi('u2conf').getElementsByTagName('a');
for (var a = nodes.length - 1; a >= 0; a--)
nodes[a].addEventListener('touchend', nop, false);
set_fsearch();
bumpthread({ "target": 1 })
return { "init_deps": init_deps }
return { "init_deps": init_deps, "set_fsearch": set_fsearch }
}
function warn_uploader_busy(e) {
e.preventDefault();
e.returnValue = '';
return "upload in progress, click abort and use the file-tree to navigate instead";
}
if (document.querySelector('#op_up2k.act'))
goto_up2k();

View File

@@ -1,92 +1,4 @@
.opview {
display: none;
}
.opview.act {
display: block;
}
#ops a {
color: #fc5;
font-size: 1.5em;
padding: 0 .3em;
margin: 0;
outline: none;
}
#ops a.act {
text-decoration: underline;
}
/*
#ops a+a:after,
#ops a:first-child:after {
content: 'x';
color: #282828;
text-shadow: 0 0 .08em #01a7e1;
margin-left: .3em;
position: relative;
}
#ops a+a:before {
content: 'x';
color: #282828;
text-shadow: 0 0 .08em #ff3f1a;
margin-right: .3em;
margin-left: -.3em;
}
#ops a:last-child:after {
content: '';
}
#ops a.act:before,
#ops a.act:after {
text-decoration: none !important;
}
*/
#ops i {
font-size: 1.5em;
}
#ops i:before {
content: 'x';
color: #282828;
text-shadow: 0 0 .08em #01a7e1;
position: relative;
}
#ops i:after {
content: 'x';
color: #282828;
text-shadow: 0 0 .08em #ff3f1a;
margin-left: -.35em;
font-size: 1.05em;
}
#ops,
.opbox {
border: 1px solid #3a3a3a;
box-shadow: 0 0 1em #222 inset;
}
#ops {
display: none;
background: #333;
margin: 1.7em 1.5em 0 1.5em;
padding: .3em .6em;
border-radius: .3em;
border-width: .15em 0;
}
.opbox {
background: #2d2d2d;
margin: 1.5em 0 0 0;
padding: .5em;
border-radius: 0 1em 1em 0;
border-width: .15em .3em .3em 0;
max-width: 40em;
}
.opbox input {
margin: .5em;
}
.opbox input[type=text] {
color: #fff;
background: #383838;
border: none;
box-shadow: 0 0 .3em #222;
border-bottom: 1px solid #fc5;
border-radius: .2em;
padding: .2em .3em;
}
#op_up2k {
padding: 0 1em 1em 1em;
}
@@ -94,6 +6,9 @@
position: absolute;
top: 0;
left: 0;
width: 2px;
height: 2px;
overflow: hidden;
}
#u2form input {
background: #444;
@@ -104,11 +19,6 @@
color: #f87;
padding: .5em;
}
#u2form {
width: 2px;
height: 2px;
overflow: hidden;
}
#u2btn {
color: #eee;
background: #555;
@@ -117,17 +27,27 @@
background: linear-gradient(to bottom, #367 0%, #489 50%, #38788a 51%, #367 100%);
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#489', endColorstr='#38788a', GradientType=0);
text-decoration: none;
line-height: 1.5em;
line-height: 1.3em;
border: 1px solid #222;
border-radius: .4em;
text-align: center;
font-size: 2em;
margin: 1em auto;
padding: 1em 0;
width: 12em;
font-size: 1.5em;
margin: .5em auto;
padding: .8em 0;
width: 16em;
cursor: pointer;
box-shadow: .4em .4em 0 #111;
}
#op_up2k.srch #u2btn {
background: linear-gradient(to bottom, #ca3 0%, #fd8 50%, #fc6 51%, #b92 100%);
text-shadow: 1px 1px 1px #fc6;
color: #333;
}
#u2conf #u2btn {
margin: -1.5em 0;
padding: .8em 0;
width: 100%;
}
#u2notbtn {
display: none;
text-align: center;
@@ -142,6 +62,9 @@
width: calc(100% - 2em);
max-width: 100em;
}
#u2form.srch #u2tab {
max-width: none;
}
#u2tab td {
border: 1px solid #ccc;
border-width: 0 0px 1px 0;
@@ -153,12 +76,19 @@
#u2tab td:nth-child(3) {
width: 40%;
}
#u2form.srch #u2tab td:nth-child(3) {
font-family: sans-serif;
width: auto;
}
#u2tab tr+tr:hover td {
background: #222;
}
#u2conf {
margin: 1em auto;
width: 26em;
width: 30em;
}
#u2conf.has_btn {
width: 46em;
}
#u2conf * {
text-align: center;
@@ -194,16 +124,72 @@
#u2conf input+a {
background: #d80;
}
#u2conf label {
font-size: 1.6em;
width: 2em;
height: 1em;
padding: .4em 0;
display: block;
user-select: none;
border-radius: .25em;
}
#u2conf input[type="checkbox"] {
position: relative;
opacity: .02;
top: 2em;
}
#u2conf input[type="checkbox"]+label {
color: #f5a;
position: relative;
background: #603;
border-bottom: .2em solid #a16;
box-shadow: 0 .1em .3em #a00 inset;
}
#u2conf input[type="checkbox"]:checked+label {
color: #fc5;
background: #6a1;
border-bottom: .2em solid #efa;
box-shadow: 0 .1em .5em #0c0;
}
#u2conf input[type="checkbox"]+label:hover {
box-shadow: 0 .1em .3em #fb0;
border-color: #fb0;
}
#op_up2k.srch #u2conf td:nth-child(1)>*,
#op_up2k.srch #u2conf td:nth-child(2)>*,
#op_up2k.srch #u2conf td:nth-child(3)>* {
background: #777;
border-color: #ccc;
box-shadow: none;
opacity: .2;
}
#u2cdesc {
position: absolute;
width: 34em;
left: calc(50% - 15em);
background: #222;
border: 0 solid #555;
text-align: center;
overflow: hidden;
margin: 0 -2em;
height: 0;
padding: 0 1em;
opacity: .1;
transition: all 0.14s ease-in-out;
border-radius: .4em;
box-shadow: 0 .2em .5em #222;
}
#u2cdesc.show {
padding: 1em;
height: auto;
border-width: .2em 0;
opacity: 1;
}
#u2foot {
color: #fff;
font-style: italic;
}
#u2footfoot {
margin-bottom: -1em;
}
.prog {
font-family: monospace;
}
@@ -225,3 +211,13 @@
bottom: 0;
background: #0a0;
}
#u2tab a>span {
font-weight: bold;
font-style: italic;
color: #fff;
padding-left: .2em;
}
#u2cleanup {
float: right;
margin-bottom: -.3em;
}

View File

@@ -1,14 +1,7 @@
<div id="ops"><a
href="#" data-dest="">---</a><i></i><a
href="#" data-dest="up2k">up2k</a><i></i><a
href="#" data-dest="bup">bup</a><i></i><a
href="#" data-dest="mkdir">mkdir</a><i></i><a
href="#" data-dest="new_md">new.md</a><i></i><a
href="#" data-dest="msg">msg</a></div>
<div id="op_bup" class="opview opbox act">
<div id="u2err"></div>
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8">
<input type="hidden" name="act" value="bput" />
<input type="file" name="f" multiple><br />
<input type="submit" value="start upload">
@@ -16,7 +9,7 @@
</div>
<div id="op_mkdir" class="opview opbox act">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8">
<input type="hidden" name="act" value="mkdir" />
<input type="text" name="name" size="30">
<input type="submit" value="mkdir">
@@ -24,7 +17,7 @@
</div>
<div id="op_new_md" class="opview opbox">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
<form method="post" enctype="multipart/form-data" accept-charset="utf-8">
<input type="hidden" name="act" value="new_md" />
<input type="text" name="name" size="30">
<input type="submit" value="create doc">
@@ -32,9 +25,9 @@
</div>
<div id="op_msg" class="opview opbox">
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="/{{ vdir }}">
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8">
<input type="text" name="msg" size="30">
<input type="submit" value="send">
<input type="submit" value="send msg">
</form>
</div>
@@ -44,6 +37,25 @@
<table id="u2conf">
<tr>
<td>parallel uploads</td>
<td rowspan="2">
<input type="checkbox" id="multitask" />
<label for="multitask" alt="continue hashing other files while uploading">🏃</label>
</td>
<td rowspan="2">
<input type="checkbox" id="ask_up" />
<label for="ask_up" alt="ask for confirmation befofre upload starts">💭</label>
</td>
<td rowspan="2">
<input type="checkbox" id="flag_en" />
<label for="flag_en" alt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</label>
</td>
{%- if have_up2k_idx %}
<td data-perm="read" rowspan="2">
<input type="checkbox" id="fsearch" />
<label for="fsearch" alt="don't actually upload, instead check if the files already $N exist on the server (will scan all folders you can read)">🔎</label>
</td>
{%- endif %}
<td data-perm="read" rowspan="2" id="u2btn_cw"></td>
</tr>
<tr>
<td>
@@ -51,32 +63,29 @@
<input class="txtbox" id="nthread" value="2" />
<a href="#" id="nthread_add">+</a>
</td>
<td rowspan="2" style="padding-left:1.5em">
<input type="checkbox" id="multitask" />
<label for="multitask">hash while<br />uploading</label>
</td>
<td rowspan="2">
<input type="checkbox" id="ask_up" />
<label for="ask_up">ask for<br />confirmation</label>
</td>
</tr>
</table>
<div id="u2cdesc"></div>
<div id="u2notbtn"></div>
<div id="u2btn">
drop files here<br />
(or click me)
<div id="u2btn_ct">
<div id="u2btn">
<span id="u2bm"></span><br />
drop files here<br />
(or click me)
</div>
</div>
<table id="u2tab">
<tr>
<td>filename</td>
<td>status</td>
<td>progress</td>
<td>progress<a href="#" id="u2cleanup">cleanup</a></td>
</tr>
</table>
<p id="u2foot"></p>
<p>( if you don't need lastmod timestamps, resumable uploads or progress bars just use the <a href="#" id="u2nope">basic uploader</a>)</p>
<p id="u2footfoot">( if you don't need lastmod timestamps, resumable uploads or progress bars just use the <a href="#" id="u2nope">basic uploader</a>)</p>
</div>

View File

@@ -43,6 +43,21 @@ function ebi(id) {
return document.getElementById(id);
}
function ev(e) {
e = e || window.event;
if (!e)
return;
if (e.preventDefault)
e.preventDefault()
if (e.stopPropagation)
e.stopPropagation();
e.returnValue = false;
return e;
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
if (!String.prototype.endsWith) {
@@ -76,20 +91,25 @@ function import_js(url, cb) {
function sortTable(table, col) {
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
var tb = table.tBodies[0],
th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className == 'sort1' ? -1 : 1;
i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1;
for (var a = 0, thl = th.length; a < thl; a++)
th[a].className = '';
th[col].className = 'sort' + reverse;
th[a].className = th[a].className.replace(/ *sort-?1 */, " ");
th[col].className += ' sort' + reverse;
var stype = th[col].getAttribute('sort');
tr = tr.sort(function (a, b) {
var v1 = a.cells[col].textContent.trim();
var v2 = b.cells[col].textContent.trim();
if (!a.cells[col])
return -1;
if (!b.cells[col])
return 1;
var v1 = a.cells[col].getAttribute('sortv') || a.cells[col].textContent.trim();
var v2 = b.cells[col].getAttribute('sortv') || b.cells[col].textContent.trim();
if (stype == 'int') {
v1 = parseInt(v1.replace(/,/g, ''));
v2 = parseInt(v2.replace(/,/g, ''));
v1 = parseInt(v1.replace(/,/g, '')) || 0;
v2 = parseInt(v2.replace(/,/g, '')) || 0;
return reverse * (v1 - v2);
}
return reverse * (v1.localeCompare(v2));
@@ -102,8 +122,162 @@ function makeSortable(table) {
if (th) i = th.length;
else return; // if no `<thead>` then do nothing
while (--i >= 0) (function (i) {
th[i].onclick = function () {
th[i].onclick = function (e) {
ev(e);
sortTable(table, i);
};
}(i));
}
}
(function () {
var ops = document.querySelectorAll('#ops>a');
for (var a = 0; a < ops.length; a++) {
ops[a].onclick = opclick;
}
})();
function opclick(e) {
ev(e);
var dest = this.getAttribute('data-dest');
goto(dest);
swrite('opmode', dest || undefined);
var input = document.querySelector('.opview.act input:not([type="hidden"])')
if (input)
input.focus();
}
function goto(dest) {
var obj = document.querySelectorAll('.opview.act');
for (var a = obj.length - 1; a >= 0; a--)
obj[a].classList.remove('act');
obj = document.querySelectorAll('#ops>a');
for (var a = obj.length - 1; a >= 0; a--)
obj[a].classList.remove('act');
var others = ['path', 'files', 'widget'];
for (var a = 0; a < others.length; a++)
ebi(others[a]).classList.remove('hidden');
if (dest) {
var ui = ebi('op_' + dest);
ui.classList.add('act');
document.querySelector('#ops>a[data-dest=' + dest + ']').classList.add('act');
var fn = window['goto_' + dest];
if (fn)
fn();
}
}
(function () {
goto();
var op = sread('opmode');
if (op !== null && op !== '.')
goto(op);
})();
function linksplit(rp) {
var ret = [];
var apath = '/';
if (rp && rp.charAt(0) == '/')
rp = rp.slice(1);
while (rp) {
var link = rp;
var ofs = rp.indexOf('/');
if (ofs === -1) {
rp = null;
}
else {
link = rp.slice(0, ofs + 1);
rp = rp.slice(ofs + 1);
}
var vlink = link;
if (link.indexOf('/') !== -1)
vlink = link.slice(0, -1) + '<span>/</span>';
ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
apath += link;
}
return ret;
}
function get_evpath() {
var ret = document.location.pathname;
if (ret.indexOf('/') !== 0)
ret = '/' + ret;
if (ret.lastIndexOf('/') !== ret.length - 1)
ret += '/';
return ret;
}
function get_vpath() {
return decodeURIComponent(get_evpath());
}
function unix2iso(ts) {
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
}
function s2ms(s) {
var m = Math.floor(s / 60);
return m + ":" + ("0" + (s - m * 60)).slice(-2);
}
function has(haystack, needle) {
for (var a = 0; a < haystack.length; a++)
if (haystack[a] == needle)
return true;
return false;
}
function sread(key) {
if (window.localStorage)
return localStorage.getItem(key);
return '';
}
function swrite(key, val) {
if (window.localStorage) {
if (val === undefined)
localStorage.removeItem(key);
else
localStorage.setItem(key, val);
}
}
function jread(key, fb) {
var str = sread(key);
if (!str)
return fb;
return JSON.parse(str);
}
function jwrite(key, val) {
if (!val)
swrite(key);
else
swrite(key, JSON.stringify(val));
}

View File

@@ -1,12 +1,10 @@
FROM alpine:3.11
FROM alpine:3.13
WORKDIR /z
ENV ver_asmcrypto=2821dd1dedd1196c378f5854037dda5c869313f3 \
ver_markdownit=10.0.0 \
ver_showdown=1.9.1 \
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_marked=1.1.0 \
ver_ogvjs=1.6.1 \
ver_mde=2.10.1 \
ver_codemirror=5.53.2 \
ver_ogvjs=1.8.0 \
ver_mde=2.14.0 \
ver_codemirror=5.59.3 \
ver_fontawesome=5.13.0 \
ver_zopfli=1.0.3
@@ -17,7 +15,7 @@ RUN mkdir -p /z/dist/no-pk \
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \
&& wget https://github.com/asmcrypto/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
&& wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
&& wget https://github.com/codemirror/CodeMirror/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
@@ -52,6 +50,7 @@ RUN tar -xf zopfli.tgz \
-S . \
&& make -C build \
&& make -C build install \
&& python3 -m ensurepip \
&& python3 -m pip install fonttools zopfli

View File

@@ -1,6 +1,6 @@
diff -NarU2 CodeMirror-orig/mode/gfm/gfm.js CodeMirror-edit/mode/gfm/gfm.js
--- CodeMirror-orig/mode/gfm/gfm.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/mode/gfm/gfm.js 2020-05-02 02:13:32.142131800 +0200
diff -NarU2 codemirror-5.59.3-orig/mode/gfm/gfm.js codemirror-5.59.3/mode/gfm/gfm.js
--- codemirror-5.59.3-orig/mode/gfm/gfm.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/mode/gfm/gfm.js 2021-02-21 20:42:02.166174775 +0000
@@ -97,5 +97,5 @@
}
}
@@ -15,9 +15,9 @@ diff -NarU2 CodeMirror-orig/mode/gfm/gfm.js CodeMirror-edit/mode/gfm/gfm.js
+ }*/
stream.next();
return null;
diff -NarU2 CodeMirror-orig/mode/meta.js CodeMirror-edit/mode/meta.js
--- CodeMirror-orig/mode/meta.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/mode/meta.js 2020-05-02 03:56:58.852408400 +0200
diff -NarU2 codemirror-5.59.3-orig/mode/meta.js codemirror-5.59.3/mode/meta.js
--- codemirror-5.59.3-orig/mode/meta.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/mode/meta.js 2021-02-21 20:42:54.798742821 +0000
@@ -13,4 +13,5 @@
CodeMirror.modeInfo = [
@@ -28,7 +28,7 @@ diff -NarU2 CodeMirror-orig/mode/meta.js CodeMirror-edit/mode/meta.js
{name: "Gas", mime: "text/x-gas", mode: "gas", ext: ["s"]},
{name: "Gherkin", mime: "text/x-feature", mode: "gherkin", ext: ["feature"]},
+ */
{name: "GitHub Flavored Markdown", mime: "text/x-gfm", mode: "gfm", file: /^(readme|contributing|history).md$/i},
{name: "GitHub Flavored Markdown", mime: "text/x-gfm", mode: "gfm", file: /^(readme|contributing|history)\.md$/i},
+ /*
{name: "Go", mime: "text/x-go", mode: "go", ext: ["go"]},
{name: "Groovy", mime: "text/x-groovy", mode: "groovy", ext: ["groovy", "gradle"], file: /^Jenkinsfile$/},
@@ -56,16 +56,16 @@ diff -NarU2 CodeMirror-orig/mode/meta.js CodeMirror-edit/mode/meta.js
+ /*
{name: "XQuery", mime: "application/xquery", mode: "xquery", ext: ["xy", "xquery"]},
{name: "Yacas", mime: "text/x-yacas", mode: "yacas", ext: ["ys"]},
@@ -171,4 +180,5 @@
{name: "xu", mime: "text/x-xu", mode: "mscgen", ext: ["xu"]},
{name: "msgenny", mime: "text/x-msgenny", mode: "mscgen", ext: ["msgenny"]}
@@ -172,4 +181,5 @@
{name: "msgenny", mime: "text/x-msgenny", mode: "mscgen", ext: ["msgenny"]},
{name: "WebAssembly", mime: "text/webassembly", mode: "wast", ext: ["wat", "wast"]},
+ */
];
// Ensure all modes have a mime property for backwards compatibility
diff -NarU2 CodeMirror-orig/src/display/selection.js CodeMirror-edit/src/display/selection.js
--- CodeMirror-orig/src/display/selection.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/display/selection.js 2020-05-02 03:27:30.144662800 +0200
@@ -83,29 +83,21 @@
diff -NarU2 codemirror-5.59.3-orig/src/display/selection.js codemirror-5.59.3/src/display/selection.js
--- codemirror-5.59.3-orig/src/display/selection.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/display/selection.js 2021-02-21 20:44:14.860894328 +0000
@@ -84,29 +84,21 @@
let order = getOrder(lineObj, doc.direction)
iterateBidiSections(order, fromArg || 0, toArg == null ? lineLen : toArg, (from, to, dir, i) => {
- let ltr = dir == "ltr"
@@ -105,24 +105,24 @@ diff -NarU2 CodeMirror-orig/src/display/selection.js CodeMirror-edit/src/display
+ botRight = openEnd && last ? rightSide : toPos.right
add(topLeft, fromPos.top, topRight - topLeft, fromPos.bottom)
if (fromPos.bottom < toPos.top) add(leftSide, fromPos.bottom, null, toPos.top)
diff -NarU2 CodeMirror-orig/src/input/ContentEditableInput.js CodeMirror-edit/src/input/ContentEditableInput.js
--- CodeMirror-orig/src/input/ContentEditableInput.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/input/ContentEditableInput.js 2020-05-02 03:33:05.707995500 +0200
@@ -391,4 +391,5 @@
diff -NarU2 codemirror-5.59.3-orig/src/input/ContentEditableInput.js codemirror-5.59.3/src/input/ContentEditableInput.js
--- codemirror-5.59.3-orig/src/input/ContentEditableInput.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/input/ContentEditableInput.js 2021-02-21 20:44:33.273953867 +0000
@@ -399,4 +399,5 @@
let info = mapFromLineView(view, line, pos.line)
+ /*
let order = getOrder(line, cm.doc.direction), side = "left"
if (order) {
@@ -396,4 +397,5 @@
@@ -404,4 +405,5 @@
side = partPos % 2 ? "right" : "left"
}
+ */
let result = nodeAndOffsetInLineMap(info.map, pos.ch, side)
result.offset = result.collapse == "right" ? result.end : result.start
diff -NarU2 CodeMirror-orig/src/input/movement.js CodeMirror-edit/src/input/movement.js
--- CodeMirror-orig/src/input/movement.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/input/movement.js 2020-05-02 03:31:19.710773500 +0200
diff -NarU2 codemirror-5.59.3-orig/src/input/movement.js codemirror-5.59.3/src/input/movement.js
--- codemirror-5.59.3-orig/src/input/movement.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/input/movement.js 2021-02-21 20:45:12.763093671 +0000
@@ -15,4 +15,5 @@
export function endOfLine(visually, cm, lineObj, lineNo, dir) {
@@ -146,9 +146,9 @@ diff -NarU2 CodeMirror-orig/src/input/movement.js CodeMirror-edit/src/input/move
return null
+ */
}
diff -NarU2 CodeMirror-orig/src/line/line_data.js CodeMirror-edit/src/line/line_data.js
--- CodeMirror-orig/src/line/line_data.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/line/line_data.js 2020-05-02 03:17:02.785065000 +0200
diff -NarU2 codemirror-5.59.3-orig/src/line/line_data.js codemirror-5.59.3/src/line/line_data.js
--- codemirror-5.59.3-orig/src/line/line_data.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/line/line_data.js 2021-02-21 20:45:36.472549599 +0000
@@ -79,6 +79,6 @@
// Optionally wire in some hacks into the token-rendering
// algorithm, to deal with browser quirks.
@@ -158,9 +158,9 @@ diff -NarU2 CodeMirror-orig/src/line/line_data.js CodeMirror-edit/src/line/line_
+ // builder.addToken = buildTokenBadBidi(builder.addToken, order)
builder.map = []
let allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line)
diff -NarU2 CodeMirror-orig/src/measurement/position_measurement.js CodeMirror-edit/src/measurement/position_measurement.js
--- CodeMirror-orig/src/measurement/position_measurement.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/measurement/position_measurement.js 2020-05-02 03:35:20.674159600 +0200
diff -NarU2 codemirror-5.59.3-orig/src/measurement/position_measurement.js codemirror-5.59.3/src/measurement/position_measurement.js
--- codemirror-5.59.3-orig/src/measurement/position_measurement.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/measurement/position_measurement.js 2021-02-21 20:50:52.372945293 +0000
@@ -380,5 +380,6 @@
sticky = "after"
}
@@ -199,9 +199,9 @@ diff -NarU2 CodeMirror-orig/src/measurement/position_measurement.js CodeMirror-e
+*/
let measureText
diff -NarU2 CodeMirror-orig/src/util/bidi.js CodeMirror-edit/src/util/bidi.js
--- CodeMirror-orig/src/util/bidi.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/util/bidi.js 2020-05-02 03:12:44.418649800 +0200
diff -NarU2 codemirror-5.59.3-orig/src/util/bidi.js codemirror-5.59.3/src/util/bidi.js
--- codemirror-5.59.3-orig/src/util/bidi.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/util/bidi.js 2021-02-21 20:52:18.168092225 +0000
@@ -4,5 +4,5 @@
export function iterateBidiSections(order, from, to, f) {
@@ -239,20 +239,19 @@ diff -NarU2 CodeMirror-orig/src/util/bidi.js CodeMirror-edit/src/util/bidi.js
+ var fun = function(str, direction) {
let outerType = direction == "ltr" ? "L" : "R"
@@ -204,12 +210,16 @@
@@ -204,5 +210,11 @@
return direction == "rtl" ? order.reverse() : order
}
-})()
+ return function(str, direction) {
+ var ret = fun(str, direction);
+ console.log("bidiOrdering inner ([%s], %s) => [%s]", str, direction, ret);
+ return ret;
+ }
+})()
})()
+*/
// Get the bidi ordering for the given line (and cache it). Returns
// false for lines that are fully left-to-right, and an array of
@@ -210,6 +222,4 @@
// BidiSpan objects otherwise.
export function getOrder(line, direction) {
- let order = line.order
@@ -260,9 +259,9 @@ diff -NarU2 CodeMirror-orig/src/util/bidi.js CodeMirror-edit/src/util/bidi.js
- return order
+ return false;
}
diff -NarU2 CodeMirror-orig/src/util/feature_detection.js CodeMirror-edit/src/util/feature_detection.js
--- CodeMirror-orig/src/util/feature_detection.js 2020-04-21 12:47:20.000000000 +0200
+++ CodeMirror-edit/src/util/feature_detection.js 2020-05-02 03:16:21.085621400 +0200
diff -NarU2 codemirror-5.59.3-orig/src/util/feature_detection.js codemirror-5.59.3/src/util/feature_detection.js
--- codemirror-5.59.3-orig/src/util/feature_detection.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/util/feature_detection.js 2021-02-21 20:49:22.191269270 +0000
@@ -25,4 +25,5 @@
}

View File

@@ -1,33 +1,57 @@
diff -NarU2 easymde-orig/gulpfile.js easymde-mod1/gulpfile.js
--- easymde-orig/gulpfile.js 2020-04-06 14:09:36.000000000 +0200
+++ easymde-mod1/gulpfile.js 2020-05-01 14:33:52.260175200 +0200
diff -NarU2 easy-markdown-editor-2.14.0-orig/gulpfile.js easy-markdown-editor-2.14.0/gulpfile.js
--- easy-markdown-editor-2.14.0-orig/gulpfile.js 2021-02-14 12:11:48.000000000 +0000
+++ easy-markdown-editor-2.14.0/gulpfile.js 2021-02-21 20:55:37.134701007 +0000
@@ -25,5 +25,4 @@
'./node_modules/codemirror/lib/codemirror.css',
'./src/css/*.css',
- './node_modules/codemirror-spell-checker/src/css/spell-checker.css',
];
diff -NarU2 easymde-orig/package.json easymde-mod1/package.json
--- easymde-orig/package.json 2020-04-06 14:09:36.000000000 +0200
+++ easymde-mod1/package.json 2020-05-01 14:33:57.189975800 +0200
diff -NarU2 easy-markdown-editor-2.14.0-orig/package.json easy-markdown-editor-2.14.0/package.json
--- easy-markdown-editor-2.14.0-orig/package.json 2021-02-14 12:11:48.000000000 +0000
+++ easy-markdown-editor-2.14.0/package.json 2021-02-21 20:55:47.761190082 +0000
@@ -21,5 +21,4 @@
"dependencies": {
"codemirror": "^5.52.2",
"codemirror": "^5.59.2",
- "codemirror-spell-checker": "1.1.2",
"marked": "^0.8.2"
"marked": "^2.0.0"
},
diff -NarU2 easymde-orig/src/js/easymde.js easymde-mod1/src/js/easymde.js
--- easymde-orig/src/js/easymde.js 2020-04-06 14:09:36.000000000 +0200
+++ easymde-mod1/src/js/easymde.js 2020-05-01 14:34:19.878774400 +0200
@@ -11,5 +11,4 @@
diff -NarU2 easy-markdown-editor-2.14.0-orig/src/js/easymde.js easy-markdown-editor-2.14.0/src/js/easymde.js
--- easy-markdown-editor-2.14.0-orig/src/js/easymde.js 2021-02-14 12:11:48.000000000 +0000
+++ easy-markdown-editor-2.14.0/src/js/easymde.js 2021-02-21 20:57:09.143171536 +0000
@@ -12,5 +12,4 @@
require('codemirror/mode/gfm/gfm.js');
require('codemirror/mode/xml/xml.js');
-var CodeMirrorSpellChecker = require('codemirror-spell-checker');
var marked = require('marked/lib/marked');
@@ -1889,18 +1888,7 @@
@@ -1762,9 +1761,4 @@
options.autosave.uniqueId = options.autosave.unique_id;
- // If overlay mode is specified and combine is not provided, default it to true
- if (options.overlayMode && options.overlayMode.combine === undefined) {
- options.overlayMode.combine = true;
- }
-
// Update this options
this.options = options;
@@ -2003,28 +1997,7 @@
var mode, backdrop;
- // CodeMirror overlay mode
- if (options.overlayMode) {
- CodeMirror.defineMode('overlay-mode', function(config) {
- return CodeMirror.overlayMode(CodeMirror.getMode(config, options.spellChecker !== false ? 'spell-checker' : 'gfm'), options.overlayMode.mode, options.overlayMode.combine);
- });
-
- mode = 'overlay-mode';
- backdrop = options.parsingConfig;
- backdrop.gitHubSpice = false;
- } else {
mode = options.parsingConfig;
mode.name = 'gfm';
mode.gitHubSpice = false;
- }
- if (options.spellChecker !== false) {
- mode = 'spell-checker';
- backdrop = options.parsingConfig;
@@ -37,16 +61,28 @@ diff -NarU2 easymde-orig/src/js/easymde.js easymde-mod1/src/js/easymde.js
- CodeMirrorSpellChecker({
- codeMirrorInstance: CodeMirror,
- });
- } else {
mode = options.parsingConfig;
mode.name = 'gfm';
mode.gitHubSpice = false;
- }
// eslint-disable-next-line no-unused-vars
@@ -1927,5 +1915,4 @@
configureMouse: configureMouse,
inputStyle: (options.inputStyle != undefined) ? options.inputStyle : isMobile() ? 'contenteditable' : 'textarea',
- spellcheck: (options.nativeSpellcheck != undefined) ? options.nativeSpellcheck : true,
});
diff -NarU2 easy-markdown-editor-2.14.0-orig/types/easymde.d.ts easy-markdown-editor-2.14.0/types/easymde.d.ts
--- easy-markdown-editor-2.14.0-orig/types/easymde.d.ts 2021-02-14 12:11:48.000000000 +0000
+++ easy-markdown-editor-2.14.0/types/easymde.d.ts 2021-02-21 20:57:42.492620979 +0000
@@ -160,9 +160,4 @@
}
- interface OverlayModeOptions {
- mode: CodeMirror.Mode<any>
- combine?: boolean
- }
-
interface Options {
autoDownloadFontAwesome?: boolean;
@@ -214,7 +209,5 @@
promptTexts?: PromptTexts;
- syncSideBySidePreviewScroll?: boolean;
-
- overlayMode?: OverlayModeOptions
+ syncSideBySidePreviewScroll?: boolean
}
}

View File

@@ -86,6 +86,8 @@ function have() {
python -c "import $1; $1; $1.__version__"
}
mv copyparty/web/deps/marked.full.js.gz srv/ || true
. buildenv/bin/activate
have setuptools
have wheel

View File

@@ -35,6 +35,8 @@ ver="$1"
exit 1
}
mv copyparty/web/deps/marked.full.js.gz srv/ || true
mkdir -p dist
zip_path="$(pwd)/dist/copyparty-$ver.zip"
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"