Compare commits

..

42 Commits

Author SHA1 Message Date
ed
46e70d50b7 v0.7.0 2021-01-10 17:49:56 +01:00
ed
d64e9b85a7 prefer sqlite over registry snaps 2021-01-10 17:47:27 +01:00
ed
fb853edbe3 prevent index loss on mid-write crash 2021-01-10 17:16:55 +01:00
ed
cc076c1be1 persist/timeout incomplete uploads too 2021-01-10 16:47:35 +01:00
ed
98cc9a6755 mojibake support + exception handling 2021-01-10 09:48:26 +01:00
ed
7bd2b9c23a sqlite3 as up2k db + build index on boot + rproxy ip fix 2021-01-10 09:27:11 +01:00
ed
de724a1ff3 up2k: add volume flag to reject existing files 2021-01-09 15:20:02 +01:00
ed
2163055dae media-player: play links don't scroll on click 2021-01-09 14:40:56 +01:00
ed
93ed0fc10b v0.6.3 2021-01-07 01:09:32 +01:00
ed
0d98cefd40 fix dumb 2021-01-07 01:06:31 +01:00
ed
d58988a033 use sendfile when possible 2021-01-07 00:50:42 +01:00
ed
2acfab1e3f cleanup 2021-01-06 22:54:54 +01:00
ed
b915dfe9a6 nagle adds ~.2sec delay on last packet 2021-01-06 21:08:52 +00:00
ed
25bd5a823e fuse-client: add timestamps to logger 2021-01-06 17:40:42 +01:00
ed
1c35de4716 fuse-client: cache tweaks 2021-01-06 17:22:07 +01:00
ed
4c00435a0a fuse: add windows-explorer settings 2021-01-06 17:18:37 +01:00
ed
844e3079a8 saved for posterity 2021-01-06 17:13:24 +01:00
ed
4778cb5b2c readme: add quickstart 2021-01-02 22:57:48 +01:00
ed
ec5d60b919 fuse-client: fix directory parser 2021-01-01 21:54:56 +01:00
ed
e1f4b960e8 oh no 2020-12-20 02:33:37 +01:00
ed
669e46da54 update TODOs 2020-12-14 09:19:43 +01:00
ed
ba94cc5df7 v0.6.2 2020-12-14 04:28:21 +01:00
ed
d08245c3df v0.6.1 2020-12-14 03:51:24 +01:00
ed
5c18d12cbf self-upgrading upgrader... getting too meta 2020-12-14 03:45:59 +01:00
ed
580a42dec7 sfx-repack: support wget 2020-12-14 02:59:15 +01:00
ed
29286e159b up2k-client: ignore rejected dupes 2020-12-12 00:55:42 +01:00
ed
19bcf90e9f support uploads with huge filenames 2020-12-12 00:35:54 +01:00
ed
dae9c00742 always display world-readable subvolumes 2020-12-04 23:28:18 +01:00
ed
35324ceb7c tests: support windows 2020-12-04 23:26:46 +01:00
ed
5aadd47199 dodge python-bug #7980 2020-12-01 23:20:44 +01:00
ed
7d9057cc62 v0.6.0 2020-12-01 02:58:11 +01:00
ed
c4b322b883 this commit sponsored by eslint 2020-12-01 02:25:46 +01:00
ed
19b09c898a fix sfx repack whoops 2020-11-30 03:27:27 +01:00
ed
eafe2098b6 v0.5.7 2020-11-30 03:01:14 +01:00
ed
2bc6a20d71 md: poll server for changes 2020-11-30 03:00:44 +01:00
ed
8b502a7235 v0.5.6 2020-11-29 19:49:16 +01:00
ed
37567844af md: add render2 plugin func 2020-11-29 19:34:08 +01:00
ed
2f6c4e0e34 refactoring 2020-11-29 19:32:22 +01:00
ed
1c7cc4cb2b ignore border when sizing table 2020-11-29 18:48:55 +01:00
ed
f83db3648e git tag as sfx version 2020-11-28 20:02:20 +01:00
ed
b164aa00d4 md: fix eof scroll glitch 2020-11-27 21:25:52 +01:00
ed
a2d866d0c2 show plugin errors 2020-11-27 21:10:47 +01:00
36 changed files with 2522 additions and 502 deletions

12
.eslintrc.json Normal file
View File

@@ -0,0 +1,12 @@
{
"env": {
"browser": true,
"es2021": true
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaVersion": 12
},
"rules": {
}
}

2
.gitattributes vendored
View File

@@ -1,4 +1,6 @@
* text eol=lf
*.reg text eol=crlf
*.png binary
*.gif binary

4
.vscode/launch.json vendored
View File

@@ -12,10 +12,12 @@
//"-nw",
"-ed",
"-emp",
"-e2d",
"-e2s",
"-a",
"ed:wark",
"-v",
"srv::r:aed"
"srv::r:aed:cnodupe"
]
},
{

View File

@@ -13,6 +13,17 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* code standard: `black`
## quickstart
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
running the sfx without arguments (for example doubleclicking it on Windows) will let anyone access the current folder; see `-h` for help if you want accounts and volumes etc
you may also want these, especially on servers:
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for legit https)
## notes
* iPhone/iPad: use Firefox to download files
@@ -126,13 +137,14 @@ in the `scripts` folder:
roughly sorted by priority
* up2k handle filename too long
* up2k fails on empty files? alert then stuck
* reduce up2k roundtrips
* start from a chunk index and just go
* terminate client on bad data
* drop onto folders
* look into android thumbnail cache file format
* `os.copy_file_range` for up2k cloning
* support pillow-simd
* cache sha512 chunks on client
* symlink existing files on upload
* comment field
* ~~look into android thumbnail cache file format~~ bad idea
* figure out the deal with pixel3a not being connectable as hotspot
* pixel3a having unpredictable 3sec latency in general :||||

View File

@@ -34,3 +34,8 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
* does the same thing except more correct, `samba` approves
* **supports Linux** -- expect `18 MiB/s` (wait what)
* **supports Macos** -- probably
# copyparty-fuse-streaming.py
* pretend this doesn't exist

1100
bin/copyparty-fuse-streaming.py Executable file

File diff suppressed because it is too large Load Diff

View File

@@ -12,7 +12,7 @@ __url__ = "https://github.com/9001/copyparty/"
mount a copyparty server (local or remote) as a filesystem
usage:
python copyparty-fuse.py ./music http://192.168.1.69:3923/
python copyparty-fuse.py http://192.168.1.69:3923/ ./music
dependencies:
python3 -m pip install --user fusepy
@@ -20,6 +20,10 @@ dependencies:
+ on Macos: https://osxfuse.github.io/
+ on Windows: https://github.com/billziss-gh/winfsp/releases/latest
note:
you probably want to run this on windows clients:
https://github.com/9001/copyparty/blob/master/contrib/explorer-nothumbs-nofoldertypes.reg
get server cert:
awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem
"""
@@ -100,7 +104,7 @@ def rice_tid():
def fancy_log(msg):
print("{} {}\n".format(rice_tid(), msg), end="")
print("{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg), end="")
def null_log(msg):
@@ -159,7 +163,7 @@ class RecentLog(object):
thr.start()
def put(self, msg):
msg = "{} {}\n".format(rice_tid(), msg)
msg = "{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg)
if self.f:
fmsg = " ".join([datetime.utcnow().strftime("%H%M%S.%f"), str(msg)])
self.f.write(fmsg.encode("utf-8"))
@@ -367,7 +371,7 @@ class Gateway(object):
ret = []
remainder = b""
ptn = re.compile(
r'^<tr><td>(-|DIR)</td><td><a[^>]* href="([^"]+)"[^>]*>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$'
r'^<tr><td>(-|DIR|<a [^<]+</a>)</td><td><a[^>]* href="([^"]+)"[^>]*>([^<]+)</a></td><td>([^<]+)</td><td>[^<]+</td><td>([^<]+)</td></tr>$'
)
while True:
@@ -405,7 +409,7 @@ class Gateway(object):
info("bad HTML or OS [{}] [{}]".format(fdate, fsize))
# python cannot strptime(1959-01-01) on windows
if ftype == "-":
if ftype != "DIR":
ret.append([fname, self.stat_file(ts, sz), 0])
else:
ret.append([fname, self.stat_dir(ts, sz), 0])
@@ -658,8 +662,18 @@ class CPPF(Operations):
else:
if get2 - get1 <= 1024 * 1024:
h_ofs = get1 - 256 * 1024
h_end = get2 + 1024 * 1024
# unless the request is for the last n bytes of the file,
# grow the start to cache some stuff around the range
if get2 < file_sz - 1:
h_ofs = get1 - 1024 * 256
else:
h_ofs = get1 - 1024 * 32
# likewise grow the end unless start is 0
if get1 > 0:
h_end = get2 + 1024 * 1024
else:
h_end = get2 + 1024 * 64
else:
# big enough, doesn't need pads
h_ofs = get1
@@ -705,6 +719,7 @@ class CPPF(Operations):
self.dircache.append(cn)
self.clean_dircache()
# import pprint; pprint.pprint(ret)
return ret
def readdir(self, path, fh=None):
@@ -802,7 +817,11 @@ class CPPF(Operations):
# dbg("=" + repr(cache_stat))
return cache_stat
info("=ENOENT ({})".format(hexler(path)))
fun = info
if MACOS and path.split('/')[-1].startswith('._'):
fun = dbg
fun("=ENOENT ({})".format(hexler(path)))
raise FuseOSError(errno.ENOENT)
access = None
@@ -906,6 +925,7 @@ class TheArgparseFormatter(
def main():
global info, log, dbg
time.strptime("19970815", "%Y%m%d") # python#7980
# filecache helps for reads that are ~64k or smaller;
# linux generally does 128k so the cache is a slowdown,

View File

@@ -567,6 +567,8 @@ class CPPF(Fuse):
def main():
time.strptime("19970815", "%Y%m%d") # python#7980
server = CPPF()
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
server.parse(values=server, errex=1)

View File

@@ -9,6 +9,9 @@
* assumes the webserver and copyparty is running on the same server/IP
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
disables thumbnails and folder-type detection in windows explorer, makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
# OS integration
init-scripts to start copyparty as a service
* [`systemd/copyparty.service`](systemd/copyparty.service)

View File

@@ -0,0 +1,31 @@
Windows Registry Editor Version 5.00
; this will do 3 things, all optional:
; 1) disable thumbnails
; 2) delete all existing folder type settings/detections
; 3) disable folder type detection (force default columns)
;
; this makes the file explorer way faster,
; especially on slow/networked locations
; =====================================================================
; 1) disable thumbnails
[HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Explorer\Advanced]
"IconsOnly"=dword:00000001
; =====================================================================
; 2) delete all existing folder type settings/detections
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags]
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\BagMRU]
; =====================================================================
; 3) disable folder type detection
[HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags\AllFolders\Shell]
"FolderType"="NotSpecified"

View File

@@ -9,6 +9,7 @@ __license__ = "MIT"
__url__ = "https://github.com/9001/copyparty/"
import os
import time
import shutil
import filecmp
import locale
@@ -85,6 +86,7 @@ def ensure_cert():
def main():
time.strptime("19970815", "%Y%m%d") # python#7980
if WINDOWS:
os.system("") # enables colors
@@ -103,17 +105,22 @@ def main():
epilog=dedent(
"""
-a takes username:password,
-v takes src:dst:permset:permset:... where "permset" is
accesslevel followed by username (no separator)
-v takes src:dst:permset:permset:cflag:cflag:...
where "permset" is accesslevel followed by username (no separator)
and "cflag" is config flags to set on this volume
list of cflags:
cnodupe rejects existing files (instead of symlinking them)
example:\033[35m
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed \033[36m
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
mount current directory at "/" with
* r (read-only) for everyone
* a (read+write) for ed
mount ../inc at "/dump" with
* w (write-only) for everyone
* a (read+write) for ed \033[0m
* a (read+write) for ed
* reject duplicate files \033[0m
if no accounts or volumes are configured,
current folder will be read/write for everyone
@@ -123,24 +130,26 @@ def main():
"""
),
)
ap.add_argument(
"-c", metavar="PATH", type=str, action="append", help="add config file"
)
# fmt: off
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind")
ap.add_argument("-p", metavar="PORT", type=int, default=3923, help="port to bind")
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
ap.add_argument(
"-j", metavar="CORES", type=int, default=1, help="max num cpu cores"
)
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
ap.add_argument("-q", action="store_true", help="quiet")
ap.add_argument("-ed", action="store_true", help="enable ?dots")
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap.add_argument("-e2d", action="store_true", help="enable up2k database")
ap.add_argument("-e2s", action="store_true", help="enable up2k db-scanner")
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap.add_argument("-nih", action="store_true", help="no info hostname")
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
al = ap.parse_args()
# fmt: on
SvcHub(al).run()

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (0, 5, 5)
CODENAME = "fuse jelly"
BUILD_DT = (2020, 11, 27)
VERSION = (0, 7, 0)
CODENAME = "keeping track"
BUILD_DT = (2021, 1, 10)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -12,11 +12,12 @@ from .util import undot, Pebkac, fsdec, fsenc
class VFS(object):
"""single level in the virtual fs"""
def __init__(self, realpath, vpath, uread=[], uwrite=[]):
def __init__(self, realpath, vpath, uread=[], uwrite=[], flags={}):
self.realpath = realpath # absolute path on host filesystem
self.vpath = vpath # absolute path in the virtual filesystem
self.uread = uread # users who can read this
self.uwrite = uwrite # users who can write this
self.flags = flags # config switches
self.nodes = {} # child nodes
def add(self, src, dst):
@@ -36,6 +37,7 @@ class VFS(object):
"{}/{}".format(self.vpath, name).lstrip("/"),
self.uread,
self.uwrite,
self.flags,
)
self.nodes[name] = vn
return vn.add(src, dst)
@@ -104,7 +106,7 @@ class VFS(object):
real.sort()
if not rem:
for name, vn2 in sorted(self.nodes.items()):
if uname in vn2.uread:
if uname in vn2.uread or "*" in vn2.uread:
virt_vis[name] = vn2
# no vfs nodes in the list of real inodes
@@ -161,7 +163,7 @@ class AuthSrv(object):
yield prev, True
def _parse_config_file(self, fd, user, mread, mwrite, mount):
def _parse_config_file(self, fd, user, mread, mwrite, mflags, mount):
vol_src = None
vol_dst = None
for ln in [x.decode("utf-8").strip() for x in fd]:
@@ -191,6 +193,7 @@ class AuthSrv(object):
mount[vol_dst] = vol_src
mread[vol_dst] = []
mwrite[vol_dst] = []
mflags[vol_dst] = {}
continue
lvl, uname = ln.split(" ")
@@ -198,6 +201,9 @@ class AuthSrv(object):
mread[vol_dst].append(uname)
if lvl in "wa":
mwrite[vol_dst].append(uname)
if lvl == "c":
# config option, currently switches only
mflags[vol_dst][uname] = True
def reload(self):
"""
@@ -210,6 +216,7 @@ class AuthSrv(object):
user = {} # username:password
mread = {} # mountpoint:[username]
mwrite = {} # mountpoint:[username]
mflags = {} # mountpoint:[flag]
mount = {} # dst:src (mountpoint:realpath)
if self.args.a:
@@ -232,9 +239,13 @@ class AuthSrv(object):
mount[dst] = src
mread[dst] = []
mwrite[dst] = []
mflags[dst] = {}
perms = perms.split(":")
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
if lvl == "c":
# config option, currently switches only
mflags[dst][uname] = True
if uname == "":
uname = "*"
if lvl in "ra":
@@ -245,14 +256,15 @@ class AuthSrv(object):
if self.args.c:
for cfg_fn in self.args.c:
with open(cfg_fn, "rb") as f:
self._parse_config_file(f, user, mread, mwrite, mount)
self._parse_config_file(f, user, mread, mwrite, mflags, mount)
self.all_writable = []
if not mount:
# -h says our defaults are CWD at root and read/write for everyone
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
elif "" not in mount:
# there's volumes but no root; make root inaccessible
vfs = VFS(os.path.abspath("."), "", [], [])
vfs = VFS(os.path.abspath("."), "")
maxdepth = 0
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
@@ -262,12 +274,18 @@ class AuthSrv(object):
if dst == "":
# rootfs was mapped; fully replaces the default CWD vfs
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst])
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst], mflags[dst])
continue
v = vfs.add(mount[dst], dst)
v.uread = mread[dst]
v.uwrite = mwrite[dst]
v.flags = mflags[dst]
if v.uwrite:
self.all_writable.append(v)
if vfs.uwrite and vfs not in self.all_writable:
self.all_writable.append(vfs)
missing_users = {}
for d in [mread, mwrite]:

View File

@@ -28,6 +28,7 @@ class HttpCli(object):
self.conn = conn
self.s = conn.s
self.sr = conn.sr
self.ip = conn.addr[0]
self.addr = conn.addr
self.args = conn.args
self.auth = conn.auth
@@ -42,7 +43,7 @@ class HttpCli(object):
self.log_func(self.log_src, msg)
def _check_nonfatal(self, ex):
return ex.code in [404]
return ex.code < 400 or ex.code == 404
def _assert_safe_rem(self, rem):
# sanity check to prevent any disasters
@@ -85,7 +86,8 @@ class HttpCli(object):
v = self.headers.get("x-forwarded-for", None)
if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]:
self.log_src = self.conn.set_rproxy(v.split(",")[0])
self.ip = v.split(",")[0]
self.log_src = self.conn.set_rproxy(self.ip)
self.uname = "*"
if "cookie" in self.headers:
@@ -305,7 +307,7 @@ class HttpCli(object):
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
fdir = os.path.join(vfs.realpath, rem)
addr = self.conn.addr[0].replace(":", ".")
addr = self.ip.replace(":", ".")
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
path = os.path.join(fdir, fn)
@@ -384,9 +386,11 @@ class HttpCli(object):
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
body["vdir"] = self.vpath
body["rdir"] = os.path.join(vfs.realpath, rem)
body["addr"] = self.addr[0]
body["vtop"] = vfs.vpath
body["ptop"] = vfs.realpath
body["prel"] = rem
body["addr"] = self.ip
body["flag"] = vfs.flags
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
response = x.get()
@@ -408,7 +412,10 @@ class HttpCli(object):
except KeyError:
raise Pebkac(400, "need hash and wark headers for binary POST")
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", wark, chash)
vfs, _ = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
ptop = vfs.realpath
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash)
response = x.get()
chunksize, cstart, path, lastmod = response
@@ -453,8 +460,8 @@ class HttpCli(object):
self.log("clone {} done".format(cstart[0]))
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", wark, chash)
num_left = x.get()
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
num_left, path = x.get()
if not WINDOWS and num_left == 0:
times = (int(time.time()), int(lastmod))
@@ -568,24 +575,24 @@ class HttpCli(object):
self.log("discarding incoming file without filename")
# fallthrough
fn = os.devnull
if p_file and not nullwrite:
fdir = os.path.join(vfs.realpath, rem)
fn = os.path.join(fdir, sanitize_fn(p_file))
fname = sanitize_fn(p_file)
if not os.path.isdir(fsenc(fdir)):
raise Pebkac(404, "that folder does not exist")
# TODO broker which avoid this race and
# provides a new filename if taken (same as up2k)
if os.path.exists(fsenc(fn)):
fn += ".{:.6f}-{}".format(time.time(), self.addr[0])
# using current-time instead of t0 cause clients
# may reuse a name for multiple files in one post
suffix = ".{:.6f}-{}".format(time.time(), self.ip)
open_args = {"fdir": fdir, "suffix": suffix}
else:
open_args = {}
fname = os.devnull
fdir = ""
try:
with open(fsenc(fn), "wb") as f:
self.log("writing to {0}".format(fn))
with ren_open(fname, "wb", 512 * 1024, **open_args) as f:
f, fname = f["orz"]
self.log("writing to {}/{}".format(fdir, fname))
sz, sha512_hex, _ = hashcopy(self.conn, p_data, f)
if sz == 0:
raise Pebkac(400, "empty files in post")
@@ -594,8 +601,14 @@ class HttpCli(object):
self.conn.nbyte += sz
except Pebkac:
if fn != os.devnull:
os.rename(fsenc(fn), fsenc(fn + ".PARTIAL"))
if fname != os.devnull:
fp = os.path.join(fdir, fname)
suffix = ".PARTIAL"
try:
os.rename(fsenc(fp), fsenc(fp + suffix))
except:
fp = fp[: -len(suffix)]
os.rename(fsenc(fp), fsenc(fp + suffix))
raise
@@ -631,7 +644,7 @@ class HttpCli(object):
"\n".join(
unicode(x)
for x in [
":".join(unicode(x) for x in self.addr),
":".join(unicode(x) for x in [self.ip, self.addr[1]]),
msg.rstrip(),
]
)
@@ -680,7 +693,7 @@ class HttpCli(object):
return True
fp = os.path.join(vfs.realpath, rem)
srv_lastmod = -1
srv_lastmod = srv_lastmod3 = -1
try:
st = os.stat(fsenc(fp))
srv_lastmod = st.st_mtime
@@ -731,7 +744,7 @@ class HttpCli(object):
if p_field != "body":
raise Pebkac(400, "expected body, got {}".format(p_field))
with open(fp, "wb") as f:
with open(fp, "wb", 512 * 1024) as f:
sz, sha512, _ = hashcopy(self.conn, p_data, f)
new_lastmod = os.stat(fsenc(fp)).st_mtime
@@ -756,9 +769,12 @@ class HttpCli(object):
cli_dt = time.strptime(cli_lastmod, "%a, %d %b %Y %H:%M:%S GMT")
cli_ts = calendar.timegm(cli_dt)
return file_lastmod, int(file_ts) > int(cli_ts)
except:
self.log("bad lastmod format: {}".format(cli_lastmod))
self.log(" expected format: {}".format(file_lastmod))
except Exception as ex:
self.log(
"lastmod {}\nremote: [{}]\n local: [{}]".format(
repr(ex), cli_lastmod, file_lastmod
)
)
return file_lastmod, file_lastmod != cli_lastmod
return file_lastmod, True
@@ -875,6 +891,7 @@ class HttpCli(object):
logtail += " [\033[36m{}-{}\033[0m]".format(lower, upper)
use_sendfile = False
if decompress:
open_func = gzip.open
open_args = [fsenc(fs_path), "rb"]
@@ -884,6 +901,8 @@ class HttpCli(object):
open_func = open
# 512 kB is optimal for huge files, use 64k
open_args = [fsenc(fs_path), "rb", 64 * 1024]
if hasattr(os, "sendfile"):
use_sendfile = not self.args.no_sendfile
#
# send reply
@@ -906,24 +925,13 @@ class HttpCli(object):
ret = True
with open_func(*open_args) as f:
remains = upper - lower
f.seek(lower)
while remains > 0:
# time.sleep(0.01)
buf = f.read(4096)
if not buf:
break
if use_sendfile:
remains = sendfile_kern(lower, upper, f, self.s)
else:
remains = sendfile_py(lower, upper, f, self.s)
if remains < len(buf):
buf = buf[:remains]
try:
self.s.sendall(buf)
remains -= len(buf)
except:
logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
ret = False
break
if remains > 0:
logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
spd = self._spd((upper - lower) - remains)
self.log("{}, {}".format(logmsg, spd))
@@ -964,6 +972,7 @@ class HttpCli(object):
"title": html_escape(self.vpath),
"lastmod": int(ts_md * 1000),
"md_plug": "true" if self.args.emp else "false",
"md_chk_rate": self.args.mcr,
"md": "",
}
sz_html = len(template.render(**targs).encode("utf-8"))
@@ -1018,6 +1027,10 @@ class HttpCli(object):
if abspath.endswith(".md") and "raw" not in self.uparam:
return self.tx_md(abspath)
bad = "{0}.hist{0}up2k.".format(os.sep)
if abspath.endswith(bad + "db") or abspath.endswith(bad + "snap"):
raise Pebkac(403)
return self.tx_file(abspath)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)

View File

@@ -65,6 +65,7 @@ class HttpConn(object):
color = 34
self.rproxy = ip
self.ip = ip
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
return self.log_src

View File

@@ -9,6 +9,7 @@ from datetime import datetime, timedelta
import calendar
from .__init__ import PY2, WINDOWS, MACOS, VT100
from .authsrv import AuthSrv
from .tcpsrv import TcpSrv
from .up2k import Up2k
from .util import mp
@@ -38,6 +39,10 @@ class SvcHub(object):
self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self)
if self.args.e2d and self.args.e2s:
auth = AuthSrv(self.args, self.log)
self.up2k.build_indexes(auth.all_writable)
# decide which worker impl to use
if self.check_mp_enable():
from .broker_mp import BrokerMp as Broker

View File

@@ -36,6 +36,7 @@ class TcpSrv(object):
self.srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
try:
self.srv.bind((self.args.i, self.args.p))
except (OSError, socket.error) as ex:

View File

@@ -6,14 +6,25 @@ import os
import re
import time
import math
import json
import gzip
import stat
import shutil
import base64
import hashlib
import threading
from copy import deepcopy
from .__init__ import WINDOWS
from .util import Pebkac, Queue, fsenc, sanitize_fn
from .__init__ import WINDOWS, PY2
from .util import Pebkac, Queue, fsdec, fsenc, sanitize_fn, ren_open, atomic_move
HAVE_SQLITE3 = False
try:
import sqlite3
HAVE_SQLITE3 = True
except:
pass
class Up2k(object):
@@ -22,20 +33,21 @@ class Up2k(object):
* documentation
* registry persistence
* ~/.config flatfiles for active jobs
* wark->path database for finished uploads
"""
def __init__(self, broker):
self.broker = broker
self.args = broker.args
self.log = broker.log
self.persist = self.args.e2d
# config
self.salt = "hunter2" # TODO: config
# state
self.registry = {}
self.mutex = threading.Lock()
self.registry = {}
self.db = {}
if WINDOWS:
# usually fails to set lastmod too quickly
@@ -44,54 +56,291 @@ class Up2k(object):
thr.daemon = True
thr.start()
if self.persist:
thr = threading.Thread(target=self._snapshot)
thr.daemon = True
thr.start()
# static
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
if self.persist and not HAVE_SQLITE3:
m = "could not initialize sqlite3, will use in-memory registry only"
self.log("up2k", m)
def _vis_job_progress(self, job):
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
path = os.path.join(job["ptop"], job["prel"], job["name"])
return "{:5.1f}% {}".format(perc, path)
def _vis_reg_progress(self, reg):
ret = []
for _, job in reg.items():
ret.append(self._vis_job_progress(job))
return ret
def register_vpath(self, ptop):
with self.mutex:
if ptop in self.registry:
return None
reg = {}
path = os.path.join(ptop, ".hist", "up2k.snap")
if self.persist and os.path.exists(path):
with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8")
reg = json.loads(j)
for _, job in reg.items():
job["poke"] = time.time()
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
m = [m] + self._vis_reg_progress(reg)
self.log("up2k", "\n".join(m))
self.registry[ptop] = reg
if not self.persist or not HAVE_SQLITE3:
return None
try:
os.mkdir(os.path.join(ptop, ".hist"))
except:
pass
db_path = os.path.join(ptop, ".hist", "up2k.db")
if ptop in self.db:
# self.db[ptop].close()
return None
try:
db = self._open_db(db_path)
self.db[ptop] = db
return db
except Exception as ex:
m = "failed to open [{}]: {}".format(ptop, repr(ex))
self.log("up2k", m)
return None
def build_indexes(self, writeables):
tops = [d.realpath for d in writeables]
for top in tops:
db = self.register_vpath(top)
if db:
# can be symlink so don't `and d.startswith(top)``
excl = set([d for d in tops if d != top])
self._build_dir([db, 0], top, excl, top)
self._drop_lost(db, top)
db.commit()
def _build_dir(self, dbw, top, excl, cdir):
try:
inodes = [fsdec(x) for x in os.listdir(fsenc(cdir))]
except Exception as ex:
self.log("up2k", "listdir: " + repr(ex))
return
histdir = os.path.join(top, ".hist")
for inode in inodes:
abspath = os.path.join(cdir, inode)
try:
inf = os.stat(fsenc(abspath))
except Exception as ex:
self.log("up2k", "stat: " + repr(ex))
continue
if stat.S_ISDIR(inf.st_mode):
if abspath in excl or abspath == histdir:
continue
# self.log("up2k", " dir: {}".format(abspath))
self._build_dir(dbw, top, excl, abspath)
else:
# self.log("up2k", "file: {}".format(abspath))
rp = abspath[len(top) :].replace("\\", "/").strip("/")
c = dbw[0].execute("select * from up where rp = ?", (rp,))
in_db = list(c.fetchall())
if in_db:
_, dts, dsz, _ = in_db[0]
if len(in_db) > 1:
m = "WARN: multiple entries: [{}] => [{}] ({})"
self.log("up2k", m.format(top, rp, len(in_db)))
dts = -1
if dts == inf.st_mtime and dsz == inf.st_size:
continue
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
top, rp, dts, inf.st_mtime, dsz, inf.st_size
)
self.log("up2k", m)
self.db_rm(dbw[0], rp)
dbw[1] += 1
in_db = None
self.log("up2k", "file: {}".format(abspath))
try:
hashes = self._hashlist_from_file(abspath)
except Exception as ex:
self.log("up2k", "hash: " + repr(ex))
continue
wark = self._wark_from_hashlist(inf.st_size, hashes)
self.db_add(dbw[0], wark, rp, inf.st_mtime, inf.st_size)
dbw[1] += 1
if dbw[1] > 1024:
dbw[0].commit()
dbw[1] = 0
def _drop_lost(self, db, top):
rm = []
c = db.execute("select * from up")
for dwark, dts, dsz, drp in c:
abspath = os.path.join(top, drp)
try:
if not os.path.exists(fsenc(abspath)):
rm.append(drp)
except Exception as ex:
self.log("up2k", "stat-rm: " + repr(ex))
if not rm:
return
self.log("up2k", "forgetting {} deleted files".format(len(rm)))
for rp in rm:
self.db_rm(db, rp)
def _open_db(self, db_path):
conn = sqlite3.connect(db_path, check_same_thread=False)
try:
c = conn.execute(r"select * from kv where k = 'sver'")
rows = c.fetchall()
if rows:
ver = rows[0][1]
else:
self.log("up2k", "WARN: no sver in kv, DB corrupt?")
ver = "unknown"
if ver == "1":
try:
nfiles = next(conn.execute("select count(w) from up"))[0]
self.log("up2k", "found DB at {} |{}|".format(db_path, nfiles))
return conn
except Exception as ex:
m = "WARN: could not list files, DB corrupt?\n " + repr(ex)
self.log("up2k", m)
m = "REPLACING unsupported DB (v.{}) at {}".format(ver, db_path)
self.log("up2k", m)
conn.close()
os.unlink(db_path)
conn = sqlite3.connect(db_path, check_same_thread=False)
except:
pass
# sqlite is variable-width only, no point in using char/nchar/varchar
for cmd in [
r"create table kv (k text, v text)",
r"create table up (w text, mt int, sz int, rp text)",
r"insert into kv values ('sver', '1')",
r"create index up_w on up(w)",
]:
conn.execute(cmd)
conn.commit()
self.log("up2k", "created DB at {}".format(db_path))
return conn
def handle_json(self, cj):
self.register_vpath(cj["ptop"])
cj["name"] = sanitize_fn(cj["name"])
cj["poke"] = time.time()
wark = self._get_wark(cj)
now = time.time()
job = None
with self.mutex:
# TODO use registry persistence here to symlink any matching wark
if wark in self.registry:
job = self.registry[wark]
if job["rdir"] != cj["rdir"] or job["name"] != cj["name"]:
src = os.path.join(job["rdir"], job["name"])
dst = os.path.join(cj["rdir"], cj["name"])
db = self.db.get(cj["ptop"], None)
reg = self.registry[cj["ptop"]]
if db:
cur = db.execute(r"select * from up where w = ?", (wark,))
for _, dtime, dsize, dp_rel in cur:
dp_abs = os.path.join(cj["ptop"], dp_rel).replace("\\", "/")
# relying on path.exists to return false on broken symlinks
if os.path.exists(fsenc(dp_abs)):
try:
prel, name = dp_rel.rsplit("/", 1)
except:
prel = ""
name = dp_rel
job = {
"name": name,
"prel": prel,
"vtop": cj["vtop"],
"ptop": cj["ptop"],
"flag": cj["flag"],
"size": dsize,
"lmod": dtime,
"hash": [],
"need": [],
}
break
if job and wark in reg:
del reg[wark]
if job or wark in reg:
job = job or reg[wark]
if job["prel"] != cj["prel"] or job["name"] != cj["name"]:
src = os.path.join(job["ptop"], job["prel"], job["name"])
dst = os.path.join(cj["ptop"], cj["prel"], cj["name"])
vsrc = os.path.join(job["vtop"], job["prel"], job["name"])
vsrc = vsrc.replace("\\", "/") # just for prints anyways
if job["need"]:
self.log("up2k", "unfinished:\n {0}\n {1}".format(src, dst))
err = "partial upload exists at a different location; please resume uploading here instead:\n{0}{1} ".format(
job["vdir"], job["name"]
)
err = "partial upload exists at a different location; please resume uploading here instead:\n"
err += vsrc + " "
raise Pebkac(400, err)
elif "nodupe" in job["flag"]:
self.log("up2k", "dupe-reject:\n {0}\n {1}".format(src, dst))
err = "upload rejected, file already exists:\n " + vsrc + " "
raise Pebkac(400, err)
else:
# symlink to the client-provided name,
# returning the previous upload info
job = deepcopy(job)
suffix = self._suffix(dst, now, job["addr"])
job["name"] = cj["name"] + suffix
self._symlink(src, dst + suffix)
else:
for k in ["ptop", "vtop", "prel"]:
job[k] = cj[k]
pdir = os.path.join(cj["ptop"], cj["prel"])
job["name"] = self._untaken(pdir, cj["name"], now, cj["addr"])
dst = os.path.join(job["ptop"], job["prel"], job["name"])
os.unlink(fsenc(dst)) # TODO ed pls
self._symlink(src, dst)
if not job:
job = {
"wark": wark,
"t0": now,
"addr": cj["addr"],
"vdir": cj["vdir"],
"rdir": cj["rdir"],
# client-provided, sanitized by _get_wark:
"name": cj["name"],
"size": cj["size"],
"lmod": cj["lmod"],
"hash": deepcopy(cj["hash"]),
"need": [],
}
path = os.path.join(job["rdir"], job["name"])
job["name"] += self._suffix(path, now, cj["addr"])
# client-provided, sanitized by _get_wark: name, size, lmod
for k in [
"addr",
"vtop",
"ptop",
"prel",
"flag",
"name",
"size",
"lmod",
]:
job[k] = cj[k]
# one chunk may occur multiple times in a file;
# filter to unique values for the list of missing chunks
# (preserve order to reduce disk thrashing)
job["need"] = []
lut = {}
for k in cj["hash"]:
if k not in lut:
@@ -108,13 +357,12 @@ class Up2k(object):
"wark": wark,
}
def _suffix(self, fpath, ts, ip):
def _untaken(self, fdir, fname, ts, ip):
# TODO broker which avoid this race and
# provides a new filename if taken (same as bup)
if not os.path.exists(fsenc(fpath)):
return ""
return ".{:.6f}-{}".format(ts, ip)
suffix = ".{:.6f}-{}".format(ts, ip)
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
return f["orz"][1]
def _symlink(self, src, dst):
# TODO store this in linktab so we never delete src if there are links to it
@@ -141,40 +389,58 @@ class Up2k(object):
lsrc = "../" * (len(lsrc) - 1) + "/".join(lsrc)
os.symlink(fsenc(lsrc), fsenc(ldst))
except (AttributeError, OSError) as ex:
self.log("up2k", "cannot symlink; creating copy")
self.log("up2k", "cannot symlink; creating copy: " + repr(ex))
shutil.copy2(fsenc(src), fsenc(dst))
def handle_chunk(self, wark, chash):
def handle_chunk(self, ptop, wark, chash):
with self.mutex:
job = self.registry.get(wark)
job = self.registry[ptop].get(wark, None)
if not job:
raise Pebkac(404, "unknown wark")
raise Pebkac(400, "unknown wark")
if chash not in job["need"]:
raise Pebkac(200, "already got that but thanks??")
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
if not nchunk:
raise Pebkac(404, "unknown chunk")
raise Pebkac(400, "unknown chunk")
job["poke"] = time.time()
chunksize = self._get_chunksize(job["size"])
ofs = [chunksize * x for x in nchunk]
path = os.path.join(job["rdir"], job["name"])
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
return [chunksize, ofs, path, job["lmod"]]
def confirm_chunk(self, wark, chash):
def confirm_chunk(self, ptop, wark, chash):
with self.mutex:
job = self.registry[wark]
job = self.registry[ptop][wark]
pdir = os.path.join(job["ptop"], job["prel"])
src = os.path.join(pdir, job["tnam"])
dst = os.path.join(pdir, job["name"])
job["need"].remove(chash)
ret = len(job["need"])
if ret > 0:
return ret, src
if WINDOWS and ret == 0:
path = os.path.join(job["rdir"], job["name"])
self.lastmod_q.put([path, (int(time.time()), int(job["lmod"]))])
atomic_move(src, dst)
return ret
if WINDOWS:
self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))])
db = self.db.get(job["ptop"], None)
if db:
rp = os.path.join(job["prel"], job["name"]).replace("\\", "/")
self.db_rm(db, rp)
self.db_add(db, job["wark"], rp, job["lmod"], job["size"])
db.commit()
del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads
return ret, dst
def _get_chunksize(self, filesize):
chunksize = 1024 * 1024
@@ -188,6 +454,14 @@ class Up2k(object):
chunksize += stepsize
stepsize *= mul
def db_rm(self, db, rp):
db.execute("delete from up where rp = ?", (rp,))
def db_add(self, db, wark, rp, ts, sz):
db.execute(
"insert into up values (?,?,?,?)", (wark, ts, sz, rp,),
)
def _get_wark(self, cj):
if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB
raise Pebkac(400, "name or numchunks not according to spec")
@@ -204,9 +478,13 @@ class Up2k(object):
except:
cj["lmod"] = int(time.time())
# server-reproducible file identifier, independent of name or location
ident = [self.salt, str(cj["size"])]
ident.extend(cj["hash"])
wark = self._wark_from_hashlist(cj["size"], cj["hash"])
return wark
def _wark_from_hashlist(self, filesize, hashes):
""" server-reproducible file identifier, independent of name or location """
ident = [self.salt, str(filesize)]
ident.extend(hashes)
ident = "\n".join(ident)
hasher = hashlib.sha512()
@@ -216,10 +494,38 @@ class Up2k(object):
wark = base64.urlsafe_b64encode(digest)
return wark.decode("utf-8").rstrip("=")
def _hashlist_from_file(self, path):
fsz = os.path.getsize(path)
csz = self._get_chunksize(fsz)
ret = []
with open(path, "rb", 512 * 1024) as f:
while fsz > 0:
hashobj = hashlib.sha512()
rem = min(csz, fsz)
fsz -= rem
while rem > 0:
buf = f.read(min(rem, 64 * 1024))
if not buf:
raise Exception("EOF at " + str(f.tell()))
hashobj.update(buf)
rem -= len(buf)
digest = hashobj.digest()[:32]
digest = base64.urlsafe_b64encode(digest)
ret.append(digest.decode("utf-8").rstrip("="))
return ret
def _new_upload(self, job):
self.registry[job["wark"]] = job
path = os.path.join(job["rdir"], job["name"])
with open(fsenc(path), "wb") as f:
self.registry[job["ptop"]][job["wark"]] = job
pdir = os.path.join(job["ptop"], job["prel"])
job["name"] = self._untaken(pdir, job["name"], job["t0"], job["addr"])
tnam = job["name"] + ".PARTIAL"
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
f, job["tnam"] = f["orz"]
f.seek(job["size"] - 1)
f.write(b"e")
@@ -236,3 +542,53 @@ class Up2k(object):
os.utime(fsenc(path), times)
except:
self.log("lmod", "failed to utime ({}, {})".format(path, times))
def _snapshot(self):
persist_interval = 30 # persist unfinished uploads index every 30 sec
discard_interval = 3600 # drop unfinished uploads after 1 hour inactivity
prev = {}
while True:
time.sleep(persist_interval)
with self.mutex:
for k, reg in self.registry.items():
self._snap_reg(prev, k, reg, discard_interval)
def _snap_reg(self, prev, k, reg, discard_interval):
now = time.time()
rm = [x for x in reg.values() if now - x["poke"] > discard_interval]
if rm:
m = "dropping {} abandoned uploads in {}".format(len(rm), k)
vis = [self._vis_job_progress(x) for x in rm]
self.log("up2k", "\n".join([m] + vis))
for job in rm:
del reg[job["wark"]]
try:
# remove the placeholder zero-byte file (keep the PARTIAL)
path = os.path.join(job["ptop"], job["prel"], job["name"])
if os.path.getsize(path) == 0:
os.unlink(path)
except:
pass
path = os.path.join(k, ".hist", "up2k.snap")
if not reg:
if k not in prev or prev[k] is not None:
prev[k] = None
if os.path.exists(path):
os.unlink(path)
return
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
etag = [len(reg), newest]
if etag == prev.get(k, None):
return
path2 = "{}.{}".format(path, os.getpid())
j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8")
with gzip.GzipFile(path2, "wb") as f:
f.write(j)
atomic_move(path2, path)
self.log("up2k", "snap: {} |{}|".format(path, len(reg.keys())))
prev[k] = etag

View File

@@ -2,14 +2,17 @@
from __future__ import print_function, unicode_literals
import re
import os
import sys
import time
import base64
import select
import struct
import hashlib
import platform
import threading
import mimetypes
import contextlib
import subprocess as sp # nosec
from .__init__ import PY2, WINDOWS
@@ -96,6 +99,80 @@ class Unrecv(object):
self.buf = buf + self.buf
@contextlib.contextmanager
def ren_open(fname, *args, **kwargs):
fdir = kwargs.pop("fdir", None)
suffix = kwargs.pop("suffix", None)
if fname == os.devnull:
with open(fname, *args, **kwargs) as f:
yield {"orz": [f, fname]}
return
orig_name = fname
bname = fname
ext = ""
while True:
ofs = bname.rfind(".")
if ofs < 0 or ofs < len(bname) - 7:
# doesn't look like an extension anymore
break
ext = bname[ofs:] + ext
bname = bname[:ofs]
b64 = ""
while True:
try:
if fdir:
fpath = os.path.join(fdir, fname)
else:
fpath = fname
if suffix and os.path.exists(fpath):
fpath += suffix
fname += suffix
ext += suffix
with open(fsenc(fpath), *args, **kwargs) as f:
if b64:
fp2 = "fn-trunc.{}.txt".format(b64)
fp2 = os.path.join(fdir, fp2)
with open(fsenc(fp2), "wb") as f2:
f2.write(orig_name.encode("utf-8"))
yield {"orz": [f, fname]}
return
except OSError as ex_:
ex = ex_
if ex.errno != 36:
raise
if not b64:
b64 = (bname + ext).encode("utf-8", "replace")
b64 = hashlib.sha512(b64).digest()[:12]
b64 = base64.urlsafe_b64encode(b64).decode("utf-8").rstrip("=")
badlen = len(fname)
while len(fname) >= badlen:
if len(bname) < 8:
raise ex
if len(bname) > len(ext):
# drop the last letter of the filename
bname = bname[:-1]
else:
try:
# drop the leftmost sub-extension
_, ext = ext.split(".", 1)
except:
# okay do the first letter then
ext = "." + ext[2:]
fname = "{}~{}{}".format(bname, b64, ext)
class MultipartParser(object):
def __init__(self, log_func, sr, http_headers):
self.sr = sr
@@ -472,6 +549,16 @@ else:
fsdec = w8dec
def atomic_move(src, dst):
if not PY2:
os.replace(src, dst)
else:
if os.path.exists(dst):
os.unlink(dst)
os.rename(src, dst)
def read_socket(sr, total_size):
remains = total_size
while remains > 0:
@@ -515,6 +602,46 @@ def hashcopy(actor, fin, fout):
return tlen, hashobj.hexdigest(), digest_b64
def sendfile_py(lower, upper, f, s):
remains = upper - lower
f.seek(lower)
while remains > 0:
# time.sleep(0.01)
buf = f.read(min(4096, remains))
if not buf:
return remains
try:
s.sendall(buf)
remains -= len(buf)
except:
return remains
return 0
def sendfile_kern(lower, upper, f, s):
out_fd = s.fileno()
in_fd = f.fileno()
ofs = lower
while ofs < upper:
try:
req = min(2 ** 30, upper - ofs)
select.select([], [out_fd], [], 10)
n = os.sendfile(out_fd, in_fd, ofs, req)
except Exception as ex:
# print("sendfile: " + repr(ex))
n = 0
if n <= 0:
return upper - ofs
ofs += n
# print("sendfile: ok, sent {} now, {} total, {} remains".format(n, ofs - lower, upper - ofs))
return 0
def unescape_cookie(orig):
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
ret = ""
@@ -595,3 +722,6 @@ class Pebkac(Exception):
def __init__(self, code, msg=None):
super(Pebkac, self).__init__(msg or HTTPCODE[code])
self.code = code
def __repr__(self):
return "Pebkac({}, {})".format(self.code, repr(self.args))

12
copyparty/web/Makefile Normal file
View File

@@ -0,0 +1,12 @@
# run me to zopfli all the static files
# which should help on really slow connections
# but then why are you using copyparty in the first place
pk: $(addsuffix .gz, $(wildcard *.js *.css))
un: $(addsuffix .un, $(wildcard *.gz))
%.gz: %
pigz -11 -J 34 -I 5730 $<
%.un: %
pigz -d $<

View File

@@ -68,6 +68,8 @@
</div>
</div>
<script src="/.cpr/util.js{{ ts }}"></script>
{%- if can_read %}
<script src="/.cpr/browser.js{{ ts }}"></script>
{%- endif %}

View File

@@ -1,117 +1,25 @@
"use strict";
// error handler for mobile devices
function hcroak(msg) {
document.body.innerHTML = msg;
window.onerror = undefined;
throw 'fatal_err';
}
function croak(msg) {
document.body.textContent = msg;
window.onerror = undefined;
throw msg;
}
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
'&': '&amp;',
'"': '&quot;',
'<': '&lt;',
'>': '&gt;'
}[c];
});
}
window.onerror = function (msg, url, lineNo, columnNo, error) {
window.onerror = undefined;
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<h2>' + find[a] + '</h2>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
}
document.body.style.fontSize = '0.8em';
document.body.style.padding = '0 1em 1em 1em';
hcroak(html.join('\n'));
};
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
if (!String.prototype.endsWith) {
String.prototype.endsWith = function (search, this_len) {
if (this_len === undefined || this_len > this.length) {
this_len = this.length;
}
return this.substring(this_len - search.length, this_len) === search;
};
}
// https://stackoverflow.com/a/950146
function import_js(url, cb) {
var head = document.head || document.getElementsByTagName('head')[0];
var script = document.createElement('script');
script.type = 'text/javascript';
script.src = url;
script.onreadystatechange = cb;
script.onload = cb;
head.appendChild(script);
}
function o(id) {
return document.getElementById(id);
}
window.onerror = vis_exh;
function dbg(msg) {
o('path').innerHTML = msg;
ebi('path').innerHTML = msg;
}
function ev(e) {
e = e || window.event;
e.preventDefault ? e.preventDefault() : (e.returnValue = false);
if (e.preventDefault)
e.preventDefault()
if (e.stopPropagation)
e.stopPropagation();
e.returnValue = false;
return e;
}
function sortTable(table, col) {
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className == 'sort1' ? -1 : 1;
for (var a = 0, thl = th.length; a < thl; a++)
th[a].className = '';
th[col].className = 'sort' + reverse;
var stype = th[col].getAttribute('sort');
tr = tr.sort(function (a, b) {
var v1 = a.cells[col].textContent.trim();
var v2 = b.cells[col].textContent.trim();
if (stype == 'int') {
v1 = parseInt(v1.replace(/,/g, ''));
v2 = parseInt(v2.replace(/,/g, ''));
return reverse * (v1 - v2);
}
return reverse * (v1.localeCompare(v2));
});
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]);
}
function makeSortable(table) {
var th = table.tHead, i;
th && (th = th.rows[0]) && (th = th.cells);
if (th) i = th.length;
else return; // if no `<thead>` then do nothing
while (--i >= 0) (function (i) {
th[i].onclick = function () {
sortTable(table, i);
};
}(i));
}
makeSortable(o('files'));
makeSortable(ebi('files'));
// extract songs + add play column
@@ -124,9 +32,9 @@ var mp = (function () {
'tracks': tracks,
'cover_url': ''
};
var re_audio = new RegExp('\.(opus|ogg|m4a|aac|mp3|wav|flac)$', 'i');
var re_audio = /\.(opus|ogg|m4a|aac|mp3|wav|flac)$/i;
var trs = document.getElementById('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
for (var a = 0, aa = trs.length; a < aa; a++) {
var tds = trs[a].getElementsByTagName('td');
var link = tds[1].getElementsByTagName('a')[0];
@@ -142,7 +50,7 @@ var mp = (function () {
}
for (var a = 0, aa = tracks.length; a < aa; a++)
o('trk' + a).onclick = ev_play;
ebi('trk' + a).onclick = ev_play;
ret.vol = localStorage.getItem('vol');
if (ret.vol !== null)
@@ -169,8 +77,8 @@ var mp = (function () {
// toggle player widget
var widget = (function () {
var ret = {};
var widget = document.getElementById('widget');
var wtoggle = document.getElementById('wtoggle');
var widget = ebi('widget');
var wtoggle = ebi('wtoggle');
var touchmode = false;
var side_open = false;
var was_paused = true;
@@ -199,7 +107,7 @@ var widget = (function () {
ret.paused = function (paused) {
if (was_paused != paused) {
was_paused = paused;
o('bplay').innerHTML = paused ? '▶' : '⏸';
ebi('bplay').innerHTML = paused ? '▶' : '⏸';
}
};
var click_handler = function (e) {
@@ -223,8 +131,8 @@ var widget = (function () {
// buffer/position bar
var pbar = (function () {
var r = {};
r.bcan = o('barbuf');
r.pcan = o('barpos');
r.bcan = ebi('barbuf');
r.pcan = ebi('barpos');
r.bctx = r.bcan.getContext('2d');
r.pctx = r.pcan.getContext('2d');
@@ -289,7 +197,7 @@ var pbar = (function () {
// volume bar
var vbar = (function () {
var r = {};
r.can = o('pvol');
r.can = ebi('pvol');
r.ctx = r.can.getContext('2d');
var bctx = r.ctx;
@@ -386,7 +294,7 @@ var vbar = (function () {
else
play(0);
};
o('bplay').onclick = function (e) {
ebi('bplay').onclick = function (e) {
ev(e);
if (mp.au) {
if (mp.au.paused)
@@ -397,15 +305,15 @@ var vbar = (function () {
else
play(0);
};
o('bprev').onclick = function (e) {
ebi('bprev').onclick = function (e) {
ev(e);
bskip(-1);
};
o('bnext').onclick = function (e) {
ebi('bnext').onclick = function (e) {
ev(e);
bskip(1);
};
o('barpos').onclick = function (e) {
ebi('barpos').onclick = function (e) {
if (!mp.au) {
//dbg((new Date()).getTime());
return play(0);
@@ -414,8 +322,12 @@ var vbar = (function () {
var rect = pbar.pcan.getBoundingClientRect();
var x = e.clientX - rect.left;
var mul = x * 1.0 / rect.width;
var seek = mp.au.duration * mul;
console.log('seek: ' + seek);
if (!isFinite(seek))
return;
mp.au.currentTime = mp.au.duration * mul;
mp.au.currentTime = seek;
if (mp.au === mp.au_native)
// hack: ogv.js breaks on .play() during playback
@@ -471,7 +383,7 @@ function ev_play(e) {
function setclass(id, clas) {
o(id).setAttribute('class', clas);
ebi(id).setAttribute('class', clas);
}
@@ -542,7 +454,8 @@ function play(tid, call_depth) {
mp.au.tid = tid;
mp.au.src = url;
mp.au.volume = mp.expvol();
setclass('trk' + tid, 'play act');
var oid = 'trk' + tid;
setclass(oid, 'play act');
try {
if (hack_attempt_play)
@@ -551,7 +464,11 @@ function play(tid, call_depth) {
if (mp.au.paused)
autoplay_blocked();
location.hash = 'trk' + tid;
var o = ebi(oid);
o.setAttribute('id', 'thx_js');
location.hash = oid;
o.setAttribute('id', oid);
pbar.drawbuf();
return true;
}
@@ -567,7 +484,6 @@ function play(tid, call_depth) {
function evau_error(e) {
var err = '';
var eplaya = (e && e.target) || (window.event && window.event.srcElement);
var url = eplaya.src;
switch (eplaya.error.code) {
case eplaya.error.MEDIA_ERR_ABORTED:
@@ -608,20 +524,20 @@ function show_modal(html) {
// hide fullscreen message
function unblocked() {
var dom = o('blocked');
var dom = ebi('blocked');
if (dom)
dom.parentNode.removeChild(dom);
}
// show ui to manually start playback of a linked song
function autoplay_blocked(tid) {
function autoplay_blocked() {
show_modal(
'<div id="blk_play"><a href="#" id="blk_go"></a></div>' +
'<div id="blk_abrt"><a href="#" id="blk_na">Cancel<br />(show file list)</a></div>');
var go = o('blk_go');
var na = o('blk_na');
var go = ebi('blk_go');
var na = ebi('blk_na');
var fn = mp.tracks[mp.au.tid].split(/\//).pop();
fn = decodeURIComponent(fn.replace(/\+/g, ' '));

View File

@@ -126,7 +126,8 @@ write markdown (most html is 🙆 too)
var last_modified = {{ lastmod }};
var md_opt = {
link_md_as_html: false,
allow_plugins: {{ md_plug }}
allow_plugins: {{ md_plug }},
modpoll_freq: {{ md_chk_rate }}
};
(function () {
@@ -144,17 +145,11 @@ var md_opt = {
toggle();
})();
if (!String.startsWith) {
String.prototype.startsWith = function(s, i) {
i = i>0 ? i|0 : 0;
return this.substring(i, i + s.length) === s;
};
}
</script>
<script src="/.cpr/util.js"></script>
<script src="/.cpr/deps/marked.full.js"></script>
<script src="/.cpr/md.js"></script>
{%- if edit %}
<script src="/.cpr/md2.js"></script>
<script src="/.cpr/md2.js"></script>
{%- endif %}
</body></html>

View File

@@ -1,10 +1,12 @@
var dom_toc = document.getElementById('toc');
var dom_wrap = document.getElementById('mw');
var dom_hbar = document.getElementById('mh');
var dom_nav = document.getElementById('mn');
var dom_pre = document.getElementById('mp');
var dom_src = document.getElementById('mt');
var dom_navtgl = document.getElementById('navtoggle');
"use strict";
var dom_toc = ebi('toc');
var dom_wrap = ebi('mw');
var dom_hbar = ebi('mh');
var dom_nav = ebi('mn');
var dom_pre = ebi('mp');
var dom_src = ebi('mt');
var dom_navtgl = ebi('navtoggle');
// chrome 49 needs this
@@ -34,7 +36,7 @@ function cls(dom, name, add) {
}
function static(obj) {
function statify(obj) {
return JSON.parse(JSON.stringify(obj));
}
@@ -158,6 +160,46 @@ function copydom(src, dst, lv) {
}
function md_plug_err(ex, js) {
var errbox = ebi('md_errbox');
if (errbox)
errbox.parentNode.removeChild(errbox);
if (!ex)
return;
var msg = (ex + '').split('\n')[0];
var ln = ex.lineNumber;
var o = null;
if (ln) {
msg = "Line " + ln + ", " + msg;
var lns = js.split('\n');
if (ln < lns.length) {
o = document.createElement('span');
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
o.textContent = lns[ln - 1];
}
}
errbox = document.createElement('div');
errbox.setAttribute('id', 'md_errbox');
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
errbox.textContent = msg;
errbox.onclick = function () {
alert('' + ex.stack);
};
if (o) {
errbox.appendChild(o);
errbox.style.padding = '.25em .5em';
}
dom_nav.appendChild(errbox);
try {
console.trace();
}
catch (ex2) { }
}
function load_plug(md_text, plug_type) {
if (!md_opt.allow_plugins)
return md_text;
@@ -177,7 +219,14 @@ function load_plug(md_text, plug_type) {
var old_plug = md_plug[plug_type];
if (!old_plug || old_plug[1] != js) {
js = 'const x = { ' + js + ' }; x;';
var x = eval(js);
try {
var x = eval(js);
}
catch (ex) {
md_plug[plug_type] = null;
md_plug_err(ex, js);
return md;
}
if (x['ctor']) {
x['ctor']();
delete x['ctor'];
@@ -191,20 +240,30 @@ function load_plug(md_text, plug_type) {
function convert_markdown(md_text, dest_dom) {
md_text = md_text.replace(/\r/g, '');
md_plug_err(null);
md_text = load_plug(md_text, 'pre');
md_text = load_plug(md_text, 'post');
marked.setOptions({
var marked_opts = {
//headerPrefix: 'h-',
breaks: true,
gfm: true
});
};
if (md_plug['pre']) {
marked.use(md_plug['pre'][0]);
var ext = md_plug['pre'];
if (ext)
Object.assign(marked_opts, ext[0]);
try {
var md_html = marked(md_text, marked_opts);
}
catch (ex) {
if (ext)
md_plug_err(ex, ext[1]);
var md_html = marked(md_text);
throw ex;
}
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
var nodes = md_dom.getElementsByTagName('a');
@@ -240,7 +299,7 @@ function convert_markdown(md_text, dest_dom) {
}
// separate <code> for each line in <pre>
var nodes = md_dom.getElementsByTagName('pre');
nodes = md_dom.getElementsByTagName('pre');
for (var a = nodes.length - 1; a >= 0; a--) {
var el = nodes[a];
@@ -286,15 +345,29 @@ function convert_markdown(md_text, dest_dom) {
el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>';
}
if (md_plug['post'])
md_plug['post'][0].render(md_dom);
ext = md_plug['post'];
if (ext && ext[0].render)
try {
ext[0].render(md_dom);
}
catch (ex) {
md_plug_err(ex, ext[1]);
}
copydom(md_dom, dest_dom, 0);
if (ext && ext[0].render2)
try {
ext[0].render2(dest_dom);
}
catch (ex) {
md_plug_err(ex, ext[1]);
}
}
function init_toc() {
var loader = document.getElementById('ml');
var loader = ebi('ml');
loader.parentNode.removeChild(loader);
var anchors = []; // list of toc entries, complex objects

View File

@@ -77,32 +77,52 @@ html.dark #mt {
background: #f97;
border-radius: .15em;
}
html.dark #save.force-save {
color: #fca;
background: #720;
}
#save.disabled {
opacity: .4;
}
#helpbox,
#toast {
background: #f7f7f7;
border-radius: .4em;
z-index: 9001;
}
#helpbox {
display: none;
position: fixed;
background: #f7f7f7;
box-shadow: 0 .5em 2em #777;
border-radius: .4em;
padding: 2em;
top: 4em;
overflow-y: auto;
box-shadow: 0 .5em 2em #777;
height: calc(100% - 12em);
left: calc(50% - 15em);
right: 0;
width: 30em;
z-index: 9001;
}
#helpclose {
display: block;
}
html.dark #helpbox {
background: #222;
box-shadow: 0 .5em 2em #444;
}
html.dark #helpbox,
html.dark #toast {
background: #222;
border: 1px solid #079;
border-width: 1px 0;
}
#toast {
font-weight: bold;
text-align: center;
padding: .6em 0;
position: fixed;
z-index: 9001;
top: 30%;
transition: opacity 0.2s ease-in-out;
opacity: 1;
}
# mt {opacity: .5;top:1px}

View File

@@ -1,3 +1,6 @@
"use strict";
// server state
var server_md = dom_src.value;
@@ -8,15 +11,15 @@ var js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
// dom nodes
var dom_swrap = document.getElementById('mtw');
var dom_sbs = document.getElementById('sbs');
var dom_nsbs = document.getElementById('nsbs');
var dom_tbox = document.getElementById('toolsbox');
var dom_swrap = ebi('mtw');
var dom_sbs = ebi('sbs');
var dom_nsbs = ebi('nsbs');
var dom_tbox = ebi('toolsbox');
var dom_ref = (function () {
var d = document.createElement('div');
d.setAttribute('id', 'mtr');
dom_swrap.appendChild(d);
d = document.getElementById('mtr');
d = ebi('mtr');
// hide behind the textarea (offsetTop is not computed if display:none)
dom_src.style.zIndex = '4';
d.style.zIndex = '3';
@@ -105,7 +108,7 @@ var draw_md = (function () {
map_src = genmap(dom_ref, map_src);
map_pre = genmap(dom_pre, map_pre);
cls(document.getElementById('save'), 'disabled', src == server_md);
cls(ebi('save'), 'disabled', src == server_md);
var t1 = new Date().getTime();
delay = t1 - t0 > 100 ? 25 : 1;
@@ -141,7 +144,7 @@ redraw = (function () {
onresize();
}
function modetoggle() {
mode = dom_nsbs.innerHTML;
var mode = dom_nsbs.innerHTML;
dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor';
mode += ' single';
dom_wrap.setAttribute('class', mode);
@@ -177,7 +180,7 @@ redraw = (function () {
y += src.clientHeight / 2;
var sy1 = -1, sy2 = -1, dy1 = -1, dy2 = -1;
for (var a = 1; a < nlines + 1; a++) {
if (srcmap[a] === null || dstmap[a] === null)
if (srcmap[a] == null || dstmap[a] == null)
continue;
if (srcmap[a] > y) {
@@ -220,14 +223,108 @@ redraw = (function () {
})();
// modification checker
function Modpoll() {
this.skip_one = true;
this.disabled = false;
this.periodic = function () {
var that = this;
setTimeout(function () {
that.periodic();
}, 1000 * md_opt.modpoll_freq);
var skip = null;
if (ebi('toast'))
skip = 'toast';
else if (this.skip_one)
skip = 'saved';
else if (this.disabled)
skip = 'disabled';
if (skip) {
console.log('modpoll skip, ' + skip);
this.skip_one = false;
return;
}
console.log('modpoll...');
var url = (document.location + '').split('?')[0] + '?raw&_=' + new Date().getTime();
var xhr = new XMLHttpRequest();
xhr.modpoll = this;
xhr.open('GET', url, true);
xhr.responseType = 'text';
xhr.onreadystatechange = this.cb;
xhr.send();
}
this.cb = function () {
if (this.modpoll.disabled || this.modpoll.skip_one) {
console.log('modpoll abort');
return;
}
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) {
console.log('modpoll err ' + this.status + ": " + this.responseText);
return;
}
if (!this.responseText)
return;
var server_ref = server_md.replace(/\r/g, '');
var server_now = this.responseText.replace(/\r/g, '');
if (server_ref != server_now) {
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
this.modpoll.disabled = true;
var msg = [
"The document has changed on the server.<br />" +
"The changes will NOT be loaded into your editor automatically.",
"Press F5 or CTRL-R to refresh the page,<br />" +
"replacing your document with the server copy.",
"You can click this message to ignore and contnue."
];
return toast(false, "box-shadow:0 1em 2em rgba(64,64,64,0.8);font-weight:normal",
36, "<p>" + msg.join('</p>\n<p>') + '</p>');
}
console.log('modpoll eq');
}
if (md_opt.modpoll_freq > 0)
this.periodic();
return this;
}
var modpoll = new Modpoll();
window.onbeforeunload = function (e) {
if ((ebi("save").getAttribute('class') + '').indexOf('disabled') >= 0)
return; //nice (todo)
e.preventDefault(); //ff
e.returnValue = ''; //chrome
};
// save handler
function save(e) {
if (e) e.preventDefault();
var save_btn = document.getElementById("save"),
var save_btn = ebi("save"),
save_cls = save_btn.getAttribute('class') + '';
if (save_cls.indexOf('disabled') >= 0) {
toast('font-size:2em;color:#fc6;width:9em;', 'no changes');
toast(true, ";font-size:2em;color:#c90", 9, "no changes");
return;
}
@@ -251,6 +348,8 @@ function save(e) {
xhr.onreadystatechange = save_cb;
xhr.btn = save_btn;
xhr.txt = txt;
modpoll.skip_one = true; // skip one iteration while we save
xhr.send(fd);
}
@@ -344,23 +443,44 @@ function savechk_cb() {
last_modified = this.lastmod;
server_md = this.txt;
draw_md();
toast('font-size:6em;font-family:serif;color:#cf6;width:4em;',
toast(true, ";font-size:6em;font-family:serif;color:#9b4", 4,
'OK✔<span style="font-size:.2em;color:#999;position:absolute">' + this.ntry + '</span>');
modpoll.disabled = false;
}
function toast(style, msg) {
var ok = document.createElement('div');
style += 'font-weight:bold;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1';
function toast(autoclose, style, width, msg) {
var ok = ebi("toast");
if (ok)
ok.parentNode.removeChild(ok);
style = "width:" + width + "em;left:calc(50% - " + (width / 2) + "em);" + style;
ok = document.createElement('div');
ok.setAttribute('id', 'toast');
ok.setAttribute('style', style);
ok.innerHTML = msg;
var parent = document.getElementById('m');
var parent = ebi('m');
document.documentElement.appendChild(ok);
setTimeout(function () {
ok.style.opacity = 0;
}, 500);
setTimeout(function () {
ok.parentNode.removeChild(ok);
}, 750);
var hide = function (delay) {
delay = delay || 0;
setTimeout(function () {
ok.style.opacity = 0;
}, delay);
setTimeout(function () {
if (ok.parentNode)
ok.parentNode.removeChild(ok);
}, delay + 250);
}
ok.onclick = function () {
hide(0);
};
if (autoclose)
hide(500);
}
@@ -540,6 +660,10 @@ function md_backspace() {
if (/^\s*$/.test(left))
return true;
// same if selection
if (o0 != dom_src.selectionEnd)
return true;
// same if line is all-whitespace or non-markup
var v = m[0].replace(/[^ ]/g, " ");
if (v === m[0] || v.length !== left.length)
@@ -623,7 +747,8 @@ function fmt_table(e) {
lpipe = tab[1].indexOf('|') < tab[1].indexOf('-'),
rpipe = tab[1].lastIndexOf('|') > tab[1].lastIndexOf('-'),
re_lpipe = lpipe ? /^\s*\|\s*/ : /^\s*/,
re_rpipe = rpipe ? /\s*\|\s*$/ : /\s*$/;
re_rpipe = rpipe ? /\s*\|\s*$/ : /\s*$/,
ncols;
// the second row defines the table,
// need to process that first
@@ -680,7 +805,8 @@ function fmt_table(e) {
for (var col = 0; col < ncols; col++) {
var max = 0;
for (var row = 0; row < tab.length; row++)
max = Math.max(max, tab[row][col].length);
if (row != 1)
max = Math.max(max, tab[row][col].length);
var s = '';
for (var n = 0; n < max; n++)
@@ -747,9 +873,8 @@ function mark_uni(e) {
dom_tbox.setAttribute('class', '');
var txt = dom_src.value,
ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g');
mod = txt.replace(/\r/g, "").replace(ptn, "\u2588\u2770$1\u2771");
ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g'),
mod = txt.replace(/\r/g, "").replace(ptn, "\u2588\u2770$1\u2771");
if (txt == mod) {
alert('no results; no modifications were made');
@@ -785,7 +910,12 @@ function iter_uni(e) {
// configure whitelist
function cfg_uni(e) {
if (e) e.preventDefault();
esc_uni_whitelist = prompt("unicode whitelist", esc_uni_whitelist);
var reply = prompt("unicode whitelist", esc_uni_whitelist);
if (reply === null)
return;
esc_uni_whitelist = reply;
js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
}
@@ -802,7 +932,7 @@ function cfg_uni(e) {
return false;
}
if (ev.code == "Escape" || kc == 27) {
var d = document.getElementById('helpclose');
var d = ebi('helpclose');
if (d)
d.click();
}
@@ -859,22 +989,22 @@ function cfg_uni(e) {
}
}
document.onkeydown = keydown;
document.getElementById('save').onclick = save;
ebi('save').onclick = save;
})();
document.getElementById('tools').onclick = function (e) {
ebi('tools').onclick = function (e) {
if (e) e.preventDefault();
var is_open = dom_tbox.getAttribute('class') != 'open';
dom_tbox.setAttribute('class', is_open ? 'open' : '');
};
document.getElementById('help').onclick = function (e) {
ebi('help').onclick = function (e) {
if (e) e.preventDefault();
dom_tbox.setAttribute('class', '');
var dom = document.getElementById('helpbox');
var dom = ebi('helpbox');
var dtxt = dom.getElementsByTagName('textarea');
if (dtxt.length > 0) {
convert_markdown(dtxt[0].value, dom);
@@ -882,16 +1012,16 @@ document.getElementById('help').onclick = function (e) {
}
dom.style.display = 'block';
document.getElementById('helpclose').onclick = function () {
ebi('helpclose').onclick = function () {
dom.style.display = 'none';
};
};
document.getElementById('fmt_table').onclick = fmt_table;
document.getElementById('mark_uni').onclick = mark_uni;
document.getElementById('iter_uni').onclick = iter_uni;
document.getElementById('cfg_uni').onclick = cfg_uni;
ebi('fmt_table').onclick = fmt_table;
ebi('mark_uni').onclick = mark_uni;
ebi('iter_uni').onclick = iter_uni;
ebi('cfg_uni').onclick = cfg_uni;
// blame steen
@@ -999,13 +1129,12 @@ action_stack = (function () {
ref = newtxt;
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
if (hist.un.length > 0)
dbg(static(hist.un.slice(-1)[0]));
dbg(statify(hist.un.slice(-1)[0]));
if (hist.re.length > 0)
dbg(static(hist.re.slice(-1)[0]));
dbg(statify(hist.re.slice(-1)[0]));
}
return {
push: push,
undo: undo,
redo: redo,
push: schedule_push,
@@ -1015,7 +1144,7 @@ action_stack = (function () {
})();
/*
document.getElementById('help').onclick = function () {
ebi('help').onclick = function () {
var c1 = getComputedStyle(dom_src).cssText.split(';');
var c2 = getComputedStyle(dom_ref).cssText.split(';');
var max = Math.min(c1.length, c2.length);

View File

@@ -25,7 +25,8 @@
var last_modified = {{ lastmod }};
var md_opt = {
link_md_as_html: false,
allow_plugins: {{ md_plug }}
allow_plugins: {{ md_plug }},
modpoll_freq: {{ md_chk_rate }}
};
var lightswitch = (function () {
@@ -42,6 +43,7 @@ var lightswitch = (function () {
})();
</script>
<script src="/.cpr/util.js"></script>
<script src="/.cpr/deps/easymde.js"></script>
<script src="/.cpr/mde.js"></script>
</body></html>

View File

@@ -1,7 +1,9 @@
var dom_wrap = document.getElementById('mw');
var dom_nav = document.getElementById('mn');
var dom_doc = document.getElementById('m');
var dom_md = document.getElementById('mt');
"use strict";
var dom_wrap = ebi('mw');
var dom_nav = ebi('mn');
var dom_doc = ebi('m');
var dom_md = ebi('mt');
(function () {
var n = document.location + '';
@@ -63,7 +65,7 @@ var mde = (function () {
mde.codemirror.on("change", function () {
md_changed(mde);
});
var loader = document.getElementById('ml');
var loader = ebi('ml');
loader.parentNode.removeChild(loader);
return mde;
})();
@@ -213,7 +215,7 @@ function save_chk() {
var ok = document.createElement('div');
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
ok.innerHTML = 'OK✔';
var parent = document.getElementById('m');
var parent = ebi('m');
document.documentElement.appendChild(ok);
setTimeout(function () {
ok.style.opacity = 0;

View File

@@ -1,61 +1,6 @@
"use strict";
// error handler for mobile devices
function hcroak(msg) {
document.body.innerHTML = msg;
window.onerror = undefined;
throw 'fatal_err';
}
function croak(msg) {
document.body.textContent = msg;
window.onerror = undefined;
throw msg;
}
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
'&': '&amp;',
'"': '&quot;',
'<': '&lt;',
'>': '&gt;'
}[c];
});
}
window.onerror = function (msg, url, lineNo, columnNo, error) {
window.onerror = undefined;
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<h2>' + find[a] + '</h2>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
}
document.body.style.fontSize = '0.8em';
document.body.style.padding = '0 1em 1em 1em';
hcroak(html.join('\n'));
};
// https://stackoverflow.com/a/950146
function import_js(url, cb) {
var head = document.head || document.getElementsByTagName('head')[0];
var script = document.createElement('script');
script.type = 'text/javascript';
script.src = url;
script.onreadystatechange = cb;
script.onload = cb;
head.appendChild(script);
}
function o(id) {
return document.getElementById(id);
}
window.onerror = vis_exh;
(function () {
@@ -88,12 +33,12 @@ function goto(dest) {
for (var a = obj.length - 1; a >= 0; a--)
obj[a].classList.remove('act');
var obj = document.querySelectorAll('#ops>a');
obj = document.querySelectorAll('#ops>a');
for (var a = obj.length - 1; a >= 0; a--)
obj[a].classList.remove('act');
if (dest) {
document.getElementById('op_' + dest).classList.add('act');
ebi('op_' + dest).classList.add('act');
document.querySelector('#ops>a[data-dest=' + dest + ']').classList.add('act');
var fn = window['goto_' + dest];
@@ -121,7 +66,7 @@ function goto_up2k() {
if (op !== null && op !== '.')
goto(op);
}
document.getElementById('ops').style.display = 'block';
ebi('ops').style.display = 'block';
})();
@@ -150,21 +95,21 @@ function up2k_init(have_crypto) {
// show modal message
function showmodal(msg) {
o('u2notbtn').innerHTML = msg;
o('u2btn').style.display = 'none';
o('u2notbtn').style.display = 'block';
o('u2conf').style.opacity = '0.5';
ebi('u2notbtn').innerHTML = msg;
ebi('u2btn').style.display = 'none';
ebi('u2notbtn').style.display = 'block';
ebi('u2conf').style.opacity = '0.5';
}
// hide modal message
function unmodal() {
o('u2notbtn').style.display = 'none';
o('u2btn').style.display = 'block';
o('u2conf').style.opacity = '1';
o('u2notbtn').innerHTML = '';
ebi('u2notbtn').style.display = 'none';
ebi('u2btn').style.display = 'block';
ebi('u2conf').style.opacity = '1';
ebi('u2notbtn').innerHTML = '';
}
var post_url = o('op_bup').getElementsByTagName('form')[0].getAttribute('action');
var post_url = ebi('op_bup').getElementsByTagName('form')[0].getAttribute('action');
if (post_url && post_url.charAt(post_url.length - 1) !== '/')
post_url += '/';
@@ -181,25 +126,25 @@ function up2k_init(have_crypto) {
import_js('/.cpr/deps/sha512.js', unmodal);
if (is_https)
o('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
ebi('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
else
o('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance';
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance';
}
};
}
// show uploader if the user only has write-access
if (!o('files'))
if (!ebi('files'))
goto('up2k');
// shows or clears an error message in the basic uploader ui
function setmsg(msg) {
if (msg !== undefined) {
o('u2err').setAttribute('class', 'err');
o('u2err').innerHTML = msg;
ebi('u2err').setAttribute('class', 'err');
ebi('u2err').innerHTML = msg;
}
else {
o('u2err').setAttribute('class', '');
o('u2err').innerHTML = '';
ebi('u2err').setAttribute('class', '');
ebi('u2err').innerHTML = '';
}
}
@@ -210,7 +155,7 @@ function up2k_init(have_crypto) {
}
// handle user intent to use the basic uploader instead
o('u2nope').onclick = function (e) {
ebi('u2nope').onclick = function (e) {
e.preventDefault();
setmsg('');
goto('bup');
@@ -229,9 +174,9 @@ function up2k_init(have_crypto) {
function cfg_get(name) {
var val = localStorage.getItem(name);
if (val === null)
return parseInt(o(name).value);
return parseInt(ebi(name).value);
o(name).value = val;
ebi(name).value = val;
return val;
}
@@ -242,7 +187,7 @@ function up2k_init(have_crypto) {
else
val = (val == '1');
o(name).checked = val;
ebi(name).checked = val;
return val;
}
@@ -250,7 +195,7 @@ function up2k_init(have_crypto) {
localStorage.setItem(
name, val ? '1' : '0');
o(name).checked = val;
ebi(name).checked = val;
return val;
}
@@ -284,9 +229,9 @@ function up2k_init(have_crypto) {
return un2k("this is the basic uploader; up2k needs at least<br />chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1");
function nav() {
o('file' + fdom_ctr).click();
ebi('file' + fdom_ctr).click();
}
o('u2btn').addEventListener('click', nav, false);
ebi('u2btn').addEventListener('click', nav, false);
function ondrag(ev) {
ev.stopPropagation();
@@ -294,8 +239,8 @@ function up2k_init(have_crypto) {
ev.dataTransfer.dropEffect = 'copy';
ev.dataTransfer.effectAllowed = 'copy';
}
o('u2btn').addEventListener('dragover', ondrag, false);
o('u2btn').addEventListener('dragenter', ondrag, false);
ebi('u2btn').addEventListener('dragover', ondrag, false);
ebi('u2btn').addEventListener('dragenter', ondrag, false);
function gotfile(ev) {
ev.stopPropagation();
@@ -357,7 +302,7 @@ function up2k_init(have_crypto) {
var tr = document.createElement('tr');
tr.innerHTML = '<td id="f{0}n"></td><td id="f{0}t">hashing</td><td id="f{0}p" class="prog"></td>'.format(st.files.length);
tr.getElementsByTagName('td')[0].textContent = entry.name;
o('u2tab').appendChild(tr);
ebi('u2tab').appendChild(tr);
st.files.push(entry);
st.todo.hash.push(entry);
@@ -374,14 +319,14 @@ function up2k_init(have_crypto) {
alert(msg);
}
}
o('u2btn').addEventListener('drop', gotfile, false);
ebi('u2btn').addEventListener('drop', gotfile, false);
function more_one_file() {
fdom_ctr++;
var elm = document.createElement('div')
elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" />'.format(fdom_ctr);
o('u2form').appendChild(elm);
o('file' + fdom_ctr).addEventListener('change', gotfile, false);
ebi('u2form').appendChild(elm);
ebi('file' + fdom_ctr).addEventListener('change', gotfile, false);
}
more_one_file();
@@ -451,17 +396,6 @@ function up2k_init(have_crypto) {
/// hashing
//
// https://gist.github.com/jonleighton/958841
function buf2b64_maybe_fucky(buffer) {
var ret = '';
var view = new DataView(buffer);
for (var i = 0; i < view.byteLength; i++) {
ret += String.fromCharCode(view.getUint8(i));
}
return window.btoa(ret).replace(
/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
}
// https://gist.github.com/jonleighton/958841
function buf2b64(arrayBuffer) {
var base64 = '';
@@ -502,20 +436,6 @@ function up2k_init(have_crypto) {
return base64;
}
// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest
function buf2hex(buffer) {
var hexCodes = [];
var view = new DataView(buffer);
for (var i = 0; i < view.byteLength; i += 4) {
var value = view.getUint32(i) // 4 bytes per iter
var stringValue = value.toString(16) // doesn't pad
var padding = '00000000'
var paddedValue = (padding + stringValue).slice(-padding.length)
hexCodes.push(paddedValue);
}
return hexCodes.join("");
}
function get_chunksize(filesize) {
var chunksize = 1024 * 1024;
var stepsize = 512 * 1024;
@@ -602,7 +522,7 @@ function up2k_init(have_crypto) {
pb_html += '<div id="f{0}p{1}" style="width:{2}%"><div></div></div>'.format(
t.n, a, pb_perc);
o('f{0}p'.format(t.n)).innerHTML = pb_html;
ebi('f{0}p'.format(t.n)).innerHTML = pb_html;
var reader = new FileReader();
@@ -677,7 +597,7 @@ function up2k_init(have_crypto) {
alert('{0} ms, {1} MB/s\n'.format(t.t2 - t.t1, spd.toFixed(3)) + t.hash.join('\n'));
}
o('f{0}t'.format(t.n)).innerHTML = 'connecting';
ebi('f{0}t'.format(t.n)).innerHTML = 'connecting';
st.busy.hash.splice(st.busy.hash.indexOf(t), 1);
st.todo.handshake.push(t);
};
@@ -706,7 +626,7 @@ function up2k_init(have_crypto) {
if (response.name !== t.name) {
// file exists; server renamed us
t.name = response.name;
o('f{0}n'.format(t.n)).textContent = t.name;
ebi('f{0}n'.format(t.n)).textContent = t.name;
}
t.postlist = [];
@@ -736,23 +656,41 @@ function up2k_init(have_crypto) {
msg = 'uploading';
done = false;
}
o('f{0}t'.format(t.n)).innerHTML = msg;
ebi('f{0}t'.format(t.n)).innerHTML = msg;
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
if (done) {
var spd1 = (t.size / ((t.t2 - t.t1) / 1000.)) / (1024 * 1024.);
var spd2 = (t.size / ((t.t3 - t.t2) / 1000.)) / (1024 * 1024.);
o('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format(
ebi('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format(
spd1.toFixed(2), spd2.toFixed(2));
}
tasker();
}
else
else {
var err = "";
var rsp = (xhr.responseText + '');
if (rsp.indexOf('partial upload exists') !== -1 ||
rsp.indexOf('file already exists') !== -1) {
err = rsp;
var ofs = err.lastIndexOf(' : ');
if (ofs > 0)
err = err.slice(0, ofs);
}
if (err != "") {
ebi('f{0}t'.format(t.n)).innerHTML = "ERROR";
ebi('f{0}p'.format(t.n)).innerHTML = err;
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
tasker();
return;
}
alert("server broke (error {0}):\n\"{1}\"\n".format(
xhr.status,
(xhr.response && xhr.response.err) ||
(xhr.responseText && xhr.responseText) ||
"no further information"));
}
};
xhr.open('POST', post_url + 'handshake.php', true);
xhr.responseType = 'text';
@@ -803,7 +741,7 @@ function up2k_init(have_crypto) {
t.postlist.splice(t.postlist.indexOf(npart), 1);
if (t.postlist.length == 0) {
t.t3 = new Date().getTime();
o('f{0}t'.format(t.n)).innerHTML = 'verifying';
ebi('f{0}t'.format(t.n)).innerHTML = 'verifying';
st.todo.handshake.push(t);
}
tasker();
@@ -834,7 +772,7 @@ function up2k_init(have_crypto) {
//
function prog(nfile, nchunk, color, percent) {
var n1 = o('f{0}p{1}'.format(nfile, nchunk));
var n1 = ebi('f{0}p{1}'.format(nfile, nchunk));
var n2 = n1.getElementsByTagName('div')[0];
if (percent === undefined) {
n1.style.background = color;
@@ -857,7 +795,7 @@ function up2k_init(have_crypto) {
dir.preventDefault();
} catch (ex) { }
var obj = o('nthread');
var obj = ebi('nthread');
if (dir.target) {
obj.style.background = '#922';
var v = Math.floor(parseInt(obj.value));
@@ -892,19 +830,19 @@ function up2k_init(have_crypto) {
this.click();
}
o('nthread_add').onclick = function (ev) {
ebi('nthread_add').onclick = function (ev) {
ev.preventDefault();
bumpthread(1);
};
o('nthread_sub').onclick = function (ev) {
ebi('nthread_sub').onclick = function (ev) {
ev.preventDefault();
bumpthread(-1);
};
o('nthread').addEventListener('input', bumpthread, false);
o('multitask').addEventListener('click', tgl_multitask, false);
ebi('nthread').addEventListener('input', bumpthread, false);
ebi('multitask').addEventListener('click', tgl_multitask, false);
var nodes = o('u2conf').getElementsByTagName('a');
var nodes = ebi('u2conf').getElementsByTagName('a');
for (var a = nodes.length - 1; a >= 0; a--)
nodes[a].addEventListener('touchend', nop, false);

109
copyparty/web/util.js Normal file
View File

@@ -0,0 +1,109 @@
"use strict";
// error handler for mobile devices
function hcroak(msg) {
document.body.innerHTML = msg;
window.onerror = undefined;
throw 'fatal_err';
}
function croak(msg) {
document.body.textContent = msg;
window.onerror = undefined;
throw msg;
}
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
'&': '&amp;',
'"': '&quot;',
'<': '&lt;',
'>': '&gt;'
}[c];
});
}
function vis_exh(msg, url, lineNo, columnNo, error) {
window.onerror = undefined;
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<h2>' + find[a] + '</h2>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
}
document.body.style.fontSize = '0.8em';
document.body.style.padding = '0 1em 1em 1em';
hcroak(html.join('\n'));
}
function ebi(id) {
return document.getElementById(id);
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
if (!String.prototype.endsWith) {
String.prototype.endsWith = function (search, this_len) {
if (this_len === undefined || this_len > this.length) {
this_len = this.length;
}
return this.substring(this_len - search.length, this_len) === search;
};
}
if (!String.startsWith) {
String.prototype.startsWith = function (s, i) {
i = i > 0 ? i | 0 : 0;
return this.substring(i, i + s.length) === s;
};
}
// https://stackoverflow.com/a/950146
function import_js(url, cb) {
var head = document.head || document.getElementsByTagName('head')[0];
var script = document.createElement('script');
script.type = 'text/javascript';
script.src = url;
script.onreadystatechange = cb;
script.onload = cb;
head.appendChild(script);
}
function sortTable(table, col) {
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className == 'sort1' ? -1 : 1;
for (var a = 0, thl = th.length; a < thl; a++)
th[a].className = '';
th[col].className = 'sort' + reverse;
var stype = th[col].getAttribute('sort');
tr = tr.sort(function (a, b) {
var v1 = a.cells[col].textContent.trim();
var v2 = b.cells[col].textContent.trim();
if (stype == 'int') {
v1 = parseInt(v1.replace(/,/g, ''));
v2 = parseInt(v2.replace(/,/g, ''));
return reverse * (v1 - v2);
}
return reverse * (v1.localeCompare(v2));
});
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]);
}
function makeSortable(table) {
var th = table.tHead, i;
th && (th = th.rows[0]) && (th = th.cells);
if (th) i = th.length;
else return; // if no `<thead>` then do nothing
while (--i >= 0) (function (i) {
th[i].onclick = function () {
sortTable(table, i);
};
}(i));
}

View File

@@ -1,4 +1,5 @@
#!/bin/bash
repacker=1
set -e
# -- download latest copyparty (source.tgz and sfx),
@@ -19,19 +20,32 @@ set -e
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
command -v gtar && tar() { gtar "$@"; }
command -v gsed && sed() { gsed "$@"; }
td="$(mktemp -d)"
od="$(pwd)"
cd "$td"
pwd
# debug: if cache exists, use that instead of bothering github
dl_text() {
command -v curl && exec curl "$@"
exec wget -O- "$@"
}
dl_files() {
command -v curl && exec curl -L --remote-name-all "$@"
exec wget "$@"
}
export -f dl_files
# if cache exists, use that instead of bothering github
cache="$od/.copyparty-repack.cache"
[ -e "$cache" ] &&
tar -xvf "$cache" ||
tar -xf "$cache" ||
{
# get download links from github
curl https://api.github.com/repos/9001/copyparty/releases/latest |
dl_text https://api.github.com/repos/9001/copyparty/releases/latest |
(
# prefer jq if available
jq -r '.assets[]|select(.name|test("-sfx|tar.gz")).browser_download_url' ||
@@ -40,10 +54,10 @@ cache="$od/.copyparty-repack.cache"
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
) |
tee /dev/stderr |
tr -d '\r' | tr '\n' '\0' | xargs -0 curl -L --remote-name-all
tr -d '\r' | tr '\n' '\0' |
xargs -0 bash -c 'dl_files "$@"' _
# debug: create cache
#tar -czvf "$cache" *
tar -czf "$cache" *
}
@@ -56,10 +70,21 @@ mv copyparty-*.tar.gz copyparty-extras/
# unpack the source code
( cd copyparty-extras/
tar -xvf *.tar.gz
tar -xf *.tar.gz
)
# use repacker from release if that is newer
p_other=copyparty-extras/copyparty-*/scripts/copyparty-repack.sh
other=$(awk -F= 'BEGIN{v=-1} NR<10&&/^repacker=/{v=$NF} END{print v}' <$p_other)
[ $repacker -lt $other ] &&
cat $p_other >"$od/$0" && cd "$od" && rm -rf "$td" && exec "$0" "$@"
# now drop the cache
rm -f "$cache"
# fix permissions
chmod 755 \
copyparty-extras/sfx-full/* \
@@ -86,8 +111,10 @@ rm -rf copyparty-{0..9}*.*.*{0..9}
)
# and include the repacker itself too
cp -pv "$od/$0" copyparty-extras/
# and include the repacker itself too
cp -av "$od/$0" copyparty-extras/ ||
cp -av "$0" copyparty-extras/ ||
true
# create the bundle

View File

@@ -4,10 +4,10 @@ import os
import time
"""
mkdir -p /dev/shm/fusefuzz/{r,v}
PYTHONPATH=.. python3 -m copyparty -v /dev/shm/fusefuzz/r::r -i 127.0.0.1
../bin/copyparty-fuse.py /dev/shm/fusefuzz/v http://127.0.0.1:3923/ 2 0
(d="$PWD"; cd /dev/shm/fusefuzz && "$d"/fusefuzz.py)
td=/dev/shm/; [ -e $td ] || td=$HOME; mkdir -p $td/fusefuzz/{r,v}
PYTHONPATH=.. python3 -m copyparty -v $td/fusefuzz/r::r -i 127.0.0.1
../bin/copyparty-fuse.py http://127.0.0.1:3923/ $td/fusefuzz/v -cf 2 -cd 0.5
(d="$PWD"; cd $td/fusefuzz && "$d"/fusefuzz.py)
"""

View File

@@ -94,8 +94,39 @@ cd sfx
rm -f ../tar
}
ver="$(awk '/^VERSION *= \(/ {
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < ../copyparty/__version__.py)"
ver=
git describe --tags >/dev/null 2>/dev/null && {
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//; s/-g?/./g')";
t_ver=
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+$' && {
# short format (exact version number)
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g')";
}
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+-[0-9]+-g[0-9a-f]+$' && {
# long format (unreleased commit)
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g; s/(.*) (.*)/\1 "\2"/')"
}
[ -z "$t_ver" ] && {
printf 'unexpected git version format: [%s]\n' "$git_ver"
exit 1
}
dt="$(git log -1 --format=%cd --date=format:'%Y,%m,%d' | sed -E 's/,0?/, /g')"
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
sed -ri '
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;
s/^(S_VERSION =)(.*)/#\1\2\n\1 "'"$ver"'"/;
s/^(BUILD_DT =)(.*)/#\1\2\n\1 ('"$dt"')/;
' copyparty/__version__.py
}
[ -z "$ver" ] &&
ver="$(awk '/^VERSION *= \(/ {
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
ts=$(date -u +%s)
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)

View File

@@ -16,15 +16,15 @@ which md5sum 2>/dev/null >/dev/null &&
ver="$1"
[[ "x$ver" == x ]] &&
[ "x$ver" = x ] &&
{
echo "need argument 1: version"
echo
exit 1
}
[[ -e copyparty/__main__.py ]] || cd ..
[[ -e copyparty/__main__.py ]] ||
[ -e copyparty/__main__.py ] || cd ..
[ -e copyparty/__main__.py ] ||
{
echo "run me from within the project root folder"
echo
@@ -35,8 +35,8 @@ mkdir -p dist
zip_path="$(pwd)/dist/copyparty-$ver.zip"
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"
[[ -e "$zip_path" ]] ||
[[ -e "$tgz_path" ]] &&
[ -e "$zip_path" ] ||
[ -e "$tgz_path" ] &&
{
echo "found existing archives for this version"
echo " $zip_path"

View File

@@ -18,6 +18,8 @@ this one becomes a hyperlink to ./except/ thanks to
it is a passthrough to the markdown extension api, see https://marked.js.org/using_pro
in addition to the markdown extension functions, `ctor` will be called on document init
### these/
and this one becomes ./except/these/
@@ -36,6 +38,13 @@ whic hshoud be ./except/also-this.md
# ok
now for another extension type, `copyparty_post` which is called to manipulate the generated dom instead
`copyparty_post` can have the following functions, all optional
* `ctor` is called on document init
* `render` is called when the dom is done but still in-memory
* `render2` is called with the live browser dom as-displayed
## post example
the values in the `ex:` columns are linkified to `example.com/$value`
| ex:foo | bar | ex:baz |
@@ -43,6 +52,8 @@ the values in the `ex:` columns are linkified to `example.com/$value`
| asdf | nice | fgsfds |
| more one row | hi hello | aaa |
and the table can be sorted by clicking the headers
the difference is that with `copyparty_pre` you'll probably break various copyparty features but if you use `copyparty_post` then future copyparty versions will probably break you
@@ -123,5 +134,8 @@ render(dom) {
}
}
}
},
render2(dom) {
window.makeSortable(dom.getElementsByTagName('table')[0]);
}
```

View File

@@ -6,6 +6,7 @@ import os
import time
import json
import shutil
import tempfile
import unittest
import subprocess as sp # nosec
@@ -31,9 +32,6 @@ class TestVFS(unittest.TestCase):
response = self.unfoo(response)
self.assertEqual(util.undot(query), response)
def absify(self, root, names):
return ["{}/{}".format(root, x).replace("//", "/") for x in names]
def ls(self, vfs, vpath, uname):
"""helper for resolving and listing a folder"""
vn, rem = vfs.get(vpath, uname, True, False)
@@ -60,23 +58,31 @@ class TestVFS(unittest.TestCase):
if os.path.exists("/Volumes"):
devname, _ = self.chkcmd("hdiutil", "attach", "-nomount", "ram://8192")
devname = devname.strip()
print("devname: [{}]".format(devname))
for _ in range(10):
try:
_, _ = self.chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
_, _ = self.chkcmd(
"diskutil", "eraseVolume", "HFS+", "cptd", devname
)
return "/Volumes/cptd"
except:
print('lol macos')
except Exception as ex:
print(repr(ex))
time.sleep(0.25)
raise Exception("ramdisk creation failed")
raise Exception("TODO support windows")
ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
try:
os.mkdir(ret)
finally:
return ret
def log(self, src, msg):
pass
def test(self):
td = self.get_ramdisk() + "/vfs"
td = os.path.join(self.get_ramdisk(), "vfs")
try:
shutil.rmtree(td)
except OSError:
@@ -107,7 +113,7 @@ class TestVFS(unittest.TestCase):
vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs
self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td + "/a/ab")
self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
self.assertEqual(vfs.uread, ["*"])
self.assertEqual(vfs.uwrite, [])
@@ -117,7 +123,7 @@ class TestVFS(unittest.TestCase):
).vfs
self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td + "/a/aa")
self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
self.assertEqual(vfs.uread, ["*"])
self.assertEqual(vfs.uwrite, [])
@@ -146,42 +152,63 @@ class TestVFS(unittest.TestCase):
n = n.nodes["acb"]
self.assertEqual(n.nodes, {})
self.assertEqual(n.vpath, "a/ac/acb")
self.assertEqual(n.realpath, td + "/a/ac/acb")
self.assertEqual(n.realpath, os.path.join(td, "a", "ac", "acb"))
self.assertEqual(n.uread, ["k"])
self.assertEqual(n.uwrite, ["*", "k"])
# something funky about the windows path normalization,
# doesn't really matter but makes the test messy, TODO?
fsdir, real, virt = self.ls(vfs, "/", "*")
self.assertEqual(fsdir, td)
self.assertEqual(real, ["b", "c"])
self.assertEqual(list(virt), ["a"])
fsdir, real, virt = self.ls(vfs, "a", "*")
self.assertEqual(fsdir, td + "/a")
self.assertEqual(fsdir, os.path.join(td, "a"))
self.assertEqual(real, ["aa", "ab"])
self.assertEqual(list(virt), ["ac"])
fsdir, real, virt = self.ls(vfs, "a/ab", "*")
self.assertEqual(fsdir, td + "/a/ab")
self.assertEqual(fsdir, os.path.join(td, "a", "ab"))
self.assertEqual(real, ["aba", "abb", "abc"])
self.assertEqual(list(virt), [])
fsdir, real, virt = self.ls(vfs, "a/ac", "*")
self.assertEqual(fsdir, td + "/a/ac")
self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
self.assertEqual(real, ["aca", "acc"])
self.assertEqual(list(virt), [])
fsdir, real, virt = self.ls(vfs, "a/ac", "k")
self.assertEqual(fsdir, td + "/a/ac")
self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
self.assertEqual(real, ["aca", "acc"])
self.assertEqual(list(virt), ["acb"])
self.assertRaises(util.Pebkac, vfs.get, "a/ac/acb", "*", True, False)
fsdir, real, virt = self.ls(vfs, "a/ac/acb", "k")
self.assertEqual(fsdir, td + "/a/ac/acb")
self.assertEqual(fsdir, os.path.join(td, "a", "ac", "acb"))
self.assertEqual(real, ["acba", "acbb", "acbc"])
self.assertEqual(list(virt), [])
# admin-only rootfs with all-read-only subfolder
vfs = AuthSrv(Namespace(c=None, a=["k:k"], v=[".::ak", "a:a:r"]), self.log,).vfs
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td)
self.assertEqual(vfs.uread, ["k"])
self.assertEqual(vfs.uwrite, ["k"])
n = vfs.nodes["a"]
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a")
self.assertEqual(n.realpath, os.path.join(td, "a"))
self.assertEqual(n.uread, ["*"])
self.assertEqual(n.uwrite, [])
self.assertEqual(vfs.can_access("/", "*"), [False, False])
self.assertEqual(vfs.can_access("/", "k"), [True, True])
self.assertEqual(vfs.can_access("/a", "*"), [True, False])
self.assertEqual(vfs.can_access("/a", "k"), [True, False])
# breadth-first construction
vfs = AuthSrv(
Namespace(
@@ -215,20 +242,20 @@ class TestVFS(unittest.TestCase):
self.assertEqual(list(v1), ["a"])
fsp, r1, v1 = self.ls(vfs, "a", "*")
self.assertEqual(fsp, td + "/a")
self.assertEqual(fsp, os.path.join(td, "a"))
self.assertEqual(r1, ["aa", "ab"])
self.assertEqual(list(v1), ["ac"])
fsp1, r1, v1 = self.ls(vfs, "a/ac", "*")
fsp2, r2, v2 = self.ls(vfs, "b", "*")
self.assertEqual(fsp1, td + "/b")
self.assertEqual(fsp2, td + "/b")
self.assertEqual(fsp1, os.path.join(td, "b"))
self.assertEqual(fsp2, os.path.join(td, "b"))
self.assertEqual(r1, ["ba", "bb", "bc"])
self.assertEqual(r1, r2)
self.assertEqual(list(v1), list(v2))
# config file parser
cfg_path = self.get_ramdisk() + "/test.cfg"
cfg_path = os.path.join(self.get_ramdisk(), "test.cfg")
with open(cfg_path, "wb") as f:
f.write(
dedent(
@@ -256,10 +283,11 @@ class TestVFS(unittest.TestCase):
self.assertEqual(len(n.nodes), 1)
n = n.nodes["dst"]
self.assertEqual(n.vpath, "dst")
self.assertEqual(n.realpath, td + "/src")
self.assertEqual(n.realpath, os.path.join(td, "src"))
self.assertEqual(n.uread, ["a", "asd"])
self.assertEqual(n.uwrite, ["asd"])
self.assertEqual(len(n.nodes), 0)
os.chdir(tempfile.gettempdir())
shutil.rmtree(td)
os.unlink(cfg_path)