Compare commits

...

26 Commits

Author SHA1 Message Date
ed
1195b8f17e v0.10.3 2021-03-29 04:47:59 +02:00
ed
28dce13776 no load-balancer spam when -q 2021-03-28 03:06:52 +02:00
ed
431f20177a make tar 6x faster (1.8 GiB/s) 2021-03-28 01:50:16 +01:00
ed
87aff54d9d v0.10.2 2021-03-27 18:03:33 +01:00
ed
f50462de82 persist lead-column sort 2021-03-27 17:56:21 +01:00
ed
9bda8c7eb6 better errlog name 2021-03-27 17:38:59 +01:00
ed
e83c63d239 fix unix permissions in zip files 2021-03-27 17:28:25 +01:00
ed
b38533b0cc recover from file access errors when zipping 2021-03-27 17:16:59 +01:00
ed
5ccca3fbd5 more 2021-03-27 16:12:47 +01:00
ed
9e850fc3ab zip selection 2021-03-27 15:48:52 +01:00
ed
ffbfcd7e00 h 2021-03-27 03:35:57 +01:00
ed
5ea7590748 readme: mention zip configs 2021-03-27 03:34:03 +01:00
ed
290c3bc2bb reclining 2021-03-27 03:07:44 +01:00
ed
b12131e91c v0.10.1 2021-03-27 02:44:40 +01:00
ed
3b354447b0 v0.10.0 2021-03-27 02:08:07 +01:00
ed
d09ec6feaa tehe 2021-03-27 01:49:58 +01:00
ed
21405c3fda be nice to windows 2021-03-27 01:43:02 +01:00
ed
13e5c96cab finish adding zip-crc (semi-streaming) 2021-03-27 01:27:12 +01:00
ed
426687b75e archive format selection in browser 2021-03-27 01:10:05 +01:00
ed
c8f59fb978 up2k: add folder upload 2021-03-27 00:20:42 +01:00
ed
871dde79a9 download as tar + utf8 zip + optimize walk 2021-03-26 20:43:25 +01:00
ed
e14d81bc6f fix utf8 content-disposition 2021-03-26 02:54:19 +01:00
ed
514d046d1f download folders as zip 2021-03-26 01:51:38 +01:00
ed
4ed9528d36 5x faster reply on 1st req on new conns 2021-03-25 19:29:16 +01:00
ed
625560e642 steal from diodes 2021-03-25 02:59:04 +01:00
ed
73ebd917d1 i know too much about zip now 2021-03-25 02:31:25 +01:00
22 changed files with 947 additions and 87 deletions

View File

@@ -21,6 +21,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [status](#status)
* [bugs](#bugs)
* [usage](#usage)
* [zip downloads](#zip-downloads)
* [searching](#searching)
* [search configuration](#search-configuration)
* [metadata from audio files](#metadata-from-audio-files)
@@ -72,7 +73,7 @@ you may also want these, especially on servers:
* ☑ symlink/discard existing files (content-matching)
* download
* ☑ single files in browser
* folders as zip files
* folders as zip / tar files
* ☑ FUSE client (read-only)
* browser
* ☑ tree-view
@@ -95,6 +96,7 @@ summary: it works! you can use it! (but technically not even close to beta)
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* Windows: python 2.7 cannot handle filenames with mojibake
* probably more, pls let me know
@@ -108,6 +110,23 @@ the browser has the following hotkeys
* `P` parent folder
## zip downloads
the `zip` link next to folders can produce various types of zip/tar files using these alternatives in the browser settings tab:
| name | url-suffix | description |
|--|--|--|
| `tar` | `?tar` | plain gnutar, works great with `curl \| tar -xv` |
| `zip` | `?zip=utf8` | works everywhere, glitchy filenames on win7 and older |
| `zip_dos` | `?zip` | traditional cp437 (no unicode) to fix glitchy filenames |
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
* hidden files (dotfiles) are excluded unless `-ed`
* the up2k.db is always excluded
* `zip_crc` will take longer to download since the server has to read each file twice
* please let me know if you find a program old enough to actually need this
# searching
when started with `-e2dsa` copyparty will scan/index all your files. This avoids duplicates on upload, and also makes the volumes searchable through the web-ui:

View File

@@ -177,11 +177,14 @@ def sighandler(signal=None, frame=None):
print("\n".join(msg))
def main():
def main(argv=None):
time.strptime("19970815", "%Y%m%d") # python#7980
if WINDOWS:
os.system("rem") # enables colors
if argv is None:
argv = sys.argv
desc = py_desc().replace("[", "\033[1;30m[")
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0m\n'
@@ -194,13 +197,13 @@ def main():
deprecated = [["-e2s", "-e2ds"]]
for dk, nk in deprecated:
try:
idx = sys.argv.index(dk)
idx = argv.index(dk)
except:
continue
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
print(msg.format(dk, nk))
sys.argv[idx] = nk
argv[idx] = nk
time.sleep(2)
ap = argparse.ArgumentParser(
@@ -261,6 +264,7 @@ def main():
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap.add_argument("-nih", action="store_true", help="no info hostname")
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)")
ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)")
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms")
@@ -289,7 +293,7 @@ def main():
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
al = ap.parse_args()
al = ap.parse_args(args=argv[1:])
# fmt: on
# propagate implications

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (0, 9, 13)
CODENAME = "the strongest music server"
BUILD_DT = (2021, 3, 23)
VERSION = (0, 10, 3)
CODENAME = "zip it"
BUILD_DT = (2021, 3, 29)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import re
import os
import sys
import stat
import threading
from .__init__ import PY2, WINDOWS
@@ -53,6 +54,7 @@ class VFS(object):
self.uwrite,
self.flags,
)
self._trk(vn)
self.nodes[name] = vn
return self._trk(vn.add(src, dst))
@@ -127,6 +129,73 @@ class VFS(object):
return [abspath, real, virt_vis]
def walk(self, rel, rem, uname, dots, scandir, lstat=False):
"""
recursively yields from ./rem;
rel is a unix-style user-defined vpath (not vfs-related)
"""
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, lstat)
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
rfiles.sort()
rdirs.sort()
yield rel, fsroot, rfiles, rdirs, vfs_virt
for rdir, _ in rdirs:
if not dots and rdir.startswith("."):
continue
wrel = (rel + "/" + rdir).lstrip("/")
wrem = (rem + "/" + rdir).lstrip("/")
for x in self.walk(wrel, wrem, uname, scandir, lstat):
yield x
for n, vfs in sorted(vfs_virt.items()):
if not dots and n.startswith("."):
continue
wrel = (rel + "/" + n).lstrip("/")
for x in vfs.walk(wrel, "", uname, scandir, lstat):
yield x
def zipgen(self, vrem, flt, uname, dots, scandir):
if flt:
flt = {k: True for k in flt}
for vpath, apath, files, rd, vd in self.walk("", vrem, uname, dots, scandir):
if flt:
files = [x for x in files if x[0] in flt]
rd = [x for x in rd if x[0] in flt]
vd = {x: y for x, y in vd.items() if x in flt}
flt = None
# print(repr([vpath, apath, [x[0] for x in files]]))
fnames = [n[0] for n in files]
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
apaths = [os.path.join(apath, n) for n in fnames]
files = list(zip(vpaths, apaths, files))
if not dots:
# dotfile filtering based on vpath (intended visibility)
files = [x for x in files if "/." not in "/" + x[0]]
rm = [x for x in rd if x[0].startswith(".")]
for x in rm:
rd.remove(x)
rm = [k for k in vd.keys() if k.startswith(".")]
for x in rm:
del vd[x]
# up2k filetring based on actual abspath
files = [x for x in files if "{0}.hist{0}up2k.".format(os.sep) not in x[1]]
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
yield f
def user_tree(self, uname, readable=False, writable=False):
ret = []
opt1 = readable and (uname in self.uread or "*" in self.uread)

View File

@@ -51,7 +51,7 @@ class BrokerMp(object):
self.procs.append(proc)
proc.start()
if True:
if not self.args.q:
thr = threading.Thread(target=self.debug_load_balancer)
thr.daemon = True
thr.start()

View File

@@ -7,6 +7,7 @@ import gzip
import time
import copy
import json
import string
import socket
import ctypes
from datetime import datetime
@@ -14,6 +15,8 @@ import calendar
from .__init__ import E, PY2, WINDOWS
from .util import * # noqa # pylint: disable=unused-wildcard-import
from .szip import StreamZip
from .star import StreamTar
if not PY2:
unicode = str
@@ -52,6 +55,10 @@ class HttpCli(object):
if rem.startswith("/") or rem.startswith("../") or "/../" in rem:
raise Exception("that was close")
def j2(self, name, **kwargs):
tpl = self.conn.hsrv.j2[name]
return tpl.render(**kwargs) if kwargs else tpl
def run(self):
"""returns true if connection can be reused"""
self.keepalive = False
@@ -154,7 +161,9 @@ class HttpCli(object):
try:
# self.log("pebkac at httpcli.run #2: " + repr(ex))
self.keepalive = self._check_nonfatal(ex)
self.loud_reply("{}: {}".format(str(ex), self.vpath), status=ex.code)
self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3)
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
self.reply(msg.encode("utf-8", "replace"), status=ex.code)
return self.keepalive
except Pebkac:
return False
@@ -388,8 +397,30 @@ class HttpCli(object):
if act == "tput":
return self.handle_text_upload()
if act == "zip":
return self.handle_zip_post()
raise Pebkac(422, 'invalid action "{}"'.format(act))
def handle_zip_post(self):
for k in ["zip", "tar"]:
v = self.uparam.get(k)
if v is not None:
break
if v is None:
raise Pebkac(422, "need zip or tar keyword")
vn, rem = self.auth.vfs.get(self.vpath, self.uname, True, False)
items = self.parser.require("files", 1024 * 1024)
if not items:
raise Pebkac(422, "need files list")
items = items.replace("\r", "").split("\n")
items = [unquotep(x) for x in items if items]
return self.tx_zip(k, v, vn, rem, items, self.args.ed)
def handle_post_json(self):
try:
remains = int(self.headers["content-length"])
@@ -417,15 +448,18 @@ class HttpCli(object):
if "srch" in self.uparam or "srch" in body:
return self.handle_search(body)
# prefer this over undot; no reason to allow traversion
if "/" in body["name"]:
raise Pebkac(400, "folders verboten")
# up2k-php compat
for k in "chunkpit.php", "handshake.php":
if self.vpath.endswith(k):
self.vpath = self.vpath[: -len(k)]
sub = None
name = undot(body["name"])
if "/" in name:
sub, name = name.rsplit("/", 1)
self.vpath = "/".join([self.vpath, sub]).strip("/")
body["name"] = name
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
body["vtop"] = vfs.vpath
@@ -434,12 +468,22 @@ class HttpCli(object):
body["addr"] = self.ip
body["vcfg"] = vfs.flags
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
response = x.get()
response = json.dumps(response)
if sub:
try:
dst = os.path.join(vfs.realpath, rem)
os.makedirs(dst)
except:
if not os.path.isdir(dst):
raise Pebkac(400, "some file got your folder name")
self.log(response)
self.reply(response.encode("utf-8"), mime="application/json")
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
ret = x.get()
if sub:
ret["name"] = "/".join([sub, ret["name"]])
ret = json.dumps(ret)
self.log(ret)
self.reply(ret.encode("utf-8"), mime="application/json")
return True
def handle_search(self, body):
@@ -580,7 +624,7 @@ class HttpCli(object):
pwd = "x" # nosec
h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
html = self.conn.tpl_msg.render(h1=msg, h2='<a href="/">ack</a>', redir="/")
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
self.reply(html.encode("utf-8"), headers=h)
return True
@@ -611,7 +655,8 @@ class HttpCli(object):
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
esc_paths = [quotep(vpath), html_escape(vpath)]
html = self.conn.tpl_msg.render(
html = self.j2(
"msg",
h2='<a href="/{}">go to /{}</a>'.format(*esc_paths),
pre="aight",
click=True,
@@ -643,7 +688,8 @@ class HttpCli(object):
f.write(b"`GRUNNUR`\n")
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
html = self.conn.tpl_msg.render(
html = self.j2(
"msg",
h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
quotep(vpath), html_escape(vpath)
),
@@ -749,7 +795,8 @@ class HttpCli(object):
).encode("utf-8")
)
html = self.conn.tpl_msg.render(
html = self.j2(
"msg",
h2='<a href="/{}">return to /{}</a>'.format(
quotep(self.vpath), html_escape(self.vpath)
),
@@ -1037,16 +1084,75 @@ class HttpCli(object):
self.log("{}, {}".format(logmsg, spd))
return ret
def tx_zip(self, fmt, uarg, vn, rem, items, dots):
if self.args.no_zip:
raise Pebkac(400, "not enabled")
logmsg = "{:4} {} ".format("", self.req)
self.keepalive = False
if not uarg:
uarg = ""
if fmt == "tar":
mime = "application/x-tar"
packer = StreamTar
else:
mime = "application/zip"
packer = StreamZip
fn = items[0] if items and items[0] else self.vpath
if fn:
fn = fn.rstrip("/").split("/")[-1]
else:
fn = self.headers.get("host", "hey")
afn = "".join(
[x if x in (string.ascii_letters + string.digits) else "_" for x in fn]
)
bascii = unicode(string.ascii_letters + string.digits).encode("utf-8")
ufn = fn.encode("utf-8", "xmlcharrefreplace")
if PY2:
ufn = [unicode(x) if x in bascii else "%{:02x}".format(ord(x)) for x in ufn]
else:
ufn = [
chr(x).encode("utf-8")
if x in bascii
else "%{:02x}".format(x).encode("ascii")
for x in ufn
]
ufn = b"".join(ufn).decode("ascii")
cdis = "attachment; filename=\"{}.{}\"; filename*=UTF-8''{}.{}"
cdis = cdis.format(afn, fmt, ufn, fmt)
self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis})
fgen = vn.zipgen(rem, items, self.uname, dots, not self.args.no_scandir)
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
bgen = packer(fgen, utf8="utf" in uarg, pre_crc="crc" in uarg)
bsent = 0
for buf in bgen.gen():
if not buf:
break
try:
self.s.sendall(buf)
bsent += len(buf)
except:
logmsg += " \033[31m" + unicode(bsent) + "\033[0m"
break
spd = self._spd(bsent)
self.log("{}, {}".format(logmsg, spd))
return True
def tx_md(self, fs_path):
logmsg = "{:4} {} ".format("", self.req)
if "edit2" in self.uparam:
html_path = "web/mde.html"
template = self.conn.tpl_mde
else:
html_path = "web/md.html"
template = self.conn.tpl_md
html_path = os.path.join(E.mod, html_path)
tpl = "mde" if "edit2" in self.uparam else "md"
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
template = self.j2(tpl)
st = os.stat(fsenc(fs_path))
# sz_md = st.st_size
@@ -1098,7 +1204,7 @@ class HttpCli(object):
def tx_mounts(self):
rvol = [x + "/" if x else x for x in self.rvol]
wvol = [x + "/" if x else x for x in self.wvol]
html = self.conn.tpl_mounts.render(this=self, rvol=rvol, wvol=wvol)
html = self.j2("splash", this=self, rvol=rvol, wvol=wvol)
self.reply(html.encode("utf-8"))
return True
@@ -1187,6 +1293,11 @@ class HttpCli(object):
return self.tx_file(abspath)
for k in ["zip", "tar"]:
v = self.uparam.get(k)
if v is not None:
return self.tx_zip(k, v, vn, rem, [], self.args.ed)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
stats = {k: v for k, v in vfs_ls}
vfs_ls = [x[0] for x in vfs_ls]
@@ -1247,8 +1358,11 @@ class HttpCli(object):
is_dir = stat.S_ISDIR(inf.st_mode)
if is_dir:
margin = "DIR"
href += "/"
if self.args.no_zip:
margin = "DIR"
else:
margin = '<a href="{}?zip">zip</a>'.format(quotep(href))
elif fn in hist:
margin = '<a href="{}.hist/{}">#{}</a>'.format(
base, html_escape(hist[fn][2], quote=True), hist[fn][0]
@@ -1372,7 +1486,8 @@ class HttpCli(object):
dirs.extend(files)
html = self.conn.tpl_browser.render(
html = self.j2(
"browser",
vdir=quotep(self.vpath),
vpnodes=vpnodes,
files=dirs,
@@ -1384,6 +1499,7 @@ class HttpCli(object):
),
have_up2k_idx=("e2d" in vn.flags),
have_tags_idx=("e2t" in vn.flags),
have_zip=(not self.args.no_zip),
logues=logues,
title=html_escape(self.vpath),
srv_info=srv_info,

View File

@@ -12,23 +12,6 @@ try:
except:
HAVE_SSL = False
try:
import jinja2
except ImportError:
print(
"""\033[1;31m
you do not have jinja2 installed,\033[33m
choose one of these:\033[0m
* apt install python-jinja2
* {} -m pip install --user jinja2
* (try another python version, if you have one)
* (try copyparty.sfx instead)
""".format(
os.path.basename(sys.executable)
)
)
sys.exit(1)
from .__init__ import E
from .util import Unrecv
from .httpcli import HttpCli
@@ -57,14 +40,6 @@ class HttpConn(object):
self.log_func = hsrv.log
self.set_rproxy()
env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
self.tpl_mounts = env.get_template("splash.html")
self.tpl_browser = env.get_template("browser.html")
self.tpl_msg = env.get_template("msg.html")
self.tpl_md = env.get_template("md.html")
self.tpl_mde = env.get_template("mde.html")
def set_rproxy(self, ip=None):
if ip is None:
color = 36
@@ -112,7 +87,9 @@ class HttpConn(object):
err = "need at least 4 bytes in the first packet; got {}".format(
len(method)
)
self.log(err)
if method:
self.log(err)
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
return

View File

@@ -2,10 +2,28 @@
from __future__ import print_function, unicode_literals
import os
import sys
import time
import socket
import threading
try:
import jinja2
except ImportError:
print(
"""\033[1;31m
you do not have jinja2 installed,\033[33m
choose one of these:\033[0m
* apt install python-jinja2
* {} -m pip install --user jinja2
* (try another python version, if you have one)
* (try copyparty.sfx instead)
""".format(
os.path.basename(sys.executable)
)
)
sys.exit(1)
from .__init__ import E, MACOS
from .httpconn import HttpConn
from .authsrv import AuthSrv
@@ -30,6 +48,13 @@ class HttpSrv(object):
self.workload_thr_alive = False
self.auth = AuthSrv(self.args, self.log)
env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
self.j2 = {
x: env.get_template(x + ".html")
for x in ["splash", "browser", "msg", "md", "mde"]
}
cert_path = os.path.join(E.cfg, "cert.pem")
if os.path.exists(cert_path):
self.cert_path = cert_path

95
copyparty/star.py Normal file
View File

@@ -0,0 +1,95 @@
import os
import tarfile
import threading
from .sutil import errdesc
from .util import Queue, fsenc
class QFile(object):
"""file-like object which buffers writes into a queue"""
def __init__(self):
self.q = Queue(64)
self.bq = []
self.nq = 0
def write(self, buf):
if buf is None or self.nq >= 240 * 1024:
self.q.put(b"".join(self.bq))
self.bq = []
self.nq = 0
if buf is None:
self.q.put(None)
else:
self.bq.append(buf)
self.nq += len(buf)
class StreamTar(object):
"""construct in-memory tar file from the given path"""
def __init__(self, fgen, **kwargs):
self.ci = 0
self.co = 0
self.qfile = QFile()
self.fgen = fgen
self.errf = None
# python 3.8 changed to PAX_FORMAT as default,
# waste of space and don't care about the new features
fmt = tarfile.GNU_FORMAT
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
w = threading.Thread(target=self._gen)
w.daemon = True
w.start()
def gen(self):
while True:
buf = self.qfile.q.get()
if not buf:
break
self.co += len(buf)
yield buf
yield None
if self.errf:
os.unlink(self.errf["ap"])
def ser(self, f):
name = f["vp"]
src = f["ap"]
fsi = f["st"]
inf = tarfile.TarInfo(name=name)
inf.mode = fsi.st_mode
inf.size = fsi.st_size
inf.mtime = fsi.st_mtime
inf.uid = 0
inf.gid = 0
self.ci += inf.size
with open(fsenc(src), "rb", 512 * 1024) as f:
self.tar.addfile(inf, f)
def _gen(self):
errors = []
for f in self.fgen:
if "err" in f:
errors.append([f["vp"], f["err"]])
continue
try:
self.ser(f)
except Exception as ex:
errors.append([f["vp"], repr(ex)])
if errors:
self.errf = errdesc(errors)
self.ser(self.errf)
self.tar.close()
self.qfile.write(None)

25
copyparty/sutil.py Normal file
View File

@@ -0,0 +1,25 @@
import os
import time
import tempfile
from datetime import datetime
def errdesc(errors):
report = ["copyparty failed to add the following files to the archive:", ""]
for fn, err in errors:
report.extend([" file: {}".format(fn), "error: {}".format(err), ""])
with tempfile.NamedTemporaryFile(prefix="copyparty-", delete=False) as tf:
tf_path = tf.name
tf.write("\r\n".join(report).encode("utf-8", "replace"))
dt = datetime.utcfromtimestamp(time.time())
dt = dt.strftime("%Y-%m%d-%H%M%S")
os.chmod(tf_path, 0o444)
return {
"vp": "archive-errors-{}.txt".format(dt),
"ap": tf_path,
"st": os.stat(tf_path),
}

271
copyparty/szip.py Normal file
View File

@@ -0,0 +1,271 @@
import os
import time
import zlib
import struct
from datetime import datetime
from .sutil import errdesc
from .util import yieldfile, sanitize_fn
def dostime2unix(buf):
t, d = struct.unpack("<HH", buf)
ts = (t & 0x1F) * 2
tm = (t >> 5) & 0x3F
th = t >> 11
dd = d & 0x1F
dm = (d >> 5) & 0xF
dy = (d >> 9) + 1980
tt = (dy, dm, dd, th, tm, ts)
tf = "{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}"
iso = tf.format(*tt)
dt = datetime.strptime(iso, "%Y-%m-%d %H:%M:%S")
return int(dt.timestamp())
def unixtime2dos(ts):
tt = time.gmtime(ts)
dy, dm, dd, th, tm, ts = list(tt)[:6]
bd = ((dy - 1980) << 9) + (dm << 5) + dd
bt = (th << 11) + (tm << 5) + ts // 2
return struct.pack("<HH", bt, bd)
def gen_fdesc(sz, crc32, z64):
ret = b"\x50\x4b\x07\x08"
fmt = "<LQQ" if z64 else "<LLL"
ret += struct.pack(fmt, crc32, sz, sz)
return ret
def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
"""
does regular file headers
and the central directory meme if h_pos is set
(h_pos = absolute position of the regular header)
"""
# appnote 4.5 / zip 3.0 (2008) / unzip 6.0 (2009) says to add z64
# extinfo for values which exceed H, but that becomes an off-by-one
# (can't tell if it was clamped or exactly maxval), make it obvious
z64 = sz >= 0xFFFFFFFF
z64v = [sz, sz] if z64 else []
if h_pos and h_pos >= 0xFFFFFFFF:
# central, also consider ptr to original header
z64v.append(h_pos)
# confusingly this doesn't bump if h_pos
req_ver = b"\x2d\x00" if z64 else b"\x0a\x00"
if crc32:
crc32 = struct.pack("<L", crc32)
else:
crc32 = b"\x00" * 4
if h_pos is None:
# 4b magic, 2b min-ver
ret = b"\x50\x4b\x03\x04" + req_ver
else:
# 4b magic, 2b spec-ver, 2b min-ver
ret = b"\x50\x4b\x01\x02\x1e\x03" + req_ver
ret += b"\x00" if pre_crc else b"\x08" # streaming
ret += b"\x08" if utf8 else b"\x00" # appnote 6.3.2 (2007)
# 2b compression, 4b time, 4b crc
ret += b"\x00\x00" + unixtime2dos(lastmod) + crc32
# spec says to put zeros when !crc if bit3 (streaming)
# however infozip does actual sz and it even works on winxp
# (same reasning for z64 extradata later)
vsz = 0xFFFFFFFF if z64 else sz
ret += struct.pack("<LL", vsz, vsz)
# windows support (the "?" replace below too)
fn = sanitize_fn(fn, "/")
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
z64_len = len(z64v) * 8 + 4 if z64v else 0
ret += struct.pack("<HH", len(bfn), z64_len)
if h_pos is not None:
# 2b comment, 2b diskno
ret += b"\x00" * 4
# 2b internal.attr, 4b external.attr
# infozip-macos: 0100 0000 a481 file:644
# infozip-macos: 0100 0100 0080 file:000
ret += b"\x01\x00\x00\x00\xa4\x81"
# 4b local-header-ofs
ret += struct.pack("<L", min(h_pos, 0xFFFFFFFF))
ret += bfn
if z64v:
ret += struct.pack("<HH" + "Q" * len(z64v), 1, len(z64v) * 8, *z64v)
return ret
def gen_ecdr(items, cdir_pos, cdir_end):
"""
summary of all file headers,
usually the zipfile footer unless something clamps
"""
ret = b"\x50\x4b\x05\x06"
# 2b ndisk, 2b disk0
ret += b"\x00" * 4
cdir_sz = cdir_end - cdir_pos
nitems = min(0xFFFF, len(items))
csz = min(0xFFFFFFFF, cdir_sz)
cpos = min(0xFFFFFFFF, cdir_pos)
need_64 = nitems == 0xFFFF or 0xFFFFFFFF in [csz, cpos]
# 2b tnfiles, 2b dnfiles, 4b dir sz, 4b dir pos
ret += struct.pack("<HHLL", nitems, nitems, csz, cpos)
# 2b comment length
ret += b"\x00\x00"
return [ret, need_64]
def gen_ecdr64(items, cdir_pos, cdir_end):
"""
z64 end of central directory
added when numfiles or a headerptr clamps
"""
ret = b"\x50\x4b\x06\x06"
# 8b own length from hereon
ret += b"\x2c" + b"\x00" * 7
# 2b spec-ver, 2b min-ver
ret += b"\x1e\x03\x2d\x00"
# 4b ndisk, 4b disk0
ret += b"\x00" * 8
# 8b tnfiles, 8b dnfiles, 8b dir sz, 8b dir pos
cdir_sz = cdir_end - cdir_pos
ret += struct.pack("<QQQQ", len(items), len(items), cdir_sz, cdir_pos)
return ret
def gen_ecdr64_loc(ecdr64_pos):
"""
z64 end of central directory locator
points to ecdr64
why
"""
ret = b"\x50\x4b\x06\x07"
# 4b cdisk, 8b start of ecdr64, 4b ndisks
ret += struct.pack("<LQL", 0, ecdr64_pos, 1)
return ret
class StreamZip(object):
def __init__(self, fgen, utf8=False, pre_crc=False):
self.fgen = fgen
self.utf8 = utf8
self.pre_crc = pre_crc
self.pos = 0
self.items = []
def _ct(self, buf):
self.pos += len(buf)
return buf
def ser(self, f):
name = f["vp"]
src = f["ap"]
st = f["st"]
sz = st.st_size
ts = st.st_mtime + 1
crc = None
if self.pre_crc:
crc = 0
for buf in yieldfile(src):
crc = zlib.crc32(buf, crc)
crc &= 0xFFFFFFFF
h_pos = self.pos
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
yield self._ct(buf)
crc = crc or 0
for buf in yieldfile(src):
if not self.pre_crc:
crc = zlib.crc32(buf, crc)
yield self._ct(buf)
crc &= 0xFFFFFFFF
self.items.append([name, sz, ts, crc, h_pos])
z64 = sz >= 4 * 1024 * 1024 * 1024
if z64 or not self.pre_crc:
buf = gen_fdesc(sz, crc, z64)
yield self._ct(buf)
def gen(self):
errors = []
for f in self.fgen:
if "err" in f:
errors.append([f["vp"], f["err"]])
continue
try:
for x in self.ser(f):
yield x
except Exception as ex:
errors.append([f["vp"], repr(ex)])
if errors:
errf = errdesc(errors)
print(repr(errf))
for x in self.ser(errf):
yield x
cdir_pos = self.pos
for name, sz, ts, crc, h_pos in self.items:
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
yield self._ct(buf)
cdir_end = self.pos
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
if need_64:
ecdir64_pos = self.pos
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
yield self._ct(buf)
buf = gen_ecdr64_loc(ecdir64_pos)
yield self._ct(buf)
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
yield self._ct(ecdr)
if errors:
os.unlink(errf["ap"])

View File

@@ -232,7 +232,8 @@ class Up2k(object):
(ft if v is True else ff if v is False else fv).format(k, str(v))
for k, v in flags.items()
]
self.log(" ".join(sorted(a)) + "\033[0m")
if a:
self.log(" ".join(sorted(a)) + "\033[0m")
reg = {}
path = os.path.join(ptop, ".hist", "up2k.snap")
@@ -1309,6 +1310,7 @@ class Up2k(object):
self.log("no cursor to write tags with??", c=1)
continue
# TODO is undef if vol 404 on startup
entags = self.entags[ptop]
if not entags:
self.log("no entags okay.jpg", c=3)

View File

@@ -576,11 +576,12 @@ def undot(path):
return "/".join(ret)
def sanitize_fn(fn):
fn = fn.replace("\\", "/").split("/")[-1]
def sanitize_fn(fn, ok=""):
if "/" not in ok:
fn = fn.replace("\\", "/").split("/")[-1]
if WINDOWS:
for bad, good in [
for bad, good in [x for x in [
["<", ""],
[">", ""],
[":", ""],
@@ -590,7 +591,7 @@ def sanitize_fn(fn):
["|", ""],
["?", ""],
["*", ""],
]:
] if x[0] not in ok]:
fn = fn.replace(bad, good)
bad = ["con", "prn", "aux", "nul"]
@@ -780,6 +781,16 @@ def read_socket_chunked(sr, log=None):
sr.recv(2) # \r\n after each chunk too
def yieldfile(fn):
with open(fsenc(fn), "rb", 512 * 1024) as f:
while True:
buf = f.read(64 * 1024)
if not buf:
break
yield buf
def hashcopy(actor, fin, fout):
u32_lim = int((2 ** 31) * 0.9)
hashobj = hashlib.sha512()

View File

@@ -182,6 +182,11 @@ a, #files tbody div a:last-child {
color: #840;
text-shadow: 0 0 .3em #b80;
}
#files tbody tr.sel td {
background: #80b;
color: #fff;
border-color: #a3d;
}
#blocked {
position: fixed;
top: 0;
@@ -268,6 +273,25 @@ a, #files tbody div a:last-child {
padding: .2em 0 0 .07em;
color: #fff;
}
#wtoggle>span {
display: none;
}
#wtoggle.sel {
width: 4.27em;
}
#wtoggle.sel>span {
display: inline-block;
line-height: 0;
}
#wtoggle.sel>span a {
font-size: .4em;
margin: -.3em 0;
position: relative;
display: inline-block;
}
#wtoggle.sel>span #selzip {
top: -.6em;
}
#barpos,
#barbuf {
position: absolute;

View File

@@ -41,10 +41,12 @@
<div id="op_cfg" class="opview opbox">
<h3>key notation</h3>
<div id="key_notation"></div>
{%- if have_zip %}
<h3>folder download</h3>
<div id="arc_fmt"></div>
{%- endif %}
<h3>tooltips</h3>
<div>
<a id="tooltips" class="tglbtn" href="#">enable</a>
</div>
<div><a id="tooltips" class="tglbtn" href="#">enable</a></div>
</div>
<h1 id="path">
@@ -70,7 +72,7 @@
<table id="files">
<thead>
<tr>
<th></th>
<th name="lead"><span>c</span></th>
<th name="href"><span>File Name</span></th>
<th name="sz" sort="int"><span>Size</span></th>
{%- for k in taglist %}
@@ -110,7 +112,14 @@
{%- endif %}
<div id="widget">
<div id="wtoggle"></div>
<div id="wtoggle">
<span>
<a href="#" id="selall">sel.<br />all</a>
<a href="#" id="selinv">sel.<br />inv.</a>
<a href="#" id="selzip">zip</a>
</span>
</div>
<div id="widgeti">
<div id="pctl"><a href="#" id="bprev"></a><a href="#" id="bplay"></a><a href="#" id="bnext"></a></div>
<canvas id="pvol" width="288" height="38"></canvas>

View File

@@ -833,7 +833,7 @@ document.onkeydown = function (e) {
v = r.tags[k] || "";
if (k == ".dur") {
var sv = s2ms(v);
var sv = v ? s2ms(v) : "";
nodes[nodes.length - 1] += '</td><td sortv="' + v + '">' + sv;
continue;
}
@@ -1091,6 +1091,9 @@ var treectl = (function () {
try {
for (var a = sopts.length - 1; a >= 0; a--) {
var name = sopts[a][0], rev = sopts[a][1], typ = sopts[a][2];
if (!name)
continue;
if (name.indexOf('tags/') == -1) {
nodes.sort(function (v1, v2) {
if (!v1[name]) return -1 * rev;
@@ -1125,7 +1128,7 @@ var treectl = (function () {
v = (r.tags || {})[k] || "";
if (k == ".dur") {
var sv = s2ms(v);
var sv = v ? s2ms(v) : "";
ln[ln.length - 1] += '</td><td sortv="' + v + '">' + sv;
continue;
}
@@ -1149,6 +1152,7 @@ var treectl = (function () {
filecols.set_style();
mukey.render();
msel.render();
reload_tree();
reload_browser();
}
@@ -1304,7 +1308,7 @@ function find_file_col(txt) {
function mk_files_header(taglist) {
var html = [
'<thead>',
'<th></th>',
'<th name="lead"><span>c</span></th>',
'<th name="href"><span>File Name</span></th>',
'<th name="sz" sort="int"><span>Size</span></th>'
];
@@ -1407,8 +1411,8 @@ var filecols = (function () {
if (!min)
for (var a = 0, aa = rows.length; a < aa; a++) {
var c = rows[a].cells[i];
if (c)
var v = c.textContent = s2ms(c.textContent);
if (c && c.textContent)
c.textContent = s2ms(c.textContent);
}
}
catch (ex) { }
@@ -1559,6 +1563,141 @@ function addcrc() {
})();
var arcfmt = (function () {
if (!ebi('arc_fmt'))
return { "render": function () { } };
var html = [],
arcfmts = ["tar", "zip", "zip_dos", "zip_crc"],
arcv = ["tar", "zip=utf8", "zip", "zip=crc"];
for (var a = 0; a < arcfmts.length; a++) {
var k = arcfmts[a];
html.push(
'<span><input type="radio" name="arcfmt" value="' + k + '" id="arcfmt_' + k + '">' +
'<label for="arcfmt_' + k + '">' + k + '</label></span>');
}
ebi('arc_fmt').innerHTML = html.join('\n');
var fmt = sread("arc_fmt") || "zip";
ebi('arcfmt_' + fmt).checked = true;
function render() {
var arg = arcv[arcfmts.indexOf(fmt)],
tds = document.querySelectorAll('#files tbody td:first-child a');
for (var a = 0, aa = tds.length; a < aa; a++) {
var o = tds[a], txt = o.textContent, href = o.getAttribute('href');
if (txt != 'tar' && txt != 'zip')
continue;
var ofs = href.lastIndexOf('?');
if (ofs < 0)
throw 'missing arg in url';
o.setAttribute("href", href.slice(0, ofs + 1) + arg);
o.textContent = fmt.split('_')[0];
}
ebi('selzip').textContent = fmt.split('_')[0];
ebi('selzip').setAttribute('fmt', arg);
}
function try_render() {
try {
render();
}
catch (ex) {
console.log("arcfmt failed: " + ex);
}
}
function change_fmt(e) {
ev(e);
fmt = this.getAttribute('value');
swrite("arc_fmt", fmt);
try_render();
}
var o = document.querySelectorAll('#arc_fmt input');
for (var a = 0; a < o.length; a++) {
o[a].onchange = change_fmt;
}
return {
"render": try_render
};
})();
var msel = (function () {
function getsel() {
var names = [];
var links = document.querySelectorAll('#files tbody tr.sel td:nth-child(2) a');
for (var a = 0, aa = links.length; a < aa; a++)
names.push(links[a].getAttribute('href').replace(/\/$/, "").split('/').slice(-1));
return names;
}
function selui() {
var fun = getsel().length ? "add" : "remove";
ebi('wtoggle').classList[fun]('sel');
}
function seltgl(e) {
ev(e);
var tr = this.parentNode;
tr.classList.toggle('sel');
selui();
}
function evsel(e, fun) {
ev(e);
var trs = document.querySelectorAll('#files tbody tr');
for (var a = 0, aa = trs.length; a < aa; a++)
trs[a].classList[fun]('sel');
selui();
}
ebi('selall').onclick = function (e) {
evsel(e, "add");
};
ebi('selinv').onclick = function (e) {
evsel(e, "toggle");
};
ebi('selzip').onclick = function (e) {
ev(e);
var names = getsel();
var arg = ebi('selzip').getAttribute('fmt');
var txt = names.join('\n');
var frm = document.createElement('form');
frm.setAttribute('action', '?' + arg);
frm.setAttribute('method', 'post');
frm.setAttribute('target', '_blank');
frm.setAttribute('enctype', 'multipart/form-data');
frm.innerHTML = '<input name="act" value="zip" />' +
'<textarea name="files" id="ziptxt"></textarea>';
frm.style.display = 'none';
var oldform = document.querySelector('#widgeti>form');
if (oldform)
oldform.parentNode.removeChild(oldform);
ebi('widgeti').appendChild(frm);
var obj = ebi('ziptxt');
obj.value = txt;
console.log(txt);
frm.submit();
};
function render() {
var tds = document.querySelectorAll('#files tbody td+td+td');
for (var a = 0, aa = tds.length; a < aa; a++) {
tds[a].onclick = seltgl;
}
arcfmt.render();
}
return {
"render": render
};
})();
function ev_row_tgl(e) {
ev(e);
filecols.toggle(this.parentElement.parentElement.getElementsByTagName('span')[0].textContent);
@@ -1611,3 +1750,4 @@ function reload_browser(not_mp) {
}
reload_browser(true);
mukey.render();
msel.render();

View File

@@ -147,7 +147,7 @@ var md_opt = {
</script>
<script src="/.cpr/util.js"></script>
<script src="/.cpr/deps/marked.full.js"></script>
<script src="/.cpr/deps/marked.js"></script>
<script src="/.cpr/md.js"></script>
{%- if edit %}
<script src="/.cpr/md2.js"></script>

View File

@@ -278,18 +278,27 @@ function up2k_init(have_crypto) {
}
else files = e.target.files;
if (files.length == 0)
if (!files || files.length == 0)
return alert('no files selected??');
more_one_file();
var bad_files = [];
var good_files = [];
var dirs = [];
for (var a = 0; a < files.length; a++) {
var fobj = files[a];
if (is_itemlist) {
if (fobj.kind !== 'file')
continue;
try {
var wi = fobj.webkitGetAsEntry();
if (wi.isDirectory) {
dirs.push(wi);
continue;
}
}
catch (ex) { }
fobj = fobj.getAsFile();
}
try {
@@ -300,12 +309,69 @@ function up2k_init(have_crypto) {
bad_files.push(fobj.name);
continue;
}
good_files.push(fobj);
good_files.push([fobj, fobj.name]);
}
if (dirs) {
return read_dirs(null, [], dirs, good_files, bad_files);
}
}
function read_dirs(rd, pf, dirs, good, bad) {
if (!dirs.length) {
if (!pf.length)
return gotallfiles(good, bad);
console.log("retry pf, " + pf.length);
setTimeout(function () {
read_dirs(rd, pf, dirs, good, bad);
}, 50);
return;
}
if (!rd)
rd = dirs[0].createReader();
rd.readEntries(function (ents) {
var ngot = 0;
ents.forEach(function (dn) {
if (dn.isDirectory) {
dirs.push(dn);
}
else {
var name = dn.fullPath;
if (name.indexOf('/') === 0)
name = name.slice(1);
pf.push(name);
dn.file(function (fobj) {
var idx = pf.indexOf(name);
pf.splice(idx, 1);
try {
if (fobj.size > 0) {
good.push([fobj, name]);
return;
}
}
catch (ex) { }
bad.push(name);
});
}
ngot += 1;
});
// console.log("ngot: " + ngot);
if (!ngot) {
dirs.shift();
rd = null;
}
return read_dirs(rd, pf, dirs, good, bad);
});
}
function gotallfiles(good_files, bad_files) {
if (bad_files.length > 0) {
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length);
for (var a = 0; a < bad_files.length; a++)
var ntot = bad_files.length + good_files.length;
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, ntot);
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
msg += '-- ' + bad_files[a] + '\n';
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
@@ -315,21 +381,21 @@ function up2k_init(have_crypto) {
}
var msg = ['upload these ' + good_files.length + ' files?'];
for (var a = 0; a < good_files.length; a++)
msg.push(good_files[a].name);
for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++)
msg.push(good_files[a][1]);
if (ask_up && !fsearch && !confirm(msg.join('\n')))
return;
for (var a = 0; a < good_files.length; a++) {
var fobj = good_files[a];
var fobj = good_files[a][0];
var now = new Date().getTime();
var lmod = fobj.lastModified || now;
var entry = {
"n": parseInt(st.files.length.toString()),
"t0": now, // TODO remove probably
"t0": now,
"fobj": fobj,
"name": fobj.name,
"name": good_files[a][1],
"size": fobj.size,
"lmod": lmod / 1000,
"purl": get_evpath(),

View File

@@ -88,7 +88,7 @@
width: 30em;
}
#u2conf.has_btn {
width: 46em;
width: 48em;
}
#u2conf * {
text-align: center;

View File

@@ -73,7 +73,8 @@
<div id="u2btn_ct">
<div id="u2btn">
<span id="u2bm"></span><br />
drop files here<br />
drag/drop files<br />
and folders here<br />
(or click me)
</div>
</div>

View File

@@ -237,7 +237,10 @@ function goto(dest) {
goto();
var op = sread('opmode');
if (op !== null && op !== '.')
goto(op);
try {
goto(op);
}
catch (ex) { }
})();

View File

@@ -83,6 +83,9 @@ sqlite3 up2k.db 'select mt1.w, mt1.k, mt1.v, mt2.v from mt mt1 inner join mt mt2
time sqlite3 up2k.db 'select mt1.w from mt mt1 inner join mt mt2 on mt1.w = mt2.w where mt1.k = +mt2.k and mt1.rowid != mt2.rowid' > warks
cat warks | while IFS= read -r x; do sqlite3 up2k.db "delete from mt where w = '$x'"; done
# dump all dbs
find -iname up2k.db | while IFS= read -r x; do sqlite3 "$x" 'select substr(w,1,12), rd, fn from up' | sed -r 's/\|/ \| /g' | while IFS= read -r y; do printf '%s | %s\n' "$x" "$y"; done; done
##
## media