mirror of
				https://github.com/9001/copyparty.git
				synced 2025-10-31 12:03:32 +00:00 
			
		
		
		
	Compare commits
	
		
			20 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | b12131e91c | ||
|  | 3b354447b0 | ||
|  | d09ec6feaa | ||
|  | 21405c3fda | ||
|  | 13e5c96cab | ||
|  | 426687b75e | ||
|  | c8f59fb978 | ||
|  | 871dde79a9 | ||
|  | e14d81bc6f | ||
|  | 514d046d1f | ||
|  | 4ed9528d36 | ||
|  | 625560e642 | ||
|  | 73ebd917d1 | ||
|  | cd3e0afad2 | ||
|  | d8d1f94a86 | ||
|  | 00dfd8cfd1 | ||
|  | 273de6db31 | ||
|  | c6c0eeb0ff | ||
|  | e70c74a3b5 | ||
|  | f7d939eeab | 
| @@ -72,7 +72,7 @@ you may also want these, especially on servers: | ||||
|   * ☑ symlink/discard existing files (content-matching) | ||||
| * download | ||||
|   * ☑ single files in browser | ||||
|   * ✖ folders as zip files | ||||
|   * ☑ folders as zip / tar files | ||||
|   * ☑ FUSE client (read-only) | ||||
| * browser | ||||
|   * ☑ tree-view | ||||
|   | ||||
| @@ -261,6 +261,7 @@ def main(): | ||||
|     ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)") | ||||
|     ap.add_argument("-nih", action="store_true", help="no info hostname") | ||||
|     ap.add_argument("-nid", action="store_true", help="no info disk-usage") | ||||
|     ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar") | ||||
|     ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)") | ||||
|     ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)") | ||||
|     ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms") | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| VERSION = (0, 9, 10) | ||||
| CODENAME = "the strongest music server" | ||||
| BUILD_DT = (2021, 3, 21) | ||||
| VERSION = (0, 10, 1) | ||||
| CODENAME = "zip it" | ||||
| BUILD_DT = (2021, 3, 27) | ||||
|  | ||||
| S_VERSION = ".".join(map(str, VERSION)) | ||||
| S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) | ||||
|   | ||||
| @@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals | ||||
| import re | ||||
| import os | ||||
| import sys | ||||
| import stat | ||||
| import threading | ||||
|  | ||||
| from .__init__ import PY2, WINDOWS | ||||
| @@ -22,6 +23,14 @@ class VFS(object): | ||||
|         self.nodes = {}  # child nodes | ||||
|         self.all_vols = {vpath: self}  # flattened recursive | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "VFS({})".format( | ||||
|             ", ".join( | ||||
|                 "{}={!r}".format(k, self.__dict__[k]) | ||||
|                 for k in "realpath vpath uread uwrite flags".split() | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     def _trk(self, vol): | ||||
|         self.all_vols[vol.vpath] = vol | ||||
|         return vol | ||||
| @@ -45,6 +54,7 @@ class VFS(object): | ||||
|                 self.uwrite, | ||||
|                 self.flags, | ||||
|             ) | ||||
|             self._trk(vn) | ||||
|             self.nodes[name] = vn | ||||
|             return self._trk(vn.add(src, dst)) | ||||
|  | ||||
| @@ -119,6 +129,80 @@ class VFS(object): | ||||
|  | ||||
|         return [abspath, real, virt_vis] | ||||
|  | ||||
|     def walk(self, rel, rem, uname, dots, scandir, lstat=False): | ||||
|         """ | ||||
|         recursively yields from ./rem; | ||||
|         rel is a unix-style user-defined vpath (not vfs-related) | ||||
|         """ | ||||
|  | ||||
|         fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, lstat) | ||||
|         rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)] | ||||
|         rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)] | ||||
|  | ||||
|         rfiles.sort() | ||||
|         rdirs.sort() | ||||
|  | ||||
|         yield rel, fsroot, rfiles, rdirs, vfs_virt | ||||
|  | ||||
|         for rdir, _ in rdirs: | ||||
|             if not dots and rdir.startswith("."): | ||||
|                 continue | ||||
|  | ||||
|             wrel = (rel + "/" + rdir).lstrip("/") | ||||
|             wrem = (rem + "/" + rdir).lstrip("/") | ||||
|             for x in self.walk(wrel, wrem, uname, scandir, lstat): | ||||
|                 yield x | ||||
|  | ||||
|         for n, vfs in sorted(vfs_virt.items()): | ||||
|             if not dots and n.startswith("."): | ||||
|                 continue | ||||
|  | ||||
|             wrel = (rel + "/" + n).lstrip("/") | ||||
|             for x in vfs.walk(wrel, "", uname, scandir, lstat): | ||||
|                 yield x | ||||
|  | ||||
|     def zipgen(self, vrem, rems, uname, dots, scandir): | ||||
|         vtops = [["", [self, vrem]]] | ||||
|         if rems: | ||||
|             # list of subfolders to zip was provided, | ||||
|             # add all the ones uname is allowed to access | ||||
|             vtops = [] | ||||
|             for rem in rems: | ||||
|                 try: | ||||
|                     d = rem if not vrem else vrem + "/" + rem | ||||
|                     vn = self.get(d, uname, True, False) | ||||
|                     vtops.append([rem, vn]) | ||||
|                 except: | ||||
|                     pass | ||||
|  | ||||
|         for rel, (vn, rem) in vtops: | ||||
|             for vpath, apath, files, rd, vd in vn.walk(rel, rem, uname, dots, scandir): | ||||
|                 # print(repr([vpath, apath, [x[0] for x in files]])) | ||||
|                 fnames = [n[0] for n in files] | ||||
|                 vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames | ||||
|                 apaths = [os.path.join(apath, n) for n in fnames] | ||||
|                 files = list(zip(vpaths, apaths, files)) | ||||
|  | ||||
|                 if not dots: | ||||
|                     # dotfile filtering based on vpath (intended visibility) | ||||
|                     files = [x for x in files if "/." not in "/" + x[0]] | ||||
|  | ||||
|                     rm = [x for x in rd if x[0].startswith(".")] | ||||
|                     for x in rm: | ||||
|                         rd.remove(x) | ||||
|  | ||||
|                     rm = [k for k in vd.keys() if k.startswith(".")] | ||||
|                     for x in rm: | ||||
|                         del vd[x] | ||||
|  | ||||
|                 # up2k filetring based on actual abspath | ||||
|                 files = [ | ||||
|                     x for x in files if "{0}.hist{0}up2k.".format(os.sep) not in x[1] | ||||
|                 ] | ||||
|  | ||||
|                 for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]: | ||||
|                     yield f | ||||
|  | ||||
|     def user_tree(self, uname, readable=False, writable=False): | ||||
|         ret = [] | ||||
|         opt1 = readable and (uname in self.uread or "*" in self.uread) | ||||
| @@ -343,6 +427,21 @@ class AuthSrv(object): | ||||
|             # append parsers from argv to volume-flags | ||||
|             self._read_volflag(vol.flags, "mtp", self.args.mtp, True) | ||||
|  | ||||
|             # d2d drops all database features for a volume | ||||
|             for grp, rm in [["d2d", "e2d"], ["d2t", "e2t"]]: | ||||
|                 if not vol.flags.get(grp, False): | ||||
|                     continue | ||||
|  | ||||
|                 vol.flags["d2t"] = True | ||||
|                 vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)} | ||||
|  | ||||
|             # mt* needs e2t so drop those too | ||||
|             for grp, rm in [["e2t", "mt"]]: | ||||
|                 if vol.flags.get(grp, False): | ||||
|                     continue | ||||
|  | ||||
|                 vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)} | ||||
|  | ||||
|             # verify tags mentioned by -mt[mp] are used by -mte | ||||
|             local_mtp = {} | ||||
|             local_only_mtp = {} | ||||
|   | ||||
| @@ -7,6 +7,7 @@ import gzip | ||||
| import time | ||||
| import copy | ||||
| import json | ||||
| import string | ||||
| import socket | ||||
| import ctypes | ||||
| from datetime import datetime | ||||
| @@ -14,6 +15,8 @@ import calendar | ||||
|  | ||||
| from .__init__ import E, PY2, WINDOWS | ||||
| from .util import *  # noqa  # pylint: disable=unused-wildcard-import | ||||
| from .szip import StreamZip | ||||
| from .star import StreamTar | ||||
|  | ||||
| if not PY2: | ||||
|     unicode = str | ||||
| @@ -52,6 +55,10 @@ class HttpCli(object): | ||||
|         if rem.startswith("/") or rem.startswith("../") or "/../" in rem: | ||||
|             raise Exception("that was close") | ||||
|  | ||||
|     def j2(self, name, **kwargs): | ||||
|         tpl = self.conn.hsrv.j2[name] | ||||
|         return tpl.render(**kwargs) if kwargs else tpl | ||||
|  | ||||
|     def run(self): | ||||
|         """returns true if connection can be reused""" | ||||
|         self.keepalive = False | ||||
| @@ -154,7 +161,9 @@ class HttpCli(object): | ||||
|             try: | ||||
|                 # self.log("pebkac at httpcli.run #2: " + repr(ex)) | ||||
|                 self.keepalive = self._check_nonfatal(ex) | ||||
|                 self.loud_reply("{}: {}".format(str(ex), self.vpath), status=ex.code) | ||||
|                 self.log("{}\033[0m: {}".format(str(ex), self.vpath), 3) | ||||
|                 msg = "<pre>{}: {}\r\n".format(str(ex), self.vpath) | ||||
|                 self.reply(msg.encode("utf-8", "replace"), status=ex.code) | ||||
|                 return self.keepalive | ||||
|             except Pebkac: | ||||
|                 return False | ||||
| @@ -417,15 +426,18 @@ class HttpCli(object): | ||||
|         if "srch" in self.uparam or "srch" in body: | ||||
|             return self.handle_search(body) | ||||
|  | ||||
|         # prefer this over undot; no reason to allow traversion | ||||
|         if "/" in body["name"]: | ||||
|             raise Pebkac(400, "folders verboten") | ||||
|  | ||||
|         # up2k-php compat | ||||
|         for k in "chunkpit.php", "handshake.php": | ||||
|             if self.vpath.endswith(k): | ||||
|                 self.vpath = self.vpath[: -len(k)] | ||||
|  | ||||
|         sub = None | ||||
|         name = undot(body["name"]) | ||||
|         if "/" in name: | ||||
|             sub, name = name.rsplit("/", 1) | ||||
|             self.vpath = "/".join([self.vpath, sub]).strip("/") | ||||
|             body["name"] = name | ||||
|  | ||||
|         vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) | ||||
|  | ||||
|         body["vtop"] = vfs.vpath | ||||
| @@ -434,12 +446,22 @@ class HttpCli(object): | ||||
|         body["addr"] = self.ip | ||||
|         body["vcfg"] = vfs.flags | ||||
|  | ||||
|         x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body) | ||||
|         response = x.get() | ||||
|         response = json.dumps(response) | ||||
|         if sub: | ||||
|             try: | ||||
|                 dst = os.path.join(vfs.realpath, rem) | ||||
|                 os.makedirs(dst) | ||||
|             except: | ||||
|                 if not os.path.isdir(dst): | ||||
|                     raise Pebkac(400, "some file got your folder name") | ||||
|  | ||||
|         self.log(response) | ||||
|         self.reply(response.encode("utf-8"), mime="application/json") | ||||
|         x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body) | ||||
|         ret = x.get() | ||||
|         if sub: | ||||
|             ret["name"] = "/".join([sub, ret["name"]]) | ||||
|  | ||||
|         ret = json.dumps(ret) | ||||
|         self.log(ret) | ||||
|         self.reply(ret.encode("utf-8"), mime="application/json") | ||||
|         return True | ||||
|  | ||||
|     def handle_search(self, body): | ||||
| @@ -580,7 +602,7 @@ class HttpCli(object): | ||||
|             pwd = "x"  # nosec | ||||
|  | ||||
|         h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)} | ||||
|         html = self.conn.tpl_msg.render(h1=msg, h2='<a href="/">ack</a>', redir="/") | ||||
|         html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/") | ||||
|         self.reply(html.encode("utf-8"), headers=h) | ||||
|         return True | ||||
|  | ||||
| @@ -611,7 +633,8 @@ class HttpCli(object): | ||||
|  | ||||
|         vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/") | ||||
|         esc_paths = [quotep(vpath), html_escape(vpath)] | ||||
|         html = self.conn.tpl_msg.render( | ||||
|         html = self.j2( | ||||
|             "msg", | ||||
|             h2='<a href="/{}">go to /{}</a>'.format(*esc_paths), | ||||
|             pre="aight", | ||||
|             click=True, | ||||
| @@ -643,7 +666,8 @@ class HttpCli(object): | ||||
|                 f.write(b"`GRUNNUR`\n") | ||||
|  | ||||
|         vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/") | ||||
|         html = self.conn.tpl_msg.render( | ||||
|         html = self.j2( | ||||
|             "msg", | ||||
|             h2='<a href="/{}?edit">go to /{}?edit</a>'.format( | ||||
|                 quotep(vpath), html_escape(vpath) | ||||
|             ), | ||||
| @@ -749,7 +773,8 @@ class HttpCli(object): | ||||
|                     ).encode("utf-8") | ||||
|                 ) | ||||
|  | ||||
|         html = self.conn.tpl_msg.render( | ||||
|         html = self.j2( | ||||
|             "msg", | ||||
|             h2='<a href="/{}">return to /{}</a>'.format( | ||||
|                 quotep(self.vpath), html_escape(self.vpath) | ||||
|             ), | ||||
| @@ -1037,16 +1062,75 @@ class HttpCli(object): | ||||
|         self.log("{},  {}".format(logmsg, spd)) | ||||
|         return ret | ||||
|  | ||||
|     def tx_zip(self, fmt, uarg, vn, rem, items, dots): | ||||
|         if self.args.no_zip: | ||||
|             raise Pebkac(400, "not enabled") | ||||
|  | ||||
|         logmsg = "{:4} {} ".format("", self.req) | ||||
|         self.keepalive = False | ||||
|  | ||||
|         if not uarg: | ||||
|             uarg = "" | ||||
|  | ||||
|         if fmt == "tar": | ||||
|             mime = "application/x-tar" | ||||
|             packer = StreamTar | ||||
|         else: | ||||
|             mime = "application/zip" | ||||
|             packer = StreamZip | ||||
|  | ||||
|         fn = items[0] if items and items[0] else self.vpath | ||||
|         if fn: | ||||
|             fn = fn.rstrip("/").split("/")[-1] | ||||
|         else: | ||||
|             fn = self.headers.get("host", "hey") | ||||
|  | ||||
|         afn = "".join( | ||||
|             [x if x in (string.ascii_letters + string.digits) else "_" for x in fn] | ||||
|         ) | ||||
|  | ||||
|         bascii = unicode(string.ascii_letters + string.digits).encode("utf-8") | ||||
|         ufn = fn.encode("utf-8", "xmlcharrefreplace") | ||||
|         if PY2: | ||||
|             ufn = [unicode(x) if x in bascii else "%{:02x}".format(ord(x)) for x in ufn] | ||||
|         else: | ||||
|             ufn = [ | ||||
|                 chr(x).encode("utf-8") | ||||
|                 if x in bascii | ||||
|                 else "%{:02x}".format(x).encode("ascii") | ||||
|                 for x in ufn | ||||
|             ] | ||||
|         ufn = b"".join(ufn).decode("ascii") | ||||
|  | ||||
|         cdis = "attachment; filename=\"{}.{}\"; filename*=UTF-8''{}.{}" | ||||
|         cdis = cdis.format(afn, fmt, ufn, fmt) | ||||
|         self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis}) | ||||
|  | ||||
|         fgen = vn.zipgen(rem, items, self.uname, dots, not self.args.no_scandir) | ||||
|         # for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]})) | ||||
|         bgen = packer(fgen, utf8="utf" in uarg, pre_crc="crc" in uarg) | ||||
|         bsent = 0 | ||||
|         for buf in bgen.gen(): | ||||
|             if not buf: | ||||
|                 break | ||||
|  | ||||
|             try: | ||||
|                 self.s.sendall(buf) | ||||
|                 bsent += len(buf) | ||||
|             except: | ||||
|                 logmsg += " \033[31m" + unicode(bsent) + "\033[0m" | ||||
|                 break | ||||
|  | ||||
|         spd = self._spd(bsent) | ||||
|         self.log("{},  {}".format(logmsg, spd)) | ||||
|         return True | ||||
|  | ||||
|     def tx_md(self, fs_path): | ||||
|         logmsg = "{:4} {} ".format("", self.req) | ||||
|         if "edit2" in self.uparam: | ||||
|             html_path = "web/mde.html" | ||||
|             template = self.conn.tpl_mde | ||||
|         else: | ||||
|             html_path = "web/md.html" | ||||
|             template = self.conn.tpl_md | ||||
|  | ||||
|         html_path = os.path.join(E.mod, html_path) | ||||
|         tpl = "mde" if "edit2" in self.uparam else "md" | ||||
|         html_path = os.path.join(E.mod, "web", "{}.html".format(tpl)) | ||||
|         template = self.j2(tpl) | ||||
|  | ||||
|         st = os.stat(fsenc(fs_path)) | ||||
|         # sz_md = st.st_size | ||||
| @@ -1098,7 +1182,7 @@ class HttpCli(object): | ||||
|     def tx_mounts(self): | ||||
|         rvol = [x + "/" if x else x for x in self.rvol] | ||||
|         wvol = [x + "/" if x else x for x in self.wvol] | ||||
|         html = self.conn.tpl_mounts.render(this=self, rvol=rvol, wvol=wvol) | ||||
|         html = self.j2("splash", this=self, rvol=rvol, wvol=wvol) | ||||
|         self.reply(html.encode("utf-8")) | ||||
|         return True | ||||
|  | ||||
| @@ -1187,6 +1271,11 @@ class HttpCli(object): | ||||
|  | ||||
|             return self.tx_file(abspath) | ||||
|  | ||||
|         for k in ["zip", "tar"]: | ||||
|             v = self.uparam.get(k) | ||||
|             if v is not None: | ||||
|                 return self.tx_zip(k, v, vn, rem, [], self.args.ed) | ||||
|  | ||||
|         fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir) | ||||
|         stats = {k: v for k, v in vfs_ls} | ||||
|         vfs_ls = [x[0] for x in vfs_ls] | ||||
| @@ -1247,8 +1336,11 @@ class HttpCli(object): | ||||
|  | ||||
|             is_dir = stat.S_ISDIR(inf.st_mode) | ||||
|             if is_dir: | ||||
|                 margin = "DIR" | ||||
|                 href += "/" | ||||
|                 if self.args.no_zip: | ||||
|                     margin = "DIR" | ||||
|                 else: | ||||
|                     margin = '<a href="{}?zip">zip</a>'.format(quotep(href)) | ||||
|             elif fn in hist: | ||||
|                 margin = '<a href="{}.hist/{}">#{}</a>'.format( | ||||
|                     base, html_escape(hist[fn][2], quote=True), hist[fn][0] | ||||
| @@ -1295,7 +1387,7 @@ class HttpCli(object): | ||||
|  | ||||
|                 tags = {} | ||||
|                 f["tags"] = tags | ||||
|                  | ||||
|  | ||||
|                 if not r: | ||||
|                     continue | ||||
|  | ||||
| @@ -1306,7 +1398,7 @@ class HttpCli(object): | ||||
|                     tags[k] = v | ||||
|  | ||||
|         if icur: | ||||
|             taglist = [k for k in vn.flags["mte"].split(",") if k in taglist] | ||||
|             taglist = [k for k in vn.flags.get("mte", "").split(",") if k in taglist] | ||||
|             for f in dirs: | ||||
|                 f["tags"] = {} | ||||
|  | ||||
| @@ -1372,16 +1464,20 @@ class HttpCli(object): | ||||
|  | ||||
|         dirs.extend(files) | ||||
|  | ||||
|         html = self.conn.tpl_browser.render( | ||||
|         html = self.j2( | ||||
|             "browser", | ||||
|             vdir=quotep(self.vpath), | ||||
|             vpnodes=vpnodes, | ||||
|             files=dirs, | ||||
|             ts=ts, | ||||
|             perms=json.dumps(perms), | ||||
|             taglist=taglist, | ||||
|             tag_order=json.dumps(vn.flags["mte"].split(",")), | ||||
|             tag_order=json.dumps( | ||||
|                 vn.flags["mte"].split(",") if "mte" in vn.flags else [] | ||||
|             ), | ||||
|             have_up2k_idx=("e2d" in vn.flags), | ||||
|             have_tags_idx=("e2t" in vn.flags), | ||||
|             have_zip=(not self.args.no_zip), | ||||
|             logues=logues, | ||||
|             title=html_escape(self.vpath), | ||||
|             srv_info=srv_info, | ||||
|   | ||||
| @@ -12,23 +12,6 @@ try: | ||||
| except: | ||||
|     HAVE_SSL = False | ||||
|  | ||||
| try: | ||||
|     import jinja2 | ||||
| except ImportError: | ||||
|     print( | ||||
|         """\033[1;31m | ||||
|   you do not have jinja2 installed,\033[33m | ||||
|   choose one of these:\033[0m | ||||
|    * apt install python-jinja2 | ||||
|    * {} -m pip install --user jinja2 | ||||
|    * (try another python version, if you have one) | ||||
|    * (try copyparty.sfx instead) | ||||
| """.format( | ||||
|             os.path.basename(sys.executable) | ||||
|         ) | ||||
|     ) | ||||
|     sys.exit(1) | ||||
|  | ||||
| from .__init__ import E | ||||
| from .util import Unrecv | ||||
| from .httpcli import HttpCli | ||||
| @@ -57,14 +40,6 @@ class HttpConn(object): | ||||
|         self.log_func = hsrv.log | ||||
|         self.set_rproxy() | ||||
|  | ||||
|         env = jinja2.Environment() | ||||
|         env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web")) | ||||
|         self.tpl_mounts = env.get_template("splash.html") | ||||
|         self.tpl_browser = env.get_template("browser.html") | ||||
|         self.tpl_msg = env.get_template("msg.html") | ||||
|         self.tpl_md = env.get_template("md.html") | ||||
|         self.tpl_mde = env.get_template("mde.html") | ||||
|  | ||||
|     def set_rproxy(self, ip=None): | ||||
|         if ip is None: | ||||
|             color = 36 | ||||
|   | ||||
| @@ -2,10 +2,28 @@ | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import time | ||||
| import socket | ||||
| import threading | ||||
|  | ||||
| try: | ||||
|     import jinja2 | ||||
| except ImportError: | ||||
|     print( | ||||
|         """\033[1;31m | ||||
|   you do not have jinja2 installed,\033[33m | ||||
|   choose one of these:\033[0m | ||||
|    * apt install python-jinja2 | ||||
|    * {} -m pip install --user jinja2 | ||||
|    * (try another python version, if you have one) | ||||
|    * (try copyparty.sfx instead) | ||||
| """.format( | ||||
|             os.path.basename(sys.executable) | ||||
|         ) | ||||
|     ) | ||||
|     sys.exit(1) | ||||
|  | ||||
| from .__init__ import E, MACOS | ||||
| from .httpconn import HttpConn | ||||
| from .authsrv import AuthSrv | ||||
| @@ -30,6 +48,13 @@ class HttpSrv(object): | ||||
|         self.workload_thr_alive = False | ||||
|         self.auth = AuthSrv(self.args, self.log) | ||||
|  | ||||
|         env = jinja2.Environment() | ||||
|         env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web")) | ||||
|         self.j2 = { | ||||
|             x: env.get_template(x + ".html") | ||||
|             for x in ["splash", "browser", "msg", "md", "mde"] | ||||
|         } | ||||
|  | ||||
|         cert_path = os.path.join(E.cfg, "cert.pem") | ||||
|         if os.path.exists(cert_path): | ||||
|             self.cert_path = cert_path | ||||
|   | ||||
							
								
								
									
										64
									
								
								copyparty/star.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								copyparty/star.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,64 @@ | ||||
| import tarfile | ||||
| import threading | ||||
|  | ||||
| from .util import Queue, fsenc | ||||
|  | ||||
|  | ||||
| class QFile(object): | ||||
|     """file-like object which buffers writes into a queue""" | ||||
|  | ||||
|     def __init__(self): | ||||
|         self.q = Queue(64) | ||||
|  | ||||
|     def write(self, buf): | ||||
|         self.q.put(buf) | ||||
|  | ||||
|  | ||||
| class StreamTar(object): | ||||
|     """construct in-memory tar file from the given path""" | ||||
|  | ||||
|     def __init__(self, fgen, **kwargs): | ||||
|         self.ci = 0 | ||||
|         self.co = 0 | ||||
|         self.qfile = QFile() | ||||
|         self.fgen = fgen | ||||
|  | ||||
|         # python 3.8 changed to PAX_FORMAT as default, | ||||
|         # waste of space and don't care about the new features | ||||
|         fmt = tarfile.GNU_FORMAT | ||||
|         self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) | ||||
|  | ||||
|         w = threading.Thread(target=self._gen) | ||||
|         w.daemon = True | ||||
|         w.start() | ||||
|  | ||||
|     def gen(self): | ||||
|         while True: | ||||
|             buf = self.qfile.q.get() | ||||
|             if buf is None: | ||||
|                 break | ||||
|  | ||||
|             self.co += len(buf) | ||||
|             yield buf | ||||
|  | ||||
|         yield None | ||||
|  | ||||
|     def _gen(self): | ||||
|         for f in self.fgen: | ||||
|             name = f["vp"] | ||||
|             src = f["ap"] | ||||
|             fsi = f["st"] | ||||
|  | ||||
|             inf = tarfile.TarInfo(name=name) | ||||
|             inf.mode = fsi.st_mode | ||||
|             inf.size = fsi.st_size | ||||
|             inf.mtime = fsi.st_mtime | ||||
|             inf.uid = 0 | ||||
|             inf.gid = 0 | ||||
|  | ||||
|             self.ci += inf.size | ||||
|             with open(fsenc(src), "rb", 512 * 1024) as f: | ||||
|                 self.tar.addfile(inf, f) | ||||
|  | ||||
|         self.tar.close() | ||||
|         self.qfile.q.put(None) | ||||
							
								
								
									
										244
									
								
								copyparty/szip.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										244
									
								
								copyparty/szip.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,244 @@ | ||||
| import time | ||||
| import zlib | ||||
| import struct | ||||
| from datetime import datetime | ||||
|  | ||||
| from .util import yieldfile, sanitize_fn | ||||
|  | ||||
|  | ||||
| def dostime2unix(buf): | ||||
|     t, d = struct.unpack("<HH", buf) | ||||
|  | ||||
|     ts = (t & 0x1F) * 2 | ||||
|     tm = (t >> 5) & 0x3F | ||||
|     th = t >> 11 | ||||
|  | ||||
|     dd = d & 0x1F | ||||
|     dm = (d >> 5) & 0xF | ||||
|     dy = (d >> 9) + 1980 | ||||
|  | ||||
|     tt = (dy, dm, dd, th, tm, ts) | ||||
|     tf = "{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}" | ||||
|     iso = tf.format(*tt) | ||||
|  | ||||
|     dt = datetime.strptime(iso, "%Y-%m-%d %H:%M:%S") | ||||
|     return int(dt.timestamp()) | ||||
|  | ||||
|  | ||||
| def unixtime2dos(ts): | ||||
|     tt = time.gmtime(ts) | ||||
|     dy, dm, dd, th, tm, ts = list(tt)[:6] | ||||
|  | ||||
|     bd = ((dy - 1980) << 9) + (dm << 5) + dd | ||||
|     bt = (th << 11) + (tm << 5) + ts // 2 | ||||
|     return struct.pack("<HH", bt, bd) | ||||
|  | ||||
|  | ||||
| def gen_fdesc(sz, crc32, z64): | ||||
|     ret = b"\x50\x4b\x07\x08" | ||||
|     fmt = "<LQQ" if z64 else "<LLL" | ||||
|     ret += struct.pack(fmt, crc32, sz, sz) | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc): | ||||
|     """ | ||||
|     does regular file headers | ||||
|     and the central directory meme if h_pos is set | ||||
|     (h_pos = absolute position of the regular header) | ||||
|     """ | ||||
|  | ||||
|     # appnote 4.5 / zip 3.0 (2008) / unzip 6.0 (2009) says to add z64 | ||||
|     # extinfo for values which exceed H, but that becomes an off-by-one | ||||
|     # (can't tell if it was clamped or exactly maxval), make it obvious | ||||
|     z64 = sz >= 0xFFFFFFFF | ||||
|     z64v = [sz, sz] if z64 else [] | ||||
|     if h_pos and h_pos >= 0xFFFFFFFF: | ||||
|         # central, also consider ptr to original header | ||||
|         z64v.append(h_pos) | ||||
|  | ||||
|     # confusingly this doesn't bump if h_pos | ||||
|     req_ver = b"\x2d\x00" if z64 else b"\x0a\x00" | ||||
|  | ||||
|     if crc32: | ||||
|         crc32 = struct.pack("<L", crc32) | ||||
|     else: | ||||
|         crc32 = b"\x00" * 4 | ||||
|  | ||||
|     if h_pos is None: | ||||
|         # 4b magic, 2b min-ver | ||||
|         ret = b"\x50\x4b\x03\x04" + req_ver | ||||
|     else: | ||||
|         # 4b magic, 2b spec-ver, 2b min-ver | ||||
|         ret = b"\x50\x4b\x01\x02\x1e\x03" + req_ver | ||||
|  | ||||
|     ret += b"\x00" if pre_crc else b"\x08"  # streaming | ||||
|     ret += b"\x08" if utf8 else b"\x00"  # appnote 6.3.2 (2007) | ||||
|  | ||||
|     # 2b compression, 4b time, 4b crc | ||||
|     ret += b"\x00\x00" + unixtime2dos(lastmod) + crc32 | ||||
|  | ||||
|     # spec says to put zeros when !crc if bit3 (streaming) | ||||
|     # however infozip does actual sz and it even works on winxp | ||||
|     # (same reasning for z64 extradata later) | ||||
|     vsz = 0xFFFFFFFF if z64 else sz | ||||
|     ret += struct.pack("<LL", vsz, vsz) | ||||
|  | ||||
|     # windows support (the "?" replace below too) | ||||
|     fn = sanitize_fn(fn, "/") | ||||
|     bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_") | ||||
|  | ||||
|     z64_len = len(z64v) * 8 + 4 if z64v else 0 | ||||
|     ret += struct.pack("<HH", len(bfn), z64_len) | ||||
|  | ||||
|     if h_pos is not None: | ||||
|         # 2b comment, 2b diskno, 2b internal.attr, | ||||
|         # 4b external.attr (infozip-linux: 0000(a481|ff81)) idk | ||||
|         ret += b"\x00" * 10 | ||||
|  | ||||
|         # 4b local-header-ofs | ||||
|         ret += struct.pack("<L", min(h_pos, 0xFFFFFFFF)) | ||||
|  | ||||
|     ret += bfn | ||||
|  | ||||
|     if z64v: | ||||
|         ret += struct.pack("<HH" + "Q" * len(z64v), 1, len(z64v) * 8, *z64v) | ||||
|  | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| def gen_ecdr(items, cdir_pos, cdir_end): | ||||
|     """ | ||||
|     summary of all file headers, | ||||
|     usually the zipfile footer unless something clamps | ||||
|     """ | ||||
|  | ||||
|     ret = b"\x50\x4b\x05\x06" | ||||
|  | ||||
|     # 2b ndisk, 2b disk0 | ||||
|     ret += b"\x00" * 4 | ||||
|  | ||||
|     cdir_sz = cdir_end - cdir_pos | ||||
|  | ||||
|     nitems = min(0xFFFF, len(items)) | ||||
|     csz = min(0xFFFFFFFF, cdir_sz) | ||||
|     cpos = min(0xFFFFFFFF, cdir_pos) | ||||
|  | ||||
|     need_64 = nitems == 0xFFFF or 0xFFFFFFFF in [csz, cpos] | ||||
|  | ||||
|     # 2b tnfiles, 2b dnfiles, 4b dir sz, 4b dir pos | ||||
|     ret += struct.pack("<HHLL", nitems, nitems, csz, cpos) | ||||
|  | ||||
|     # 2b comment length | ||||
|     ret += b"\x00\x00" | ||||
|  | ||||
|     return [ret, need_64] | ||||
|  | ||||
|  | ||||
| def gen_ecdr64(items, cdir_pos, cdir_end): | ||||
|     """ | ||||
|     z64 end of central directory | ||||
|     added when numfiles or a headerptr clamps | ||||
|     """ | ||||
|  | ||||
|     ret = b"\x50\x4b\x06\x06" | ||||
|  | ||||
|     # 8b own length from hereon | ||||
|     ret += b"\x2c" + b"\x00" * 7 | ||||
|  | ||||
|     # 2b spec-ver, 2b min-ver | ||||
|     ret += b"\x1e\x03\x2d\x00" | ||||
|  | ||||
|     # 4b ndisk, 4b disk0 | ||||
|     ret += b"\x00" * 8 | ||||
|  | ||||
|     # 8b tnfiles, 8b dnfiles, 8b dir sz, 8b dir pos | ||||
|     cdir_sz = cdir_end - cdir_pos | ||||
|     ret += struct.pack("<QQQQ", len(items), len(items), cdir_sz, cdir_pos) | ||||
|  | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| def gen_ecdr64_loc(ecdr64_pos): | ||||
|     """ | ||||
|     z64 end of central directory locator | ||||
|     points to ecdr64 | ||||
|     why | ||||
|     """ | ||||
|  | ||||
|     ret = b"\x50\x4b\x06\x07" | ||||
|  | ||||
|     # 4b cdisk, 8b start of ecdr64, 4b ndisks | ||||
|     ret += struct.pack("<LQL", 0, ecdr64_pos, 1) | ||||
|  | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| class StreamZip(object): | ||||
|     def __init__(self, fgen, utf8=False, pre_crc=False): | ||||
|         self.fgen = fgen | ||||
|         self.utf8 = utf8 | ||||
|         self.pre_crc = pre_crc | ||||
|  | ||||
|         self.pos = 0 | ||||
|         self.items = [] | ||||
|  | ||||
|     def _ct(self, buf): | ||||
|         self.pos += len(buf) | ||||
|         return buf | ||||
|  | ||||
|     def gen(self): | ||||
|         for f in self.fgen: | ||||
|             name = f["vp"] | ||||
|             src = f["ap"] | ||||
|             st = f["st"] | ||||
|  | ||||
|             sz = st.st_size | ||||
|             ts = st.st_mtime + 1 | ||||
|  | ||||
|             crc = None | ||||
|             if self.pre_crc: | ||||
|                 crc = 0 | ||||
|                 for buf in yieldfile(src): | ||||
|                     crc = zlib.crc32(buf, crc) | ||||
|  | ||||
|                 crc &= 0xFFFFFFFF | ||||
|  | ||||
|             h_pos = self.pos | ||||
|             buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc) | ||||
|             yield self._ct(buf) | ||||
|  | ||||
|             crc = crc or 0 | ||||
|             for buf in yieldfile(src): | ||||
|                 if not self.pre_crc: | ||||
|                     crc = zlib.crc32(buf, crc) | ||||
|  | ||||
|                 yield self._ct(buf) | ||||
|  | ||||
|             crc &= 0xFFFFFFFF | ||||
|  | ||||
|             self.items.append([name, sz, ts, crc, h_pos]) | ||||
|  | ||||
|             z64 = sz >= 4 * 1024 * 1024 * 1024 | ||||
|  | ||||
|             if z64 or not self.pre_crc: | ||||
|                 buf = gen_fdesc(sz, crc, z64) | ||||
|                 yield self._ct(buf) | ||||
|  | ||||
|         cdir_pos = self.pos | ||||
|         for name, sz, ts, crc, h_pos in self.items: | ||||
|             buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc) | ||||
|             yield self._ct(buf) | ||||
|         cdir_end = self.pos | ||||
|  | ||||
|         _, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end) | ||||
|         if need_64: | ||||
|             ecdir64_pos = self.pos | ||||
|             buf = gen_ecdr64(self.items, cdir_pos, cdir_end) | ||||
|             yield self._ct(buf) | ||||
|  | ||||
|             buf = gen_ecdr64_loc(ecdir64_pos) | ||||
|             yield self._ct(buf) | ||||
|  | ||||
|         ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end) | ||||
|         yield self._ct(ecdr) | ||||
| @@ -225,9 +225,15 @@ class Up2k(object): | ||||
|  | ||||
|             _, flags = self._expr_idx_filter(flags) | ||||
|  | ||||
|             a = "\033[0;36m{}:\033[1;30m{}" | ||||
|             a = [a.format(k, v) for k, v in sorted(flags.items())] | ||||
|             self.log(" ".join(a) + "\033[0m") | ||||
|             ft = "\033[0;32m{}{:.0}" | ||||
|             ff = "\033[0;35m{}{:.0}" | ||||
|             fv = "\033[0;36m{}:\033[1;30m{}" | ||||
|             a = [ | ||||
|                 (ft if v is True else ff if v is False else fv).format(k, str(v)) | ||||
|                 for k, v in flags.items() | ||||
|             ] | ||||
|             if a: | ||||
|                 self.log(" ".join(sorted(a)) + "\033[0m") | ||||
|  | ||||
|             reg = {} | ||||
|             path = os.path.join(ptop, ".hist", "up2k.snap") | ||||
| @@ -282,9 +288,12 @@ class Up2k(object): | ||||
|         dbw = [reg[0], 0, time.time()] | ||||
|         self.pp.n = next(dbw[0].execute("select count(w) from up"))[0] | ||||
|  | ||||
|         # can be symlink so don't `and d.startswith(top)`` | ||||
|         excl = set([d.realpath for d in all_vols if d != vol]) | ||||
|         n_add = self._build_dir(dbw, top, excl, top) | ||||
|         excl = [ | ||||
|             vol.realpath + "/" + d.vpath[len(vol.vpath) :].lstrip("/") | ||||
|             for d in all_vols | ||||
|             if d != vol and (d.vpath.startswith(vol.vpath + "/") or not vol.vpath) | ||||
|         ] | ||||
|         n_add = self._build_dir(dbw, top, set(excl), top) | ||||
|         n_rm = self._drop_lost(dbw[0], top) | ||||
|         if dbw[1]: | ||||
|             self.log("commit {} new files".format(dbw[1])) | ||||
|   | ||||
| @@ -576,11 +576,12 @@ def undot(path): | ||||
|     return "/".join(ret) | ||||
|  | ||||
|  | ||||
| def sanitize_fn(fn): | ||||
|     fn = fn.replace("\\", "/").split("/")[-1] | ||||
| def sanitize_fn(fn, ok=""): | ||||
|     if "/" not in ok: | ||||
|         fn = fn.replace("\\", "/").split("/")[-1] | ||||
|  | ||||
|     if WINDOWS: | ||||
|         for bad, good in [ | ||||
|         for bad, good in [x for x in [ | ||||
|             ["<", "<"], | ||||
|             [">", ">"], | ||||
|             [":", ":"], | ||||
| @@ -590,7 +591,7 @@ def sanitize_fn(fn): | ||||
|             ["|", "|"], | ||||
|             ["?", "?"], | ||||
|             ["*", "*"], | ||||
|         ]: | ||||
|         ] if x[0] not in ok]: | ||||
|             fn = fn.replace(bad, good) | ||||
|  | ||||
|         bad = ["con", "prn", "aux", "nul"] | ||||
| @@ -780,6 +781,16 @@ def read_socket_chunked(sr, log=None): | ||||
|         sr.recv(2)  # \r\n after each chunk too | ||||
|  | ||||
|  | ||||
| def yieldfile(fn): | ||||
|     with open(fsenc(fn), "rb", 512 * 1024) as f: | ||||
|         while True: | ||||
|             buf = f.read(64 * 1024) | ||||
|             if not buf: | ||||
|                 break | ||||
|  | ||||
|             yield buf | ||||
|  | ||||
|  | ||||
| def hashcopy(actor, fin, fout): | ||||
|     u32_lim = int((2 ** 31) * 0.9) | ||||
|     hashobj = hashlib.sha512() | ||||
|   | ||||
| @@ -41,10 +41,12 @@ | ||||
|     <div id="op_cfg" class="opview opbox"> | ||||
|         <h3>key notation</h3> | ||||
|         <div id="key_notation"></div> | ||||
|         {%- if have_zip %} | ||||
|         <h3>folder download</h3> | ||||
|         <div id="arc_fmt"></div> | ||||
|         {%- endif %} | ||||
|         <h3>tooltips</h3> | ||||
|         <div> | ||||
|             <a id="tooltips" class="tglbtn" href="#">enable</a> | ||||
|         </div> | ||||
|         <div><a id="tooltips" class="tglbtn" href="#">enable</a></div> | ||||
|     </div> | ||||
|      | ||||
|     <h1 id="path"> | ||||
|   | ||||
| @@ -1149,6 +1149,7 @@ var treectl = (function () { | ||||
|  | ||||
| 		filecols.set_style(); | ||||
| 		mukey.render(); | ||||
| 		arcfmt.render(); | ||||
| 		reload_tree(); | ||||
| 		reload_browser(); | ||||
| 	} | ||||
| @@ -1559,6 +1560,70 @@ function addcrc() { | ||||
| })(); | ||||
|  | ||||
|  | ||||
| var arcfmt = (function () { | ||||
| 	if (!ebi('arc_fmt')) | ||||
| 		return { "render": function () { } }; | ||||
|  | ||||
| 	var html = [], | ||||
| 		arcfmts = ["tar", "zip", "zip_dos", "zip_crc"], | ||||
| 		arcv = ["tar", "zip=utf8", "zip", "zip=crc"]; | ||||
|  | ||||
| 	for (var a = 0; a < arcfmts.length; a++) { | ||||
| 		var k = arcfmts[a]; | ||||
| 		html.push( | ||||
| 			'<span><input type="radio" name="arcfmt" value="' + k + '" id="arcfmt_' + k + '">' + | ||||
| 			'<label for="arcfmt_' + k + '">' + k + '</label></span>'); | ||||
| 	} | ||||
| 	ebi('arc_fmt').innerHTML = html.join('\n'); | ||||
|  | ||||
| 	var fmt = sread("arc_fmt") || "zip"; | ||||
| 	ebi('arcfmt_' + fmt).checked = true; | ||||
|  | ||||
| 	function render() { | ||||
| 		var arg = arcv[arcfmts.indexOf(fmt)], | ||||
| 			tds = document.querySelectorAll('#files tbody td:first-child a'); | ||||
|  | ||||
| 		for (var a = 0, aa = tds.length; a < aa; a++) { | ||||
| 			var o = tds[a], txt = o.textContent, href = o.getAttribute('href'); | ||||
| 			if (txt != 'tar' && txt != 'zip') | ||||
| 				continue; | ||||
|  | ||||
| 			var ofs = href.lastIndexOf('?'); | ||||
| 			if (ofs < 0) | ||||
| 				throw 'missing arg in url'; | ||||
|  | ||||
| 			o.setAttribute("href", href.slice(0, ofs + 1) + arg); | ||||
| 			o.textContent = fmt.split('_')[0]; | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	function try_render() { | ||||
| 		try { | ||||
| 			render(); | ||||
| 		} | ||||
| 		catch (ex) { | ||||
| 			console.log("arcfmt failed: " + ex); | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	function change_fmt(e) { | ||||
| 		ev(e); | ||||
| 		fmt = this.getAttribute('value'); | ||||
| 		swrite("arc_fmt", fmt); | ||||
| 		try_render(); | ||||
| 	} | ||||
|  | ||||
| 	var o = document.querySelectorAll('#arc_fmt input'); | ||||
| 	for (var a = 0; a < o.length; a++) { | ||||
| 		o[a].onchange = change_fmt; | ||||
| 	} | ||||
|  | ||||
| 	return { | ||||
| 		"render": try_render | ||||
| 	}; | ||||
| })(); | ||||
|  | ||||
|  | ||||
| function ev_row_tgl(e) { | ||||
| 	ev(e); | ||||
| 	filecols.toggle(this.parentElement.parentElement.getElementsByTagName('span')[0].textContent); | ||||
| @@ -1611,3 +1676,4 @@ function reload_browser(not_mp) { | ||||
| } | ||||
| reload_browser(true); | ||||
| mukey.render(); | ||||
| arcfmt.render(); | ||||
|   | ||||
| @@ -147,7 +147,7 @@ var md_opt = { | ||||
|  | ||||
| 	</script> | ||||
|     <script src="/.cpr/util.js"></script> | ||||
| 	<script src="/.cpr/deps/marked.full.js"></script> | ||||
| 	<script src="/.cpr/deps/marked.js"></script> | ||||
| 	<script src="/.cpr/md.js"></script> | ||||
| 	{%- if edit %} | ||||
| 	<script src="/.cpr/md2.js"></script> | ||||
|   | ||||
| @@ -278,18 +278,27 @@ function up2k_init(have_crypto) { | ||||
|         } | ||||
|         else files = e.target.files; | ||||
|  | ||||
|         if (files.length == 0) | ||||
|         if (!files || files.length == 0) | ||||
|             return alert('no files selected??'); | ||||
|  | ||||
|         more_one_file(); | ||||
|         var bad_files = []; | ||||
|         var good_files = []; | ||||
|         var dirs = []; | ||||
|         for (var a = 0; a < files.length; a++) { | ||||
|             var fobj = files[a]; | ||||
|             if (is_itemlist) { | ||||
|                 if (fobj.kind !== 'file') | ||||
|                     continue; | ||||
|  | ||||
|                 try { | ||||
|                     var wi = fobj.webkitGetAsEntry(); | ||||
|                     if (wi.isDirectory) { | ||||
|                         dirs.push(wi); | ||||
|                         continue; | ||||
|                     } | ||||
|                 } | ||||
|                 catch (ex) { } | ||||
|                 fobj = fobj.getAsFile(); | ||||
|             } | ||||
|             try { | ||||
| @@ -300,12 +309,69 @@ function up2k_init(have_crypto) { | ||||
|                 bad_files.push(fobj.name); | ||||
|                 continue; | ||||
|             } | ||||
|             good_files.push(fobj); | ||||
|             good_files.push([fobj, fobj.name]); | ||||
|         } | ||||
|         if (dirs) { | ||||
|             return read_dirs(null, [], dirs, good_files, bad_files); | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     function read_dirs(rd, pf, dirs, good, bad) { | ||||
|         if (!dirs.length) { | ||||
|             if (!pf.length) | ||||
|                 return gotallfiles(good, bad); | ||||
|  | ||||
|             console.log("retry pf, " + pf.length); | ||||
|             setTimeout(function () { | ||||
|                 read_dirs(rd, pf, dirs, good, bad); | ||||
|             }, 50); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         if (!rd) | ||||
|             rd = dirs[0].createReader(); | ||||
|  | ||||
|         rd.readEntries(function (ents) { | ||||
|             var ngot = 0; | ||||
|             ents.forEach(function (dn) { | ||||
|                 if (dn.isDirectory) { | ||||
|                     dirs.push(dn); | ||||
|                 } | ||||
|                 else { | ||||
|                     var name = dn.fullPath; | ||||
|                     if (name.indexOf('/') === 0) | ||||
|                         name = name.slice(1); | ||||
|  | ||||
|                     pf.push(name); | ||||
|                     dn.file(function (fobj) { | ||||
|                         var idx = pf.indexOf(name); | ||||
|                         pf.splice(idx, 1); | ||||
|                         try { | ||||
|                             if (fobj.size > 0) { | ||||
|                                 good.push([fobj, name]); | ||||
|                                 return; | ||||
|                             } | ||||
|                         } | ||||
|                         catch (ex) { } | ||||
|                         bad.push(name); | ||||
|                     }); | ||||
|                 } | ||||
|                 ngot += 1; | ||||
|             }); | ||||
|             // console.log("ngot: " + ngot); | ||||
|             if (!ngot) { | ||||
|                 dirs.shift(); | ||||
|                 rd = null; | ||||
|             } | ||||
|             return read_dirs(rd, pf, dirs, good, bad); | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     function gotallfiles(good_files, bad_files) { | ||||
|         if (bad_files.length > 0) { | ||||
|             var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length); | ||||
|             for (var a = 0; a < bad_files.length; a++) | ||||
|             var ntot = bad_files.length + good_files.length; | ||||
|             var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, ntot); | ||||
|             for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++) | ||||
|                 msg += '-- ' + bad_files[a] + '\n'; | ||||
|  | ||||
|             if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent)) | ||||
| @@ -315,21 +381,21 @@ function up2k_init(have_crypto) { | ||||
|         } | ||||
|  | ||||
|         var msg = ['upload these ' + good_files.length + ' files?']; | ||||
|         for (var a = 0; a < good_files.length; a++) | ||||
|             msg.push(good_files[a].name); | ||||
|         for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++) | ||||
|             msg.push(good_files[a][1]); | ||||
|  | ||||
|         if (ask_up && !fsearch && !confirm(msg.join('\n'))) | ||||
|             return; | ||||
|  | ||||
|         for (var a = 0; a < good_files.length; a++) { | ||||
|             var fobj = good_files[a]; | ||||
|             var fobj = good_files[a][0]; | ||||
|             var now = new Date().getTime(); | ||||
|             var lmod = fobj.lastModified || now; | ||||
|             var entry = { | ||||
|                 "n": parseInt(st.files.length.toString()), | ||||
|                 "t0": now,  // TODO remove probably | ||||
|                 "t0": now, | ||||
|                 "fobj": fobj, | ||||
|                 "name": fobj.name, | ||||
|                 "name": good_files[a][1], | ||||
|                 "size": fobj.size, | ||||
|                 "lmod": lmod / 1000, | ||||
|                 "purl": get_evpath(), | ||||
|   | ||||
| @@ -88,7 +88,7 @@ | ||||
| 	width: 30em; | ||||
| } | ||||
| #u2conf.has_btn { | ||||
| 	width: 46em; | ||||
| 	width: 48em; | ||||
| } | ||||
| #u2conf * { | ||||
| 	text-align: center; | ||||
|   | ||||
| @@ -73,7 +73,8 @@ | ||||
|             <div id="u2btn_ct"> | ||||
|                 <div id="u2btn"> | ||||
|                     <span id="u2bm"></span><br /> | ||||
|                     drop files here<br /> | ||||
|                     drag/drop files<br /> | ||||
|                     and folders here<br /> | ||||
|                     (or click me) | ||||
|                 </div> | ||||
|             </div> | ||||
|   | ||||
| @@ -237,7 +237,10 @@ function goto(dest) { | ||||
|     goto(); | ||||
|     var op = sread('opmode'); | ||||
|     if (op !== null && op !== '.') | ||||
|         goto(op); | ||||
|         try { | ||||
|             goto(op); | ||||
|         } | ||||
|         catch (ex) { } | ||||
| })(); | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -28,6 +28,9 @@ gtar=$(command -v gtar || command -v gnutar) || true | ||||
| 	unexpand() { gunexpand "$@"; } | ||||
| 	command -v grealpath >/dev/null && | ||||
| 		realpath() { grealpath "$@"; } | ||||
|  | ||||
| 	[ -e /opt/local/bin/bzip2 ] && | ||||
| 		bzip2() { /opt/local/bin/bzip2 "$@"; } | ||||
| } | ||||
| pybin=$(command -v python3 || command -v python) || { | ||||
| 	echo need python | ||||
| @@ -42,11 +45,15 @@ pybin=$(command -v python3 || command -v python) || { | ||||
| 	exit 1 | ||||
| } | ||||
|  | ||||
| do_sh=1 | ||||
| do_py=1 | ||||
| while [ ! -z "$1" ]; do | ||||
| 	[ "$1" = clean  ] && clean=1  && shift && continue | ||||
| 	[ "$1" = re     ] && repack=1 && shift && continue | ||||
| 	[ "$1" = no-ogv ] && no_ogv=1 && shift && continue | ||||
| 	[ "$1" = no-cm  ] && no_cm=1  && shift && continue | ||||
| 	[ "$1" = no-sh  ] && do_sh=   && shift && continue | ||||
| 	[ "$1" = no-py  ] && do_py=   && shift && continue | ||||
| 	break | ||||
| done | ||||
|  | ||||
| @@ -199,25 +206,36 @@ tar -cf tar "${args[@]}" --numeric-owner copyparty dep-j2 | ||||
|  | ||||
| echo compressing tar | ||||
| # detect best level; bzip2 -7 is usually better than -9 | ||||
| for n in {2..9}; do cp tar t.$n; bzip2 -$n t.$n & done; wait; mv -v $(ls -1S t.*.bz2 | tail -n 1) tar.bz2 | ||||
| for n in {2..9}; do cp tar t.$n;  xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz  | tail -n 1) tar.xz | ||||
| rm t.* | ||||
| [ $do_py ] && { for n in {2..9}; do cp tar t.$n; bzip2 -$n t.$n & done; wait; mv -v $(ls -1S t.*.bz2 | tail -n 1) tar.bz2; } | ||||
| [ $do_sh ] && { for n in {2..9}; do cp tar t.$n;  xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz  | tail -n 1) tar.xz; } | ||||
| rm t.* || true | ||||
| exts=() | ||||
|  | ||||
|  | ||||
| [ $do_sh ] && { | ||||
| exts+=(sh) | ||||
| echo creating unix sfx | ||||
| ( | ||||
| 	sed "s/PACK_TS/$ts/; s/PACK_HTS/$hts/; s/CPP_VER/$ver/" <../scripts/sfx.sh | | ||||
| 	grep -E '^sfx_eof$' -B 9001; | ||||
| 	cat tar.xz | ||||
| ) >$sfx_out.sh | ||||
| } | ||||
|  | ||||
|  | ||||
| [ $do_py ] && { | ||||
| exts+=(py) | ||||
| echo creating generic sfx | ||||
| $pybin ../scripts/sfx.py --sfx-make tar.bz2 $ver $ts | ||||
| mv sfx.out $sfx_out.py | ||||
| chmod 755 $sfx_out.* | ||||
| } | ||||
|  | ||||
|  | ||||
| printf "done:\n" | ||||
| printf "  %s\n" "$(realpath $sfx_out)."{sh,py} | ||||
| # rm -rf * | ||||
| for ext in ${exts[@]}; do | ||||
| 	printf "  %s\n" "$(realpath $sfx_out)."$ext | ||||
| done | ||||
|  | ||||
| # apk add bash python3 tar xz bzip2 | ||||
| # while true; do ./make-sfx.sh; for f in ..//dist/copyparty-sfx.{sh,py}; do mv $f $f.$(wc -c <$f | awk '{print$1}'); done; done | ||||
|   | ||||
		Reference in New Issue
	
	Block a user