Compare commits

...

18 Commits

Author SHA1 Message Date
ed
83fec3cca7 v0.10.5 2021-03-31 01:28:58 +02:00
ed
3cefc99b7d search fixes 2021-03-31 01:20:09 +02:00
ed
3a38dcbc05 v0.10.4 2021-03-29 20:53:20 +02:00
ed
7ff08bce57 browser: stable sort 2021-03-29 20:08:32 +02:00
ed
fd490af434 explain the jank 2021-03-29 06:11:33 +02:00
ed
1195b8f17e v0.10.3 2021-03-29 04:47:59 +02:00
ed
28dce13776 no load-balancer spam when -q 2021-03-28 03:06:52 +02:00
ed
431f20177a make tar 6x faster (1.8 GiB/s) 2021-03-28 01:50:16 +01:00
ed
87aff54d9d v0.10.2 2021-03-27 18:03:33 +01:00
ed
f50462de82 persist lead-column sort 2021-03-27 17:56:21 +01:00
ed
9bda8c7eb6 better errlog name 2021-03-27 17:38:59 +01:00
ed
e83c63d239 fix unix permissions in zip files 2021-03-27 17:28:25 +01:00
ed
b38533b0cc recover from file access errors when zipping 2021-03-27 17:16:59 +01:00
ed
5ccca3fbd5 more 2021-03-27 16:12:47 +01:00
ed
9e850fc3ab zip selection 2021-03-27 15:48:52 +01:00
ed
ffbfcd7e00 h 2021-03-27 03:35:57 +01:00
ed
5ea7590748 readme: mention zip configs 2021-03-27 03:34:03 +01:00
ed
290c3bc2bb reclining 2021-03-27 03:07:44 +01:00
18 changed files with 427 additions and 135 deletions

View File

@@ -21,6 +21,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [status](#status)
* [bugs](#bugs)
* [usage](#usage)
* [zip downloads](#zip-downloads)
* [searching](#searching)
* [search configuration](#search-configuration)
* [metadata from audio files](#metadata-from-audio-files)
@@ -95,6 +96,8 @@ summary: it works! you can use it! (but technically not even close to beta)
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
* Windows: python 2.7 cannot handle filenames with mojibake
* hiding the contents at url `/d1/d2/d3` using `-v :d1/d2/d3:cd2d` has the side-effect of creating databases (for files/tags) inside folders d1 and d2, and those databases take precedence over the main db at the top of the vfs - this means all files in d2 and below will be reindexed unless you already had a vfs entry at or below d2
* probably more, pls let me know
@@ -108,6 +111,23 @@ the browser has the following hotkeys
* `P` parent folder
## zip downloads
the `zip` link next to folders can produce various types of zip/tar files using these alternatives in the browser settings tab:
| name | url-suffix | description |
|--|--|--|
| `tar` | `?tar` | plain gnutar, works great with `curl \| tar -xv` |
| `zip` | `?zip=utf8` | works everywhere, glitchy filenames on win7 and older |
| `zip_dos` | `?zip` | traditional cp437 (no unicode) to fix glitchy filenames |
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
* hidden files (dotfiles) are excluded unless `-ed`
* the up2k.db is always excluded
* `zip_crc` will take longer to download since the server has to read each file twice
* please let me know if you find a program old enough to actually need this
# searching
when started with `-e2dsa` copyparty will scan/index all your files. This avoids duplicates on upload, and also makes the volumes searchable through the web-ui:

View File

@@ -177,11 +177,14 @@ def sighandler(signal=None, frame=None):
print("\n".join(msg))
def main():
def main(argv=None):
time.strptime("19970815", "%Y%m%d") # python#7980
if WINDOWS:
os.system("rem") # enables colors
if argv is None:
argv = sys.argv
desc = py_desc().replace("[", "\033[1;30m[")
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0m\n'
@@ -194,13 +197,13 @@ def main():
deprecated = [["-e2s", "-e2ds"]]
for dk, nk in deprecated:
try:
idx = sys.argv.index(dk)
idx = argv.index(dk)
except:
continue
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
print(msg.format(dk, nk))
sys.argv[idx] = nk
argv[idx] = nk
time.sleep(2)
ap = argparse.ArgumentParser(
@@ -290,7 +293,7 @@ def main():
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
al = ap.parse_args()
al = ap.parse_args(args=argv[1:])
# fmt: on
# propagate implications

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (0, 10, 1)
VERSION = (0, 10, 5)
CODENAME = "zip it"
BUILD_DT = (2021, 3, 27)
BUILD_DT = (2021, 3, 31)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -161,47 +161,40 @@ class VFS(object):
for x in vfs.walk(wrel, "", uname, scandir, lstat):
yield x
def zipgen(self, vrem, rems, uname, dots, scandir):
vtops = [["", [self, vrem]]]
if rems:
# list of subfolders to zip was provided,
# add all the ones uname is allowed to access
vtops = []
for rem in rems:
try:
d = rem if not vrem else vrem + "/" + rem
vn = self.get(d, uname, True, False)
vtops.append([rem, vn])
except:
pass
def zipgen(self, vrem, flt, uname, dots, scandir):
if flt:
flt = {k: True for k in flt}
for rel, (vn, rem) in vtops:
for vpath, apath, files, rd, vd in vn.walk(rel, rem, uname, dots, scandir):
# print(repr([vpath, apath, [x[0] for x in files]]))
fnames = [n[0] for n in files]
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
apaths = [os.path.join(apath, n) for n in fnames]
files = list(zip(vpaths, apaths, files))
for vpath, apath, files, rd, vd in self.walk("", vrem, uname, dots, scandir):
if flt:
files = [x for x in files if x[0] in flt]
rd = [x for x in rd if x[0] in flt]
vd = {x: y for x, y in vd.items() if x in flt}
flt = None
if not dots:
# dotfile filtering based on vpath (intended visibility)
files = [x for x in files if "/." not in "/" + x[0]]
# print(repr([vpath, apath, [x[0] for x in files]]))
fnames = [n[0] for n in files]
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
apaths = [os.path.join(apath, n) for n in fnames]
files = list(zip(vpaths, apaths, files))
rm = [x for x in rd if x[0].startswith(".")]
for x in rm:
rd.remove(x)
if not dots:
# dotfile filtering based on vpath (intended visibility)
files = [x for x in files if "/." not in "/" + x[0]]
rm = [k for k in vd.keys() if k.startswith(".")]
for x in rm:
del vd[x]
rm = [x for x in rd if x[0].startswith(".")]
for x in rm:
rd.remove(x)
# up2k filetring based on actual abspath
files = [
x for x in files if "{0}.hist{0}up2k.".format(os.sep) not in x[1]
]
rm = [k for k in vd.keys() if k.startswith(".")]
for x in rm:
del vd[x]
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
yield f
# up2k filetring based on actual abspath
files = [x for x in files if "{0}.hist{0}up2k.".format(os.sep) not in x[1]]
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
yield f
def user_tree(self, uname, readable=False, writable=False):
ret = []

View File

@@ -51,7 +51,7 @@ class BrokerMp(object):
self.procs.append(proc)
proc.start()
if True:
if not self.args.q:
thr = threading.Thread(target=self.debug_load_balancer)
thr.daemon = True
thr.start()

View File

@@ -161,8 +161,8 @@ class HttpCli(object):
try:
# self.log("pebkac at httpcli.run #2: " + repr(ex))
self.keepalive = self._check_nonfatal(ex)
self.log("{}\033[0m: {}".format(str(ex), self.vpath), 3)
msg = "<pre>{}: {}\r\n".format(str(ex), self.vpath)
self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3)
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
self.reply(msg.encode("utf-8", "replace"), status=ex.code)
return self.keepalive
except Pebkac:
@@ -397,8 +397,30 @@ class HttpCli(object):
if act == "tput":
return self.handle_text_upload()
if act == "zip":
return self.handle_zip_post()
raise Pebkac(422, 'invalid action "{}"'.format(act))
def handle_zip_post(self):
for k in ["zip", "tar"]:
v = self.uparam.get(k)
if v is not None:
break
if v is None:
raise Pebkac(422, "need zip or tar keyword")
vn, rem = self.auth.vfs.get(self.vpath, self.uname, True, False)
items = self.parser.require("files", 1024 * 1024)
if not items:
raise Pebkac(422, "need files list")
items = items.replace("\r", "").split("\n")
items = [unquotep(x) for x in items if items]
return self.tx_zip(k, v, vn, rem, items, self.args.ed)
def handle_post_json(self):
try:
remains = int(self.headers["content-length"])

View File

@@ -87,7 +87,9 @@ class HttpConn(object):
err = "need at least 4 bytes in the first packet; got {}".format(
len(method)
)
self.log(err)
if method:
self.log(err)
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
return

View File

@@ -1,6 +1,8 @@
import os
import tarfile
import threading
from .sutil import errdesc
from .util import Queue, fsenc
@@ -9,9 +11,20 @@ class QFile(object):
def __init__(self):
self.q = Queue(64)
self.bq = []
self.nq = 0
def write(self, buf):
self.q.put(buf)
if buf is None or self.nq >= 240 * 1024:
self.q.put(b"".join(self.bq))
self.bq = []
self.nq = 0
if buf is None:
self.q.put(None)
else:
self.bq.append(buf)
self.nq += len(buf)
class StreamTar(object):
@@ -22,6 +35,7 @@ class StreamTar(object):
self.co = 0
self.qfile = QFile()
self.fgen = fgen
self.errf = None
# python 3.8 changed to PAX_FORMAT as default,
# waste of space and don't care about the new features
@@ -35,30 +49,47 @@ class StreamTar(object):
def gen(self):
while True:
buf = self.qfile.q.get()
if buf is None:
if not buf:
break
self.co += len(buf)
yield buf
yield None
if self.errf:
os.unlink(self.errf["ap"])
def ser(self, f):
name = f["vp"]
src = f["ap"]
fsi = f["st"]
inf = tarfile.TarInfo(name=name)
inf.mode = fsi.st_mode
inf.size = fsi.st_size
inf.mtime = fsi.st_mtime
inf.uid = 0
inf.gid = 0
self.ci += inf.size
with open(fsenc(src), "rb", 512 * 1024) as f:
self.tar.addfile(inf, f)
def _gen(self):
errors = []
for f in self.fgen:
name = f["vp"]
src = f["ap"]
fsi = f["st"]
if "err" in f:
errors.append([f["vp"], f["err"]])
continue
inf = tarfile.TarInfo(name=name)
inf.mode = fsi.st_mode
inf.size = fsi.st_size
inf.mtime = fsi.st_mtime
inf.uid = 0
inf.gid = 0
try:
self.ser(f)
except Exception as ex:
errors.append([f["vp"], repr(ex)])
self.ci += inf.size
with open(fsenc(src), "rb", 512 * 1024) as f:
self.tar.addfile(inf, f)
if errors:
self.errf = errdesc(errors)
self.ser(self.errf)
self.tar.close()
self.qfile.q.put(None)
self.qfile.write(None)

25
copyparty/sutil.py Normal file
View File

@@ -0,0 +1,25 @@
import os
import time
import tempfile
from datetime import datetime
def errdesc(errors):
report = ["copyparty failed to add the following files to the archive:", ""]
for fn, err in errors:
report.extend([" file: {}".format(fn), "error: {}".format(err), ""])
with tempfile.NamedTemporaryFile(prefix="copyparty-", delete=False) as tf:
tf_path = tf.name
tf.write("\r\n".join(report).encode("utf-8", "replace"))
dt = datetime.utcfromtimestamp(time.time())
dt = dt.strftime("%Y-%m%d-%H%M%S")
os.chmod(tf_path, 0o444)
return {
"vp": "archive-errors-{}.txt".format(dt),
"ap": tf_path,
"st": os.stat(tf_path),
}

View File

@@ -1,8 +1,10 @@
import os
import time
import zlib
import struct
from datetime import datetime
from .sutil import errdesc
from .util import yieldfile, sanitize_fn
@@ -92,9 +94,13 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
ret += struct.pack("<HH", len(bfn), z64_len)
if h_pos is not None:
# 2b comment, 2b diskno, 2b internal.attr,
# 4b external.attr (infozip-linux: 0000(a481|ff81)) idk
ret += b"\x00" * 10
# 2b comment, 2b diskno
ret += b"\x00" * 4
# 2b internal.attr, 4b external.attr
# infozip-macos: 0100 0000 a481 file:644
# infozip-macos: 0100 0100 0080 file:000
ret += b"\x01\x00\x00\x00\xa4\x81"
# 4b local-header-ofs
ret += struct.pack("<L", min(h_pos, 0xFFFFFFFF))
@@ -187,43 +193,61 @@ class StreamZip(object):
self.pos += len(buf)
return buf
def gen(self):
for f in self.fgen:
name = f["vp"]
src = f["ap"]
st = f["st"]
def ser(self, f):
name = f["vp"]
src = f["ap"]
st = f["st"]
sz = st.st_size
ts = st.st_mtime + 1
sz = st.st_size
ts = st.st_mtime + 1
crc = None
if self.pre_crc:
crc = 0
for buf in yieldfile(src):
crc = zlib.crc32(buf, crc)
crc &= 0xFFFFFFFF
h_pos = self.pos
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
yield self._ct(buf)
crc = crc or 0
crc = None
if self.pre_crc:
crc = 0
for buf in yieldfile(src):
if not self.pre_crc:
crc = zlib.crc32(buf, crc)
yield self._ct(buf)
crc = zlib.crc32(buf, crc)
crc &= 0xFFFFFFFF
self.items.append([name, sz, ts, crc, h_pos])
h_pos = self.pos
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
yield self._ct(buf)
z64 = sz >= 4 * 1024 * 1024 * 1024
crc = crc or 0
for buf in yieldfile(src):
if not self.pre_crc:
crc = zlib.crc32(buf, crc)
if z64 or not self.pre_crc:
buf = gen_fdesc(sz, crc, z64)
yield self._ct(buf)
yield self._ct(buf)
crc &= 0xFFFFFFFF
self.items.append([name, sz, ts, crc, h_pos])
z64 = sz >= 4 * 1024 * 1024 * 1024
if z64 or not self.pre_crc:
buf = gen_fdesc(sz, crc, z64)
yield self._ct(buf)
def gen(self):
errors = []
for f in self.fgen:
if "err" in f:
errors.append([f["vp"], f["err"]])
continue
try:
for x in self.ser(f):
yield x
except Exception as ex:
errors.append([f["vp"], repr(ex)])
if errors:
errf = errdesc(errors)
print(repr(errf))
for x in self.ser(errf):
yield x
cdir_pos = self.pos
for name, sz, ts, crc, h_pos in self.items:
@@ -242,3 +266,6 @@ class StreamZip(object):
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
yield self._ct(ecdr)
if errors:
os.unlink(errf["ap"])

View File

@@ -1310,6 +1310,7 @@ class Up2k(object):
self.log("no cursor to write tags with??", c=1)
continue
# TODO is undef if vol 404 on startup
entags = self.entags[ptop]
if not entags:
self.log("no entags okay.jpg", c=3)

View File

@@ -182,6 +182,11 @@ a, #files tbody div a:last-child {
color: #840;
text-shadow: 0 0 .3em #b80;
}
#files tbody tr.sel td {
background: #80b;
color: #fff;
border-color: #a3d;
}
#blocked {
position: fixed;
top: 0;
@@ -268,6 +273,25 @@ a, #files tbody div a:last-child {
padding: .2em 0 0 .07em;
color: #fff;
}
#wtoggle>span {
display: none;
}
#wtoggle.sel {
width: 4.27em;
}
#wtoggle.sel>span {
display: inline-block;
line-height: 0;
}
#wtoggle.sel>span a {
font-size: .4em;
margin: -.3em 0;
position: relative;
display: inline-block;
}
#wtoggle.sel>span #selzip {
top: -.6em;
}
#barpos,
#barbuf {
position: absolute;

View File

@@ -72,7 +72,7 @@
<table id="files">
<thead>
<tr>
<th></th>
<th name="lead"><span>c</span></th>
<th name="href"><span>File Name</span></th>
<th name="sz" sort="int"><span>Size</span></th>
{%- for k in taglist %}
@@ -112,7 +112,14 @@
{%- endif %}
<div id="widget">
<div id="wtoggle"></div>
<div id="wtoggle">
<span>
<a href="#" id="selall">sel.<br />all</a>
<a href="#" id="selinv">sel.<br />inv.</a>
<a href="#" id="selzip">zip</a>
</span>
</div>
<div id="widgeti">
<div id="pctl"><a href="#" id="bprev"></a><a href="#" id="bplay"></a><a href="#" id="bnext"></a></div>
<canvas id="pvol" width="288" height="38"></canvas>

View File

@@ -753,7 +753,7 @@ document.onkeydown = function (e) {
clearTimeout(search_timeout);
var now = new Date().getTime();
if (now - search_in_progress > 30 * 1000)
search_timeout = setTimeout(do_search, 100);
search_timeout = setTimeout(do_search, 200);
}
function do_search() {
@@ -796,6 +796,8 @@ document.onkeydown = function (e) {
var res = JSON.parse(this.responseText),
tagord = res.tag_order;
sortfiles(res.hits);
var ofiles = ebi('files');
if (ofiles.getAttribute('ts') > this.ts)
return;
@@ -814,7 +816,7 @@ document.onkeydown = function (e) {
var html = mk_files_header(tagord);
html.push('<tbody>');
html.push('<tr><td>-</td><td colspan="42"><a href="#" id="unsearch">close search results</a></td></tr>');
html.push('<tr><td>-</td><td colspan="42"><a href="#" id="unsearch">! close search results</a></td></tr>');
for (var a = 0; a < res.hits.length; a++) {
var r = res.hits[a],
ts = parseInt(r.ts),
@@ -833,7 +835,7 @@ document.onkeydown = function (e) {
v = r.tags[k] || "";
if (k == ".dur") {
var sv = s2ms(v);
var sv = v ? s2ms(v) : "";
nodes[nodes.length - 1] += '</td><td sortv="' + v + '">' + sv;
continue;
}
@@ -1085,32 +1087,8 @@ var treectl = (function () {
}
ebi('srv_info').innerHTML = '<span>' + res.srvinf + '</span>';
var nodes = res.dirs.concat(res.files),
sopts = jread('fsort', []);
try {
for (var a = sopts.length - 1; a >= 0; a--) {
var name = sopts[a][0], rev = sopts[a][1], typ = sopts[a][2];
if (name.indexOf('tags/') == -1) {
nodes.sort(function (v1, v2) {
if (!v1[name]) return -1 * rev;
if (!v2[name]) return 1 * rev;
return rev * (typ == 'int' ? (v1[name] - v2[name]) : (v1[name].localeCompare(v2[name])));
});
}
else {
name = name.slice(5);
nodes.sort(function (v1, v2) {
if (!v1.tags[name]) return -1 * rev;
if (!v2.tags[name]) return 1 * rev;
return rev * (typ == 'int' ? (v1.tags[name] - v2.tags[name]) : (v1.tags[name].localeCompare(v2.tags[name])));
});
}
}
}
catch (ex) {
console.log("failed to apply sort config: " + ex);
}
var nodes = res.dirs.concat(res.files);
nodes = sortfiles(nodes);
var top = this.top;
var html = mk_files_header(res.taglist);
@@ -1125,7 +1103,7 @@ var treectl = (function () {
v = (r.tags || {})[k] || "";
if (k == ".dur") {
var sv = s2ms(v);
var sv = v ? s2ms(v) : "";
ln[ln.length - 1] += '</td><td sortv="' + v + '">' + sv;
continue;
}
@@ -1149,7 +1127,7 @@ var treectl = (function () {
filecols.set_style();
mukey.render();
arcfmt.render();
msel.render();
reload_tree();
reload_browser();
}
@@ -1305,7 +1283,7 @@ function find_file_col(txt) {
function mk_files_header(taglist) {
var html = [
'<thead>',
'<th></th>',
'<th name="lead"><span>c</span></th>',
'<th name="href"><span>File Name</span></th>',
'<th name="sz" sort="int"><span>Size</span></th>'
];
@@ -1408,8 +1386,8 @@ var filecols = (function () {
if (!min)
for (var a = 0, aa = rows.length; a < aa; a++) {
var c = rows[a].cells[i];
if (c)
var v = c.textContent = s2ms(c.textContent);
if (c && c.textContent)
c.textContent = s2ms(c.textContent);
}
}
catch (ex) { }
@@ -1480,8 +1458,11 @@ var mukey = (function () {
}
function render() {
var ci = find_file_col('Key'),
i = ci[0],
var ci = find_file_col('Key');
if (!ci)
return;
var i = ci[0],
min = ci[1],
rows = ebi('files').tBodies[0].rows;
@@ -1530,7 +1511,10 @@ var mukey = (function () {
function addcrc() {
var links = document.querySelectorAll('#files>tbody>tr>td:nth-child(2)>a');
var links = document.querySelectorAll(
'#files>tbody>tr>td:nth-child(2)>' + (
ebi('unsearch') ? 'div>a:last-child' : 'a'));
for (var a = 0, aa = links.length; a < aa; a++)
if (!links[a].getAttribute('id'))
links[a].setAttribute('id', 'f-' + crc32(links[a].textContent));
@@ -1595,6 +1579,8 @@ var arcfmt = (function () {
o.setAttribute("href", href.slice(0, ofs + 1) + arg);
o.textContent = fmt.split('_')[0];
}
ebi('selzip').textContent = fmt.split('_')[0];
ebi('selzip').setAttribute('fmt', arg);
}
function try_render() {
@@ -1624,6 +1610,75 @@ var arcfmt = (function () {
})();
var msel = (function () {
function getsel() {
var names = [];
var links = document.querySelectorAll('#files tbody tr.sel td:nth-child(2) a');
for (var a = 0, aa = links.length; a < aa; a++)
names.push(links[a].getAttribute('href').replace(/\/$/, "").split('/').slice(-1));
return names;
}
function selui() {
var fun = getsel().length ? "add" : "remove";
ebi('wtoggle').classList[fun]('sel');
}
function seltgl(e) {
ev(e);
var tr = this.parentNode;
tr.classList.toggle('sel');
selui();
}
function evsel(e, fun) {
ev(e);
var trs = document.querySelectorAll('#files tbody tr');
for (var a = 0, aa = trs.length; a < aa; a++)
trs[a].classList[fun]('sel');
selui();
}
ebi('selall').onclick = function (e) {
evsel(e, "add");
};
ebi('selinv').onclick = function (e) {
evsel(e, "toggle");
};
ebi('selzip').onclick = function (e) {
ev(e);
var names = getsel();
var arg = ebi('selzip').getAttribute('fmt');
var txt = names.join('\n');
var frm = document.createElement('form');
frm.setAttribute('action', '?' + arg);
frm.setAttribute('method', 'post');
frm.setAttribute('target', '_blank');
frm.setAttribute('enctype', 'multipart/form-data');
frm.innerHTML = '<input name="act" value="zip" />' +
'<textarea name="files" id="ziptxt"></textarea>';
frm.style.display = 'none';
var oldform = document.querySelector('#widgeti>form');
if (oldform)
oldform.parentNode.removeChild(oldform);
ebi('widgeti').appendChild(frm);
var obj = ebi('ziptxt');
obj.value = txt;
console.log(txt);
frm.submit();
};
function render() {
var tds = document.querySelectorAll('#files tbody td+td+td');
for (var a = 0, aa = tds.length; a < aa; a++) {
tds[a].onclick = seltgl;
}
arcfmt.render();
}
return {
"render": render
};
})();
function ev_row_tgl(e) {
ev(e);
filecols.toggle(this.parentElement.parentElement.getElementsByTagName('span')[0].textContent);
@@ -1676,4 +1731,4 @@ function reload_browser(not_mp) {
}
reload_browser(true);
mukey.render();
arcfmt.render();
msel.render();

View File

@@ -113,6 +113,75 @@ function crc32(str) {
};
function sortfiles(nodes) {
var sopts = jread('fsort', [["lead", -1, ""], ["href", 1, ""]]);
try {
var is_srch = false;
if (nodes[0]['rp']) {
is_srch = true;
for (var b = 0, bb = nodes.length; b < bb; b++)
nodes[b].ext = nodes[b].rp.split('.').pop();
for (var b = 0; b < sopts.length; b++)
if (sopts[b][0] == 'href')
sopts[b][0] = 'rp';
}
for (var a = sopts.length - 1; a >= 0; a--) {
var name = sopts[a][0], rev = sopts[a][1], typ = sopts[a][2];
if (!name)
continue;
if (name.indexOf('tags/') === 0) {
name = name.slice(5);
for (var b = 0, bb = nodes.length; b < bb; b++)
nodes[b]._sv = nodes[b].tags[name];
}
else {
for (var b = 0, bb = nodes.length; b < bb; b++) {
var v = nodes[b][name];
if ((v + '').indexOf('<a ') === 0)
v = v.split('>')[1];
else if (name == "href" && v)
v = uricom_dec(v)[0]
nodes[b]._sv = v;
}
}
var onodes = nodes.map((x) => x);
nodes.sort(function (n1, n2) {
var v1 = n1._sv,
v2 = n2._sv;
if (v1 === undefined) {
if (v2 === undefined) {
return onodes.indexOf(n1) - onodes.indexOf(n2);
}
return -1 * rev;
}
if (v2 === undefined) return 1 * rev;
var ret = rev * (typ == 'int' ? (v1 - v2) : (v1.localeCompare(v2)));
if (ret === 0)
ret = onodes.indexOf(n1) - onodes.indexOf(n2);
return ret;
});
}
for (var b = 0, bb = nodes.length; b < bb; b++) {
delete nodes[b]._sv;
if (is_srch)
delete nodes[b].ext;
}
}
catch (ex) {
console.log("failed to apply sort config: " + ex);
}
return nodes;
}
function sortTable(table, col, cb) {
var tb = table.tBodies[0],
th = table.tHead.rows[0].cells,
@@ -186,7 +255,6 @@ function makeSortable(table, cb) {
}
(function () {
var ops = document.querySelectorAll('#ops>a');
for (var a = 0; a < ops.length; a++) {

View File

@@ -83,6 +83,9 @@ sqlite3 up2k.db 'select mt1.w, mt1.k, mt1.v, mt2.v from mt mt1 inner join mt mt2
time sqlite3 up2k.db 'select mt1.w from mt mt1 inner join mt mt2 on mt1.w = mt2.w where mt1.k = +mt2.k and mt1.rowid != mt2.rowid' > warks
cat warks | while IFS= read -r x; do sqlite3 up2k.db "delete from mt where w = '$x'"; done
# dump all dbs
find -iname up2k.db | while IFS= read -r x; do sqlite3 "$x" 'select substr(w,1,12), rd, fn from up' | sed -r 's/\|/ \| /g' | while IFS= read -r y; do printf '%s | %s\n' "$x" "$y"; done; done
##
## media
@@ -126,6 +129,15 @@ pip install virtualenv
# readme toc
cat README.md | awk '!/^#/{next} {lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab)} {printf "%" ((lv-1)*4+1) "s [%s](#%s)\n", "*",$0,bab}'
# fix firefox phantom breakpoints,
# suggestions from bugtracker, doesnt work (debugger is not attachable)
devtools settings >> advanced >> enable browser chrome debugging + enable remote debugging
burger > developer >> browser toolbox (ctrl-alt-shift-i)
iframe btn topright >> chrome://devtools/content/debugger/index.html
dbg.asyncStore.pendingBreakpoints = {}
# fix firefox phantom breakpoints
about:config >> devtools.debugger.prefs-schema-version = -1
##
## http 206

View File

@@ -161,7 +161,7 @@ find .. -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
echo use smol web deps
rm -f copyparty/web/deps/*.full.*
rm -f copyparty/web/deps/*.full.* copyparty/web/{Makefile,splash.js}
# it's fine dw
grep -lE '\.full\.(js|css)' copyparty/web/* |

View File

@@ -18,7 +18,9 @@ from copyparty import util
class Cfg(Namespace):
def __init__(self, a=[], v=[], c=None):
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr mte".split()}
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
ex["mtp"] = []
ex["mte"] = "a"
super(Cfg, self).__init__(a=a, v=v, c=c, **ex)