mirror of
https://github.com/9001/copyparty.git
synced 2025-10-24 16:43:55 +00:00
Compare commits
20 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0061d29534 | ||
|
|
a891f34a93 | ||
|
|
d6a1e62a95 | ||
|
|
cda36ea8b4 | ||
|
|
909a76434a | ||
|
|
39348ef659 | ||
|
|
99d30edef3 | ||
|
|
b63ab15bf9 | ||
|
|
485cb4495c | ||
|
|
df018eb1f2 | ||
|
|
49aa47a9b8 | ||
|
|
7d20eb202a | ||
|
|
c533da9129 | ||
|
|
5cba31a814 | ||
|
|
1d824cb26c | ||
|
|
83b903d60e | ||
|
|
9c8ccabe8e | ||
|
|
b1f2c4e70d | ||
|
|
273ca0c8da | ||
|
|
d6f516b34f |
19
README.md
19
README.md
@@ -397,6 +397,8 @@ quick summary of more eccentric web-browsers trying to view a directory index:
|
||||
* cross-platform python client available in [./bin/](bin/)
|
||||
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
||||
|
||||
* sharex (screenshot utility): see [./contrib/sharex.sxcu](contrib/#sharexsxcu)
|
||||
|
||||
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
||||
|
||||
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
|
||||
@@ -520,20 +522,25 @@ in the `scripts` folder:
|
||||
|
||||
roughly sorted by priority
|
||||
|
||||
* separate sqlite table per tag
|
||||
* audio fingerprinting
|
||||
* readme.md as epilogue
|
||||
* single sha512 across all up2k chunks? maybe
|
||||
* reduce up2k roundtrips
|
||||
* start from a chunk index and just go
|
||||
* terminate client on bad data
|
||||
* `os.copy_file_range` for up2k cloning
|
||||
* single sha512 across all up2k chunks? maybe
|
||||
* figure out the deal with pixel3a not being connectable as hotspot
|
||||
* pixel3a having unpredictable 3sec latency in general :||||
|
||||
|
||||
discarded ideas
|
||||
|
||||
* separate sqlite table per tag
|
||||
* performance fixed by skipping some indexes (`+mt.k`)
|
||||
* audio fingerprinting
|
||||
* only makes sense if there can be a wasm client and that doesn't exist yet (except for olaf which is agpl hence counts as not existing)
|
||||
* `os.copy_file_range` for up2k cloning
|
||||
* almost never hit this path anyways
|
||||
* up2k partials ui
|
||||
* feels like there isn't much point
|
||||
* cache sha512 chunks on client
|
||||
* too dangerous
|
||||
* comment field
|
||||
* nah
|
||||
* look into android thumbnail cache file format
|
||||
* absolutely not
|
||||
|
||||
@@ -9,6 +9,16 @@
|
||||
* assumes the webserver and copyparty is running on the same server/IP
|
||||
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
|
||||
|
||||
### [`sharex.sxcu`](sharex.sxcu)
|
||||
* sharex config file to upload screenshots and grab the URL
|
||||
* `RequestURL`: full URL to the target folder
|
||||
* `pw`: password (remove the `pw` line if anon-write)
|
||||
|
||||
however if your copyparty is behind a reverse-proxy, you may want to use [`sharex-html.sxcu`](sharex-html.sxcu) instead:
|
||||
* `RequestURL`: full URL to the target folder
|
||||
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
|
||||
* `pw`: password (remove `Parameters` if anon-write)
|
||||
|
||||
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
|
||||
* disables thumbnails and folder-type detection in windows explorer
|
||||
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
|
||||
|
||||
19
contrib/sharex-html.sxcu
Normal file
19
contrib/sharex-html.sxcu
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"Version": "13.5.0",
|
||||
"Name": "copyparty-html",
|
||||
"DestinationType": "ImageUploader",
|
||||
"RequestMethod": "POST",
|
||||
"RequestURL": "http://127.0.0.1:3923/sharex",
|
||||
"Parameters": {
|
||||
"pw": "wark"
|
||||
},
|
||||
"Body": "MultipartFormData",
|
||||
"Arguments": {
|
||||
"act": "bput"
|
||||
},
|
||||
"FileFormName": "f",
|
||||
"RegexList": [
|
||||
"bytes // <a href=\"/([^\"]+)\""
|
||||
],
|
||||
"URL": "http://127.0.0.1:3923/$regex:1|1$"
|
||||
}
|
||||
17
contrib/sharex.sxcu
Normal file
17
contrib/sharex.sxcu
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"Version": "13.5.0",
|
||||
"Name": "copyparty",
|
||||
"DestinationType": "ImageUploader",
|
||||
"RequestMethod": "POST",
|
||||
"RequestURL": "http://127.0.0.1:3923/sharex",
|
||||
"Parameters": {
|
||||
"pw": "wark",
|
||||
"j": null
|
||||
},
|
||||
"Body": "MultipartFormData",
|
||||
"Arguments": {
|
||||
"act": "bput"
|
||||
},
|
||||
"FileFormName": "f",
|
||||
"URL": "$json:files[0].url$"
|
||||
}
|
||||
@@ -225,6 +225,19 @@ def run_argparse(argv, formatter):
|
||||
--ciphers help = available ssl/tls ciphers,
|
||||
--ssl-ver help = available ssl/tls versions,
|
||||
default is what python considers safe, usually >= TLS1
|
||||
|
||||
values for --ls:
|
||||
"USR" is a user to browse as; * is anonymous, ** is all users
|
||||
"VOL" is a single volume to scan, default is * (all vols)
|
||||
"FLAG" is flags;
|
||||
"v" in addition to realpaths, print usernames and vpaths
|
||||
"ln" only prints symlinks leaving the volume mountpoint
|
||||
"p" exits 1 if any such symlinks are found
|
||||
"r" resumes startup after the listing
|
||||
examples:
|
||||
--ls '**' # list all files which are possible to read
|
||||
--ls '**,*,ln' # check for dangerous symlinks
|
||||
--ls '**,*,ln,p,r' # check, then start normally if safe
|
||||
"""
|
||||
),
|
||||
)
|
||||
@@ -288,6 +301,7 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
||||
|
||||
ap2 = ap.add_argument_group('debug options')
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", help="scan all volumes")
|
||||
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 11, 4)
|
||||
VERSION = (0, 11, 8)
|
||||
CODENAME = "the grid"
|
||||
BUILD_DT = (2021, 6, 1)
|
||||
BUILD_DT = (2021, 6, 6)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -228,21 +228,19 @@ class VFS(object):
|
||||
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
|
||||
yield f
|
||||
|
||||
def user_tree(self, uname, readable=False, writable=False, admin=False):
|
||||
ret = []
|
||||
opt1 = readable and (uname in self.uread or "*" in self.uread)
|
||||
opt2 = writable and (uname in self.uwrite or "*" in self.uwrite)
|
||||
if admin:
|
||||
if opt1 and opt2:
|
||||
ret.append(self.vpath)
|
||||
else:
|
||||
if opt1 or opt2:
|
||||
ret.append(self.vpath)
|
||||
def user_tree(self, uname, readable, writable, admin):
|
||||
is_readable = False
|
||||
if uname in self.uread or "*" in self.uread:
|
||||
readable.append(self.vpath)
|
||||
is_readable = True
|
||||
|
||||
if uname in self.uwrite or "*" in self.uwrite:
|
||||
writable.append(self.vpath)
|
||||
if is_readable:
|
||||
admin.append(self.vpath)
|
||||
|
||||
for _, vn in sorted(self.nodes.items()):
|
||||
ret.extend(vn.user_tree(uname, readable, writable, admin))
|
||||
|
||||
return ret
|
||||
vn.user_tree(uname, readable, writable, admin)
|
||||
|
||||
|
||||
class AuthSrv(object):
|
||||
@@ -559,3 +557,90 @@ class AuthSrv(object):
|
||||
|
||||
# import pprint
|
||||
# pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount})
|
||||
|
||||
def dbg_ls(self):
|
||||
users = self.args.ls
|
||||
vols = "*"
|
||||
flags = []
|
||||
|
||||
try:
|
||||
users, vols = users.split(",", 1)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
vols, flags = vols.split(",", 1)
|
||||
flags = flags.split(",")
|
||||
except:
|
||||
pass
|
||||
|
||||
if users == "**":
|
||||
users = list(self.user.keys()) + ["*"]
|
||||
else:
|
||||
users = [users]
|
||||
|
||||
for u in users:
|
||||
if u not in self.user and u != "*":
|
||||
raise Exception("user not found: " + u)
|
||||
|
||||
if vols == "*":
|
||||
vols = ["/" + x for x in self.vfs.all_vols.keys()]
|
||||
else:
|
||||
vols = [vols]
|
||||
|
||||
for v in vols:
|
||||
if not v.startswith("/"):
|
||||
raise Exception("volumes must start with /")
|
||||
|
||||
if v[1:] not in self.vfs.all_vols:
|
||||
raise Exception("volume not found: " + v)
|
||||
|
||||
self.log({"users": users, "vols": vols, "flags": flags})
|
||||
for k, v in self.vfs.all_vols.items():
|
||||
self.log("/{}: read({}) write({})".format(k, v.uread, v.uwrite))
|
||||
|
||||
flag_v = "v" in flags
|
||||
flag_ln = "ln" in flags
|
||||
flag_p = "p" in flags
|
||||
flag_r = "r" in flags
|
||||
|
||||
n_bads = 0
|
||||
for v in vols:
|
||||
v = v[1:]
|
||||
vtop = "/{}/".format(v) if v else "/"
|
||||
for u in users:
|
||||
self.log("checking /{} as {}".format(v, u))
|
||||
try:
|
||||
vn, _ = self.vfs.get(v, u, True, False)
|
||||
except:
|
||||
continue
|
||||
|
||||
atop = vn.realpath
|
||||
g = vn.walk("", "", u, True, not self.args.no_scandir, lstat=False)
|
||||
for vpath, apath, files, _, _ in g:
|
||||
fnames = [n[0] for n in files]
|
||||
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
|
||||
vpaths = [vtop + x for x in vpaths]
|
||||
apaths = [os.path.join(apath, n) for n in fnames]
|
||||
files = list(zip(vpaths, apaths))
|
||||
|
||||
if flag_ln:
|
||||
files = [x for x in files if not x[1].startswith(atop + os.sep)]
|
||||
n_bads += len(files)
|
||||
|
||||
if flag_v:
|
||||
msg = [
|
||||
'# user "{}", vpath "{}"\n{}'.format(u, vp, ap)
|
||||
for vp, ap in files
|
||||
]
|
||||
else:
|
||||
msg = [x[1] for x in files]
|
||||
|
||||
if msg:
|
||||
nuprint("\n".join(msg))
|
||||
|
||||
if n_bads and flag_p:
|
||||
raise Exception("found symlink leaving volume, and strict is set")
|
||||
|
||||
if not flag_r:
|
||||
sys.exit(0)
|
||||
|
||||
@@ -153,10 +153,8 @@ class HttpCli(object):
|
||||
|
||||
pwd = uparam.get("pw")
|
||||
self.uname = self.auth.iuser.get(pwd, "*")
|
||||
if self.uname:
|
||||
self.rvol = self.auth.vfs.user_tree(self.uname, readable=True)
|
||||
self.wvol = self.auth.vfs.user_tree(self.uname, writable=True)
|
||||
self.avol = self.auth.vfs.user_tree(self.uname, True, True, True)
|
||||
self.rvol, self.wvol, self.avol = [[], [], []]
|
||||
self.auth.vfs.user_tree(self.uname, self.rvol, self.wvol, self.avol)
|
||||
|
||||
ua = self.headers.get("user-agent", "")
|
||||
self.is_rclone = ua.startswith("rclone/")
|
||||
@@ -851,14 +849,28 @@ class HttpCli(object):
|
||||
status = "ERROR"
|
||||
|
||||
msg = "{} // {} bytes // {:.3f} MiB/s\n".format(status, sz_total, spd)
|
||||
jmsg = {"status": status, "sz": sz_total, "mbps": round(spd, 3), "files": []}
|
||||
|
||||
for sz, sha512, ofn, lfn in files:
|
||||
vpath = self.vpath + "/" + lfn
|
||||
vpath = (self.vpath + "/" if self.vpath else "") + lfn
|
||||
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
|
||||
sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True)
|
||||
)
|
||||
# truncated SHA-512 prevents length extension attacks;
|
||||
# using SHA-512/224, optionally SHA-512/256 = :64
|
||||
jpart = {
|
||||
"url": "{}://{}/{}".format(
|
||||
"https" if self.tls else "http",
|
||||
self.headers.get("host", "copyparty"),
|
||||
vpath,
|
||||
),
|
||||
"sha512": sha512[:56],
|
||||
"sz": sz,
|
||||
"fn": lfn,
|
||||
"fn_orig": ofn,
|
||||
"path": vpath,
|
||||
}
|
||||
jmsg["files"].append(jpart)
|
||||
|
||||
vspd = self._spd(sz_total, False)
|
||||
self.log("{} {}".format(vspd, msg))
|
||||
@@ -870,7 +882,12 @@ class HttpCli(object):
|
||||
ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg)
|
||||
f.write(ft.encode("utf-8"))
|
||||
|
||||
self.redirect(self.vpath, msg=msg, flavor="return to", click=False)
|
||||
if "j" in self.uparam:
|
||||
jtxt = json.dumps(jmsg, indent=2, sort_keys=True)
|
||||
self.reply(jtxt.encode("utf-8", "replace"), mime="application/json")
|
||||
else:
|
||||
self.redirect(self.vpath, msg=msg, flavor="return to", click=False)
|
||||
|
||||
self.parser.drop()
|
||||
return True
|
||||
|
||||
|
||||
@@ -39,6 +39,8 @@ class SvcHub(object):
|
||||
|
||||
# jank goes here
|
||||
auth = AuthSrv(self.args, self.log, False)
|
||||
if args.ls:
|
||||
auth.dbg_ls()
|
||||
|
||||
# initiate all services to manage
|
||||
self.tcpsrv = TcpSrv(self)
|
||||
|
||||
@@ -129,7 +129,7 @@ class ThumbSrv(object):
|
||||
|
||||
msg = "cannot create video thumbnails because some of the required programs are not available: "
|
||||
msg += ", ".join(missing)
|
||||
self.log(msg, c=1)
|
||||
self.log(msg, c=3)
|
||||
|
||||
t = threading.Thread(target=self.cleaner)
|
||||
t.daemon = True
|
||||
|
||||
@@ -126,10 +126,11 @@ class U2idx(object):
|
||||
joins += "inner join mt mt{} on {} = {} ".format(
|
||||
mt_ctr, mt_keycmp, mt_keycmp2
|
||||
)
|
||||
mt_keycmp = mt_keycmp2
|
||||
if v == "tags":
|
||||
v = "mt{0}.v".format(mt_ctr)
|
||||
else:
|
||||
v = "mt{0}.k = '{1}' and mt{0}.v".format(mt_ctr, v)
|
||||
v = "+mt{0}.k = '{1}' and mt{0}.v".format(mt_ctr, v)
|
||||
|
||||
else:
|
||||
raise Pebkac(400, "invalid key [" + v + "]")
|
||||
@@ -195,7 +196,7 @@ class U2idx(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if not uq:
|
||||
if not uq or not uv:
|
||||
q = "select * from up"
|
||||
v = ()
|
||||
else:
|
||||
|
||||
@@ -552,9 +552,10 @@ class Up2k(object):
|
||||
last_write = time.time()
|
||||
n_buf = 0
|
||||
|
||||
self._stop_mpool(mpool)
|
||||
with self.mutex:
|
||||
n_add += len(self._flush_mpool(c3))
|
||||
if mpool:
|
||||
self._stop_mpool(mpool)
|
||||
with self.mutex:
|
||||
n_add += len(self._flush_mpool(c3))
|
||||
|
||||
conn.commit()
|
||||
c3.close()
|
||||
|
||||
@@ -262,6 +262,11 @@ def ren_open(fname, *args, **kwargs):
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
if suffix:
|
||||
ext = fname.split(".")[-1]
|
||||
if len(ext) < 7:
|
||||
suffix += "." + ext
|
||||
|
||||
orig_name = fname
|
||||
bname = fname
|
||||
ext = ""
|
||||
|
||||
@@ -803,7 +803,10 @@ var thegrid = (function () {
|
||||
r.sz = v;
|
||||
swrite('gridsz', r.sz);
|
||||
}
|
||||
document.documentElement.style.setProperty('--grid-sz', r.sz + 'em');
|
||||
try {
|
||||
document.documentElement.style.setProperty('--grid-sz', r.sz + 'em');
|
||||
}
|
||||
catch (ex) { }
|
||||
}
|
||||
setsz();
|
||||
|
||||
@@ -820,10 +823,18 @@ var thegrid = (function () {
|
||||
this.setAttribute('class', tr.getAttribute('class'));
|
||||
}
|
||||
|
||||
function bgopen(e) {
|
||||
ev(e);
|
||||
var url = this.getAttribute('href');
|
||||
window.open(url, '_blank');
|
||||
}
|
||||
|
||||
r.loadsel = function () {
|
||||
var ths = QSA('#ggrid>a');
|
||||
var ths = QSA('#ggrid>a'),
|
||||
have_sel = !!QS('#files tr.sel');
|
||||
|
||||
for (var a = 0, aa = ths.length; a < aa; a++) {
|
||||
ths[a].onclick = r.sel ? seltgl : null;
|
||||
ths[a].onclick = r.sel ? seltgl : have_sel ? bgopen : null;
|
||||
ths[a].setAttribute('class', ebi(ths[a].getAttribute('ref')).parentNode.parentNode.getAttribute('class'));
|
||||
}
|
||||
var uns = QS('#ggrid a[ref="unsearch"]');
|
||||
@@ -965,7 +976,7 @@ document.onkeydown = function (e) {
|
||||
if (k == 'KeyT')
|
||||
return ebi('thumbs').click();
|
||||
|
||||
if (window['thegrid'] && thegrid.en) {
|
||||
if (thegrid.en) {
|
||||
if (k == 'KeyS')
|
||||
return ebi('gridsel').click();
|
||||
|
||||
@@ -1437,7 +1448,7 @@ var treectl = (function () {
|
||||
if (hpush)
|
||||
get_tree('.', xhr.top);
|
||||
|
||||
enspin('#files');
|
||||
enspin(thegrid.en ? '#gfiles' : '#files');
|
||||
}
|
||||
|
||||
function treegrow(e) {
|
||||
@@ -1517,6 +1528,7 @@ var treectl = (function () {
|
||||
|
||||
apply_perms(res.perms);
|
||||
despin('#files');
|
||||
despin('#gfiles');
|
||||
|
||||
ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : "";
|
||||
ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : "";
|
||||
@@ -2051,8 +2063,7 @@ var msel = (function () {
|
||||
}
|
||||
function selui() {
|
||||
clmod(ebi('wtoggle'), 'sel', getsel().length);
|
||||
if (window['thegrid'])
|
||||
thegrid.loadsel();
|
||||
thegrid.loadsel();
|
||||
}
|
||||
function seltgl(e) {
|
||||
ev(e);
|
||||
|
||||
@@ -17,6 +17,7 @@ function goto_up2k() {
|
||||
// chrome requires https to use crypto.subtle,
|
||||
// usually it's undefined but some chromes throw on invoke
|
||||
var up2k = null;
|
||||
var sha_js = window.WebAssembly ? 'hw' : 'ac'; // ff53,c57,sa11
|
||||
try {
|
||||
var cf = crypto.subtle || crypto.webkitSubtle;
|
||||
cf.digest('SHA-512', new Uint8Array(1)).then(
|
||||
@@ -430,13 +431,15 @@ function up2k_init(subtle) {
|
||||
// upload ui hidden by default, clicking the header shows it
|
||||
function init_deps() {
|
||||
if (!subtle && !window.asmCrypto) {
|
||||
showmodal('<h1>loading sha512.js</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
|
||||
import_js('/.cpr/deps/sha512.js', unmodal);
|
||||
var fn = 'sha512.' + sha_js + '.js';
|
||||
showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
|
||||
import_js('/.cpr/deps/' + fn, unmodal);
|
||||
|
||||
if (is_https)
|
||||
ebi('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
|
||||
else
|
||||
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance';
|
||||
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' +
|
||||
(sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -886,6 +889,10 @@ function up2k_init(subtle) {
|
||||
return base64;
|
||||
}
|
||||
|
||||
function hex2u8(txt) {
|
||||
return new Uint8Array(txt.match(/.{2}/g).map(function (b) { return parseInt(b, 16); }));
|
||||
}
|
||||
|
||||
function get_chunksize(filesize) {
|
||||
var chunksize = 1024 * 1024,
|
||||
stepsize = 512 * 1024;
|
||||
@@ -987,10 +994,18 @@ function up2k_init(subtle) {
|
||||
if (subtle)
|
||||
subtle.digest('SHA-512', buf).then(hash_done);
|
||||
else setTimeout(function () {
|
||||
var hasher = new asmCrypto.Sha512();
|
||||
hasher.process(new Uint8Array(buf));
|
||||
hasher.finish();
|
||||
hash_done(hasher.result);
|
||||
var u8buf = new Uint8Array(buf);
|
||||
if (sha_js == 'hw') {
|
||||
hashwasm.sha512(u8buf).then(function (v) {
|
||||
hash_done(hex2u8(v))
|
||||
});
|
||||
}
|
||||
else {
|
||||
var hasher = new asmCrypto.Sha512();
|
||||
hasher.process(u8buf);
|
||||
hasher.finish();
|
||||
hash_done(hasher.result);
|
||||
}
|
||||
}, 1);
|
||||
};
|
||||
|
||||
|
||||
@@ -238,6 +238,10 @@
|
||||
color: #fff;
|
||||
font-style: italic;
|
||||
}
|
||||
#u2foot span {
|
||||
color: #999;
|
||||
font-size: .9em;
|
||||
}
|
||||
#u2footfoot {
|
||||
margin-bottom: -1em;
|
||||
}
|
||||
|
||||
@@ -9,6 +9,12 @@ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
||||
ver_zopfli=1.0.3
|
||||
|
||||
|
||||
# TODO
|
||||
# sha512.hw.js https://github.com/Daninet/hash-wasm
|
||||
# sha512.kc.js https://github.com/chm-diederichs/sha3-wasm
|
||||
# awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' <sha512.ac.js.orig >sha512.ac.js; for fn in sha512.ac.js.orig sha512.ac.js; do wc -c <$fn; wc -c <$fn.gz ; for n in {1..9}; do printf '%8d %d bz\n' $(bzip2 -c$n <$fn | wc -c) $n; done; done
|
||||
|
||||
|
||||
# download;
|
||||
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
||||
RUN mkdir -p /z/dist/no-pk \
|
||||
|
||||
12
scripts/install-githooks.sh
Executable file
12
scripts/install-githooks.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
|
||||
[ -e setup.py ] || ..
|
||||
[ -e setup.py ] || {
|
||||
echo u wot
|
||||
exit 1
|
||||
}
|
||||
|
||||
cd .git/hooks
|
||||
rm -f pre-commit
|
||||
ln -s ../../scripts/run-tests.sh pre-commit
|
||||
@@ -204,7 +204,7 @@ for d in copyparty dep-j2; do find $d -type f; done |
|
||||
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
|
||||
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
|
||||
|
||||
(grep -vE 'gz$' list1; grep -E 'gz$' list1) >list
|
||||
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1) >list || true
|
||||
|
||||
echo creating tar
|
||||
args=(--owner=1000 --group=1000)
|
||||
|
||||
34
scripts/profile.py
Normal file
34
scripts/profile.py
Normal file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, ".")
|
||||
cmd = sys.argv[1]
|
||||
|
||||
if cmd == "cpp":
|
||||
from copyparty.__main__ import main
|
||||
|
||||
argv = ["__main__", "-v", "srv::r", "-v", "../../yt:yt:r"]
|
||||
main(argv=argv)
|
||||
|
||||
elif cmd == "test":
|
||||
from unittest import main
|
||||
|
||||
argv = ["__main__", "discover", "-s", "tests"]
|
||||
main(module=None, argv=argv)
|
||||
|
||||
else:
|
||||
raise Exception()
|
||||
|
||||
# import dis; print(dis.dis(main))
|
||||
|
||||
|
||||
# macos:
|
||||
# option1) python3.9 -m pip install --user -U vmprof==0.4.9
|
||||
# option2) python3.9 -m pip install --user -U https://github.com/vmprof/vmprof-python/archive/refs/heads/master.zip
|
||||
#
|
||||
# python -m vmprof -o prof --lines ./scripts/profile.py test
|
||||
|
||||
# linux: ~/.local/bin/vmprofshow prof tree | grep -vF '[1m 0.'
|
||||
# macos: ~/Library/Python/3.9/bin/vmprofshow prof tree | grep -vF '[1m 0.'
|
||||
# win: %appdata%\..\Roaming\Python\Python39\Scripts\vmprofshow.exe prof tree
|
||||
12
scripts/run-tests.sh
Executable file
12
scripts/run-tests.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
|
||||
pids=()
|
||||
for py in python{2,3}; do
|
||||
$py -m unittest discover -s tests >/dev/null &
|
||||
pids+=($!)
|
||||
done
|
||||
|
||||
for pid in ${pids[@]}; do
|
||||
wait $pid
|
||||
done
|
||||
@@ -8,13 +8,13 @@ import time
|
||||
import shutil
|
||||
import pprint
|
||||
import tarfile
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from argparse import Namespace
|
||||
from copyparty.authsrv import AuthSrv
|
||||
from copyparty.httpcli import HttpCli
|
||||
|
||||
from tests import util as tu
|
||||
from copyparty.authsrv import AuthSrv
|
||||
from copyparty.httpcli import HttpCli
|
||||
|
||||
|
||||
def hdr(query):
|
||||
@@ -42,13 +42,15 @@ class Cfg(Namespace):
|
||||
|
||||
|
||||
class TestHttpCli(unittest.TestCase):
|
||||
def test(self):
|
||||
td = os.path.join(tu.get_ramdisk(), "vfs")
|
||||
try:
|
||||
shutil.rmtree(td)
|
||||
except OSError:
|
||||
pass
|
||||
def setUp(self):
|
||||
self.td = tu.get_ramdisk()
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(tempfile.gettempdir())
|
||||
shutil.rmtree(self.td)
|
||||
|
||||
def test(self):
|
||||
td = os.path.join(self.td, "vfs")
|
||||
os.mkdir(td)
|
||||
os.chdir(td)
|
||||
|
||||
|
||||
@@ -7,13 +7,12 @@ import json
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from textwrap import dedent
|
||||
from argparse import Namespace
|
||||
from copyparty.authsrv import AuthSrv
|
||||
from copyparty import util
|
||||
|
||||
from tests import util as tu
|
||||
from copyparty.authsrv import AuthSrv
|
||||
from copyparty import util
|
||||
|
||||
|
||||
class Cfg(Namespace):
|
||||
@@ -25,6 +24,13 @@ class Cfg(Namespace):
|
||||
|
||||
|
||||
class TestVFS(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.td = tu.get_ramdisk()
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(tempfile.gettempdir())
|
||||
shutil.rmtree(self.td)
|
||||
|
||||
def dump(self, vfs):
|
||||
print(json.dumps(vfs, indent=4, sort_keys=True, default=lambda o: o.__dict__))
|
||||
|
||||
@@ -55,12 +61,7 @@ class TestVFS(unittest.TestCase):
|
||||
pass
|
||||
|
||||
def test(self):
|
||||
td = os.path.join(tu.get_ramdisk(), "vfs")
|
||||
try:
|
||||
shutil.rmtree(td)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
td = os.path.join(self.td, "vfs")
|
||||
os.mkdir(td)
|
||||
os.chdir(td)
|
||||
|
||||
@@ -227,7 +228,7 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(list(v1), list(v2))
|
||||
|
||||
# config file parser
|
||||
cfg_path = os.path.join(tu.get_ramdisk(), "test.cfg")
|
||||
cfg_path = os.path.join(self.td, "test.cfg")
|
||||
with open(cfg_path, "wb") as f:
|
||||
f.write(
|
||||
dedent(
|
||||
@@ -260,6 +261,4 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(n.uwrite, ["asd"])
|
||||
self.assertEqual(len(n.nodes), 0)
|
||||
|
||||
os.chdir(tempfile.gettempdir())
|
||||
shutil.rmtree(td)
|
||||
os.unlink(cfg_path)
|
||||
|
||||
@@ -1,16 +1,36 @@
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
import jinja2
|
||||
import tempfile
|
||||
import platform
|
||||
import subprocess as sp
|
||||
|
||||
from copyparty.util import Unrecv
|
||||
|
||||
WINDOWS = platform.system() == "Windows"
|
||||
ANYWIN = WINDOWS or sys.platform in ["msys"]
|
||||
MACOS = platform.system() == "Darwin"
|
||||
|
||||
J2_ENV = jinja2.Environment(loader=jinja2.BaseLoader)
|
||||
J2_FILES = J2_ENV.from_string("{{ files|join('\n') }}")
|
||||
|
||||
|
||||
def nah(*a, **ka):
|
||||
return False
|
||||
|
||||
|
||||
if MACOS:
|
||||
import posixpath
|
||||
|
||||
posixpath.islink = nah
|
||||
os.path.islink = nah
|
||||
# 25% faster; until any tests do symlink stuff
|
||||
|
||||
|
||||
from copyparty.util import Unrecv
|
||||
|
||||
|
||||
def runcmd(*argv):
|
||||
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
stdout, stderr = p.communicate()
|
||||
@@ -28,18 +48,25 @@ def chkcmd(*argv):
|
||||
|
||||
|
||||
def get_ramdisk():
|
||||
def subdir(top):
|
||||
ret = os.path.join(top, "cptd-{}".format(os.getpid()))
|
||||
shutil.rmtree(ret, True)
|
||||
os.mkdir(ret)
|
||||
return ret
|
||||
|
||||
for vol in ["/dev/shm", "/Volumes/cptd"]: # nosec (singleton test)
|
||||
if os.path.exists(vol):
|
||||
return vol
|
||||
return subdir(vol)
|
||||
|
||||
if os.path.exists("/Volumes"):
|
||||
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://32768")
|
||||
# hdiutil eject /Volumes/cptd/
|
||||
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://65536")
|
||||
devname = devname.strip()
|
||||
print("devname: [{}]".format(devname))
|
||||
for _ in range(10):
|
||||
try:
|
||||
_, _ = chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
|
||||
return "/Volumes/cptd"
|
||||
return subdir("/Volumes/cptd")
|
||||
except Exception as ex:
|
||||
print(repr(ex))
|
||||
time.sleep(0.25)
|
||||
@@ -50,7 +77,7 @@ def get_ramdisk():
|
||||
try:
|
||||
os.mkdir(ret)
|
||||
finally:
|
||||
return ret
|
||||
return subdir(ret)
|
||||
|
||||
|
||||
class NullBroker(object):
|
||||
|
||||
Reference in New Issue
Block a user