Compare commits

...

50 Commits

Author SHA1 Message Date
ed
a359d64d44 v0.11.11 2021-06-08 23:43:00 +02:00
ed
22396e8c33 zopfli js/css 2021-06-08 23:19:35 +02:00
ed
5ded5a4516 alphabetical up2k indexing 2021-06-08 21:42:08 +02:00
ed
79c7639aaf haha memes 2021-06-08 21:10:25 +02:00
ed
5bbf875385 fuse-client: print python version 2021-06-08 20:19:51 +02:00
ed
5e159432af vscode: support running with -jN 2021-06-08 20:18:24 +02:00
ed
1d6ae409f6 count expenses when sending files 2021-06-08 20:17:53 +02:00
ed
9d729d3d1a add thread names 2021-06-08 20:14:23 +02:00
ed
4dd5d4e1b7 when rootless, blank instead of block rootdir 2021-06-08 18:35:55 +02:00
ed
acd8149479 dont track workloads unless multiprocessing 2021-06-08 18:01:59 +02:00
ed
b97a1088fa v0.11.10 2021-06-08 09:41:31 +02:00
ed
b77bed3324 fix terminating tls connections wow 2021-06-08 09:40:49 +02:00
ed
a2b7c85a1f forgot what version was running on a box 2021-06-08 00:01:08 +02:00
ed
b28533f850 v0.11.9 2021-06-07 20:22:10 +02:00
ed
bd8c7e538a sfx.sh: use system jinja2 when available 2021-06-07 20:09:45 +02:00
ed
89e48cff24 detect recursive symlinks 2021-06-07 20:09:18 +02:00
ed
ae90a7b7b6 mention firefox funny 2021-06-07 02:10:54 +02:00
ed
6fc1be04da support windows-py3.5 2021-06-06 21:10:53 +02:00
ed
0061d29534 v0.11.8 2021-06-06 19:09:55 +02:00
ed
a891f34a93 update sharex example 2021-06-06 19:06:33 +02:00
ed
d6a1e62a95 append file-ext when avoiding name collisions 2021-06-06 18:53:32 +02:00
ed
cda36ea8b4 support json replies from bput 2021-06-06 18:47:21 +02:00
ed
909a76434a a 2021-06-06 03:07:11 +02:00
ed
39348ef659 add sharex example 2021-06-06 02:53:01 +02:00
ed
99d30edef3 v0.11.7 2021-06-05 03:33:29 +02:00
ed
b63ab15bf9 gallery links in new tab if a selection is atcive 2021-06-05 03:27:44 +02:00
ed
485cb4495c minify asmcrypto a bit 2021-06-05 03:25:54 +02:00
ed
df018eb1f2 add colors 2021-06-05 01:34:39 +02:00
ed
49aa47a9b8 way faster sha512 wasm fallback 2021-06-05 01:14:16 +02:00
ed
7d20eb202a optimize 2021-06-04 19:35:08 +02:00
ed
c533da9129 fix single-threaded mtag 2021-06-04 19:00:24 +02:00
ed
5cba31a814 spin on thumbnails too 2021-06-04 17:38:57 +02:00
ed
1d824cb26c add volume lister / containment checker 2021-06-04 02:23:46 +02:00
ed
83b903d60e readme: update todos 2021-06-02 09:42:33 +02:00
ed
9c8ccabe8e v0.11.6 2021-06-01 08:25:35 +02:00
ed
b1f2c4e70d gain 1000x performance with one weird trick 2021-06-01 06:17:46 +00:00
ed
273ca0c8da run tests on commit 2021-06-01 05:49:41 +02:00
ed
d6f516b34f pypi exclusive 2021-06-01 04:14:23 +02:00
ed
83127858ca v0.11.4 2021-06-01 03:55:51 +02:00
ed
d89329757e fix permission check in tar/zip generator (gdi) 2021-06-01 03:55:31 +02:00
ed
49ffec5320 v0.11.3 2021-06-01 03:11:02 +02:00
ed
2eaae2b66a fix youtube query example 2021-06-01 02:53:54 +02:00
ed
ea4441e25c v0.11.2 2021-06-01 02:47:37 +02:00
ed
e5f34042f9 more precise volume state in admin panel 2021-06-01 02:32:53 +02:00
ed
271096874a fix adv and date handling in query lang 2021-06-01 02:10:17 +02:00
ed
8efd780a72 thumbnail cleaner too noisy 2021-06-01 01:51:03 +02:00
ed
41bcf7308d fix search results as thumbnails 2021-06-01 01:41:36 +02:00
ed
d102bb3199 fix on-upload hasher (0.11.1 regression) 2021-06-01 01:20:34 +02:00
ed
d0bed95415 search: add a query language 2021-06-01 01:16:40 +02:00
ed
2528729971 add dbtool 2021-05-30 16:49:08 +00:00
41 changed files with 1071 additions and 316 deletions

24
.vscode/launch.py vendored
View File

@@ -3,14 +3,12 @@
# launches 10x faster than mspython debugpy
# and is stoppable with ^C
import re
import os
import sys
import shlex
sys.path.insert(0, os.getcwd())
import jstyleson
from copyparty.__main__ import main as copyparty
import subprocess as sp
with open(".vscode/launch.json", "r", encoding="utf-8") as f:
tj = f.read()
@@ -25,11 +23,19 @@ except:
pass
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
try:
copyparty(["a"] + argv)
except SystemExit as ex:
if ex.code:
raise
if re.search(" -j ?[0-9]", " ".join(argv)):
argv = [sys.executable, "-m", "copyparty"] + argv
sp.check_call(argv)
else:
sys.path.insert(0, os.getcwd())
from copyparty.__main__ import main as copyparty
try:
copyparty(["a"] + argv)
except SystemExit as ex:
if ex.code:
raise
print("\n\033[32mokke\033[0m")
sys.exit(1)

View File

@@ -68,12 +68,16 @@ you may also want these, especially on servers:
## notes
general:
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
* because no browsers currently implement the media-query to do this properly orz
browser-specific:
* iPhone/iPad: use Firefox to download files
* Android-Chrome: increase "parallel uploads" for higher speed (android bug)
* Android-Firefox: takes a while to select files (their fix for ☝️)
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
* because no browsers currently implement the media-query to do this properly orz
* Desktop-Firefox: may stop you from deleting folders you've uploaded until you visit `about:memory` and click `Minimize memory usage`
## status
@@ -397,6 +401,8 @@ quick summary of more eccentric web-browsers trying to view a directory index:
* cross-platform python client available in [./bin/](bin/)
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
* sharex (screenshot utility): see [./contrib/sharex.sxcu](contrib/#sharexsxcu)
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
@@ -520,20 +526,25 @@ in the `scripts` folder:
roughly sorted by priority
* separate sqlite table per tag
* audio fingerprinting
* readme.md as epilogue
* single sha512 across all up2k chunks? maybe
* reduce up2k roundtrips
* start from a chunk index and just go
* terminate client on bad data
* `os.copy_file_range` for up2k cloning
* single sha512 across all up2k chunks? maybe
* figure out the deal with pixel3a not being connectable as hotspot
* pixel3a having unpredictable 3sec latency in general :||||
discarded ideas
* separate sqlite table per tag
* performance fixed by skipping some indexes (`+mt.k`)
* audio fingerprinting
* only makes sense if there can be a wasm client and that doesn't exist yet (except for olaf which is agpl hence counts as not existing)
* `os.copy_file_range` for up2k cloning
* almost never hit this path anyways
* up2k partials ui
* feels like there isn't much point
* cache sha512 chunks on client
* too dangerous
* comment field
* nah
* look into android thumbnail cache file format
* absolutely not

View File

@@ -45,3 +45,18 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
# [`mtag/`](mtag/)
* standalone programs which perform misc. file analysis
* copyparty can Popen programs like these during file indexing to collect additional metadata
# [`dbtool.py`](dbtool.py)
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty recommends to wipe the DB and reindex because it now collects additional metadata during analysis, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
for that example (upgrading to v0.11.0), first move the old db aside, launch copyparty, let it rebuild the db until the point where it starts running mtp (colored messages as it adds the mtp tags), then CTRL-C and patch in the old mtp tags from the old db instead
so assuming you have `-mtp` parsers to provide the tags `key` and `.bpm`:
```
~/bin/dbtool.py -ls up2k.db
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -cmp
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy key
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy .bpm -vac
```

View File

@@ -54,6 +54,12 @@ MACOS = platform.system() == "Darwin"
info = log = dbg = None
print("{} v{} @ {}".format(
platform.python_implementation(),
".".join([str(x) for x in sys.version_info]),
sys.executable))
try:
from fuse import FUSE, FuseOSError, Operations
except:

198
bin/dbtool.py Executable file
View File

@@ -0,0 +1,198 @@
#!/usr/bin/env python3
import os
import sys
import sqlite3
import argparse
DB_VER = 3
def die(msg):
print("\033[31m\n" + msg + "\n\033[0m")
sys.exit(1)
def read_ver(db):
for tab in ["ki", "kv"]:
try:
c = db.execute(r"select v from {} where k = 'sver'".format(tab))
except:
continue
rows = c.fetchall()
if rows:
return int(rows[0][0])
return "corrupt"
def ls(db):
nfiles = next(db.execute("select count(w) from up"))[0]
ntags = next(db.execute("select count(w) from mt"))[0]
print(f"{nfiles} files")
print(f"{ntags} tags\n")
print("number of occurences for each tag,")
print(" 'x' = file has no tags")
print(" 't:mtp' = the mtp flag (file not mtp processed yet)")
print()
for k, nk in db.execute("select k, count(k) from mt group by k order by k"):
print(f"{nk:9} {k}")
def compare(n1, d1, n2, d2, verbose):
nt = next(d1.execute("select count(w) from up"))[0]
n = 0
miss = 0
for w, rd, fn in d1.execute("select w, rd, fn from up"):
n += 1
if n % 25_000 == 0:
m = f"\033[36mchecked {n:,} of {nt:,} files in {n1} against {n2}\033[0m"
print(m)
q = "select w from up where substr(w,1,16) = ?"
hit = d2.execute(q, (w[:16],)).fetchone()
if not hit:
miss += 1
if verbose:
print(f"file in {n1} missing in {n2}: [{w}] {rd}/{fn}")
print(f" {miss} files in {n1} missing in {n2}\n")
nt = next(d1.execute("select count(w) from mt"))[0]
n = 0
miss = {}
nmiss = 0
for w, k, v in d1.execute("select * from mt"):
n += 1
if n % 100_000 == 0:
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
print(m)
v2 = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
if v2:
v2 = v2[0]
# if v != v2 and v2 and k in [".bpm", "key"] and n2 == "src":
# print(f"{w} [{rd}/{fn}] {k} = [{v}] / [{v2}]")
if v2 is not None:
if k.startswith("."):
try:
diff = abs(float(v) - float(v2))
if diff > float(v) / 0.9:
v2 = None
else:
v2 = v
except:
pass
if v != v2:
v2 = None
if v2 is None:
nmiss += 1
try:
miss[k] += 1
except:
miss[k] = 1
if verbose:
q = "select rd, fn from up where substr(w,1,16) = ?"
rd, fn = d1.execute(q, (w,)).fetchone()
print(f"missing in {n2}: [{w}] [{rd}/{fn}] {k} = {v}")
for k, v in sorted(miss.items()):
if v:
print(f"{n1} has {v:6} more {k:<6} tags than {n2}")
print(f"in total, {nmiss} missing tags in {n2}\n")
def copy_mtp(d1, d2, tag, rm):
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
n = 0
ndone = 0
for w, k, v in d1.execute("select * from mt where k = ?", (tag,)):
n += 1
if n % 25_000 == 0:
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
print(m)
hit = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
if hit:
hit = hit[0]
if hit != v:
ndone += 1
if hit is not None:
d2.execute("delete from mt where w = ? and +k = ?", (w, k))
d2.execute("insert into mt values (?,?,?)", (w, k, v))
if rm:
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w,))
d2.commit()
print(f"copied {ndone} {tag} tags over")
def main():
os.system("")
print()
ap = argparse.ArgumentParser()
ap.add_argument("db", help="database to work on")
ap.add_argument("-src", metavar="DB", type=str, help="database to copy from")
ap2 = ap.add_argument_group("informational / read-only stuff")
ap2.add_argument("-v", action="store_true", help="verbose")
ap2.add_argument("-ls", action="store_true", help="list summary for db")
ap2.add_argument("-cmp", action="store_true", help="compare databases")
ap2 = ap.add_argument_group("options which modify target db")
ap2.add_argument("-copy", metavar="TAG", type=str, help="mtp tag to copy over")
ap2.add_argument(
"-rm-mtp-flag",
action="store_true",
help="when an mtp tag is copied over, also mark that as done, so copyparty won't run mtp on it",
)
ap2.add_argument("-vac", action="store_true", help="optimize DB")
ar = ap.parse_args()
for v in [ar.db, ar.src]:
if v and not os.path.exists(v):
die("database must exist")
db = sqlite3.connect(ar.db)
ds = sqlite3.connect(ar.src) if ar.src else None
for d, n in [[ds, "src"], [db, "dst"]]:
if not d:
continue
ver = read_ver(d)
if ver == "corrupt":
die("{} database appears to be corrupt, sorry")
if ver != DB_VER:
m = f"{n} db is version {ver}, this tool only supports version {DB_VER}, please upgrade it with copyparty first"
die(m)
if ar.ls:
ls(db)
if ar.cmp:
if not ds:
die("need src db to compare against")
compare("src", ds, "dst", db, ar.v)
compare("dst", db, "src", ds, ar.v)
if ar.copy:
copy_mtp(ds, db, ar.copy, ar.rm_mtp_flag)
if __name__ == "__main__":
main()

View File

@@ -9,6 +9,16 @@
* assumes the webserver and copyparty is running on the same server/IP
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
### [`sharex.sxcu`](sharex.sxcu)
* sharex config file to upload screenshots and grab the URL
* `RequestURL`: full URL to the target folder
* `pw`: password (remove the `pw` line if anon-write)
however if your copyparty is behind a reverse-proxy, you may want to use [`sharex-html.sxcu`](sharex-html.sxcu) instead:
* `RequestURL`: full URL to the target folder
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
* `pw`: password (remove `Parameters` if anon-write)
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
* disables thumbnails and folder-type detection in windows explorer
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))

19
contrib/sharex-html.sxcu Normal file
View File

@@ -0,0 +1,19 @@
{
"Version": "13.5.0",
"Name": "copyparty-html",
"DestinationType": "ImageUploader",
"RequestMethod": "POST",
"RequestURL": "http://127.0.0.1:3923/sharex",
"Parameters": {
"pw": "wark"
},
"Body": "MultipartFormData",
"Arguments": {
"act": "bput"
},
"FileFormName": "f",
"RegexList": [
"bytes // <a href=\"/([^\"]+)\""
],
"URL": "http://127.0.0.1:3923/$regex:1|1$"
}

17
contrib/sharex.sxcu Normal file
View File

@@ -0,0 +1,17 @@
{
"Version": "13.5.0",
"Name": "copyparty",
"DestinationType": "ImageUploader",
"RequestMethod": "POST",
"RequestURL": "http://127.0.0.1:3923/sharex",
"Parameters": {
"pw": "wark",
"j": null
},
"Body": "MultipartFormData",
"Arguments": {
"act": "bput"
},
"FileFormName": "f",
"URL": "$json:files[0].url$"
}

View File

@@ -225,6 +225,20 @@ def run_argparse(argv, formatter):
--ciphers help = available ssl/tls ciphers,
--ssl-ver help = available ssl/tls versions,
default is what python considers safe, usually >= TLS1
values for --ls:
"USR" is a user to browse as; * is anonymous, ** is all users
"VOL" is a single volume to scan, default is * (all vols)
"FLAG" is flags;
"v" in addition to realpaths, print usernames and vpaths
"ln" only prints symlinks leaving the volume mountpoint
"p" exits 1 if any such symlinks are found
"r" resumes startup after the listing
examples:
--ls '**' # list all files which are possible to read
--ls '**,*,ln' # check for dangerous symlinks
--ls '**,*,ln,p,r' # check, then start normally if safe
\033[0m
"""
),
)
@@ -261,7 +275,7 @@ def run_argparse(argv, formatter):
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=1800, help="cleanup interval")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
ap2 = ap.add_argument_group('database options')
@@ -288,6 +302,7 @@ def run_argparse(argv, formatter):
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
ap2 = ap.add_argument_group('debug options')
ap2.add_argument("--ls", metavar="U[,V[,F]]", help="scan all volumes")
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
@@ -363,6 +378,9 @@ def main(argv=None):
+ " (if you crash with codec errors then that is why)"
)
if WINDOWS and sys.version_info < (3, 6):
al.no_scandir = True
# signal.signal(signal.SIGINT, sighandler)
SvcHub(al).run()

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (0, 11, 1)
VERSION = (0, 11, 11)
CODENAME = "the grid"
BUILD_DT = (2021, 5, 29)
BUILD_DT = (2021, 6, 8)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -7,7 +7,7 @@ import sys
import stat
import threading
from .__init__ import PY2, WINDOWS
from .__init__ import WINDOWS
from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint
@@ -22,7 +22,7 @@ class VFS(object):
self.uadm = uadm # users who are regular admins
self.flags = flags # config switches
self.nodes = {} # child nodes
self.all_vols = {vpath: self} # flattened recursive
self.all_vols = {vpath: self} if realpath else {} # flattened recursive
def __repr__(self):
return "VFS({})".format(
@@ -96,6 +96,7 @@ class VFS(object):
]
def get(self, vpath, uname, will_read, will_write):
# type: (str, str, bool, bool) -> tuple[VFS, str]
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
vn, rem = self._find(vpath)
@@ -135,7 +136,8 @@ class VFS(object):
#
return os.path.realpath(rp)
def ls(self, rem, uname, scandir, lstat=False):
def ls(self, rem, uname, scandir, incl_wo=False, lstat=False):
# type: (str, str, bool, bool, bool) -> tuple[str, str, dict[str, VFS]]
"""return user-readable [fsdir,real,virt] items at vpath"""
virt_vis = {} # nodes readable by user
abspath = self.canonical(rem)
@@ -143,12 +145,12 @@ class VFS(object):
real.sort()
if not rem:
for name, vn2 in sorted(self.nodes.items()):
if (
uname in vn2.uread
or "*" in vn2.uread
or uname in vn2.uwrite
or "*" in vn2.uwrite
):
ok = uname in vn2.uread or "*" in vn2.uread
if not ok and incl_wo:
ok = uname in vn2.uwrite or "*" in vn2.uwrite
if ok:
virt_vis[name] = vn2
# no vfs nodes in the list of real inodes
@@ -156,13 +158,21 @@ class VFS(object):
return [abspath, real, virt_vis]
def walk(self, rel, rem, uname, dots, scandir, lstat=False):
def walk(self, rel, rem, seen, uname, dots, scandir, lstat):
"""
recursively yields from ./rem;
rel is a unix-style user-defined vpath (not vfs-related)
"""
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, lstat)
fsroot, vfs_ls, vfs_virt = self.ls(
rem, uname, scandir, incl_wo=False, lstat=lstat
)
if seen and not fsroot.startswith(seen[-1]) and fsroot in seen:
print("bailing from symlink loop,\n {}\n {}".format(seen[-1], fsroot))
return
seen = seen[:] + [fsroot]
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
@@ -177,7 +187,7 @@ class VFS(object):
wrel = (rel + "/" + rdir).lstrip("/")
wrem = (rem + "/" + rdir).lstrip("/")
for x in self.walk(wrel, wrem, uname, scandir, lstat):
for x in self.walk(wrel, wrem, seen, uname, dots, scandir, lstat):
yield x
for n, vfs in sorted(vfs_virt.items()):
@@ -185,14 +195,16 @@ class VFS(object):
continue
wrel = (rel + "/" + n).lstrip("/")
for x in vfs.walk(wrel, "", uname, scandir, lstat):
for x in vfs.walk(wrel, "", seen, uname, dots, scandir, lstat):
yield x
def zipgen(self, vrem, flt, uname, dots, scandir):
if flt:
flt = {k: True for k in flt}
for vpath, apath, files, rd, vd in self.walk("", vrem, uname, dots, scandir):
for vpath, apath, files, rd, vd in self.walk(
"", vrem, [], uname, dots, scandir, False
):
if flt:
files = [x for x in files if x[0] in flt]
@@ -228,21 +240,19 @@ class VFS(object):
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
yield f
def user_tree(self, uname, readable=False, writable=False, admin=False):
ret = []
opt1 = readable and (uname in self.uread or "*" in self.uread)
opt2 = writable and (uname in self.uwrite or "*" in self.uwrite)
if admin:
if opt1 and opt2:
ret.append(self.vpath)
else:
if opt1 or opt2:
ret.append(self.vpath)
def user_tree(self, uname, readable, writable, admin):
is_readable = False
if uname in self.uread or "*" in self.uread:
readable.append(self.vpath)
is_readable = True
if uname in self.uwrite or "*" in self.uwrite:
writable.append(self.vpath)
if is_readable:
admin.append(self.vpath)
for _, vn in sorted(self.nodes.items()):
ret.extend(vn.user_tree(uname, readable, writable, admin))
return ret
vn.user_tree(uname, readable, writable, admin)
class AuthSrv(object):
@@ -420,7 +430,7 @@ class AuthSrv(object):
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
elif "" not in mount:
# there's volumes but no root; make root inaccessible
vfs = VFS(os.path.abspath("."), "")
vfs = VFS(None, "")
vfs.flags["d2d"] = True
maxdepth = 0
@@ -559,3 +569,90 @@ class AuthSrv(object):
# import pprint
# pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount})
def dbg_ls(self):
users = self.args.ls
vols = "*"
flags = []
try:
users, vols = users.split(",", 1)
except:
pass
try:
vols, flags = vols.split(",", 1)
flags = flags.split(",")
except:
pass
if users == "**":
users = list(self.user.keys()) + ["*"]
else:
users = [users]
for u in users:
if u not in self.user and u != "*":
raise Exception("user not found: " + u)
if vols == "*":
vols = ["/" + x for x in self.vfs.all_vols.keys()]
else:
vols = [vols]
for v in vols:
if not v.startswith("/"):
raise Exception("volumes must start with /")
if v[1:] not in self.vfs.all_vols:
raise Exception("volume not found: " + v)
self.log({"users": users, "vols": vols, "flags": flags})
for k, v in self.vfs.all_vols.items():
self.log("/{}: read({}) write({})".format(k, v.uread, v.uwrite))
flag_v = "v" in flags
flag_ln = "ln" in flags
flag_p = "p" in flags
flag_r = "r" in flags
n_bads = 0
for v in vols:
v = v[1:]
vtop = "/{}/".format(v) if v else "/"
for u in users:
self.log("checking /{} as {}".format(v, u))
try:
vn, _ = self.vfs.get(v, u, True, False)
except:
continue
atop = vn.realpath
g = vn.walk("", "", [], u, True, not self.args.no_scandir, False)
for vpath, apath, files, _, _ in g:
fnames = [n[0] for n in files]
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
vpaths = [vtop + x for x in vpaths]
apaths = [os.path.join(apath, n) for n in fnames]
files = [[vpath + "/", apath + os.sep]] + list(zip(vpaths, apaths))
if flag_ln:
files = [x for x in files if not x[1].startswith(atop + os.sep)]
n_bads += len(files)
if flag_v:
msg = [
'# user "{}", vpath "{}"\n{}'.format(u, vp, ap)
for vp, ap in files
]
else:
msg = [x[1] for x in files]
if msg:
nuprint("\n".join(msg))
if n_bads and flag_p:
raise Exception("found symlink leaving volume, and strict is set")
if not flag_r:
sys.exit(0)

View File

@@ -44,7 +44,9 @@ class BrokerMp(object):
proc.clients = {}
proc.workload = 0
thr = threading.Thread(target=self.collector, args=(proc,))
thr = threading.Thread(
target=self.collector, args=(proc,), name="mp-collector"
)
thr.daemon = True
thr.start()
@@ -52,14 +54,19 @@ class BrokerMp(object):
proc.start()
if not self.args.q:
thr = threading.Thread(target=self.debug_load_balancer)
thr = threading.Thread(
target=self.debug_load_balancer, name="mp-dbg-loadbalancer"
)
thr.daemon = True
thr.start()
def shutdown(self):
self.log("broker", "shutting down")
for proc in self.procs:
thr = threading.Thread(target=proc.q_pend.put([0, "shutdown", []]))
for n, proc in enumerate(self.procs):
thr = threading.Thread(
target=proc.q_pend.put([0, "shutdown", []]),
name="mp-shutdown-{}-{}".format(n, len(self.procs)),
)
thr.start()
with self.mutex:

View File

@@ -27,7 +27,7 @@ class MpWorker(object):
self.retpend = {}
self.retpend_mutex = threading.Lock()
self.mutex = threading.Lock()
self.workload_thr_active = False
self.workload_thr_alive = False
# we inherited signal_handler from parent,
# replace it with something harmless
@@ -35,12 +35,12 @@ class MpWorker(object):
signal.signal(signal.SIGINT, self.signal_handler)
# instantiate all services here (TODO: inheritance?)
self.httpsrv = HttpSrv(self)
self.httpsrv = HttpSrv(self, True)
self.httpsrv.disconnect_func = self.httpdrop
# on winxp and some other platforms,
# use thr.join() to block all signals
thr = threading.Thread(target=self.main)
thr = threading.Thread(target=self.main, name="mpw-main")
thr.daemon = True
thr.start()
thr.join()
@@ -75,13 +75,15 @@ class MpWorker(object):
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
self.httpsrv.accept(sck, addr)
with self.mutex:
if not self.workload_thr_active:
if not self.workload_thr_alive:
self.workload_thr_alive = True
thr = threading.Thread(target=self.thr_workload)
thr = threading.Thread(
target=self.thr_workload, name="mpw-workload"
)
thr.daemon = True
thr.start()

View File

@@ -16,6 +16,7 @@ import calendar
from .__init__ import E, PY2, WINDOWS, ANYWIN
from .util import * # noqa # pylint: disable=unused-wildcard-import
from .authsrv import AuthSrv
from .szip import StreamZip
from .star import StreamTar
@@ -35,12 +36,13 @@ class HttpCli(object):
def __init__(self, conn):
self.t0 = time.time()
self.conn = conn
self.s = conn.s
self.sr = conn.sr
self.s = conn.s # type: socket
self.sr = conn.sr # type: Unrecv
self.ip = conn.addr[0]
self.addr = conn.addr
self.addr = conn.addr # type: tuple[str, int]
self.args = conn.args
self.auth = conn.auth
self.is_mp = conn.is_mp
self.auth = conn.auth # type: AuthSrv
self.ico = conn.ico
self.thumbcli = conn.thumbcli
self.log_func = conn.log_func
@@ -153,10 +155,8 @@ class HttpCli(object):
pwd = uparam.get("pw")
self.uname = self.auth.iuser.get(pwd, "*")
if self.uname:
self.rvol = self.auth.vfs.user_tree(self.uname, readable=True)
self.wvol = self.auth.vfs.user_tree(self.uname, writable=True)
self.avol = self.auth.vfs.user_tree(self.uname, True, True, True)
self.rvol, self.wvol, self.avol = [[], [], []]
self.auth.vfs.user_tree(self.uname, self.rvol, self.wvol, self.avol)
ua = self.headers.get("user-agent", "")
self.is_rclone = ua.startswith("rclone/")
@@ -508,6 +508,7 @@ class HttpCli(object):
items = items.replace("\r", "").split("\n")
items = [unquotep(x) for x in items if items]
self.parser.drop()
return self.tx_zip(k, v, vn, rem, items, self.args.ed)
def handle_post_json(self):
@@ -600,8 +601,9 @@ class HttpCli(object):
taglist = {}
else:
# search by query params
self.log("qj: " + repr(body))
hits, taglist = idx.search(vols, body)
q = body["q"]
self.log("qj: " + q)
hits, taglist = idx.search(vols, q)
msg = len(hits)
idx.p_end = time.time()
@@ -850,14 +852,28 @@ class HttpCli(object):
status = "ERROR"
msg = "{} // {} bytes // {:.3f} MiB/s\n".format(status, sz_total, spd)
jmsg = {"status": status, "sz": sz_total, "mbps": round(spd, 3), "files": []}
for sz, sha512, ofn, lfn in files:
vpath = self.vpath + "/" + lfn
vpath = (self.vpath + "/" if self.vpath else "") + lfn
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True)
)
# truncated SHA-512 prevents length extension attacks;
# using SHA-512/224, optionally SHA-512/256 = :64
jpart = {
"url": "{}://{}/{}".format(
"https" if self.tls else "http",
self.headers.get("host", "copyparty"),
vpath,
),
"sha512": sha512[:56],
"sz": sz,
"fn": lfn,
"fn_orig": ofn,
"path": vpath,
}
jmsg["files"].append(jpart)
vspd = self._spd(sz_total, False)
self.log("{} {}".format(vspd, msg))
@@ -869,7 +885,12 @@ class HttpCli(object):
ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg)
f.write(ft.encode("utf-8"))
self.redirect(self.vpath, msg=msg, flavor="return to", click=False)
if "j" in self.uparam:
jtxt = json.dumps(jmsg, indent=2, sort_keys=True)
self.reply(jtxt.encode("utf-8", "replace"), mime="application/json")
else:
self.redirect(self.vpath, msg=msg, flavor="return to", click=False)
self.parser.drop()
return True
@@ -973,6 +994,8 @@ class HttpCli(object):
cli_lastmod = self.headers.get("if-modified-since")
if cli_lastmod:
try:
# some browser append "; length=573"
cli_lastmod = cli_lastmod.split(";")[0].strip()
cli_dt = time.strptime(cli_lastmod, HTTP_TS_FMT)
cli_ts = calendar.timegm(cli_dt)
return file_lastmod, int(file_ts) > int(cli_ts)
@@ -1142,7 +1165,8 @@ class HttpCli(object):
if use_sendfile:
remains = sendfile_kern(lower, upper, f, self.s)
else:
remains = sendfile_py(lower, upper, f, self.s)
actor = self.conn if self.is_mp else None
remains = sendfile_py(lower, upper, f, self.s, actor)
if remains > 0:
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
@@ -1310,20 +1334,23 @@ class HttpCli(object):
def tx_mounts(self):
suf = self.urlq(rm=["h"])
rvol = [x + "/" if x else x for x in self.rvol]
wvol = [x + "/" if x else x for x in self.wvol]
rvol, wvol, avol = [
[("/" + x).rstrip("/") + "/" for x in y]
for y in [self.rvol, self.wvol, self.avol]
]
vstate = {}
if self.avol and not self.args.no_rescan:
x = self.conn.hsrv.broker.put(True, "up2k.get_volstate")
vstate = json.loads(x.get())
vstate = {("/" + k).rstrip("/") + "/": v for k, v in vstate.items()}
html = self.j2(
"splash",
this=self,
rvol=rvol,
wvol=wvol,
avol=self.avol,
avol=avol,
vstate=vstate,
url_suf=suf,
)
@@ -1355,15 +1382,31 @@ class HttpCli(object):
if self.args.no_stack:
raise Pebkac(403, "disabled by argv")
ret = []
threads = {}
names = dict([(t.ident, t.name) for t in threading.enumerate()])
for tid, stack in sys._current_frames().items():
ret.append("\n\n# {} ({:x})".format(names.get(tid), tid))
name = "{} ({:x})".format(names.get(tid), tid)
threads[name] = stack
rret = []
bret = []
for name, stack in sorted(threads.items()):
ret = ["\n\n# {}".format(name)]
pad = None
for fn, lno, name, line in traceback.extract_stack(stack):
fn = os.sep.join(fn.split(os.sep)[-3:])
ret.append('File: "{}", line {}, in {}'.format(fn, lno, name))
if line:
ret.append(" " + str(line.strip()))
if "self.not_empty.wait()" in line:
pad = " " * 4
if pad:
bret += [ret[0]] + [pad + x for x in ret[1:]]
else:
rret += ret
ret = rret + bret
ret = ("<pre>" + "\n".join(ret)).encode("utf-8")
self.reply(ret)
@@ -1396,7 +1439,9 @@ class HttpCli(object):
try:
vn, rem = self.auth.vfs.get(top, self.uname, True, False)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
fsroot, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, incl_wo=True
)
except:
vfs_ls = []
vfs_virt = {}
@@ -1561,7 +1606,9 @@ class HttpCli(object):
if v is not None:
return self.tx_zip(k, v, vn, rem, [], self.args.ed)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
fsroot, vfs_ls, vfs_virt = vn.ls(
rem, self.uname, not self.args.no_scandir, incl_wo=True
)
stats = {k: v for k, v in vfs_ls}
vfs_ls = [x[0] for x in vfs_ls]
vfs_ls.extend(vfs_virt.keys())

View File

@@ -35,6 +35,7 @@ class HttpConn(object):
self.args = hsrv.args
self.auth = hsrv.auth
self.is_mp = hsrv.is_mp
self.cert_path = hsrv.cert_path
enth = HAVE_PIL and not self.args.no_thumb
@@ -174,6 +175,11 @@ class HttpConn(object):
self.sr = Unrecv(self.s)
while True:
if self.is_mp:
self.workload += 50
if self.workload >= 2 ** 31:
self.workload = 100
cli = HttpCli(self)
if not cli.run():
return

View File

@@ -25,8 +25,8 @@ except ImportError:
sys.exit(1)
from .__init__ import E, MACOS
from .httpconn import HttpConn
from .authsrv import AuthSrv
from .httpconn import HttpConn
class HttpSrv(object):
@@ -35,8 +35,9 @@ class HttpSrv(object):
relying on MpSrv for performance (HttpSrv is just plain threads)
"""
def __init__(self, broker):
def __init__(self, broker, is_mp=False):
self.broker = broker
self.is_mp = is_mp
self.args = broker.args
self.log = broker.log
@@ -66,7 +67,11 @@ class HttpSrv(object):
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
thr = threading.Thread(target=self.thr_client, args=(sck, addr))
thr = threading.Thread(
target=self.thr_client,
args=(sck, addr),
name="httpsrv-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]),
)
thr.daemon = True
thr.start()
@@ -84,13 +89,16 @@ class HttpSrv(object):
cli = HttpConn(sck, addr, self)
with self.mutex:
self.clients[cli] = 0
self.workload += 50
if not self.workload_thr_alive:
self.workload_thr_alive = True
thr = threading.Thread(target=self.thr_workload)
thr.daemon = True
thr.start()
if self.is_mp:
self.workload += 50
if not self.workload_thr_alive:
self.workload_thr_alive = True
thr = threading.Thread(
target=self.thr_workload, name="httpsrv-workload"
)
thr.daemon = True
thr.start()
try:
if self.args.log_conn:
@@ -99,6 +107,7 @@ class HttpSrv(object):
cli.run()
finally:
sck = cli.s
if self.args.log_conn:
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import hashlib
import colorsys

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import tarfile
import threading
@@ -42,7 +45,7 @@ class StreamTar(object):
fmt = tarfile.GNU_FORMAT
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
w = threading.Thread(target=self._gen)
w = threading.Thread(target=self._gen, name="star-gen")
w.daemon = True
w.start()

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import time
import tempfile

View File

@@ -39,6 +39,8 @@ class SvcHub(object):
# jank goes here
auth = AuthSrv(self.args, self.log, False)
if args.ls:
auth.dbg_ls()
# initiate all services to manage
self.tcpsrv = TcpSrv(self)
@@ -69,7 +71,7 @@ class SvcHub(object):
self.broker = Broker(self)
def run(self):
thr = threading.Thread(target=self.tcpsrv.run)
thr = threading.Thread(target=self.tcpsrv.run, name="svchub-main")
thr.daemon = True
thr.start()
@@ -93,9 +95,11 @@ class SvcHub(object):
break
if n == 3:
print("waiting for thumbsrv...")
print("waiting for thumbsrv (10sec)...")
print("nailed it")
print("nailed it", end="")
finally:
print("\033[0m")
def _log_disabled(self, src, msg, c=0):
pass

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import time
import zlib

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import time

View File

@@ -1,3 +1,6 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import sys
import time
@@ -114,8 +117,10 @@ class ThumbSrv(object):
self.stopping = False
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
self.q = Queue(self.nthr * 4)
for _ in range(self.nthr):
t = threading.Thread(target=self.worker)
for n in range(self.nthr):
t = threading.Thread(
target=self.worker, name="thumb-{}-{}".format(n, self.nthr)
)
t.daemon = True
t.start()
@@ -129,9 +134,9 @@ class ThumbSrv(object):
msg = "cannot create video thumbnails because some of the required programs are not available: "
msg += ", ".join(missing)
self.log(msg, c=1)
self.log(msg, c=3)
t = threading.Thread(target=self.cleaner)
t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
t.daemon = True
t.start()
@@ -316,10 +321,10 @@ class ThumbSrv(object):
time.sleep(interval)
for vol in self.vols:
vol += "/.hist/th"
self.log("cln {}/".format(vol))
self.log("\033[Jcln {}/\033[A".format(vol))
self.clean(vol)
self.log("cln ok")
self.log("\033[Jcln ok")
def clean(self, vol):
# self.log("cln {}".format(vol))

View File

@@ -47,11 +47,11 @@ class U2idx(object):
fhash = body["hash"]
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
uq = "substr(w,1,16) = ? and w = ?"
uq = "where substr(w,1,16) = ? and w = ?"
uv = [wark[:16], wark]
try:
return self.run_query(vols, uq, uv, {})[0]
return self.run_query(vols, uq, uv)[0]
except Exception as ex:
raise Pebkac(500, repr(ex))
@@ -67,37 +67,121 @@ class U2idx(object):
self.cur[ptop] = cur
return cur
def search(self, vols, body):
def search(self, vols, uq):
"""search by query params"""
if not HAVE_SQLITE3:
return []
qobj = {}
_conv_sz(qobj, body, "sz_min", "up.sz >= ?")
_conv_sz(qobj, body, "sz_max", "up.sz <= ?")
_conv_dt(qobj, body, "dt_min", "up.mt >= ?")
_conv_dt(qobj, body, "dt_max", "up.mt <= ?")
for seg, dk in [["path", "up.rd"], ["name", "up.fn"]]:
if seg in body:
_conv_txt(qobj, body, seg, dk)
q = ""
va = []
joins = ""
is_key = True
is_size = False
is_date = False
kw_key = ["(", ")", "and ", "or ", "not "]
kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "]
ptn_mt = re.compile(r"^\.?[a-z]+$")
mt_ctr = 0
mt_keycmp = "substr(up.w,1,16)"
mt_keycmp2 = None
uq, uv = _sqlize(qobj)
while True:
uq = uq.strip()
if not uq:
break
qobj = {}
if "tags" in body:
_conv_txt(qobj, body, "tags", "mt.v")
ok = False
for kw in kw_key + kw_val:
if uq.startswith(kw):
is_key = kw in kw_key
uq = uq[len(kw) :]
ok = True
q += kw
break
if "adv" in body:
_conv_adv(qobj, body, "adv")
if ok:
continue
v, uq = (uq + " ").split(" ", 1)
if is_key:
is_key = False
if v == "size":
v = "up.sz"
is_size = True
elif v == "date":
v = "up.mt"
is_date = True
elif v == "path":
v = "up.rd"
elif v == "name":
v = "up.fn"
elif v == "tags" or ptn_mt.match(v):
mt_ctr += 1
mt_keycmp2 = "mt{}.w".format(mt_ctr)
joins += "inner join mt mt{} on {} = {} ".format(
mt_ctr, mt_keycmp, mt_keycmp2
)
mt_keycmp = mt_keycmp2
if v == "tags":
v = "mt{0}.v".format(mt_ctr)
else:
v = "+mt{0}.k = '{1}' and mt{0}.v".format(mt_ctr, v)
else:
raise Pebkac(400, "invalid key [" + v + "]")
q += v + " "
continue
head = ""
tail = ""
if is_date:
is_date = False
v = v.upper().rstrip("Z").replace(",", " ").replace("T", " ")
while " " in v:
v = v.replace(" ", " ")
for fmt in [
"%Y-%m-%d %H:%M:%S",
"%Y-%m-%d %H:%M",
"%Y-%m-%d %H",
"%Y-%m-%d",
]:
try:
v = datetime.strptime(v, fmt).timestamp()
break
except:
pass
elif is_size:
is_size = False
v = int(float(v) * 1024 * 1024)
else:
if v.startswith("*"):
head = "'%'||"
v = v[1:]
if v.endswith("*"):
tail = "||'%'"
v = v[:-1]
q += " {}?{} ".format(head, tail)
va.append(v)
is_key = True
try:
return self.run_query(vols, uq, uv, qobj)
return self.run_query(vols, joins + "where " + q, va)
except Exception as ex:
raise Pebkac(500, repr(ex))
def run_query(self, vols, uq, uv, targs):
self.log("qs: {} {} , {}".format(uq, repr(uv), repr(targs)))
def run_query(self, vols, uq, uv):
done_flag = []
self.active_id = "{:.6f}_{}".format(
time.time(), threading.current_thread().ident
@@ -108,39 +192,19 @@ class U2idx(object):
self.active_id,
done_flag,
),
name="u2idx-terminator",
)
thr.daemon = True
thr.start()
if not targs:
if not uq:
q = "select * from up"
v = ()
else:
q = "select * from up where " + uq
v = tuple(uv)
if not uq or not uv:
q = "select * from up"
v = ()
else:
q = "select up.* from up"
keycmp = "substr(up.w,1,16)"
where = []
v = []
ctr = 0
for tq, tv in sorted(targs.items()):
ctr += 1
tq = tq.split("\n")[0]
keycmp2 = "mt{}.w".format(ctr)
q += " inner join mt mt{} on {} = {}".format(ctr, keycmp, keycmp2)
keycmp = keycmp2
where.append(tq.replace("mt.", keycmp[:-1]))
v.append(tv)
q = "select up.* from up " + uq
v = tuple(uv)
if uq:
where.append(uq)
v.extend(uv)
q += " where " + (" and ".join(where))
# self.log("q2: {} {}".format(q, repr(v)))
self.log("qs: {!r} {!r}".format(q, v))
ret = []
lim = 1000
@@ -163,7 +227,7 @@ class U2idx(object):
if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn)
rp = "/".join([vtop, rd, fn])
rp = "/".join([x for x in [vtop, rd, fn] if x])
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
for hit in sret:
@@ -204,78 +268,3 @@ def _open(ptop):
db_path = os.path.join(ptop, ".hist", "up2k.db")
if os.path.exists(db_path):
return sqlite3.connect(db_path).cursor()
def _conv_sz(q, body, k, sql):
if k in body:
q[sql] = int(float(body[k]) * 1024 * 1024)
def _conv_dt(q, body, k, sql):
if k not in body:
return
v = body[k].upper().rstrip("Z").replace(",", " ").replace("T", " ")
while " " in v:
v = v.replace(" ", " ")
for fmt in ["%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M", "%Y-%m-%d %H", "%Y-%m-%d"]:
try:
ts = datetime.strptime(v, fmt).timestamp()
break
except:
ts = None
if ts:
q[sql] = ts
def _conv_txt(q, body, k, sql):
for v in body[k].split(" "):
inv = ""
if v.startswith("-"):
inv = "not"
v = v[1:]
if not v:
continue
head = "'%'||"
if v.startswith("^"):
head = ""
v = v[1:]
tail = "||'%'"
if v.endswith("$"):
tail = ""
v = v[:-1]
qk = "{} {} like {}?{}".format(sql, inv, head, tail)
q[qk + "\n" + v] = u8safe(v)
def _conv_adv(q, body, k):
ptn = re.compile(r"^(\.?[a-z]+) *(==?|!=|<=?|>=?) *(.*)$")
parts = body[k].split(" ")
parts = [x.strip() for x in parts if x.strip()]
for part in parts:
m = ptn.match(part)
if not m:
p = html_escape(part)
raise Pebkac(400, "invalid argument [" + p + "]")
k, op, v = m.groups()
qk = "mt.k = '{}' and mt.v {} ?".format(k, op)
q[qk + "\n" + v] = u8safe(v)
def _sqlize(qobj):
keys = []
values = []
for k, v in sorted(qobj.items()):
keys.append(k.split("\n")[0])
values.append(v)
return " and ".join(keys), values

View File

@@ -83,7 +83,7 @@ class Up2k(object):
if ANYWIN:
# usually fails to set lastmod too quickly
self.lastmod_q = Queue()
thr = threading.Thread(target=self._lastmodder)
thr = threading.Thread(target=self._lastmodder, name="up2k-lastmod")
thr.daemon = True
thr.start()
@@ -96,7 +96,9 @@ class Up2k(object):
if self.args.no_fastboot:
self.deferred_init(all_vols)
else:
t = threading.Thread(target=self.deferred_init, args=(all_vols,))
t = threading.Thread(
target=self.deferred_init, args=(all_vols,), name="up2k-deferred-init"
)
t.daemon = True
t.start()
@@ -104,20 +106,20 @@ class Up2k(object):
have_e2d = self.init_indexes(all_vols)
if have_e2d:
thr = threading.Thread(target=self._snapshot)
thr = threading.Thread(target=self._snapshot, name="up2k-snapshot")
thr.daemon = True
thr.start()
thr = threading.Thread(target=self._hasher)
thr = threading.Thread(target=self._hasher, name="up2k-hasher")
thr.daemon = True
thr.start()
if self.mtag:
thr = threading.Thread(target=self._tagger)
thr = threading.Thread(target=self._tagger, name="up2k-tagger")
thr.daemon = True
thr.start()
thr = threading.Thread(target=self._run_all_mtp)
thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-init")
thr.daemon = True
thr.start()
@@ -132,7 +134,11 @@ class Up2k(object):
return "cannot initiate; scan is already in progress"
args = (all_vols, scan_vols)
t = threading.Thread(target=self.init_indexes, args=args)
t = threading.Thread(
target=self.init_indexes,
args=args,
name="up2k-rescan-{}".format(scan_vols[0]),
)
t.daemon = True
t.start()
return None
@@ -181,17 +187,22 @@ class Up2k(object):
for vol in vols:
try:
os.listdir(vol.realpath)
if not self.register_vpath(vol.realpath, vol.flags):
raise Exception()
if vol.vpath in scan_vols or not scan_vols:
live_vols.append(vol)
if vol.vpath not in self.volstate:
self.volstate[vol.vpath] = "OFFLINE (not initialized)"
except:
self.volstate[vol.vpath] = "OFFLINE (cannot access folder)"
self.log("cannot access " + vol.realpath, c=1)
continue
if scan_vols and vol.vpath not in scan_vols:
continue
if not self.register_vpath(vol.realpath, vol.flags):
# self.log("db not enabled for {}".format(m, vol.realpath))
pass
continue
live_vols.append(vol)
if vol.vpath not in self.volstate:
self.volstate[vol.vpath] = "OFFLINE (pending initialization)"
vols = live_vols
need_vac = {}
@@ -268,7 +279,7 @@ class Up2k(object):
if self.mtag:
m = "online (running mtp)"
if scan_vols:
thr = threading.Thread(target=self._run_all_mtp)
thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-scan")
thr.daemon = True
else:
del self.pp
@@ -285,7 +296,10 @@ class Up2k(object):
def register_vpath(self, ptop, flags):
db_path = os.path.join(ptop, ".hist", "up2k.db")
if ptop in self.registry:
return [self.cur[ptop], db_path]
try:
return [self.cur[ptop], db_path]
except:
return None
_, flags = self._expr_idx_filter(flags)
@@ -367,7 +381,8 @@ class Up2k(object):
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
histdir = os.path.join(top, ".hist")
ret = 0
for iname, inf in statdir(self.log, not self.args.no_scandir, False, cdir):
g = statdir(self.log, not self.args.no_scandir, False, cdir)
for iname, inf in sorted(g):
abspath = os.path.join(cdir, iname)
lmod = int(inf.st_mtime)
if stat.S_ISDIR(inf.st_mode):
@@ -549,9 +564,10 @@ class Up2k(object):
last_write = time.time()
n_buf = 0
self._stop_mpool(mpool)
with self.mutex:
n_add += len(self._flush_mpool(c3))
if mpool:
self._stop_mpool(mpool)
with self.mutex:
n_add += len(self._flush_mpool(c3))
conn.commit()
c3.close()
@@ -585,7 +601,8 @@ class Up2k(object):
del self.pp
for k in list(self.volstate.keys()):
self.volstate[k] = "online, idle"
if "OFFLINE" not in self.volstate[k]:
self.volstate[k] = "online, idle"
def _run_one_mtp(self, ptop):
entags = self.entags[ptop]
@@ -748,7 +765,9 @@ class Up2k(object):
mpool = Queue(nw)
for _ in range(nw):
thr = threading.Thread(target=self._tag_thr, args=(mpool,))
thr = threading.Thread(
target=self._tag_thr, args=(mpool,), name="up2k-mpool"
)
thr.daemon = True
thr.start()
@@ -1240,12 +1259,15 @@ class Up2k(object):
return wark
def _hashlist_from_file(self, path):
pp = self.pp if hasattr(self, "pp") else None
fsz = os.path.getsize(fsenc(path))
csz = up2k_chunksize(fsz)
ret = []
with open(fsenc(path), "rb", 512 * 1024) as f:
while fsz > 0:
self.pp.msg = "{} MB, {}".format(int(fsz / 1024 / 1024), path)
if pp:
pp.msg = "{} MB, {}".format(int(fsz / 1024 / 1024), path)
hashobj = hashlib.sha512()
rem = min(csz, fsz)
fsz -= rem

View File

@@ -193,7 +193,7 @@ class ProgressPrinter(threading.Thread):
"""
def __init__(self):
threading.Thread.__init__(self)
threading.Thread.__init__(self, name="pp")
self.daemon = True
self.msg = None
self.end = False
@@ -208,6 +208,8 @@ class ProgressPrinter(threading.Thread):
msg = self.msg
uprint(" {}\033[K\r".format(msg))
if PY2:
sys.stdout.flush()
print("\033[K", end="")
sys.stdout.flush() # necessary on win10 even w/ stderr btw
@@ -262,6 +264,11 @@ def ren_open(fname, *args, **kwargs):
yield {"orz": [f, fname]}
return
if suffix:
ext = fname.split(".")[-1]
if len(ext) < 7:
suffix += "." + ext
orig_name = fname
bname = fname
ext = ""
@@ -847,13 +854,14 @@ def yieldfile(fn):
def hashcopy(actor, fin, fout):
u32_lim = int((2 ** 31) * 0.9)
is_mp = actor.is_mp
hashobj = hashlib.sha512()
tlen = 0
for buf in fin:
actor.workload += 1
if actor.workload > u32_lim:
actor.workload = 100 # prevent overflow
if is_mp:
actor.workload += 1
if actor.workload > 2 ** 31:
actor.workload = 100
tlen += len(buf)
hashobj.update(buf)
@@ -865,12 +873,17 @@ def hashcopy(actor, fin, fout):
return tlen, hashobj.hexdigest(), digest_b64
def sendfile_py(lower, upper, f, s):
def sendfile_py(lower, upper, f, s, actor=None):
remains = upper - lower
f.seek(lower)
while remains > 0:
if actor:
actor.workload += 1
if actor.workload > 2 ** 31:
actor.workload = 100
# time.sleep(0.01)
buf = f.read(min(4096, remains))
buf = f.read(min(1024 * 32, remains))
if not buf:
return remains

View File

@@ -529,6 +529,17 @@ input[type="checkbox"]:checked+label {
height: 1em;
margin: .2em 0 -1em 1.6em;
}
#tq_raw {
width: calc(100% - 2em);
margin: .3em 0 0 1.4em;
}
#tq_raw td+td {
width: 100%;
}
#op_search #q_raw {
width: 100%;
display: block;
}
#files td div span {
color: #fff;
padding: 0 .4em;

View File

@@ -803,7 +803,10 @@ var thegrid = (function () {
r.sz = v;
swrite('gridsz', r.sz);
}
document.documentElement.style.setProperty('--grid-sz', r.sz + 'em');
try {
document.documentElement.style.setProperty('--grid-sz', r.sz + 'em');
}
catch (ex) { }
}
setsz();
@@ -820,12 +823,25 @@ var thegrid = (function () {
this.setAttribute('class', tr.getAttribute('class'));
}
function bgopen(e) {
ev(e);
var url = this.getAttribute('href');
window.open(url, '_blank');
}
r.loadsel = function () {
var ths = QSA('#ggrid>a');
var ths = QSA('#ggrid>a'),
have_sel = !!QS('#files tr.sel');
for (var a = 0, aa = ths.length; a < aa; a++) {
ths[a].onclick = r.sel ? seltgl : null;
ths[a].onclick = r.sel ? seltgl : have_sel ? bgopen : null;
ths[a].setAttribute('class', ebi(ths[a].getAttribute('ref')).parentNode.parentNode.getAttribute('class'));
}
var uns = QS('#ggrid a[ref="unsearch"]');
if (uns)
uns.onclick = function () {
ebi('unsearch').click();
};
}
function loadgrid() {
@@ -836,9 +852,9 @@ var thegrid = (function () {
return r.loadsel();
var html = [];
var tr = lfiles.tBodies[0].rows;
for (var a = 0; a < tr.length; a++) {
var ao = tr[a].cells[1].firstChild,
var files = QSA('#files>tbody>tr>td:nth-child(2) a[id]');
for (var a = 0, aa = files.length; a < aa; a++) {
var ao = files[a],
href = esc(ao.getAttribute('href')),
ref = ao.getAttribute('id'),
isdir = href.split('?')[0].slice(-1)[0] == '/',
@@ -960,7 +976,7 @@ document.onkeydown = function (e) {
if (k == 'KeyT')
return ebi('thumbs').click();
if (window['thegrid'] && thegrid.en) {
if (thegrid.en) {
if (k == 'KeyS')
return ebi('gridsel').click();
@@ -1026,6 +1042,7 @@ document.onkeydown = function (e) {
for (var a = 0; a < trs.length; a += 2) {
html.push('<table>' + (trs[a].concat(trs[a + 1])).join('\n') + '</table>');
}
html.push('<table id="tq_raw"><tr><td>raw</td><td><input id="q_raw" type="text" name="q" /></td></tr></table>');
ebi('srch_form').innerHTML = html.join('\n');
var o = QSA('#op_search input');
@@ -1050,33 +1067,83 @@ document.onkeydown = function (e) {
var chk = ebi(id.slice(0, -1) + 'c');
chk.checked = ((v + '').length > 0);
}
if (id != "q_raw")
encode_query();
clearTimeout(search_timeout);
if (Date.now() - search_in_progress > 30 * 1000)
search_timeout = setTimeout(do_search, 200);
}
function encode_query() {
var q = '';
for (var a = 0; a < sconf.length; a++) {
for (var b = 1; b < sconf[a].length; b++) {
var k = sconf[a][b][0],
chk = 'srch_' + k + 'c',
tvs = ebi('srch_' + k + 'v').value.split(/ /g);
if (!ebi(chk).checked)
continue;
for (var c = 0; c < tvs.length; c++) {
var tv = tvs[c];
if (!tv.length)
break;
q += ' and ';
if (k == 'adv') {
q += tv.replace(/ /g, " and ").replace(/([=!><]=?)/, " $1 ");
continue;
}
if (k.length == 3) {
q += k.replace(/sz/, 'size').replace(/dt/, 'date').replace(/l$/, ' >= ').replace(/u$/, ' <= ') + tv;
continue;
}
if (k == 'path' || k == 'name' || k == 'tags') {
var not = ' ';
if (tv.slice(0, 1) == '-') {
tv = tv.slice(1);
not = ' not ';
}
if (tv.slice(0, 1) == '^') {
tv = tv.slice(1);
}
else {
tv = '*' + tv;
}
if (tv.slice(-1) == '$') {
tv = tv.slice(0, -1);
}
else {
tv += '*';
}
q += k + not + 'like ' + tv;
}
}
}
}
ebi('q_raw').value = q.slice(5);
}
function do_search() {
search_in_progress = Date.now();
srch_msg(false, "searching...");
clearTimeout(search_timeout);
var params = {},
o = QSA('#op_search input[type="text"]');
for (var a = 0; a < o.length; a++) {
var chk = ebi(o[a].getAttribute('id').slice(0, -1) + 'c');
if (!chk.checked)
continue;
params[o[a].getAttribute('name')] = o[a].value;
}
// ebi('srch_q').textContent = JSON.stringify(params, null, 4);
var xhr = new XMLHttpRequest();
xhr.open('POST', '/?srch', true);
xhr.setRequestHeader('Content-Type', 'text/plain');
xhr.onreadystatechange = xhr_search_results;
xhr.ts = Date.now();
xhr.send(JSON.stringify(params));
xhr.send(JSON.stringify({ "q": ebi('q_raw').value }));
}
function xhr_search_results() {
@@ -1381,7 +1448,7 @@ var treectl = (function () {
if (hpush)
get_tree('.', xhr.top);
enspin('#files');
enspin(thegrid.en ? '#gfiles' : '#files');
}
function treegrow(e) {
@@ -1461,6 +1528,7 @@ var treectl = (function () {
apply_perms(res.perms);
despin('#files');
despin('#gfiles');
ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : "";
ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : "";
@@ -1995,8 +2063,7 @@ var msel = (function () {
}
function selui() {
clmod(ebi('wtoggle'), 'sel', getsel().length);
if (window['thegrid'])
thegrid.loadsel();
thegrid.loadsel();
}
function seltgl(e) {
ev(e);

View File

@@ -20,13 +20,13 @@
<tbody>
{% for mp in avol %}
{%- if mp in vstate and vstate[mp] %}
<tr><td>/{{ mp }}</td><td><a href="/{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
<tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
{%- endif %}
{% endfor %}
</tbody>
</table>
<div class="btns">
<a href="/{{ avol[0] }}?stack">dump stack</a>
<a href="{{ avol[0] }}?stack">dump stack</a>
</div>
{%- endif %}
@@ -34,7 +34,7 @@
<h1>you can browse these:</h1>
<ul>
{% for mp in rvol %}
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
{% endfor %}
</ul>
{%- endif %}
@@ -43,7 +43,7 @@
<h1>you can upload to:</h1>
<ul>
{% for mp in wvol %}
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
{% endfor %}
</ul>
{%- endif %}

View File

@@ -17,6 +17,7 @@ function goto_up2k() {
// chrome requires https to use crypto.subtle,
// usually it's undefined but some chromes throw on invoke
var up2k = null;
var sha_js = window.WebAssembly ? 'hw' : 'ac'; // ff53,c57,sa11
try {
var cf = crypto.subtle || crypto.webkitSubtle;
cf.digest('SHA-512', new Uint8Array(1)).then(
@@ -430,13 +431,15 @@ function up2k_init(subtle) {
// upload ui hidden by default, clicking the header shows it
function init_deps() {
if (!subtle && !window.asmCrypto) {
showmodal('<h1>loading sha512.js</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
import_js('/.cpr/deps/sha512.js', unmodal);
var fn = 'sha512.' + sha_js + '.js';
showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
import_js('/.cpr/deps/' + fn, unmodal);
if (is_https)
ebi('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
else
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance';
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' +
(sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>';
}
}
@@ -886,6 +889,10 @@ function up2k_init(subtle) {
return base64;
}
function hex2u8(txt) {
return new Uint8Array(txt.match(/.{2}/g).map(function (b) { return parseInt(b, 16); }));
}
function get_chunksize(filesize) {
var chunksize = 1024 * 1024,
stepsize = 512 * 1024;
@@ -987,10 +994,18 @@ function up2k_init(subtle) {
if (subtle)
subtle.digest('SHA-512', buf).then(hash_done);
else setTimeout(function () {
var hasher = new asmCrypto.Sha512();
hasher.process(new Uint8Array(buf));
hasher.finish();
hash_done(hasher.result);
var u8buf = new Uint8Array(buf);
if (sha_js == 'hw') {
hashwasm.sha512(u8buf).then(function (v) {
hash_done(hex2u8(v))
});
}
else {
var hasher = new asmCrypto.Sha512();
hasher.process(u8buf);
hasher.finish();
hash_done(hasher.result);
}
}, 1);
};

View File

@@ -238,6 +238,10 @@
color: #fff;
font-style: italic;
}
#u2foot span {
color: #999;
font-size: .9em;
}
#u2footfoot {
margin-bottom: -1em;
}

View File

@@ -80,6 +80,13 @@ command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (ti
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
##
## bash oneliners
# get the size and video-id of all youtube vids in folder, assuming filename ends with -id.ext, and create a copyparty search query
find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv|mp4|webm)$/{next} {sub(/\.[^\.]+$/,"");n=length($0);v=substr($0,n-10);print $1, v}' | tee /dev/stderr | awk 'BEGIN {p="("} {printf("%s name like *-%s.* ",p,$2);p="or"} END {print ")\n"}' | cat >&2
##
## sqlite3 stuff
@@ -146,6 +153,9 @@ dbg.asyncStore.pendingBreakpoints = {}
# fix firefox phantom breakpoints
about:config >> devtools.debugger.prefs-schema-version = -1
# determine server version
git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > /dev/shm/revs && cat /dev/shm/revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js 2>/dev/null | diff -wNarU0 - <(cat /mnt/Users/ed/Downloads/ref/{util,browser,up2k}.js) | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
##
## http 206

View File

@@ -9,6 +9,12 @@ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_zopfli=1.0.3
# TODO
# sha512.hw.js https://github.com/Daninet/hash-wasm
# sha512.kc.js https://github.com/chm-diederichs/sha3-wasm
# awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' <sha512.ac.js.orig >sha512.ac.js; for fn in sha512.ac.js.orig sha512.ac.js; do wc -c <$fn; wc -c <$fn.gz ; for n in {1..9}; do printf '%8d %d bz\n' $(bzip2 -c$n <$fn | wc -c) $n; done; done
# download;
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
RUN mkdir -p /z/dist/no-pk \

12
scripts/install-githooks.sh Executable file
View File

@@ -0,0 +1,12 @@
#!/bin/bash
set -ex
[ -e setup.py ] || ..
[ -e setup.py ] || {
echo u wot
exit 1
}
cd .git/hooks
rm -f pre-commit
ln -s ../../scripts/run-tests.sh pre-commit

View File

@@ -32,6 +32,10 @@ gtar=$(command -v gtar || command -v gnutar) || true
[ -e /opt/local/bin/bzip2 ] &&
bzip2() { /opt/local/bin/bzip2 "$@"; }
}
gawk=$(command -v gawk || command -v gnuawk || command -v awk)
awk() { $gawk "$@"; }
pybin=$(command -v python3 || command -v python) || {
echo need python
exit 1
@@ -194,17 +198,46 @@ tmv "$f"
# up2k goes from 28k to 22k laff
echo entabbening
find | grep -E '\.(js|css|html)$' | while IFS= read -r f; do
find | grep -E '\.css$' | while IFS= read -r f; do
awk '{
sub(/^[ \t]+/,"");
sub(/[ \t]+$/,"");
$0=gensub(/^([a-z-]+) *: *(.*[^ ]) *;$/,"\\1:\\2;","1");
sub(/ +\{$/,"{");
gsub(/, /,",")
}
!/\}$/ {printf "%s",$0;next}
1
' <$f | gsed 's/;\}$/}/' >t
tmv "$f"
done
find | grep -E '\.(js|html)$' | while IFS= read -r f; do
unexpand -t 4 --first-only <"$f" >t
tmv "$f"
done
gzres() {
command -v pigz &&
pk='pigz -11 -J 34 -I 100' ||
pk='gzip'
echo "$pk"
find | grep -E '\.(js|css)$' | while IFS= read -r f; do
echo -n .
$pk "$f"
done
echo
}
gzres
echo gen tarlist
for d in copyparty dep-j2; do find $d -type f; done |
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
(grep -vE 'gz$' list1; grep -E 'gz$' list1) >list
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1) >list || true
echo creating tar
args=(--owner=1000 --group=1000)

34
scripts/profile.py Normal file
View File

@@ -0,0 +1,34 @@
#!/usr/bin/env python3
import sys
sys.path.insert(0, ".")
cmd = sys.argv[1]
if cmd == "cpp":
from copyparty.__main__ import main
argv = ["__main__", "-v", "srv::r", "-v", "../../yt:yt:r"]
main(argv=argv)
elif cmd == "test":
from unittest import main
argv = ["__main__", "discover", "-s", "tests"]
main(module=None, argv=argv)
else:
raise Exception()
# import dis; print(dis.dis(main))
# macos:
# option1) python3.9 -m pip install --user -U vmprof==0.4.9
# option2) python3.9 -m pip install --user -U https://github.com/vmprof/vmprof-python/archive/refs/heads/master.zip
#
# python -m vmprof -o prof --lines ./scripts/profile.py test
# linux: ~/.local/bin/vmprofshow prof tree | grep -vF '[1m 0.'
# macos: ~/Library/Python/3.9/bin/vmprofshow prof tree | grep -vF '[1m 0.'
# win: %appdata%\..\Roaming\Python\Python39\Scripts\vmprofshow.exe prof tree

12
scripts/run-tests.sh Executable file
View File

@@ -0,0 +1,12 @@
#!/bin/bash
set -ex
pids=()
for py in python{2,3}; do
$py -m unittest discover -s tests >/dev/null &
pids+=($!)
done
for pid in ${pids[@]}; do
wait $pid
done

View File

@@ -47,7 +47,7 @@ grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
printf '\033[1;30mlooking for jinja2 in [%s]\033[0m\n' "$_py" >&2
$_py -c 'import jinja2' 2>/dev/null || continue
printf '%s\n' "$_py"
mv $dir/{,x.}jinja2
mv $dir/{,x.}dep-j2
break
done)"

View File

@@ -8,13 +8,13 @@ import time
import shutil
import pprint
import tarfile
import tempfile
import unittest
from argparse import Namespace
from copyparty.authsrv import AuthSrv
from copyparty.httpcli import HttpCli
from tests import util as tu
from copyparty.authsrv import AuthSrv
from copyparty.httpcli import HttpCli
def hdr(query):
@@ -32,6 +32,8 @@ class Cfg(Namespace):
no_zip=False,
no_scandir=False,
no_sendfile=True,
no_rescan=True,
ihead=False,
nih=True,
mtp=[],
mte="a",
@@ -40,13 +42,15 @@ class Cfg(Namespace):
class TestHttpCli(unittest.TestCase):
def test(self):
td = os.path.join(tu.get_ramdisk(), "vfs")
try:
shutil.rmtree(td)
except OSError:
pass
def setUp(self):
self.td = tu.get_ramdisk()
def tearDown(self):
os.chdir(tempfile.gettempdir())
shutil.rmtree(self.td)
def test(self):
td = os.path.join(self.td, "vfs")
os.mkdir(td)
os.chdir(td)

View File

@@ -7,13 +7,12 @@ import json
import shutil
import tempfile
import unittest
from textwrap import dedent
from argparse import Namespace
from copyparty.authsrv import AuthSrv
from copyparty import util
from tests import util as tu
from copyparty.authsrv import AuthSrv, VFS
from copyparty import util
class Cfg(Namespace):
@@ -25,6 +24,13 @@ class Cfg(Namespace):
class TestVFS(unittest.TestCase):
def setUp(self):
self.td = tu.get_ramdisk()
def tearDown(self):
os.chdir(tempfile.gettempdir())
shutil.rmtree(self.td)
def dump(self, vfs):
print(json.dumps(vfs, indent=4, sort_keys=True, default=lambda o: o.__dict__))
@@ -41,6 +47,7 @@ class TestVFS(unittest.TestCase):
self.assertEqual(util.undot(query), response)
def ls(self, vfs, vpath, uname):
# type: (VFS, str, str) -> tuple[str, str, str]
"""helper for resolving and listing a folder"""
vn, rem = vfs.get(vpath, uname, True, False)
r1 = vn.ls(rem, uname, False)
@@ -55,12 +62,7 @@ class TestVFS(unittest.TestCase):
pass
def test(self):
td = os.path.join(tu.get_ramdisk(), "vfs")
try:
shutil.rmtree(td)
except OSError:
pass
td = os.path.join(self.td, "vfs")
os.mkdir(td)
os.chdir(td)
@@ -227,7 +229,7 @@ class TestVFS(unittest.TestCase):
self.assertEqual(list(v1), list(v2))
# config file parser
cfg_path = os.path.join(tu.get_ramdisk(), "test.cfg")
cfg_path = os.path.join(self.td, "test.cfg")
with open(cfg_path, "wb") as f:
f.write(
dedent(
@@ -249,7 +251,7 @@ class TestVFS(unittest.TestCase):
n = au.vfs
# root was not defined, so PWD with no access to anyone
self.assertEqual(n.vpath, "")
self.assertEqual(n.realpath, td)
self.assertEqual(n.realpath, None)
self.assertEqual(n.uread, [])
self.assertEqual(n.uwrite, [])
self.assertEqual(len(n.nodes), 1)
@@ -260,6 +262,4 @@ class TestVFS(unittest.TestCase):
self.assertEqual(n.uwrite, ["asd"])
self.assertEqual(len(n.nodes), 0)
os.chdir(tempfile.gettempdir())
shutil.rmtree(td)
os.unlink(cfg_path)

View File

@@ -1,16 +1,36 @@
import os
import sys
import time
import shutil
import jinja2
import tempfile
import platform
import subprocess as sp
from copyparty.util import Unrecv
WINDOWS = platform.system() == "Windows"
ANYWIN = WINDOWS or sys.platform in ["msys"]
MACOS = platform.system() == "Darwin"
J2_ENV = jinja2.Environment(loader=jinja2.BaseLoader)
J2_FILES = J2_ENV.from_string("{{ files|join('\n') }}")
def nah(*a, **ka):
return False
if MACOS:
import posixpath
posixpath.islink = nah
os.path.islink = nah
# 25% faster; until any tests do symlink stuff
from copyparty.util import Unrecv
def runcmd(*argv):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate()
@@ -28,18 +48,25 @@ def chkcmd(*argv):
def get_ramdisk():
def subdir(top):
ret = os.path.join(top, "cptd-{}".format(os.getpid()))
shutil.rmtree(ret, True)
os.mkdir(ret)
return ret
for vol in ["/dev/shm", "/Volumes/cptd"]: # nosec (singleton test)
if os.path.exists(vol):
return vol
return subdir(vol)
if os.path.exists("/Volumes"):
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://32768")
# hdiutil eject /Volumes/cptd/
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://65536")
devname = devname.strip()
print("devname: [{}]".format(devname))
for _ in range(10):
try:
_, _ = chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
return "/Volumes/cptd"
return subdir("/Volumes/cptd")
except Exception as ex:
print(repr(ex))
time.sleep(0.25)
@@ -50,7 +77,7 @@ def get_ramdisk():
try:
os.mkdir(ret)
finally:
return ret
return subdir(ret)
class NullBroker(object):
@@ -89,9 +116,13 @@ class VHttpConn(object):
self.addr = ("127.0.0.1", "42069")
self.args = args
self.auth = auth
self.is_mp = False
self.log_func = log
self.log_src = "a"
self.lf_url = None
self.hsrv = VHttpSrv()
self.nbyte = 0
self.workload = 0
self.ico = None
self.thumbcli = None
self.t0 = time.time()