mirror of
https://github.com/9001/copyparty.git
synced 2025-10-24 00:24:04 +00:00
Compare commits
54 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
06c6ddffb6 | ||
|
|
d29f0c066c | ||
|
|
c9e4de3346 | ||
|
|
ca0b97f72d | ||
|
|
b38f20b408 | ||
|
|
05b1dbaf56 | ||
|
|
b8481e32ba | ||
|
|
9c03c65e07 | ||
|
|
d8ed006b9b | ||
|
|
63c0623a5e | ||
|
|
fd84506db0 | ||
|
|
d8bcb44e44 | ||
|
|
56a26b0916 | ||
|
|
efcf1d6b90 | ||
|
|
9f578bfec6 | ||
|
|
1f170d7d28 | ||
|
|
5ae14cf9be | ||
|
|
aaf9d53be9 | ||
|
|
75c73f7ba7 | ||
|
|
b6dba8beee | ||
|
|
94521cdc1a | ||
|
|
3365b1c355 | ||
|
|
6c957c4923 | ||
|
|
833997f04c | ||
|
|
68d51e4037 | ||
|
|
ce274d2011 | ||
|
|
280778ed43 | ||
|
|
0f558ecbbf | ||
|
|
58f9e05d93 | ||
|
|
1ec981aea7 | ||
|
|
2a90286a7c | ||
|
|
12d25d09b2 | ||
|
|
a039fae1a4 | ||
|
|
322b9abadc | ||
|
|
0aaf954cea | ||
|
|
c2d22aa3d1 | ||
|
|
6934c75bba | ||
|
|
c58cf78f86 | ||
|
|
7f0de790ab | ||
|
|
d4bb4e3a73 | ||
|
|
d25612d038 | ||
|
|
116b2351b0 | ||
|
|
69b83dfdc4 | ||
|
|
3b1839c2ce | ||
|
|
13742ebdf8 | ||
|
|
634657bea1 | ||
|
|
46e70d50b7 | ||
|
|
d64e9b85a7 | ||
|
|
fb853edbe3 | ||
|
|
cc076c1be1 | ||
|
|
98cc9a6755 | ||
|
|
7bd2b9c23a | ||
|
|
de724a1ff3 | ||
|
|
2163055dae |
11
.vscode/launch.json
vendored
11
.vscode/launch.json
vendored
@@ -12,12 +12,21 @@
|
||||
//"-nw",
|
||||
"-ed",
|
||||
"-emp",
|
||||
"-e2d",
|
||||
"-e2s",
|
||||
"-a",
|
||||
"ed:wark",
|
||||
"-v",
|
||||
"srv::r:aed"
|
||||
"srv::r:aed:cnodupe"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "No debug",
|
||||
"preLaunchTask": "no_dbg",
|
||||
"type": "python",
|
||||
//"request": "attach", "port": 42069
|
||||
// fork: nc -l 42069 </dev/null
|
||||
},
|
||||
{
|
||||
"name": "Run active unit test",
|
||||
"type": "python",
|
||||
|
||||
12
.vscode/settings.json
vendored
12
.vscode/settings.json
vendored
@@ -50,11 +50,9 @@
|
||||
"files.associations": {
|
||||
"*.makefile": "makefile"
|
||||
},
|
||||
"editor.codeActionsOnSaveTimeout": 9001,
|
||||
"editor.formatOnSaveTimeout": 9001,
|
||||
//
|
||||
// things you may wanna edit:
|
||||
//
|
||||
"python.pythonPath": "/usr/bin/python3",
|
||||
//"python.linting.enabled": true,
|
||||
"python.formatting.blackArgs": [
|
||||
"-t",
|
||||
"py27"
|
||||
],
|
||||
"python.linting.enabled": true,
|
||||
}
|
||||
15
.vscode/tasks.json
vendored
Normal file
15
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "pre",
|
||||
"command": "true;rm -rf inc/* inc/.hist/;mkdir -p inc;",
|
||||
"type": "shell"
|
||||
},
|
||||
{
|
||||
"label": "no_dbg",
|
||||
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2d -e2s -a ed:wark -v srv::r:aed:cnodupe ;exit 1",
|
||||
"type": "shell"
|
||||
}
|
||||
]
|
||||
}
|
||||
12
README.md
12
README.md
@@ -8,7 +8,7 @@
|
||||
|
||||
turn your phone or raspi into a portable file server with resumable uploads/downloads using IE6 or any other browser
|
||||
|
||||
* server runs on anything with `py2.7` or `py3.2+`
|
||||
* server runs on anything with `py2.7` or `py3.3+`
|
||||
* *resumable* uploads need `firefox 12+` / `chrome 6+` / `safari 6+` / `IE 10+`
|
||||
* code standard: `black`
|
||||
|
||||
@@ -68,17 +68,16 @@ summary: it works! you can use it! (but technically not even close to beta)
|
||||
# dependencies
|
||||
|
||||
* `jinja2`
|
||||
* pulls in `markupsafe` as of v2.7; use jinja 2.6 on py3.2
|
||||
|
||||
optional, enables thumbnails:
|
||||
optional, will eventually enable thumbnails:
|
||||
* `Pillow` (requires py2.7 or py3.5+)
|
||||
|
||||
|
||||
# sfx
|
||||
|
||||
currently there are two self-contained binaries:
|
||||
* `copyparty-sfx.sh` for unix (linux and osx) -- smaller, more robust
|
||||
* `copyparty-sfx.py` for windows (unix too) -- crossplatform, beta
|
||||
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere
|
||||
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos
|
||||
|
||||
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
|
||||
|
||||
@@ -142,10 +141,9 @@ roughly sorted by priority
|
||||
* terminate client on bad data
|
||||
* drop onto folders
|
||||
* `os.copy_file_range` for up2k cloning
|
||||
* up2k partials ui
|
||||
* support pillow-simd
|
||||
* cache sha512 chunks on client
|
||||
* ~~symlink existing files on upload~~
|
||||
* ok at runtime, up2k db still not persisted
|
||||
* comment field
|
||||
* ~~look into android thumbnail cache file format~~ bad idea
|
||||
* figure out the deal with pixel3a not being connectable as hotspot
|
||||
|
||||
@@ -1067,7 +1067,7 @@ def main():
|
||||
dbg = null_log
|
||||
|
||||
if WINDOWS:
|
||||
os.system("")
|
||||
os.system("rem")
|
||||
|
||||
for ch in '<>:"\\|?*':
|
||||
# microsoft maps illegal characters to f0xx
|
||||
|
||||
@@ -980,7 +980,7 @@ def main():
|
||||
dbg = null_log
|
||||
|
||||
if WINDOWS:
|
||||
os.system("")
|
||||
os.system("rem")
|
||||
|
||||
for ch in '<>:"\\|?*':
|
||||
# microsoft maps illegal characters to f0xx
|
||||
|
||||
@@ -10,7 +10,12 @@
|
||||
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
|
||||
|
||||
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
|
||||
disables thumbnails and folder-type detection in windows explorer, makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
|
||||
* disables thumbnails and folder-type detection in windows explorer
|
||||
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
|
||||
|
||||
### [`cfssl.sh`](cfssl.sh)
|
||||
* creates CA and server certificates using cfssl
|
||||
* give a 3rd argument to install it to your copyparty config
|
||||
|
||||
# OS integration
|
||||
init-scripts to start copyparty as a service
|
||||
|
||||
72
contrib/cfssl.sh
Executable file
72
contrib/cfssl.sh
Executable file
@@ -0,0 +1,72 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# ca-name and server-name
|
||||
ca_name="$1"
|
||||
srv_name="$2"
|
||||
|
||||
[ -z "$srv_name" ] && {
|
||||
echo "need arg 1: ca name"
|
||||
echo "need arg 2: server name"
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
gen_ca() {
|
||||
(tee /dev/stderr <<EOF
|
||||
{"CN": "$ca_name ca",
|
||||
"CA": {"expiry":"87600h", "pathlen":0},
|
||||
"key": {"algo":"rsa", "size":4096},
|
||||
"names": [{"O":"$ca_name ca"}]}
|
||||
EOF
|
||||
)|
|
||||
cfssl gencert -initca - |
|
||||
cfssljson -bare ca
|
||||
|
||||
mv ca-key.pem ca.key
|
||||
rm ca.csr
|
||||
}
|
||||
|
||||
|
||||
gen_srv() {
|
||||
(tee /dev/stderr <<EOF
|
||||
{"key": {"algo":"rsa", "size":4096},
|
||||
"names": [{"O":"$ca_name - $srv_name"}]}
|
||||
EOF
|
||||
)|
|
||||
cfssl gencert -ca ca.pem -ca-key ca.key \
|
||||
-profile=www -hostname="$srv_name.$ca_name" - |
|
||||
cfssljson -bare "$srv_name"
|
||||
|
||||
mv "$srv_name-key.pem" "$srv_name.key"
|
||||
rm "$srv_name.csr"
|
||||
}
|
||||
|
||||
|
||||
# create ca if not exist
|
||||
[ -e ca.key ] ||
|
||||
gen_ca
|
||||
|
||||
# always create server cert
|
||||
gen_srv
|
||||
|
||||
|
||||
# dump cert info
|
||||
show() {
|
||||
openssl x509 -text -noout -in $1 |
|
||||
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
|
||||
}
|
||||
show ca.pem
|
||||
show "$srv_name.pem"
|
||||
|
||||
|
||||
# write cert into copyparty config
|
||||
[ -z "$3" ] || {
|
||||
mkdir -p ~/.config/copyparty
|
||||
cat "$srv_name".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||
}
|
||||
|
||||
|
||||
# rm *.key *.pem
|
||||
# cfssl print-defaults config
|
||||
# cfssl print-defaults csr
|
||||
@@ -8,7 +8,9 @@ __copyright__ = 2019
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
import filecmp
|
||||
@@ -19,7 +21,13 @@ from textwrap import dedent
|
||||
from .__init__ import E, WINDOWS, VT100
|
||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||
from .svchub import SvcHub
|
||||
from .util import py_desc
|
||||
from .util import py_desc, align_tab
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
|
||||
class RiceFormatter(argparse.HelpFormatter):
|
||||
@@ -85,10 +93,77 @@ def ensure_cert():
|
||||
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
|
||||
|
||||
|
||||
def configure_ssl_ver(al):
|
||||
def terse_sslver(txt):
|
||||
txt = txt.lower()
|
||||
for c in ["_", "v", "."]:
|
||||
txt = txt.replace(c, "")
|
||||
|
||||
return txt.replace("tls10", "tls1")
|
||||
|
||||
# oh man i love openssl
|
||||
# check this out
|
||||
# hold my beer
|
||||
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
|
||||
sslver = terse_sslver(al.ssl_ver).split(",")
|
||||
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
||||
# SSLv2 SSLv3 TLSv1 TLSv1_1 TLSv1_2 TLSv1_3
|
||||
if "help" in sslver:
|
||||
avail = [terse_sslver(x[6:]) for x in flags]
|
||||
avail = " ".join(sorted(avail) + ["all"])
|
||||
print("\navailable ssl/tls versions:\n " + avail)
|
||||
sys.exit(0)
|
||||
|
||||
al.ssl_flags_en = 0
|
||||
al.ssl_flags_de = 0
|
||||
for flag in sorted(flags):
|
||||
ver = terse_sslver(flag[6:])
|
||||
num = getattr(ssl, flag)
|
||||
if ver in sslver:
|
||||
al.ssl_flags_en |= num
|
||||
else:
|
||||
al.ssl_flags_de |= num
|
||||
|
||||
if sslver == ["all"]:
|
||||
x = al.ssl_flags_en
|
||||
al.ssl_flags_en = al.ssl_flags_de
|
||||
al.ssl_flags_de = x
|
||||
|
||||
for k in ["ssl_flags_en", "ssl_flags_de"]:
|
||||
num = getattr(al, k)
|
||||
print("{}: {:8x} ({})".format(k, num, num))
|
||||
|
||||
# think i need that beer now
|
||||
|
||||
|
||||
def configure_ssl_ciphers(al):
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
if al.ssl_ver:
|
||||
ctx.options &= ~al.ssl_flags_en
|
||||
ctx.options |= al.ssl_flags_de
|
||||
|
||||
is_help = al.ciphers == "help"
|
||||
|
||||
if al.ciphers and not is_help:
|
||||
try:
|
||||
ctx.set_ciphers(al.ciphers)
|
||||
except:
|
||||
print("\n\033[1;31mfailed to set ciphers\033[0m\n")
|
||||
|
||||
if not hasattr(ctx, "get_ciphers"):
|
||||
print("cannot read cipher list: openssl or python too old")
|
||||
else:
|
||||
ciphers = [x["description"] for x in ctx.get_ciphers()]
|
||||
print("\n ".join(["\nenabled ciphers:"] + align_tab(ciphers) + [""]))
|
||||
|
||||
if is_help:
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if WINDOWS:
|
||||
os.system("") # enables colors
|
||||
os.system("rem") # enables colors
|
||||
|
||||
desc = py_desc().replace("[", "\033[1;30m[")
|
||||
|
||||
@@ -96,7 +171,8 @@ def main():
|
||||
print(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
|
||||
|
||||
ensure_locale()
|
||||
ensure_cert()
|
||||
if HAVE_SSL:
|
||||
ensure_cert()
|
||||
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=RiceFormatter,
|
||||
@@ -105,29 +181,45 @@ def main():
|
||||
epilog=dedent(
|
||||
"""
|
||||
-a takes username:password,
|
||||
-v takes src:dst:permset:permset:... where "permset" is
|
||||
accesslevel followed by username (no separator)
|
||||
-v takes src:dst:permset:permset:cflag:cflag:...
|
||||
where "permset" is accesslevel followed by username (no separator)
|
||||
and "cflag" is config flags to set on this volume
|
||||
|
||||
list of cflags:
|
||||
cnodupe rejects existing files (instead of symlinking them)
|
||||
|
||||
example:\033[35m
|
||||
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed \033[36m
|
||||
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
|
||||
mount current directory at "/" with
|
||||
* r (read-only) for everyone
|
||||
* a (read+write) for ed
|
||||
mount ../inc at "/dump" with
|
||||
* w (write-only) for everyone
|
||||
* a (read+write) for ed \033[0m
|
||||
* a (read+write) for ed
|
||||
* reject duplicate files \033[0m
|
||||
|
||||
if no accounts or volumes are configured,
|
||||
current folder will be read/write for everyone
|
||||
|
||||
consider the config file for more flexible account/volume management,
|
||||
including dynamic reload at runtime (and being more readable w)
|
||||
|
||||
values for --urlform:
|
||||
"stash" dumps the data to file and returns length + checksum
|
||||
"save,get" dumps to file and returns the page like a GET
|
||||
"print,get" prints the data in the log and returns GET
|
||||
(leave out the ",get" to return an error instead)
|
||||
|
||||
--ciphers help = available ssl/tls ciphers,
|
||||
--ssl-ver help = available ssl/tls versions,
|
||||
default is what python considers safe, usually >= TLS1
|
||||
"""
|
||||
),
|
||||
)
|
||||
# fmt: off
|
||||
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
|
||||
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind")
|
||||
ap.add_argument("-p", metavar="PORT", type=int, default=3923, help="port to bind")
|
||||
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||
ap.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
|
||||
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
||||
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
||||
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
|
||||
@@ -135,12 +227,43 @@ def main():
|
||||
ap.add_argument("-q", action="store_true", help="quiet")
|
||||
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||
ap.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||
ap.add_argument("-e2s", action="store_true", help="enable up2k db-scanner")
|
||||
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
||||
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||
ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms")
|
||||
|
||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
||||
ap2.add_argument("--ssl-ver", type=str, help="ssl/tls versions to allow")
|
||||
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
||||
al = ap.parse_args()
|
||||
# fmt: on
|
||||
|
||||
al.i = al.i.split(",")
|
||||
try:
|
||||
if "-" in al.p:
|
||||
lo, hi = [int(x) for x in al.p.split("-")]
|
||||
al.p = list(range(lo, hi + 1))
|
||||
else:
|
||||
al.p = [int(x) for x in al.p.split(",")]
|
||||
except:
|
||||
raise Exception("invalid value for -p")
|
||||
|
||||
if HAVE_SSL:
|
||||
if al.ssl_ver:
|
||||
configure_ssl_ver(al)
|
||||
|
||||
if al.ciphers:
|
||||
configure_ssl_ciphers(al)
|
||||
else:
|
||||
print("\033[33m ssl module does not exist; cannot enable https\033[0m\n")
|
||||
|
||||
SvcHub(al).run()
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 6, 3)
|
||||
CODENAME = "CHRISTMAAAAAS"
|
||||
BUILD_DT = (2021, 1, 7)
|
||||
VERSION = (0, 7, 7)
|
||||
CODENAME = "keeping track"
|
||||
BUILD_DT = (2021, 2, 14)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -12,11 +12,12 @@ from .util import undot, Pebkac, fsdec, fsenc
|
||||
class VFS(object):
|
||||
"""single level in the virtual fs"""
|
||||
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[]):
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[], flags={}):
|
||||
self.realpath = realpath # absolute path on host filesystem
|
||||
self.vpath = vpath # absolute path in the virtual filesystem
|
||||
self.uread = uread # users who can read this
|
||||
self.uwrite = uwrite # users who can write this
|
||||
self.flags = flags # config switches
|
||||
self.nodes = {} # child nodes
|
||||
|
||||
def add(self, src, dst):
|
||||
@@ -36,6 +37,7 @@ class VFS(object):
|
||||
"{}/{}".format(self.vpath, name).lstrip("/"),
|
||||
self.uread,
|
||||
self.uwrite,
|
||||
self.flags,
|
||||
)
|
||||
self.nodes[name] = vn
|
||||
return vn.add(src, dst)
|
||||
@@ -128,11 +130,10 @@ class VFS(object):
|
||||
class AuthSrv(object):
|
||||
"""verifies users against given paths"""
|
||||
|
||||
def __init__(self, args, log_func):
|
||||
self.log_func = log_func
|
||||
def __init__(self, args, log_func, warn_anonwrite=True):
|
||||
self.args = args
|
||||
|
||||
self.warn_anonwrite = True
|
||||
self.log_func = log_func
|
||||
self.warn_anonwrite = warn_anonwrite
|
||||
|
||||
if WINDOWS:
|
||||
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
|
||||
@@ -161,7 +162,7 @@ class AuthSrv(object):
|
||||
|
||||
yield prev, True
|
||||
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, mount):
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, mflags, mount):
|
||||
vol_src = None
|
||||
vol_dst = None
|
||||
for ln in [x.decode("utf-8").strip() for x in fd]:
|
||||
@@ -191,6 +192,7 @@ class AuthSrv(object):
|
||||
mount[vol_dst] = vol_src
|
||||
mread[vol_dst] = []
|
||||
mwrite[vol_dst] = []
|
||||
mflags[vol_dst] = {}
|
||||
continue
|
||||
|
||||
lvl, uname = ln.split(" ")
|
||||
@@ -198,6 +200,9 @@ class AuthSrv(object):
|
||||
mread[vol_dst].append(uname)
|
||||
if lvl in "wa":
|
||||
mwrite[vol_dst].append(uname)
|
||||
if lvl == "c":
|
||||
# config option, currently switches only
|
||||
mflags[vol_dst][uname] = True
|
||||
|
||||
def reload(self):
|
||||
"""
|
||||
@@ -210,6 +215,7 @@ class AuthSrv(object):
|
||||
user = {} # username:password
|
||||
mread = {} # mountpoint:[username]
|
||||
mwrite = {} # mountpoint:[username]
|
||||
mflags = {} # mountpoint:[flag]
|
||||
mount = {} # dst:src (mountpoint:realpath)
|
||||
|
||||
if self.args.a:
|
||||
@@ -232,9 +238,13 @@ class AuthSrv(object):
|
||||
mount[dst] = src
|
||||
mread[dst] = []
|
||||
mwrite[dst] = []
|
||||
mflags[dst] = {}
|
||||
|
||||
perms = perms.split(":")
|
||||
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
|
||||
if lvl == "c":
|
||||
# config option, currently switches only
|
||||
mflags[dst][uname] = True
|
||||
if uname == "":
|
||||
uname = "*"
|
||||
if lvl in "ra":
|
||||
@@ -245,14 +255,15 @@ class AuthSrv(object):
|
||||
if self.args.c:
|
||||
for cfg_fn in self.args.c:
|
||||
with open(cfg_fn, "rb") as f:
|
||||
self._parse_config_file(f, user, mread, mwrite, mount)
|
||||
self._parse_config_file(f, user, mread, mwrite, mflags, mount)
|
||||
|
||||
self.all_writable = []
|
||||
if not mount:
|
||||
# -h says our defaults are CWD at root and read/write for everyone
|
||||
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
|
||||
elif "" not in mount:
|
||||
# there's volumes but no root; make root inaccessible
|
||||
vfs = VFS(os.path.abspath("."), "", [], [])
|
||||
vfs = VFS(os.path.abspath("."), "")
|
||||
|
||||
maxdepth = 0
|
||||
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
|
||||
@@ -262,12 +273,18 @@ class AuthSrv(object):
|
||||
|
||||
if dst == "":
|
||||
# rootfs was mapped; fully replaces the default CWD vfs
|
||||
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst])
|
||||
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst], mflags[dst])
|
||||
continue
|
||||
|
||||
v = vfs.add(mount[dst], dst)
|
||||
v.uread = mread[dst]
|
||||
v.uwrite = mwrite[dst]
|
||||
v.flags = mflags[dst]
|
||||
if v.uwrite:
|
||||
self.all_writable.append(v)
|
||||
|
||||
if vfs.uwrite and vfs not in self.all_writable:
|
||||
self.all_writable.append(vfs)
|
||||
|
||||
missing_users = {}
|
||||
for d in [mread, mwrite]:
|
||||
|
||||
@@ -73,7 +73,7 @@ class MpWorker(object):
|
||||
if PY2:
|
||||
sck = pickle.loads(sck) # nosec
|
||||
|
||||
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,))
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
with self.mutex:
|
||||
|
||||
@@ -28,7 +28,7 @@ class BrokerThr(object):
|
||||
def put(self, want_retval, dest, *args):
|
||||
if dest == "httpconn":
|
||||
sck, addr = args
|
||||
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,))
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
else:
|
||||
|
||||
@@ -28,11 +28,13 @@ class HttpCli(object):
|
||||
self.conn = conn
|
||||
self.s = conn.s
|
||||
self.sr = conn.sr
|
||||
self.ip = conn.addr[0]
|
||||
self.addr = conn.addr
|
||||
self.args = conn.args
|
||||
self.auth = conn.auth
|
||||
self.log_func = conn.log_func
|
||||
self.log_src = conn.log_src
|
||||
self.tls = hasattr(self.s, "cipher")
|
||||
|
||||
self.bufsz = 1024 * 32
|
||||
self.absolute_urls = False
|
||||
@@ -42,7 +44,7 @@ class HttpCli(object):
|
||||
self.log_func(self.log_src, msg)
|
||||
|
||||
def _check_nonfatal(self, ex):
|
||||
return ex.code in [404]
|
||||
return ex.code < 400 or ex.code == 404
|
||||
|
||||
def _assert_safe_rem(self, rem):
|
||||
# sanity check to prevent any disasters
|
||||
@@ -85,7 +87,8 @@ class HttpCli(object):
|
||||
|
||||
v = self.headers.get("x-forwarded-for", None)
|
||||
if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]:
|
||||
self.log_src = self.conn.set_rproxy(v.split(",")[0])
|
||||
self.ip = v.split(",")[0]
|
||||
self.log_src = self.conn.set_rproxy(self.ip)
|
||||
|
||||
self.uname = "*"
|
||||
if "cookie" in self.headers:
|
||||
@@ -292,26 +295,51 @@ class HttpCli(object):
|
||||
if "application/octet-stream" in ctype:
|
||||
return self.handle_post_binary()
|
||||
|
||||
raise Pebkac(405, "don't know how to handle {} POST".format(ctype))
|
||||
if "application/x-www-form-urlencoded" in ctype:
|
||||
opt = self.args.urlform
|
||||
if "stash" in opt:
|
||||
return self.handle_stash()
|
||||
|
||||
def handle_stash(self):
|
||||
if "save" in opt:
|
||||
post_sz, _, _, path = self.dump_to_file()
|
||||
self.log("urlform: {} bytes, {}".format(post_sz, path))
|
||||
elif "print" in opt:
|
||||
reader, _ = self.get_body_reader()
|
||||
for buf in reader:
|
||||
buf = buf.decode("utf-8", "replace")
|
||||
self.log("urlform:\n {}\n".format(buf))
|
||||
|
||||
if "get" in opt:
|
||||
return self.handle_get()
|
||||
|
||||
raise Pebkac(405, "POST({}) is disabled".format(ctype))
|
||||
|
||||
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
|
||||
|
||||
def get_body_reader(self):
|
||||
remains = int(self.headers.get("content-length", None))
|
||||
if remains is None:
|
||||
reader = read_socket_unbounded(self.sr)
|
||||
self.keepalive = False
|
||||
return read_socket_unbounded(self.sr), remains
|
||||
else:
|
||||
reader = read_socket(self.sr, remains)
|
||||
return read_socket(self.sr, remains), remains
|
||||
|
||||
def dump_to_file(self):
|
||||
reader, remains = self.get_body_reader()
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
|
||||
addr = self.conn.addr[0].replace(":", ".")
|
||||
addr = self.ip.replace(":", ".")
|
||||
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
||||
path = os.path.join(fdir, fn)
|
||||
|
||||
with open(path, "wb", 512 * 1024) as f:
|
||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||
|
||||
return post_sz, sha_b64, remains, path
|
||||
|
||||
def handle_stash(self):
|
||||
post_sz, sha_b64, remains, path = self.dump_to_file()
|
||||
spd = self._spd(post_sz)
|
||||
self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path))
|
||||
self.reply("{}\n{}\n".format(post_sz, sha_b64).encode("utf-8"))
|
||||
@@ -384,9 +412,11 @@ class HttpCli(object):
|
||||
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
|
||||
body["vdir"] = self.vpath
|
||||
body["rdir"] = os.path.join(vfs.realpath, rem)
|
||||
body["addr"] = self.addr[0]
|
||||
body["vtop"] = vfs.vpath
|
||||
body["ptop"] = vfs.realpath
|
||||
body["prel"] = rem
|
||||
body["addr"] = self.ip
|
||||
body["flag"] = vfs.flags
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||
response = x.get()
|
||||
@@ -408,7 +438,10 @@ class HttpCli(object):
|
||||
except KeyError:
|
||||
raise Pebkac(400, "need hash and wark headers for binary POST")
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", wark, chash)
|
||||
vfs, _ = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
ptop = vfs.realpath
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash)
|
||||
response = x.get()
|
||||
chunksize, cstart, path, lastmod = response
|
||||
|
||||
@@ -453,8 +486,13 @@ class HttpCli(object):
|
||||
|
||||
self.log("clone {} done".format(cstart[0]))
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", wark, chash)
|
||||
num_left = x.get()
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
|
||||
x = x.get()
|
||||
try:
|
||||
num_left, path = x
|
||||
except:
|
||||
self.loud_reply(x, status=500)
|
||||
return False
|
||||
|
||||
if not WINDOWS and num_left == 0:
|
||||
times = (int(time.time()), int(lastmod))
|
||||
@@ -510,10 +548,9 @@ class HttpCli(object):
|
||||
raise Pebkac(500, "mkdir failed, check the logs")
|
||||
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
esc_paths = [quotep(vpath), html_escape(vpath)]
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}">go to /{}</a>'.format(
|
||||
quotep(vpath), html_escape(vpath)
|
||||
),
|
||||
h2='<a href="/{}">go to /{}</a>'.format(*esc_paths),
|
||||
pre="aight",
|
||||
click=True,
|
||||
)
|
||||
@@ -575,7 +612,7 @@ class HttpCli(object):
|
||||
if not os.path.isdir(fsenc(fdir)):
|
||||
raise Pebkac(404, "that folder does not exist")
|
||||
|
||||
suffix = ".{:.6f}-{}".format(time.time(), self.addr[0])
|
||||
suffix = ".{:.6f}-{}".format(time.time(), self.ip)
|
||||
open_args = {"fdir": fdir, "suffix": suffix}
|
||||
else:
|
||||
open_args = {}
|
||||
@@ -637,7 +674,7 @@ class HttpCli(object):
|
||||
"\n".join(
|
||||
unicode(x)
|
||||
for x in [
|
||||
":".join(unicode(x) for x in self.addr),
|
||||
":".join(unicode(x) for x in [self.ip, self.addr[1]]),
|
||||
msg.rstrip(),
|
||||
]
|
||||
)
|
||||
@@ -790,6 +827,8 @@ class HttpCli(object):
|
||||
editions[ext or "plain"] = [fs_path, st.st_size]
|
||||
except:
|
||||
pass
|
||||
if not self.vpath.startswith(".cpr/"):
|
||||
break
|
||||
|
||||
if not editions:
|
||||
raise Pebkac(404)
|
||||
@@ -894,8 +933,11 @@ class HttpCli(object):
|
||||
open_func = open
|
||||
# 512 kB is optimal for huge files, use 64k
|
||||
open_args = [fsenc(fs_path), "rb", 64 * 1024]
|
||||
if hasattr(os, 'sendfile'):
|
||||
use_sendfile = not self.args.no_sendfile
|
||||
use_sendfile = (
|
||||
not self.tls #
|
||||
and not self.args.no_sendfile
|
||||
and hasattr(os, "sendfile")
|
||||
)
|
||||
|
||||
#
|
||||
# send reply
|
||||
@@ -922,7 +964,7 @@ class HttpCli(object):
|
||||
remains = sendfile_kern(lower, upper, f, self.s)
|
||||
else:
|
||||
remains = sendfile_py(lower, upper, f, self.s)
|
||||
|
||||
|
||||
if remains > 0:
|
||||
logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
|
||||
|
||||
@@ -1020,6 +1062,10 @@ class HttpCli(object):
|
||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||
return self.tx_md(abspath)
|
||||
|
||||
bad = "{0}.hist{0}up2k.".format(os.sep)
|
||||
if abspath.endswith(bad + "db") or abspath.endswith(bad + "snap"):
|
||||
raise Pebkac(403)
|
||||
|
||||
return self.tx_file(abspath)
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)
|
||||
@@ -1045,6 +1091,10 @@ class HttpCli(object):
|
||||
if not self.args.ed or "dots" not in self.uparam:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
hidden = []
|
||||
if fsroot.endswith(str(os.sep) + ".hist"):
|
||||
hidden = ["up2k.db", "up2k.snap"]
|
||||
|
||||
dirs = []
|
||||
files = []
|
||||
for fn in vfs_ls:
|
||||
@@ -1056,6 +1106,8 @@ class HttpCli(object):
|
||||
|
||||
if fn in vfs_virt:
|
||||
fspath = vfs_virt[fn].realpath
|
||||
elif fn in hidden:
|
||||
continue
|
||||
else:
|
||||
fspath = fsroot + "/" + fn
|
||||
|
||||
|
||||
@@ -3,10 +3,15 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import ssl
|
||||
import time
|
||||
import socket
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
try:
|
||||
import jinja2
|
||||
except ImportError:
|
||||
@@ -65,6 +70,7 @@ class HttpConn(object):
|
||||
color = 34
|
||||
self.rproxy = ip
|
||||
|
||||
self.ip = ip
|
||||
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
|
||||
return self.log_src
|
||||
|
||||
@@ -74,9 +80,8 @@ class HttpConn(object):
|
||||
def log(self, msg):
|
||||
self.log_func(self.log_src, msg)
|
||||
|
||||
def run(self):
|
||||
def _detect_https(self):
|
||||
method = None
|
||||
self.sr = None
|
||||
if self.cert_path:
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
@@ -101,16 +106,58 @@ class HttpConn(object):
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return
|
||||
|
||||
if method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]:
|
||||
return method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]
|
||||
|
||||
def run(self):
|
||||
self.sr = None
|
||||
if self.args.https_only:
|
||||
is_https = True
|
||||
elif self.args.http_only or not HAVE_SSL:
|
||||
is_https = False
|
||||
else:
|
||||
is_https = self._detect_https()
|
||||
|
||||
if is_https:
|
||||
if self.sr:
|
||||
self.log("\033[1;31mTODO: cannot do https in jython\033[0m")
|
||||
return
|
||||
|
||||
self.log_src = self.log_src.replace("[36m", "[35m")
|
||||
try:
|
||||
self.s = ssl.wrap_socket(
|
||||
self.s, server_side=True, certfile=self.cert_path
|
||||
)
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
ctx.load_cert_chain(self.cert_path)
|
||||
if self.args.ssl_ver:
|
||||
ctx.options &= ~self.args.ssl_flags_en
|
||||
ctx.options |= self.args.ssl_flags_de
|
||||
# print(repr(ctx.options))
|
||||
|
||||
if self.args.ssl_log:
|
||||
try:
|
||||
ctx.keylog_filename = self.args.ssl_log
|
||||
except:
|
||||
self.log("keylog failed; openssl or python too old")
|
||||
|
||||
if self.args.ciphers:
|
||||
ctx.set_ciphers(self.args.ciphers)
|
||||
|
||||
self.s = ctx.wrap_socket(self.s, server_side=True)
|
||||
msg = [
|
||||
"\033[1;3{:d}m{}".format(c, s)
|
||||
for c, s in zip([0, 5, 0], self.s.cipher())
|
||||
]
|
||||
self.log(" ".join(msg) + "\033[0m")
|
||||
|
||||
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
|
||||
overlap = [y[::-1] for y in self.s.shared_ciphers()]
|
||||
lines = [str(x) for x in (["TLS cipher overlap:"] + overlap)]
|
||||
self.log("\n".join(lines))
|
||||
for k, v in [
|
||||
["compression", self.s.compression()],
|
||||
["ALPN proto", self.s.selected_alpn_protocol()],
|
||||
["NPN proto", self.s.selected_npn_protocol()],
|
||||
]:
|
||||
self.log("TLS {}: {}".format(k, v or "nah"))
|
||||
|
||||
except Exception as ex:
|
||||
em = str(ex)
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ class HttpSrv(object):
|
||||
|
||||
def accept(self, sck, addr):
|
||||
"""takes an incoming tcp connection and creates a thread to handle it"""
|
||||
self.log("%s %s" % addr, "-" * 5 + "C-cthr")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-cthr\033[0m" % ("-" * 5,))
|
||||
thr = threading.Thread(target=self.thr_client, args=(sck, addr))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
@@ -66,11 +66,11 @@ class HttpSrv(object):
|
||||
thr.start()
|
||||
|
||||
try:
|
||||
self.log("%s %s" % addr, "-" * 6 + "C-crun")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-crun\033[0m" % ("-" * 6,))
|
||||
cli.run()
|
||||
|
||||
finally:
|
||||
self.log("%s %s" % addr, "-" * 7 + "C-done")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-cdone\033[0m" % ("-" * 7,))
|
||||
try:
|
||||
sck.shutdown(socket.SHUT_RDWR)
|
||||
sck.close()
|
||||
@@ -78,7 +78,7 @@ class HttpSrv(object):
|
||||
if not MACOS:
|
||||
self.log(
|
||||
"%s %s" % addr,
|
||||
"shut_rdwr err:\n {}\n {}".format(repr(sck), ex),
|
||||
"\033[1;30mshut({}): {}\033[0m".format(sck.fileno(), ex),
|
||||
)
|
||||
if ex.errno not in [10038, 10054, 107, 57, 9]:
|
||||
# 10038 No longer considered a socket
|
||||
|
||||
@@ -9,6 +9,7 @@ from datetime import datetime, timedelta
|
||||
import calendar
|
||||
|
||||
from .__init__ import PY2, WINDOWS, MACOS, VT100
|
||||
from .authsrv import AuthSrv
|
||||
from .tcpsrv import TcpSrv
|
||||
from .up2k import Up2k
|
||||
from .util import mp
|
||||
@@ -38,6 +39,10 @@ class SvcHub(object):
|
||||
self.tcpsrv = TcpSrv(self)
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
if self.args.e2d and self.args.e2s:
|
||||
auth = AuthSrv(self.args, self.log, False)
|
||||
self.up2k.build_indexes(auth.all_writable)
|
||||
|
||||
# decide which worker impl to use
|
||||
if self.check_mp_enable():
|
||||
from .broker_mp import BrokerMp as Broker
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import re
|
||||
import time
|
||||
import socket
|
||||
import select
|
||||
|
||||
from .util import chkcmd, Counter
|
||||
|
||||
@@ -23,56 +24,73 @@ class TcpSrv(object):
|
||||
|
||||
ip = "127.0.0.1"
|
||||
eps = {ip: "local only"}
|
||||
if self.args.i != ip:
|
||||
eps = self.detect_interfaces(self.args.i) or {self.args.i: "external"}
|
||||
nonlocals = [x for x in self.args.i if x != ip]
|
||||
if nonlocals:
|
||||
eps = self.detect_interfaces(self.args.i)
|
||||
if not eps:
|
||||
for x in nonlocals:
|
||||
eps[x] = "external"
|
||||
|
||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||
self.log(
|
||||
"tcpsrv",
|
||||
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
|
||||
ip, self.args.p, desc
|
||||
),
|
||||
)
|
||||
for port in sorted(self.args.p):
|
||||
self.log(
|
||||
"tcpsrv",
|
||||
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
|
||||
ip, port, desc
|
||||
),
|
||||
)
|
||||
|
||||
self.srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
self.srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
self.srv = []
|
||||
for ip in self.args.i:
|
||||
for port in self.args.p:
|
||||
self.srv.append(self._listen(ip, port))
|
||||
|
||||
def _listen(self, ip, port):
|
||||
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
try:
|
||||
self.srv.bind((self.args.i, self.args.p))
|
||||
srv.bind((ip, port))
|
||||
return srv
|
||||
except (OSError, socket.error) as ex:
|
||||
if ex.errno == 98:
|
||||
raise Exception(
|
||||
"\033[1;31mport {} is busy on interface {}\033[0m".format(
|
||||
self.args.p, self.args.i
|
||||
)
|
||||
)
|
||||
|
||||
if ex.errno == 99:
|
||||
raise Exception(
|
||||
"\033[1;31minterface {} does not exist\033[0m".format(self.args.i)
|
||||
)
|
||||
if ex.errno in [98, 48]:
|
||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
elif ex.errno in [99, 49]:
|
||||
e = "\033[1;31minterface {} does not exist\033[0m".format(ip)
|
||||
else:
|
||||
raise
|
||||
raise Exception(e)
|
||||
|
||||
def run(self):
|
||||
self.srv.listen(self.args.nc)
|
||||
|
||||
self.log("tcpsrv", "listening @ {0}:{1}".format(self.args.i, self.args.p))
|
||||
for srv in self.srv:
|
||||
srv.listen(self.args.nc)
|
||||
ip, port = srv.getsockname()
|
||||
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
|
||||
|
||||
while True:
|
||||
self.log("tcpsrv", "-" * 1 + "C-ncli")
|
||||
self.log("tcpsrv", "\033[1;30m|%sC-ncli\033[0m" % ("-" * 1,))
|
||||
if self.num_clients.v >= self.args.nc:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
self.log("tcpsrv", "-" * 2 + "C-acc1")
|
||||
sck, addr = self.srv.accept()
|
||||
self.log("%s %s" % addr, "-" * 3 + "C-acc2")
|
||||
self.num_clients.add()
|
||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||
self.log("tcpsrv", "\033[1;30m|%sC-acc1\033[0m" % ("-" * 2,))
|
||||
ready, _, _ = select.select(self.srv, [], [])
|
||||
for srv in ready:
|
||||
sck, addr = srv.accept()
|
||||
sip, sport = srv.getsockname()
|
||||
self.log(
|
||||
"%s %s" % addr,
|
||||
"\033[1;30m|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||
"-" * 3, sip, sport % 8, sport
|
||||
),
|
||||
)
|
||||
self.num_clients.add()
|
||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||
|
||||
def shutdown(self):
|
||||
self.log("tcpsrv", "ok bye")
|
||||
|
||||
def detect_interfaces(self, listen_ip):
|
||||
def detect_interfaces(self, listen_ips):
|
||||
eps = {}
|
||||
|
||||
# get all ips and their interfaces
|
||||
@@ -86,8 +104,9 @@ class TcpSrv(object):
|
||||
for ln in ip_addr.split("\n"):
|
||||
try:
|
||||
ip, dev = r.match(ln.rstrip()).groups()
|
||||
if listen_ip in ["0.0.0.0", ip]:
|
||||
eps[ip] = dev
|
||||
for lip in listen_ips:
|
||||
if lip in ["0.0.0.0", ip]:
|
||||
eps[ip] = dev
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -114,11 +133,12 @@ class TcpSrv(object):
|
||||
|
||||
s.close()
|
||||
|
||||
if default_route and listen_ip in ["0.0.0.0", default_route]:
|
||||
desc = "\033[32mexternal"
|
||||
try:
|
||||
eps[default_route] += ", " + desc
|
||||
except:
|
||||
eps[default_route] = desc
|
||||
for lip in listen_ips:
|
||||
if default_route and lip in ["0.0.0.0", default_route]:
|
||||
desc = "\033[32mexternal"
|
||||
try:
|
||||
eps[default_route] += ", " + desc
|
||||
except:
|
||||
eps[default_route] = desc
|
||||
|
||||
return eps
|
||||
|
||||
@@ -2,10 +2,14 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
|
||||
import os
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import math
|
||||
import json
|
||||
import gzip
|
||||
import stat
|
||||
import shutil
|
||||
import base64
|
||||
import hashlib
|
||||
@@ -13,7 +17,15 @@ import threading
|
||||
from copy import deepcopy
|
||||
|
||||
from .__init__ import WINDOWS
|
||||
from .util import Pebkac, Queue, fsenc, sanitize_fn, ren_open
|
||||
from .util import Pebkac, Queue, fsdec, fsenc, sanitize_fn, ren_open, atomic_move
|
||||
|
||||
HAVE_SQLITE3 = False
|
||||
try:
|
||||
import sqlite3
|
||||
|
||||
HAVE_SQLITE3 = True
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class Up2k(object):
|
||||
@@ -22,20 +34,21 @@ class Up2k(object):
|
||||
* documentation
|
||||
* registry persistence
|
||||
* ~/.config flatfiles for active jobs
|
||||
* wark->path database for finished uploads
|
||||
"""
|
||||
|
||||
def __init__(self, broker):
|
||||
self.broker = broker
|
||||
self.args = broker.args
|
||||
self.log = broker.log
|
||||
self.persist = self.args.e2d
|
||||
|
||||
# config
|
||||
self.salt = "hunter2" # TODO: config
|
||||
|
||||
# state
|
||||
self.registry = {}
|
||||
self.mutex = threading.Lock()
|
||||
self.registry = {}
|
||||
self.db = {}
|
||||
|
||||
if WINDOWS:
|
||||
# usually fails to set lastmod too quickly
|
||||
@@ -44,53 +57,313 @@ class Up2k(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if self.persist:
|
||||
thr = threading.Thread(target=self._snapshot)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
# static
|
||||
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
|
||||
|
||||
if self.persist and not HAVE_SQLITE3:
|
||||
m = "could not initialize sqlite3, will use in-memory registry only"
|
||||
self.log("up2k", m)
|
||||
|
||||
def _vis_job_progress(self, job):
|
||||
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
return "{:5.1f}% {}".format(perc, path)
|
||||
|
||||
def _vis_reg_progress(self, reg):
|
||||
ret = []
|
||||
for _, job in reg.items():
|
||||
ret.append(self._vis_job_progress(job))
|
||||
|
||||
return ret
|
||||
|
||||
def register_vpath(self, ptop):
|
||||
with self.mutex:
|
||||
if ptop in self.registry:
|
||||
return None
|
||||
|
||||
reg = {}
|
||||
path = os.path.join(ptop, ".hist", "up2k.snap")
|
||||
if self.persist and os.path.exists(path):
|
||||
with gzip.GzipFile(path, "rb") as f:
|
||||
j = f.read().decode("utf-8")
|
||||
|
||||
reg = json.loads(j)
|
||||
for _, job in reg.items():
|
||||
job["poke"] = time.time()
|
||||
|
||||
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
|
||||
m = [m] + self._vis_reg_progress(reg)
|
||||
self.log("up2k", "\n".join(m))
|
||||
|
||||
self.registry[ptop] = reg
|
||||
if not self.persist or not HAVE_SQLITE3:
|
||||
return None
|
||||
|
||||
try:
|
||||
os.mkdir(os.path.join(ptop, ".hist"))
|
||||
except:
|
||||
pass
|
||||
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
if ptop in self.db:
|
||||
# self.db[ptop].close()
|
||||
return None
|
||||
|
||||
try:
|
||||
db = self._open_db(db_path)
|
||||
self.db[ptop] = db
|
||||
return db
|
||||
except Exception as ex:
|
||||
m = "failed to open [{}]: {}".format(ptop, repr(ex))
|
||||
self.log("up2k", m)
|
||||
|
||||
return None
|
||||
|
||||
def build_indexes(self, writeables):
|
||||
tops = [d.realpath for d in writeables]
|
||||
for top in tops:
|
||||
db = self.register_vpath(top)
|
||||
if db:
|
||||
# can be symlink so don't `and d.startswith(top)``
|
||||
excl = set([d for d in tops if d != top])
|
||||
dbw = [db, 0, time.time()]
|
||||
self._build_dir(dbw, top, excl, top)
|
||||
self._drop_lost(db, top)
|
||||
if dbw[1]:
|
||||
self.log("up2k", "commit {} new files".format(dbw[1]))
|
||||
|
||||
db.commit()
|
||||
|
||||
def _build_dir(self, dbw, top, excl, cdir):
|
||||
try:
|
||||
inodes = [fsdec(x) for x in os.listdir(fsenc(cdir))]
|
||||
except Exception as ex:
|
||||
self.log("up2k", "listdir: {} @ [{}]".format(repr(ex), cdir))
|
||||
return
|
||||
|
||||
histdir = os.path.join(top, ".hist")
|
||||
for inode in inodes:
|
||||
abspath = os.path.join(cdir, inode)
|
||||
try:
|
||||
inf = os.stat(fsenc(abspath))
|
||||
except Exception as ex:
|
||||
self.log("up2k", "stat: {} @ [{}]".format(repr(ex), abspath))
|
||||
continue
|
||||
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
if abspath in excl or abspath == histdir:
|
||||
continue
|
||||
# self.log("up2k", " dir: {}".format(abspath))
|
||||
self._build_dir(dbw, top, excl, abspath)
|
||||
else:
|
||||
# self.log("up2k", "file: {}".format(abspath))
|
||||
rp = abspath[len(top) :].replace("\\", "/").strip("/")
|
||||
c = dbw[0].execute("select * from up where rp = ?", (rp,))
|
||||
in_db = list(c.fetchall())
|
||||
if in_db:
|
||||
_, dts, dsz, _ = in_db[0]
|
||||
if len(in_db) > 1:
|
||||
m = "WARN: multiple entries: [{}] => [{}] ({})"
|
||||
self.log("up2k", m.format(top, rp, len(in_db)))
|
||||
dts = -1
|
||||
|
||||
if dts == inf.st_mtime and dsz == inf.st_size:
|
||||
continue
|
||||
|
||||
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
|
||||
top, rp, dts, inf.st_mtime, dsz, inf.st_size
|
||||
)
|
||||
self.log("up2k", m)
|
||||
self.db_rm(dbw[0], rp)
|
||||
dbw[1] += 1
|
||||
in_db = None
|
||||
|
||||
self.log("up2k", "file: {}".format(abspath))
|
||||
try:
|
||||
hashes = self._hashlist_from_file(abspath)
|
||||
except Exception as ex:
|
||||
self.log("up2k", "hash: {} @ [{}]".format(repr(ex), abspath))
|
||||
continue
|
||||
|
||||
wark = self._wark_from_hashlist(inf.st_size, hashes)
|
||||
self.db_add(dbw[0], wark, rp, inf.st_mtime, inf.st_size)
|
||||
dbw[1] += 1
|
||||
td = time.time() - dbw[2]
|
||||
if dbw[1] > 1024 or td > 60:
|
||||
self.log("up2k", "commit {} new files".format(dbw[1]))
|
||||
dbw[0].commit()
|
||||
dbw[1] = 0
|
||||
dbw[2] = time.time()
|
||||
|
||||
def _drop_lost(self, db, top):
|
||||
rm = []
|
||||
c = db.execute("select * from up")
|
||||
for dwark, dts, dsz, drp in c:
|
||||
abspath = os.path.join(top, drp)
|
||||
try:
|
||||
if not os.path.exists(fsenc(abspath)):
|
||||
rm.append(drp)
|
||||
except Exception as ex:
|
||||
self.log("up2k", "stat-rm: {} @ [{}]".format(repr(ex), abspath))
|
||||
|
||||
if not rm:
|
||||
return
|
||||
|
||||
self.log("up2k", "forgetting {} deleted files".format(len(rm)))
|
||||
for rp in rm:
|
||||
self.db_rm(db, rp)
|
||||
|
||||
def _open_db(self, db_path):
|
||||
conn = sqlite3.connect(db_path, check_same_thread=False)
|
||||
try:
|
||||
c = conn.execute(r"select * from kv where k = 'sver'")
|
||||
rows = c.fetchall()
|
||||
if rows:
|
||||
ver = rows[0][1]
|
||||
else:
|
||||
self.log("up2k", "WARN: no sver in kv, DB corrupt?")
|
||||
ver = "unknown"
|
||||
|
||||
if ver == "1":
|
||||
try:
|
||||
nfiles = next(conn.execute("select count(w) from up"))[0]
|
||||
self.log("up2k", "found DB at {} |{}|".format(db_path, nfiles))
|
||||
return conn
|
||||
except Exception as ex:
|
||||
m = "WARN: could not list files, DB corrupt?\n " + repr(ex)
|
||||
self.log("up2k", m)
|
||||
|
||||
m = "REPLACING unsupported DB (v.{}) at {}".format(ver, db_path)
|
||||
self.log("up2k", m)
|
||||
conn.close()
|
||||
os.unlink(db_path)
|
||||
conn = sqlite3.connect(db_path, check_same_thread=False)
|
||||
except:
|
||||
pass
|
||||
|
||||
# sqlite is variable-width only, no point in using char/nchar/varchar
|
||||
for cmd in [
|
||||
r"create table kv (k text, v text)",
|
||||
r"create table up (w text, mt int, sz int, rp text)",
|
||||
r"insert into kv values ('sver', '1')",
|
||||
r"create index up_w on up(w)",
|
||||
]:
|
||||
conn.execute(cmd)
|
||||
|
||||
conn.commit()
|
||||
self.log("up2k", "created DB at {}".format(db_path))
|
||||
return conn
|
||||
|
||||
def handle_json(self, cj):
|
||||
self.register_vpath(cj["ptop"])
|
||||
cj["name"] = sanitize_fn(cj["name"])
|
||||
cj["poke"] = time.time()
|
||||
wark = self._get_wark(cj)
|
||||
now = time.time()
|
||||
job = None
|
||||
with self.mutex:
|
||||
# TODO use registry persistence here to symlink any matching wark
|
||||
if wark in self.registry:
|
||||
job = self.registry[wark]
|
||||
if job["rdir"] != cj["rdir"] or job["name"] != cj["name"]:
|
||||
src = os.path.join(job["rdir"], job["name"])
|
||||
dst = os.path.join(cj["rdir"], cj["name"])
|
||||
db = self.db.get(cj["ptop"], None)
|
||||
reg = self.registry[cj["ptop"]]
|
||||
if db:
|
||||
cur = db.execute(r"select * from up where w = ?", (wark,))
|
||||
for _, dtime, dsize, dp_rel in cur:
|
||||
dp_abs = os.path.join(cj["ptop"], dp_rel).replace("\\", "/")
|
||||
# relying on path.exists to return false on broken symlinks
|
||||
if os.path.exists(fsenc(dp_abs)):
|
||||
try:
|
||||
prel, name = dp_rel.rsplit("/", 1)
|
||||
except:
|
||||
prel = ""
|
||||
name = dp_rel
|
||||
|
||||
job = {
|
||||
"name": name,
|
||||
"prel": prel,
|
||||
"vtop": cj["vtop"],
|
||||
"ptop": cj["ptop"],
|
||||
"flag": cj["flag"],
|
||||
"size": dsize,
|
||||
"lmod": dtime,
|
||||
"hash": [],
|
||||
"need": [],
|
||||
}
|
||||
break
|
||||
|
||||
if job and wark in reg:
|
||||
del reg[wark]
|
||||
|
||||
if job or wark in reg:
|
||||
job = job or reg[wark]
|
||||
if job["prel"] == cj["prel"] and job["name"] == cj["name"]:
|
||||
# ensure the files haven't been deleted manually
|
||||
names = [job[x] for x in ["name", "tnam"] if x in job]
|
||||
for fn in names:
|
||||
path = os.path.join(job["ptop"], job["prel"], fn)
|
||||
try:
|
||||
if os.path.getsize(path) > 0:
|
||||
# upload completed or both present
|
||||
break
|
||||
except:
|
||||
# missing; restart
|
||||
job = None
|
||||
break
|
||||
else:
|
||||
# file contents match, but not the path
|
||||
src = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
dst = os.path.join(cj["ptop"], cj["prel"], cj["name"])
|
||||
vsrc = os.path.join(job["vtop"], job["prel"], job["name"])
|
||||
vsrc = vsrc.replace("\\", "/") # just for prints anyways
|
||||
if job["need"]:
|
||||
self.log("up2k", "unfinished:\n {0}\n {1}".format(src, dst))
|
||||
err = "partial upload exists at a different location; please resume uploading here instead:\n{0}{1} ".format(
|
||||
job["vdir"], job["name"]
|
||||
)
|
||||
err = "partial upload exists at a different location; please resume uploading here instead:\n"
|
||||
err += vsrc + " "
|
||||
raise Pebkac(400, err)
|
||||
elif "nodupe" in job["flag"]:
|
||||
self.log("up2k", "dupe-reject:\n {0}\n {1}".format(src, dst))
|
||||
err = "upload rejected, file already exists:\n " + vsrc + " "
|
||||
raise Pebkac(400, err)
|
||||
else:
|
||||
# symlink to the client-provided name,
|
||||
# returning the previous upload info
|
||||
job = deepcopy(job)
|
||||
job["rdir"] = cj["rdir"]
|
||||
job["name"] = self._untaken(cj["rdir"], cj["name"], now, cj["addr"])
|
||||
dst = os.path.join(job["rdir"], job["name"])
|
||||
for k in ["ptop", "vtop", "prel"]:
|
||||
job[k] = cj[k]
|
||||
|
||||
pdir = os.path.join(cj["ptop"], cj["prel"])
|
||||
job["name"] = self._untaken(pdir, cj["name"], now, cj["addr"])
|
||||
dst = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
os.unlink(fsenc(dst)) # TODO ed pls
|
||||
self._symlink(src, dst)
|
||||
else:
|
||||
|
||||
if not job:
|
||||
job = {
|
||||
"wark": wark,
|
||||
"t0": now,
|
||||
"addr": cj["addr"],
|
||||
"vdir": cj["vdir"],
|
||||
"rdir": cj["rdir"],
|
||||
# client-provided, sanitized by _get_wark:
|
||||
"name": cj["name"],
|
||||
"size": cj["size"],
|
||||
"lmod": cj["lmod"],
|
||||
"hash": deepcopy(cj["hash"]),
|
||||
"need": [],
|
||||
}
|
||||
# client-provided, sanitized by _get_wark: name, size, lmod
|
||||
for k in [
|
||||
"addr",
|
||||
"vtop",
|
||||
"ptop",
|
||||
"prel",
|
||||
"flag",
|
||||
"name",
|
||||
"size",
|
||||
"lmod",
|
||||
"poke",
|
||||
]:
|
||||
job[k] = cj[k]
|
||||
|
||||
# one chunk may occur multiple times in a file;
|
||||
# filter to unique values for the list of missing chunks
|
||||
# (preserve order to reduce disk thrashing)
|
||||
job["need"] = []
|
||||
lut = {}
|
||||
for k in cj["hash"]:
|
||||
if k not in lut:
|
||||
@@ -142,37 +415,62 @@ class Up2k(object):
|
||||
self.log("up2k", "cannot symlink; creating copy: " + repr(ex))
|
||||
shutil.copy2(fsenc(src), fsenc(dst))
|
||||
|
||||
def handle_chunk(self, wark, chash):
|
||||
def handle_chunk(self, ptop, wark, chash):
|
||||
with self.mutex:
|
||||
job = self.registry.get(wark)
|
||||
job = self.registry[ptop].get(wark, None)
|
||||
if not job:
|
||||
raise Pebkac(404, "unknown wark")
|
||||
raise Pebkac(400, "unknown wark")
|
||||
|
||||
if chash not in job["need"]:
|
||||
raise Pebkac(200, "already got that but thanks??")
|
||||
raise Pebkac(400, "already got that but thanks??")
|
||||
|
||||
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
|
||||
if not nchunk:
|
||||
raise Pebkac(404, "unknown chunk")
|
||||
raise Pebkac(400, "unknown chunk")
|
||||
|
||||
job["poke"] = time.time()
|
||||
|
||||
chunksize = self._get_chunksize(job["size"])
|
||||
ofs = [chunksize * x for x in nchunk]
|
||||
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
|
||||
|
||||
return [chunksize, ofs, path, job["lmod"]]
|
||||
|
||||
def confirm_chunk(self, wark, chash):
|
||||
def confirm_chunk(self, ptop, wark, chash):
|
||||
with self.mutex:
|
||||
job = self.registry[wark]
|
||||
job["need"].remove(chash)
|
||||
try:
|
||||
job = self.registry[ptop][wark]
|
||||
pdir = os.path.join(job["ptop"], job["prel"])
|
||||
src = os.path.join(pdir, job["tnam"])
|
||||
dst = os.path.join(pdir, job["name"])
|
||||
except Exception as ex:
|
||||
return "confirm_chunk, wark, " + repr(ex)
|
||||
|
||||
try:
|
||||
job["need"].remove(chash)
|
||||
except Exception as ex:
|
||||
return "confirm_chunk, chash, " + repr(ex)
|
||||
|
||||
ret = len(job["need"])
|
||||
if ret > 0:
|
||||
return ret, src
|
||||
|
||||
if WINDOWS and ret == 0:
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
self.lastmod_q.put([path, (int(time.time()), int(job["lmod"]))])
|
||||
atomic_move(src, dst)
|
||||
|
||||
return ret
|
||||
if WINDOWS:
|
||||
self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))])
|
||||
|
||||
db = self.db.get(job["ptop"], None)
|
||||
if db:
|
||||
rp = os.path.join(job["prel"], job["name"]).replace("\\", "/")
|
||||
self.db_rm(db, rp)
|
||||
self.db_add(db, job["wark"], rp, job["lmod"], job["size"])
|
||||
db.commit()
|
||||
del self.registry[ptop][wark]
|
||||
# in-memory registry is reserved for unfinished uploads
|
||||
|
||||
return ret, dst
|
||||
|
||||
def _get_chunksize(self, filesize):
|
||||
chunksize = 1024 * 1024
|
||||
@@ -186,6 +484,13 @@ class Up2k(object):
|
||||
chunksize += stepsize
|
||||
stepsize *= mul
|
||||
|
||||
def db_rm(self, db, rp):
|
||||
db.execute("delete from up where rp = ?", (rp,))
|
||||
|
||||
def db_add(self, db, wark, rp, ts, sz):
|
||||
v = (wark, ts, sz, rp)
|
||||
db.execute("insert into up values (?,?,?,?)", v)
|
||||
|
||||
def _get_wark(self, cj):
|
||||
if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB
|
||||
raise Pebkac(400, "name or numchunks not according to spec")
|
||||
@@ -202,9 +507,13 @@ class Up2k(object):
|
||||
except:
|
||||
cj["lmod"] = int(time.time())
|
||||
|
||||
# server-reproducible file identifier, independent of name or location
|
||||
ident = [self.salt, str(cj["size"])]
|
||||
ident.extend(cj["hash"])
|
||||
wark = self._wark_from_hashlist(cj["size"], cj["hash"])
|
||||
return wark
|
||||
|
||||
def _wark_from_hashlist(self, filesize, hashes):
|
||||
""" server-reproducible file identifier, independent of name or location """
|
||||
ident = [self.salt, str(filesize)]
|
||||
ident.extend(hashes)
|
||||
ident = "\n".join(ident)
|
||||
|
||||
hasher = hashlib.sha512()
|
||||
@@ -214,11 +523,48 @@ class Up2k(object):
|
||||
wark = base64.urlsafe_b64encode(digest)
|
||||
return wark.decode("utf-8").rstrip("=")
|
||||
|
||||
def _hashlist_from_file(self, path):
|
||||
fsz = os.path.getsize(path)
|
||||
csz = self._get_chunksize(fsz)
|
||||
ret = []
|
||||
last_print = time.time()
|
||||
with open(path, "rb", 512 * 1024) as f:
|
||||
while fsz > 0:
|
||||
now = time.time()
|
||||
td = now - last_print
|
||||
if td >= 0.1:
|
||||
last_print = now
|
||||
msg = " {} MB \r".format(int(fsz / 1024 / 1024))
|
||||
print(msg, end="", file=sys.stderr)
|
||||
|
||||
hashobj = hashlib.sha512()
|
||||
rem = min(csz, fsz)
|
||||
fsz -= rem
|
||||
while rem > 0:
|
||||
buf = f.read(min(rem, 64 * 1024))
|
||||
if not buf:
|
||||
raise Exception("EOF at " + str(f.tell()))
|
||||
|
||||
hashobj.update(buf)
|
||||
rem -= len(buf)
|
||||
|
||||
digest = hashobj.digest()[:32]
|
||||
digest = base64.urlsafe_b64encode(digest)
|
||||
ret.append(digest.decode("utf-8").rstrip("="))
|
||||
|
||||
return ret
|
||||
|
||||
def _new_upload(self, job):
|
||||
self.registry[job["wark"]] = job
|
||||
self.registry[job["ptop"]][job["wark"]] = job
|
||||
pdir = os.path.join(job["ptop"], job["prel"])
|
||||
job["name"] = self._untaken(pdir, job["name"], job["t0"], job["addr"])
|
||||
# if len(job["name"].split(".")) > 8:
|
||||
# raise Exception("aaa")
|
||||
|
||||
tnam = job["name"] + ".PARTIAL"
|
||||
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
||||
with ren_open(job["name"], "wb", fdir=job["rdir"], suffix=suffix) as f:
|
||||
f, job["name"] = f["orz"]
|
||||
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
||||
f, job["tnam"] = f["orz"]
|
||||
f.seek(job["size"] - 1)
|
||||
f.write(b"e")
|
||||
|
||||
@@ -231,7 +577,63 @@ class Up2k(object):
|
||||
# self.log("lmod", "got {}".format(len(ready)))
|
||||
time.sleep(5)
|
||||
for path, times in ready:
|
||||
self.log("lmod", "setting times {} on {}".format(times, path))
|
||||
try:
|
||||
os.utime(fsenc(path), times)
|
||||
except:
|
||||
self.log("lmod", "failed to utime ({}, {})".format(path, times))
|
||||
|
||||
def _snapshot(self):
|
||||
persist_interval = 30 # persist unfinished uploads index every 30 sec
|
||||
discard_interval = 21600 # drop unfinished uploads after 6 hours inactivity
|
||||
prev = {}
|
||||
while True:
|
||||
time.sleep(persist_interval)
|
||||
with self.mutex:
|
||||
for k, reg in self.registry.items():
|
||||
self._snap_reg(prev, k, reg, discard_interval)
|
||||
|
||||
def _snap_reg(self, prev, k, reg, discard_interval):
|
||||
now = time.time()
|
||||
rm = [x for x in reg.values() if now - x["poke"] > discard_interval]
|
||||
if rm:
|
||||
m = "dropping {} abandoned uploads in {}".format(len(rm), k)
|
||||
vis = [self._vis_job_progress(x) for x in rm]
|
||||
self.log("up2k", "\n".join([m] + vis))
|
||||
for job in rm:
|
||||
del reg[job["wark"]]
|
||||
try:
|
||||
# remove the filename reservation
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
if os.path.getsize(path) == 0:
|
||||
os.unlink(path)
|
||||
|
||||
if len(job["hash"]) == len(job["need"]):
|
||||
# PARTIAL is empty, delete that too
|
||||
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
|
||||
os.unlink(path)
|
||||
except:
|
||||
pass
|
||||
|
||||
path = os.path.join(k, ".hist", "up2k.snap")
|
||||
if not reg:
|
||||
if k not in prev or prev[k] is not None:
|
||||
prev[k] = None
|
||||
if os.path.exists(path):
|
||||
os.unlink(path)
|
||||
return
|
||||
|
||||
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
|
||||
etag = [len(reg), newest]
|
||||
if etag == prev.get(k, None):
|
||||
return
|
||||
|
||||
path2 = "{}.{}".format(path, os.getpid())
|
||||
j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8")
|
||||
with gzip.GzipFile(path2, "wb") as f:
|
||||
f.write(j)
|
||||
|
||||
atomic_move(path2, path)
|
||||
|
||||
self.log("up2k", "snap: {} |{}|".format(path, len(reg.keys())))
|
||||
prev[k] = etag
|
||||
|
||||
@@ -108,7 +108,7 @@ def ren_open(fname, *args, **kwargs):
|
||||
with open(fname, *args, **kwargs) as f:
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
|
||||
orig_name = fname
|
||||
bname = fname
|
||||
ext = ""
|
||||
@@ -549,6 +549,16 @@ else:
|
||||
fsdec = w8dec
|
||||
|
||||
|
||||
def atomic_move(src, dst):
|
||||
if not PY2:
|
||||
os.replace(src, dst)
|
||||
else:
|
||||
if os.path.exists(dst):
|
||||
os.unlink(dst)
|
||||
|
||||
os.rename(src, dst)
|
||||
|
||||
|
||||
def read_socket(sr, total_size):
|
||||
remains = total_size
|
||||
while remains > 0:
|
||||
@@ -622,10 +632,10 @@ def sendfile_kern(lower, upper, f, s):
|
||||
except Exception as ex:
|
||||
# print("sendfile: " + repr(ex))
|
||||
n = 0
|
||||
|
||||
|
||||
if n <= 0:
|
||||
return upper - ofs
|
||||
|
||||
|
||||
ofs += n
|
||||
# print("sendfile: ok, sent {} now, {} total, {} remains".format(n, ofs - lower, upper - ofs))
|
||||
|
||||
@@ -708,7 +718,26 @@ def py_desc():
|
||||
)
|
||||
|
||||
|
||||
def align_tab(lines):
|
||||
rows = []
|
||||
ncols = 0
|
||||
for ln in lines:
|
||||
row = [x for x in ln.split(" ") if x]
|
||||
ncols = max(ncols, len(row))
|
||||
rows.append(row)
|
||||
|
||||
lens = [0] * ncols
|
||||
for row in rows:
|
||||
for n, col in enumerate(row):
|
||||
lens[n] = max(lens[n], len(col))
|
||||
|
||||
return ["".join(x.ljust(y + 2) for x, y in zip(row, lens)) for row in rows]
|
||||
|
||||
|
||||
class Pebkac(Exception):
|
||||
def __init__(self, code, msg=None):
|
||||
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
||||
self.code = code
|
||||
|
||||
def __repr__(self):
|
||||
return "Pebkac({}, {})".format(self.code, repr(self.args))
|
||||
|
||||
@@ -8,7 +8,14 @@ function dbg(msg) {
|
||||
|
||||
function ev(e) {
|
||||
e = e || window.event;
|
||||
e.preventDefault ? e.preventDefault() : (e.returnValue = false);
|
||||
|
||||
if (e.preventDefault)
|
||||
e.preventDefault()
|
||||
|
||||
if (e.stopPropagation)
|
||||
e.stopPropagation();
|
||||
|
||||
e.returnValue = false;
|
||||
return e;
|
||||
}
|
||||
|
||||
@@ -447,7 +454,8 @@ function play(tid, call_depth) {
|
||||
mp.au.tid = tid;
|
||||
mp.au.src = url;
|
||||
mp.au.volume = mp.expvol();
|
||||
setclass('trk' + tid, 'play act');
|
||||
var oid = 'trk' + tid;
|
||||
setclass(oid, 'play act');
|
||||
|
||||
try {
|
||||
if (hack_attempt_play)
|
||||
@@ -456,7 +464,11 @@ function play(tid, call_depth) {
|
||||
if (mp.au.paused)
|
||||
autoplay_blocked();
|
||||
|
||||
location.hash = 'trk' + tid;
|
||||
var o = ebi(oid);
|
||||
o.setAttribute('id', 'thx_js');
|
||||
location.hash = oid;
|
||||
o.setAttribute('id', oid);
|
||||
|
||||
pbar.drawbuf();
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -89,6 +89,104 @@ catch (ex) {
|
||||
}
|
||||
|
||||
|
||||
function up2k_flagbus() {
|
||||
var flag = {
|
||||
"id": Math.floor(Math.random() * 1024 * 1024 * 1023 * 2),
|
||||
"ch": new BroadcastChannel("up2k_flagbus"),
|
||||
"ours": false,
|
||||
"owner": null,
|
||||
"wants": null,
|
||||
"act": false,
|
||||
"last_tx": ["x", null]
|
||||
};
|
||||
var dbg = function (who, msg) {
|
||||
console.log('flagbus(' + flag.id + '): [' + who + '] ' + msg);
|
||||
};
|
||||
flag.ch.onmessage = function (ev) {
|
||||
var who = ev.data[0],
|
||||
what = ev.data[1];
|
||||
|
||||
if (who == flag.id) {
|
||||
dbg(who, 'hi me (??)');
|
||||
return;
|
||||
}
|
||||
flag.act = new Date().getTime();
|
||||
if (what == "want") {
|
||||
// lowest id wins, don't care if that's us
|
||||
if (who < flag.id) {
|
||||
dbg(who, 'wants (ack)');
|
||||
flag.wants = [who, flag.act];
|
||||
}
|
||||
else {
|
||||
dbg(who, 'wants (ign)');
|
||||
}
|
||||
}
|
||||
else if (what == "have") {
|
||||
dbg(who, 'have');
|
||||
flag.owner = [who, flag.act];
|
||||
}
|
||||
else if (what == "give") {
|
||||
if (flag.owner && flag.owner[0] == who) {
|
||||
flag.owner = null;
|
||||
dbg(who, 'give (ok)');
|
||||
}
|
||||
else {
|
||||
dbg(who, 'give, INVALID, ' + flag.owner);
|
||||
}
|
||||
}
|
||||
else if (what == "hi") {
|
||||
dbg(who, 'hi');
|
||||
flag.ch.postMessage([flag.id, "hey"]);
|
||||
}
|
||||
else {
|
||||
dbg('?', ev.data);
|
||||
}
|
||||
};
|
||||
var tx = function (now, msg) {
|
||||
var td = now - flag.last_tx[1];
|
||||
if (td > 500 || flag.last_tx[0] != msg) {
|
||||
dbg('*', 'tx ' + msg);
|
||||
flag.ch.postMessage([flag.id, msg]);
|
||||
flag.last_tx = [msg, now];
|
||||
}
|
||||
};
|
||||
var do_take = function (now) {
|
||||
//dbg('*', 'do_take');
|
||||
tx(now, "have");
|
||||
flag.owner = [flag.id, now];
|
||||
flag.ours = true;
|
||||
};
|
||||
var do_want = function (now) {
|
||||
//dbg('*', 'do_want');
|
||||
tx(now, "want");
|
||||
};
|
||||
flag.take = function (now) {
|
||||
if (flag.ours) {
|
||||
do_take(now);
|
||||
return;
|
||||
}
|
||||
if (flag.owner && now - flag.owner[1] > 5000) {
|
||||
flag.owner = null;
|
||||
}
|
||||
if (flag.wants && now - flag.wants[1] > 5000) {
|
||||
flag.wants = null;
|
||||
}
|
||||
if (!flag.owner && !flag.wants) {
|
||||
do_take(now);
|
||||
return;
|
||||
}
|
||||
do_want(now);
|
||||
};
|
||||
flag.give = function () {
|
||||
dbg('#', 'put give');
|
||||
flag.ch.postMessage([flag.id, "give"]);
|
||||
flag.owner = null;
|
||||
flag.ours = false;
|
||||
};
|
||||
flag.ch.postMessage([flag.id, 'hi']);
|
||||
return flag;
|
||||
}
|
||||
|
||||
function up2k_init(have_crypto) {
|
||||
//have_crypto = false;
|
||||
var need_filereader_cache = undefined;
|
||||
@@ -201,6 +299,8 @@ function up2k_init(have_crypto) {
|
||||
|
||||
var parallel_uploads = cfg_get('nthread');
|
||||
var multitask = bcfg_get('multitask', true);
|
||||
var ask_up = bcfg_get('ask_up', true);
|
||||
var flag_en = bcfg_get('flag_en', false);
|
||||
|
||||
var col_hashing = '#00bbff';
|
||||
var col_hashed = '#004466';
|
||||
@@ -218,6 +318,10 @@ function up2k_init(have_crypto) {
|
||||
"hash": [],
|
||||
"handshake": [],
|
||||
"upload": []
|
||||
},
|
||||
"bytes": {
|
||||
"hashed": 0,
|
||||
"uploaded": 0
|
||||
}
|
||||
};
|
||||
|
||||
@@ -228,6 +332,9 @@ function up2k_init(have_crypto) {
|
||||
if (!bobslice || !window.FileReader || !window.FileList)
|
||||
return un2k("this is the basic uploader; up2k needs at least<br />chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1");
|
||||
|
||||
var flag = false;
|
||||
apply_flag_cfg();
|
||||
|
||||
function nav() {
|
||||
ebi('file' + fdom_ctr).click();
|
||||
}
|
||||
@@ -262,6 +369,7 @@ function up2k_init(have_crypto) {
|
||||
|
||||
more_one_file();
|
||||
var bad_files = [];
|
||||
var good_files = [];
|
||||
for (var a = 0; a < files.length; a++) {
|
||||
var fobj = files[a];
|
||||
if (is_itemlist) {
|
||||
@@ -275,9 +383,32 @@ function up2k_init(have_crypto) {
|
||||
throw 1;
|
||||
}
|
||||
catch (ex) {
|
||||
bad_files.push([a, fobj.name]);
|
||||
bad_files.push(fobj.name);
|
||||
continue;
|
||||
}
|
||||
good_files.push(fobj);
|
||||
}
|
||||
|
||||
if (bad_files.length > 0) {
|
||||
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length);
|
||||
for (var a = 0; a < bad_files.length; a++)
|
||||
msg += '-- ' + bad_files[a] + '\n';
|
||||
|
||||
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
|
||||
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
|
||||
|
||||
alert(msg);
|
||||
}
|
||||
|
||||
var msg = ['upload these ' + good_files.length + ' files?'];
|
||||
for (var a = 0; a < good_files.length; a++)
|
||||
msg.push(good_files[a].name);
|
||||
|
||||
if (ask_up && !confirm(msg.join('\n')))
|
||||
return;
|
||||
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var fobj = good_files[a];
|
||||
var now = new Date().getTime();
|
||||
var lmod = fobj.lastModified || now;
|
||||
var entry = {
|
||||
@@ -307,17 +438,6 @@ function up2k_init(have_crypto) {
|
||||
st.files.push(entry);
|
||||
st.todo.hash.push(entry);
|
||||
}
|
||||
|
||||
if (bad_files.length > 0) {
|
||||
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length);
|
||||
for (var a = 0; a < bad_files.length; a++)
|
||||
msg += '-- ' + bad_files[a][1] + '\n';
|
||||
|
||||
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
|
||||
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
|
||||
|
||||
alert(msg);
|
||||
}
|
||||
}
|
||||
ebi('u2btn').addEventListener('drop', gotfile, false);
|
||||
|
||||
@@ -336,16 +456,20 @@ function up2k_init(have_crypto) {
|
||||
//
|
||||
|
||||
function handshakes_permitted() {
|
||||
return multitask || (
|
||||
st.todo.upload.length == 0 &&
|
||||
st.busy.upload.length == 0);
|
||||
var lim = multitask ? 1 : 0;
|
||||
return lim >=
|
||||
st.todo.upload.length +
|
||||
st.busy.upload.length;
|
||||
}
|
||||
|
||||
function hashing_permitted() {
|
||||
return multitask || (
|
||||
handshakes_permitted() &&
|
||||
st.todo.handshake.length == 0 &&
|
||||
st.busy.handshake.length == 0);
|
||||
if (multitask) {
|
||||
var ahead = st.bytes.hashed - st.bytes.uploaded;
|
||||
return ahead < 1024 * 1024 * 128;
|
||||
}
|
||||
return handshakes_permitted() && 0 ==
|
||||
st.todo.handshake.length +
|
||||
st.busy.handshake.length;
|
||||
}
|
||||
|
||||
var tasker = (function () {
|
||||
@@ -357,8 +481,54 @@ function up2k_init(have_crypto) {
|
||||
|
||||
mutex = true;
|
||||
while (true) {
|
||||
if (false) {
|
||||
ebi('srv_info').innerHTML =
|
||||
new Date().getTime() + ", " +
|
||||
st.todo.hash.length + ", " +
|
||||
st.todo.handshake.length + ", " +
|
||||
st.todo.upload.length + ", " +
|
||||
st.busy.hash.length + ", " +
|
||||
st.busy.handshake.length + ", " +
|
||||
st.busy.upload.length;
|
||||
}
|
||||
|
||||
if (flag) {
|
||||
var need_flag = 0 !=
|
||||
st.todo.hash.length +
|
||||
st.todo.handshake.length +
|
||||
st.todo.upload.length +
|
||||
st.busy.hash.length +
|
||||
st.busy.handshake.length +
|
||||
st.busy.upload.length;
|
||||
|
||||
if (need_flag) {
|
||||
var now = new Date().getTime();
|
||||
flag.take(now);
|
||||
if (!flag.ours) {
|
||||
setTimeout(taskerd, 100);
|
||||
mutex = false;
|
||||
return;
|
||||
}
|
||||
}
|
||||
else if (flag.ours) {
|
||||
flag.give();
|
||||
}
|
||||
}
|
||||
|
||||
var mou_ikkai = false;
|
||||
|
||||
if (st.todo.handshake.length > 0 &&
|
||||
st.busy.handshake.length == 0 && (
|
||||
st.todo.handshake[0].t3 || (
|
||||
handshakes_permitted() &&
|
||||
st.busy.upload.length < parallel_uploads
|
||||
)
|
||||
)
|
||||
) {
|
||||
exec_handshake();
|
||||
mou_ikkai = true;
|
||||
}
|
||||
|
||||
if (handshakes_permitted() &&
|
||||
st.todo.handshake.length > 0 &&
|
||||
st.busy.handshake.length == 0 &&
|
||||
@@ -497,6 +667,8 @@ function up2k_init(have_crypto) {
|
||||
|
||||
var t = st.todo.hash.shift();
|
||||
st.busy.hash.push(t);
|
||||
st.bytes.hashed += t.size;
|
||||
t.bytes_uploaded = 0;
|
||||
t.t1 = new Date().getTime();
|
||||
|
||||
var nchunk = 0;
|
||||
@@ -660,18 +832,25 @@ function up2k_init(have_crypto) {
|
||||
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
|
||||
|
||||
if (done) {
|
||||
st.bytes.uploaded += t.size - t.bytes_uploaded;
|
||||
var spd1 = (t.size / ((t.t2 - t.t1) / 1000.)) / (1024 * 1024.);
|
||||
var spd2 = (t.size / ((t.t3 - t.t2) / 1000.)) / (1024 * 1024.);
|
||||
ebi('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format(
|
||||
spd1.toFixed(2), spd2.toFixed(2));
|
||||
}
|
||||
else t.t3 = undefined;
|
||||
|
||||
tasker();
|
||||
}
|
||||
else {
|
||||
var err = "";
|
||||
var rsp = (xhr.responseText + '');
|
||||
if (rsp.indexOf('partial upload exists') !== -1) {
|
||||
err = rsp.slice(5);
|
||||
if (rsp.indexOf('partial upload exists') !== -1 ||
|
||||
rsp.indexOf('file already exists') !== -1) {
|
||||
err = rsp;
|
||||
var ofs = err.lastIndexOf(' : ');
|
||||
if (ofs > 0)
|
||||
err = err.slice(0, ofs);
|
||||
}
|
||||
if (err != "") {
|
||||
ebi('f{0}t'.format(t.n)).innerHTML = "ERROR";
|
||||
@@ -733,12 +912,14 @@ function up2k_init(have_crypto) {
|
||||
xhr.onload = function (xev) {
|
||||
if (xhr.status == 200) {
|
||||
prog(t.n, npart, col_uploaded);
|
||||
st.bytes.uploaded += cdr - car;
|
||||
t.bytes_uploaded += cdr - car;
|
||||
st.busy.upload.splice(st.busy.upload.indexOf(upt), 1);
|
||||
t.postlist.splice(t.postlist.indexOf(npart), 1);
|
||||
if (t.postlist.length == 0) {
|
||||
t.t3 = new Date().getTime();
|
||||
ebi('f{0}t'.format(t.n)).innerHTML = 'verifying';
|
||||
st.todo.handshake.push(t);
|
||||
st.todo.handshake.unshift(t);
|
||||
}
|
||||
tasker();
|
||||
}
|
||||
@@ -821,6 +1002,33 @@ function up2k_init(have_crypto) {
|
||||
bcfg_set('multitask', multitask);
|
||||
}
|
||||
|
||||
function tgl_ask_up() {
|
||||
ask_up = !ask_up;
|
||||
bcfg_set('ask_up', ask_up);
|
||||
}
|
||||
|
||||
function tgl_flag_en() {
|
||||
flag_en = !flag_en;
|
||||
bcfg_set('flag_en', flag_en);
|
||||
apply_flag_cfg();
|
||||
}
|
||||
|
||||
function apply_flag_cfg() {
|
||||
if (flag_en && !flag) {
|
||||
try {
|
||||
flag = up2k_flagbus();
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("flag error: " + ex.toString());
|
||||
tgl_flag_en();
|
||||
}
|
||||
}
|
||||
else if (!flag_en && flag) {
|
||||
flag.ch.close();
|
||||
flag = false;
|
||||
}
|
||||
}
|
||||
|
||||
function nop(ev) {
|
||||
ev.preventDefault();
|
||||
this.click();
|
||||
@@ -837,6 +1045,8 @@ function up2k_init(have_crypto) {
|
||||
|
||||
ebi('nthread').addEventListener('input', bumpthread, false);
|
||||
ebi('multitask').addEventListener('click', tgl_multitask, false);
|
||||
ebi('ask_up').addEventListener('click', tgl_ask_up, false);
|
||||
ebi('flag_en').addEventListener('click', tgl_flag_en, false);
|
||||
|
||||
var nodes = ebi('u2conf').getElementsByTagName('a');
|
||||
for (var a = nodes.length - 1; a >= 0; a--)
|
||||
|
||||
@@ -194,6 +194,12 @@
|
||||
#u2conf input+a {
|
||||
background: #d80;
|
||||
}
|
||||
#u2conf input[type="checkbox"]+label {
|
||||
color: #f5a;
|
||||
}
|
||||
#u2conf input[type="checkbox"]:checked+label {
|
||||
color: #fc5;
|
||||
}
|
||||
#u2foot {
|
||||
color: #fff;
|
||||
font-style: italic;
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
href="#" data-dest="up2k">up2k</a><i></i><a
|
||||
href="#" data-dest="bup">bup</a><i></i><a
|
||||
href="#" data-dest="mkdir">mkdir</a><i></i><a
|
||||
href="#" data-dest="new_md">new.md</a></div>
|
||||
href="#" data-dest="new_md">new.md</a><i></i><a
|
||||
href="#" data-dest="msg">msg</a></div>
|
||||
|
||||
<div id="op_bup" class="opview opbox act">
|
||||
<div id="u2err"></div>
|
||||
@@ -30,6 +31,13 @@
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_msg" class="opview opbox">
|
||||
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="/{{ vdir }}">
|
||||
<input type="text" name="msg" size="30">
|
||||
<input type="submit" value="send">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_up2k" class="opview">
|
||||
<form id="u2form" method="post" enctype="multipart/form-data" onsubmit="return false;"></form>
|
||||
|
||||
@@ -43,9 +51,17 @@
|
||||
<input class="txtbox" id="nthread" value="2" />
|
||||
<a href="#" id="nthread_add">+</a>
|
||||
</td>
|
||||
<td rowspan="2">
|
||||
<td rowspan="2" style="padding-left:1.5em">
|
||||
<input type="checkbox" id="multitask" />
|
||||
<label for="multitask">hash while<br />uploading</label>
|
||||
<label for="multitask">hash<br />while<br />upping</label>
|
||||
</td>
|
||||
<td rowspan="2">
|
||||
<input type="checkbox" id="ask_up" />
|
||||
<label for="ask_up">ask<br />before<br />start</label>
|
||||
</td>
|
||||
<td rowspan="2">
|
||||
<input type="checkbox" id="flag_en" />
|
||||
<label for="flag_en">only<br />one tab<br />at once</label>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
@@ -3,6 +3,14 @@ echo not a script
|
||||
exit 1
|
||||
|
||||
|
||||
##
|
||||
## delete all partial uploads
|
||||
## (supports linux/macos, probably windows+msys2)
|
||||
|
||||
gzip -d < .hist/up2k.snap | jq -r '.[].tnam' | while IFS= read -r f; do rm -f -- "$f"; done
|
||||
gzip -d < .hist/up2k.snap | jq -r '.[].name' | while IFS= read -r f; do wc -c -- "$f" | grep -qiE '^[^0-9a-z]*0' && rm -f -- "$f"; done
|
||||
|
||||
|
||||
##
|
||||
## create a test payload
|
||||
|
||||
|
||||
@@ -3,12 +3,15 @@ set -e
|
||||
echo
|
||||
|
||||
# osx support
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
which md5sum 2>/dev/null >/dev/null &&
|
||||
|
||||
@@ -18,13 +18,16 @@ echo
|
||||
# (the fancy markdown editor)
|
||||
|
||||
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
unexpand() { gunexpand "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
[ -e copyparty/__main__.py ] || cd ..
|
||||
@@ -59,28 +62,32 @@ cd sfx
|
||||
)/pe-copyparty"
|
||||
|
||||
echo "repack of files in $old"
|
||||
cp -pR "$old/"*{jinja2,copyparty} .
|
||||
mv {x.,}jinja2 2>/dev/null || true
|
||||
cp -pR "$old/"*{dep-j2,copyparty} .
|
||||
}
|
||||
|
||||
[ $repack ] || {
|
||||
echo collecting jinja2
|
||||
f="../build/Jinja2-2.6.tar.gz"
|
||||
f="../build/Jinja2-2.11.3.tar.gz"
|
||||
[ -e "$f" ] ||
|
||||
(url=https://files.pythonhosted.org/packages/25/c8/212b1c2fd6df9eaf536384b6c6619c4e70a3afd2dffdd00e5296ffbae940/Jinja2-2.6.tar.gz;
|
||||
(url=https://files.pythonhosted.org/packages/4f/e7/65300e6b32e69768ded990494809106f87da1d436418d5f1367ed3966fd7/Jinja2-2.11.3.tar.gz;
|
||||
wget -O$f "$url" || curl -L "$url" >$f)
|
||||
|
||||
tar -zxf $f
|
||||
mv Jinja2-*/jinja2 .
|
||||
rm -rf Jinja2-* jinja2/testsuite jinja2/_markupsafe/tests.py jinja2/_stringdefs.py
|
||||
mv Jinja2-*/src/jinja2 .
|
||||
rm -rf Jinja2-*
|
||||
|
||||
f=jinja2/lexer.py
|
||||
sed -r '/.*föö.*/ raise SyntaxError/' <$f >t
|
||||
tmv $f
|
||||
|
||||
f=jinja2/_markupsafe/_constants.py
|
||||
awk '!/: [0-9]+,?$/ || /(amp|gt|lt|quot|apos|nbsp).:/' <$f >t
|
||||
tmv $f
|
||||
echo collecting markupsafe
|
||||
f="../build/MarkupSafe-1.1.1.tar.gz"
|
||||
[ -e "$f" ] ||
|
||||
(url=https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz;
|
||||
wget -O$f "$url" || curl -L "$url" >$f)
|
||||
|
||||
tar -zxf $f
|
||||
mv MarkupSafe-*/src/markupsafe .
|
||||
rm -rf MarkupSafe-* markupsafe/_speedups.c
|
||||
|
||||
mkdir dep-j2/
|
||||
mv {markupsafe,jinja2} dep-j2/
|
||||
|
||||
# msys2 tar is bad, make the best of it
|
||||
echo collecting source
|
||||
@@ -162,6 +169,15 @@ done
|
||||
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
|
||||
}
|
||||
|
||||
find | grep -E '\.py$' |
|
||||
grep -vE '__version__' |
|
||||
tr '\n' '\0' |
|
||||
xargs -0 python ../scripts/uncomment.py
|
||||
|
||||
f=dep-j2/jinja2/constants.py
|
||||
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
|
||||
tmv "$f"
|
||||
|
||||
# up2k goes from 28k to 22k laff
|
||||
echo entabbening
|
||||
find | grep -E '\.(js|css|html|py)$' | while IFS= read -r f; do
|
||||
@@ -174,7 +190,7 @@ args=(--owner=1000 --group=1000)
|
||||
[ "$OSTYPE" = msys ] &&
|
||||
args=()
|
||||
|
||||
tar -cf tar "${args[@]}" --numeric-owner copyparty jinja2
|
||||
tar -cf tar "${args[@]}" --numeric-owner copyparty dep-j2
|
||||
|
||||
echo compressing tar
|
||||
# detect best level; bzip2 -7 is usually better than -9
|
||||
|
||||
@@ -2,12 +2,16 @@
|
||||
set -e
|
||||
echo
|
||||
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# osx support
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
which md5sum 2>/dev/null >/dev/null &&
|
||||
|
||||
153
scripts/sfx.py
153
scripts/sfx.py
@@ -2,7 +2,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re, os, sys, time, shutil, signal, tarfile, hashlib, platform, tempfile
|
||||
import os, sys, time, shutil, signal, tarfile, hashlib, platform, tempfile
|
||||
import subprocess as sp
|
||||
|
||||
"""
|
||||
@@ -202,93 +202,6 @@ def u8(gen):
|
||||
yield s
|
||||
|
||||
|
||||
def get_py_win(ret):
|
||||
tops = []
|
||||
p = str(os.getenv("LocalAppdata"))
|
||||
if p:
|
||||
tops.append(os.path.join(p, "Programs", "Python"))
|
||||
|
||||
progfiles = {}
|
||||
for p in ["ProgramFiles", "ProgramFiles(x86)"]:
|
||||
p = str(os.getenv(p))
|
||||
if p:
|
||||
progfiles[p] = 1
|
||||
# 32bit apps get x86 for both
|
||||
if p.endswith(" (x86)"):
|
||||
progfiles[p[:-6]] = 1
|
||||
|
||||
tops += list(progfiles.keys())
|
||||
|
||||
for sysroot in [me, sys.executable]:
|
||||
sysroot = sysroot[:3].upper()
|
||||
if sysroot[1] == ":" and sysroot not in tops:
|
||||
tops.append(sysroot)
|
||||
|
||||
# $WIRESHARK_SLOGAN
|
||||
for top in tops:
|
||||
try:
|
||||
for name1 in u8(sorted(os.listdir(top), reverse=True)):
|
||||
if name1.lower().startswith("python"):
|
||||
path1 = os.path.join(top, name1)
|
||||
try:
|
||||
for name2 in u8(os.listdir(path1)):
|
||||
if name2.lower() == "python.exe":
|
||||
path2 = os.path.join(path1, name2)
|
||||
ret[path2.lower()] = path2
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def get_py_nix(ret):
|
||||
ptn = re.compile(r"^(python|pypy)[0-9\.-]*$")
|
||||
for bindir in os.getenv("PATH").split(":"):
|
||||
if not bindir:
|
||||
next
|
||||
|
||||
try:
|
||||
for fn in u8(os.listdir(bindir)):
|
||||
if ptn.match(fn):
|
||||
fn = os.path.join(bindir, fn)
|
||||
ret[fn.lower()] = fn
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def read_py(binp):
|
||||
cmd = [
|
||||
binp,
|
||||
"-c",
|
||||
"import sys; sys.stdout.write(' '.join(str(x) for x in sys.version_info)); import jinja2",
|
||||
]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
ver, _ = p.communicate()
|
||||
ver = ver.decode("utf-8").split(" ")[:3]
|
||||
ver = [int(x) if x.isdigit() else 0 for x in ver]
|
||||
return ver, p.returncode == 0
|
||||
|
||||
|
||||
def get_pys():
|
||||
ver, chk = read_py(sys.executable)
|
||||
if chk or PY2:
|
||||
return [[chk, ver, sys.executable]]
|
||||
|
||||
hits = {sys.executable.lower(): sys.executable}
|
||||
if platform.system() == "Windows":
|
||||
get_py_win(hits)
|
||||
else:
|
||||
get_py_nix(hits)
|
||||
|
||||
ret = []
|
||||
for binp in hits.values():
|
||||
ver, chk = read_py(binp)
|
||||
ret.append([chk, ver, binp])
|
||||
msg("\t".join(str(x) for x in ret[-1]))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def yieldfile(fn):
|
||||
with open(fn, "rb") as f:
|
||||
for block in iter(lambda: f.read(64 * 1024), b""):
|
||||
@@ -440,12 +353,11 @@ def confirm():
|
||||
pass
|
||||
|
||||
|
||||
def run(tmp, py):
|
||||
def run(tmp, j2ver):
|
||||
global cpp
|
||||
|
||||
msg("OK")
|
||||
msg("will use:", py)
|
||||
msg("bound to:", tmp)
|
||||
msg("jinja2:", j2ver or "bundled")
|
||||
msg("sfxdir:", tmp)
|
||||
|
||||
# "systemd-tmpfiles-clean.timer"?? HOW do you even come up with this shit
|
||||
try:
|
||||
@@ -457,24 +369,20 @@ def run(tmp, py):
|
||||
except:
|
||||
pass
|
||||
|
||||
fp_py = os.path.join(tmp, "py")
|
||||
try:
|
||||
with open(fp_py, "wb") as f:
|
||||
f.write(py.encode("utf-8") + b"\n")
|
||||
except:
|
||||
pass
|
||||
ld = [tmp, os.path.join(tmp, "dep-j2")]
|
||||
if j2ver:
|
||||
del ld[-1]
|
||||
|
||||
# avoid loading ./copyparty.py
|
||||
cmd = [
|
||||
py,
|
||||
"-c",
|
||||
'import sys, runpy; sys.path.insert(0, r"'
|
||||
+ tmp
|
||||
+ '"); runpy.run_module("copyparty", run_name="__main__")',
|
||||
] + list(sys.argv[1:])
|
||||
cmd = (
|
||||
"import sys, runpy; "
|
||||
+ "".join(['sys.path.insert(0, r"' + x + '"); ' for x in ld])
|
||||
+ 'runpy.run_module("copyparty", run_name="__main__")'
|
||||
)
|
||||
cmd = [sys.executable, "-c", cmd] + list(sys.argv[1:])
|
||||
|
||||
cmd = [str(x) for x in cmd]
|
||||
msg("\n", cmd, "\n")
|
||||
cpp = sp.Popen(str(x) for x in cmd)
|
||||
cpp = sp.Popen(cmd)
|
||||
try:
|
||||
cpp.wait()
|
||||
except:
|
||||
@@ -494,7 +402,6 @@ def bye(sig, frame):
|
||||
def main():
|
||||
sysver = str(sys.version).replace("\n", "\n" + " " * 18)
|
||||
pktime = time.strftime("%Y-%m-%d, %H:%M:%S", time.gmtime(STAMP))
|
||||
os.system("")
|
||||
msg()
|
||||
msg(" this is: copyparty", VER)
|
||||
msg(" packed at:", pktime, "UTC,", STAMP)
|
||||
@@ -526,33 +433,13 @@ def main():
|
||||
signal.signal(signal.SIGTERM, bye)
|
||||
|
||||
tmp = unpack()
|
||||
fp_py = os.path.join(tmp, "py")
|
||||
if os.path.exists(fp_py):
|
||||
with open(fp_py, "rb") as f:
|
||||
py = f.read().decode("utf-8").rstrip()
|
||||
|
||||
return run(tmp, py)
|
||||
try:
|
||||
from jinja2 import __version__ as j2ver
|
||||
except:
|
||||
j2ver = None
|
||||
|
||||
pys = get_pys()
|
||||
pys.sort(reverse=True)
|
||||
j2, ver, py = pys[0]
|
||||
if j2:
|
||||
try:
|
||||
os.rename(os.path.join(tmp, "jinja2"), os.path.join(tmp, "x.jinja2"))
|
||||
except:
|
||||
pass
|
||||
|
||||
return run(tmp, py)
|
||||
|
||||
msg("\n could not find jinja2; will use py2 + the bundled version\n")
|
||||
for _, ver, py in pys:
|
||||
if ver > [2, 7] and ver < [3, 0]:
|
||||
return run(tmp, py)
|
||||
|
||||
m = "\033[1;31m\n\n\ncould not find a python with jinja2 installed; please do one of these:\n\n pip install --user jinja2\n\n install python2\n\n\033[0m"
|
||||
msg(m)
|
||||
confirm()
|
||||
sys.exit(1)
|
||||
return run(tmp, j2ver)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
77
scripts/uncomment.py
Normal file
77
scripts/uncomment.py
Normal file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import io
|
||||
import sys
|
||||
import tokenize
|
||||
|
||||
|
||||
def uncomment(fpath):
|
||||
""" modified https://stackoverflow.com/a/62074206 """
|
||||
|
||||
with open(fpath, "rb") as f:
|
||||
orig = f.read().decode("utf-8")
|
||||
|
||||
out = ""
|
||||
for ln in orig.split("\n"):
|
||||
if not ln.startswith("#"):
|
||||
break
|
||||
|
||||
out += ln + "\n"
|
||||
|
||||
io_obj = io.StringIO(orig)
|
||||
prev_toktype = tokenize.INDENT
|
||||
last_lineno = -1
|
||||
last_col = 0
|
||||
for tok in tokenize.generate_tokens(io_obj.readline):
|
||||
# print(repr(tok))
|
||||
token_type = tok[0]
|
||||
token_string = tok[1]
|
||||
start_line, start_col = tok[2]
|
||||
end_line, end_col = tok[3]
|
||||
|
||||
if start_line > last_lineno:
|
||||
last_col = 0
|
||||
|
||||
if start_col > last_col:
|
||||
out += " " * (start_col - last_col)
|
||||
|
||||
is_legalese = (
|
||||
"copyright" in token_string.lower() or "license" in token_string.lower()
|
||||
)
|
||||
|
||||
if token_type == tokenize.STRING:
|
||||
if (
|
||||
prev_toktype != tokenize.INDENT
|
||||
and prev_toktype != tokenize.NEWLINE
|
||||
and start_col > 0
|
||||
or is_legalese
|
||||
):
|
||||
out += token_string
|
||||
else:
|
||||
out += '"a"'
|
||||
elif token_type != tokenize.COMMENT or is_legalese:
|
||||
out += token_string
|
||||
|
||||
prev_toktype = token_type
|
||||
last_lineno = end_line
|
||||
last_col = end_col
|
||||
|
||||
# out = "\n".join(x for x in out.splitlines() if x.strip())
|
||||
|
||||
with open(fpath, "wb") as f:
|
||||
f.write(out.encode("utf-8"))
|
||||
|
||||
|
||||
def main():
|
||||
print("uncommenting", end="")
|
||||
for f in sys.argv[1:]:
|
||||
print(".", end="")
|
||||
uncomment(f)
|
||||
|
||||
print("k")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
6
setup.py
6
setup.py
@@ -2,10 +2,8 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function
|
||||
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
from glob import glob
|
||||
from shutil import rmtree
|
||||
|
||||
setuptools_available = True
|
||||
@@ -49,7 +47,7 @@ with open(here + "/README.md", "rb") as f:
|
||||
about = {}
|
||||
if not VERSION:
|
||||
with open(os.path.join(here, NAME, "__version__.py"), "rb") as f:
|
||||
exec(f.read().decode("utf-8").split("\n\n", 1)[1], about)
|
||||
exec (f.read().decode("utf-8").split("\n\n", 1)[1], about)
|
||||
else:
|
||||
about["__version__"] = VERSION
|
||||
|
||||
@@ -110,13 +108,13 @@ args = {
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.2",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Environment :: Console",
|
||||
|
||||
Reference in New Issue
Block a user