Compare commits

...

81 Commits

Author SHA1 Message Date
ed
9c03c65e07 v0.7.6 2021-02-12 20:53:29 +01:00
ed
d8ed006b9b up2k: 128 MiB runahead 2021-02-12 20:41:42 +01:00
ed
63c0623a5e vscode: windows support 2021-02-12 19:47:18 +01:00
ed
fd84506db0 don't list up2k db in browser 2021-02-12 19:25:57 +01:00
ed
d8bcb44e44 vscode: no-debug launcher 2021-02-12 19:25:01 +01:00
ed
56a26b0916 up2k: print final commit too 2021-02-12 17:10:08 +01:00
ed
efcf1d6b90 add cfssl.sh 2021-02-12 07:30:20 +00:00
ed
9f578bfec6 v0.7.5 2021-02-12 07:06:38 +00:00
ed
1f170d7d28 up2k scanner messages less useless 2021-02-12 07:04:35 +00:00
ed
5ae14cf9be up2k scanner more better 2021-02-12 01:07:55 +00:00
ed
aaf9d53be9 more ssl options 2021-02-12 00:31:28 +00:00
ed
75c73f7ba7 add --http-only (might as well) 2021-02-11 22:54:40 +00:00
ed
b6dba8beee imagine going plaintext in the middle of a tls reply 2021-02-11 22:50:59 +00:00
ed
94521cdc1a add --https-only 2021-02-11 22:48:10 +00:00
ed
3365b1c355 add --ssl-ver (ssl/tls versions to allow) 2021-02-11 21:24:17 +00:00
ed
6c957c4923 v0.7.4 2021-02-04 01:01:42 +01:00
ed
833997f04c shrink sfx.py from 515k to 472k 2021-02-04 01:01:11 +01:00
ed
68d51e4037 rem 2021-02-04 01:00:41 +01:00
ed
ce274d2011 handle url-encoded posts 2021-02-03 23:18:11 +01:00
ed
280778ed43 catch macos socket errors 2021-02-03 22:32:16 +01:00
ed
0f558ecbbf upgrade bundled jinja2 2021-02-03 22:32:01 +01:00
ed
58f9e05d93 v0.7.3 2021-02-03 00:50:51 +01:00
ed
1ec981aea7 bind multiple ip/ports 2021-02-03 00:49:51 +01:00
ed
2a90286a7c dim the socket debug msgs 2021-02-03 00:25:13 +01:00
ed
12d25d09b2 limit gz/br unpacker to embedded resources 2021-02-03 00:19:14 +01:00
ed
a039fae1a4 remove extra anon-rw warning 2021-02-03 00:17:12 +01:00
ed
322b9abadc v0.7.2 2021-01-29 00:52:41 +01:00
ed
0aaf954cea up2k: increase purge timeout 2021-01-29 00:52:22 +01:00
ed
c2d22aa3d1 up2k: make confirmation optional 2021-01-29 00:49:35 +01:00
ed
6934c75bba nice 2021-01-29 00:43:57 +01:00
ed
c58cf78f86 yabe 2021-01-24 16:14:01 +01:00
ed
7f0de790ab more macports compat 2021-01-23 21:19:29 +01:00
ed
d4bb4e3a73 v0.7.1 2021-01-23 19:55:35 +01:00
ed
d25612d038 make-sfx: support macports 2021-01-23 19:55:24 +01:00
ed
116b2351b0 mention howto purge partial uploads 2021-01-23 19:25:25 +01:00
ed
69b83dfdc4 up2k: limit runahead in client 2021-01-23 19:05:45 +01:00
ed
3b1839c2ce up2k: ask before starting the upload 2021-01-23 18:51:08 +01:00
ed
13742ebdf8 verify that PARTIALs exist after a restart 2021-01-23 18:49:43 +01:00
ed
634657bea1 up2k: discard empty PARTIALs 2021-01-23 18:10:11 +01:00
ed
46e70d50b7 v0.7.0 2021-01-10 17:49:56 +01:00
ed
d64e9b85a7 prefer sqlite over registry snaps 2021-01-10 17:47:27 +01:00
ed
fb853edbe3 prevent index loss on mid-write crash 2021-01-10 17:16:55 +01:00
ed
cc076c1be1 persist/timeout incomplete uploads too 2021-01-10 16:47:35 +01:00
ed
98cc9a6755 mojibake support + exception handling 2021-01-10 09:48:26 +01:00
ed
7bd2b9c23a sqlite3 as up2k db + build index on boot + rproxy ip fix 2021-01-10 09:27:11 +01:00
ed
de724a1ff3 up2k: add volume flag to reject existing files 2021-01-09 15:20:02 +01:00
ed
2163055dae media-player: play links don't scroll on click 2021-01-09 14:40:56 +01:00
ed
93ed0fc10b v0.6.3 2021-01-07 01:09:32 +01:00
ed
0d98cefd40 fix dumb 2021-01-07 01:06:31 +01:00
ed
d58988a033 use sendfile when possible 2021-01-07 00:50:42 +01:00
ed
2acfab1e3f cleanup 2021-01-06 22:54:54 +01:00
ed
b915dfe9a6 nagle adds ~.2sec delay on last packet 2021-01-06 21:08:52 +00:00
ed
25bd5a823e fuse-client: add timestamps to logger 2021-01-06 17:40:42 +01:00
ed
1c35de4716 fuse-client: cache tweaks 2021-01-06 17:22:07 +01:00
ed
4c00435a0a fuse: add windows-explorer settings 2021-01-06 17:18:37 +01:00
ed
844e3079a8 saved for posterity 2021-01-06 17:13:24 +01:00
ed
4778cb5b2c readme: add quickstart 2021-01-02 22:57:48 +01:00
ed
ec5d60b919 fuse-client: fix directory parser 2021-01-01 21:54:56 +01:00
ed
e1f4b960e8 oh no 2020-12-20 02:33:37 +01:00
ed
669e46da54 update TODOs 2020-12-14 09:19:43 +01:00
ed
ba94cc5df7 v0.6.2 2020-12-14 04:28:21 +01:00
ed
d08245c3df v0.6.1 2020-12-14 03:51:24 +01:00
ed
5c18d12cbf self-upgrading upgrader... getting too meta 2020-12-14 03:45:59 +01:00
ed
580a42dec7 sfx-repack: support wget 2020-12-14 02:59:15 +01:00
ed
29286e159b up2k-client: ignore rejected dupes 2020-12-12 00:55:42 +01:00
ed
19bcf90e9f support uploads with huge filenames 2020-12-12 00:35:54 +01:00
ed
dae9c00742 always display world-readable subvolumes 2020-12-04 23:28:18 +01:00
ed
35324ceb7c tests: support windows 2020-12-04 23:26:46 +01:00
ed
5aadd47199 dodge python-bug #7980 2020-12-01 23:20:44 +01:00
ed
7d9057cc62 v0.6.0 2020-12-01 02:58:11 +01:00
ed
c4b322b883 this commit sponsored by eslint 2020-12-01 02:25:46 +01:00
ed
19b09c898a fix sfx repack whoops 2020-11-30 03:27:27 +01:00
ed
eafe2098b6 v0.5.7 2020-11-30 03:01:14 +01:00
ed
2bc6a20d71 md: poll server for changes 2020-11-30 03:00:44 +01:00
ed
8b502a7235 v0.5.6 2020-11-29 19:49:16 +01:00
ed
37567844af md: add render2 plugin func 2020-11-29 19:34:08 +01:00
ed
2f6c4e0e34 refactoring 2020-11-29 19:32:22 +01:00
ed
1c7cc4cb2b ignore border when sizing table 2020-11-29 18:48:55 +01:00
ed
f83db3648e git tag as sfx version 2020-11-28 20:02:20 +01:00
ed
b164aa00d4 md: fix eof scroll glitch 2020-11-27 21:25:52 +01:00
ed
a2d866d0c2 show plugin errors 2020-11-27 21:10:47 +01:00
49 changed files with 3194 additions and 767 deletions

12
.eslintrc.json Normal file
View File

@@ -0,0 +1,12 @@
{
"env": {
"browser": true,
"es2021": true
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaVersion": 12
},
"rules": {
}
}

2
.gitattributes vendored
View File

@@ -1,4 +1,6 @@
* text eol=lf * text eol=lf
*.reg text eol=crlf
*.png binary *.png binary
*.gif binary *.gif binary

11
.vscode/launch.json vendored
View File

@@ -12,12 +12,21 @@
//"-nw", //"-nw",
"-ed", "-ed",
"-emp", "-emp",
"-e2d",
"-e2s",
"-a", "-a",
"ed:wark", "ed:wark",
"-v", "-v",
"srv::r:aed" "srv::r:aed:cnodupe"
] ]
}, },
{
"name": "No debug",
"preLaunchTask": "no_dbg",
"type": "python",
//"request": "attach", "port": 42069
// fork: nc -l 42069 </dev/null
},
{ {
"name": "Run active unit test", "name": "Run active unit test",
"type": "python", "type": "python",

12
.vscode/settings.json vendored
View File

@@ -50,11 +50,9 @@
"files.associations": { "files.associations": {
"*.makefile": "makefile" "*.makefile": "makefile"
}, },
"editor.codeActionsOnSaveTimeout": 9001, "python.formatting.blackArgs": [
"editor.formatOnSaveTimeout": 9001, "-t",
// "py27"
// things you may wanna edit: ],
// "python.linting.enabled": true,
"python.pythonPath": "/usr/bin/python3",
//"python.linting.enabled": true,
} }

15
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,15 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "pre",
"command": "true;rm -rf inc/* inc/.hist/;mkdir -p inc;",
"type": "shell"
},
{
"label": "no_dbg",
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2d -e2s -a ed:wark -v srv::r:aed:cnodupe ;exit 1",
"type": "shell"
}
]
}

View File

@@ -8,11 +8,22 @@
turn your phone or raspi into a portable file server with resumable uploads/downloads using IE6 or any other browser turn your phone or raspi into a portable file server with resumable uploads/downloads using IE6 or any other browser
* server runs on anything with `py2.7` or `py3.2+` * server runs on anything with `py2.7` or `py3.3+`
* *resumable* uploads need `firefox 12+` / `chrome 6+` / `safari 6+` / `IE 10+` * *resumable* uploads need `firefox 12+` / `chrome 6+` / `safari 6+` / `IE 10+`
* code standard: `black` * code standard: `black`
## quickstart
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
running the sfx without arguments (for example doubleclicking it on Windows) will let anyone access the current folder; see `-h` for help if you want accounts and volumes etc
you may also want these, especially on servers:
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for legit https)
## notes ## notes
* iPhone/iPad: use Firefox to download files * iPhone/iPad: use Firefox to download files
@@ -57,17 +68,16 @@ summary: it works! you can use it! (but technically not even close to beta)
# dependencies # dependencies
* `jinja2` * `jinja2`
* pulls in `markupsafe` as of v2.7; use jinja 2.6 on py3.2
optional, enables thumbnails: optional, will eventually enable thumbnails:
* `Pillow` (requires py2.7 or py3.5+) * `Pillow` (requires py2.7 or py3.5+)
# sfx # sfx
currently there are two self-contained binaries: currently there are two self-contained binaries:
* `copyparty-sfx.sh` for unix (linux and osx) -- smaller, more robust * [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere
* `copyparty-sfx.py` for windows (unix too) -- crossplatform, beta * [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
@@ -126,13 +136,15 @@ in the `scripts` folder:
roughly sorted by priority roughly sorted by priority
* up2k handle filename too long * reduce up2k roundtrips
* up2k fails on empty files? alert then stuck * start from a chunk index and just go
* terminate client on bad data
* drop onto folders * drop onto folders
* look into android thumbnail cache file format * `os.copy_file_range` for up2k cloning
* up2k partials ui
* support pillow-simd * support pillow-simd
* cache sha512 chunks on client * cache sha512 chunks on client
* symlink existing files on upload
* comment field * comment field
* ~~look into android thumbnail cache file format~~ bad idea
* figure out the deal with pixel3a not being connectable as hotspot * figure out the deal with pixel3a not being connectable as hotspot
* pixel3a having unpredictable 3sec latency in general :|||| * pixel3a having unpredictable 3sec latency in general :||||

View File

@@ -34,3 +34,8 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
* does the same thing except more correct, `samba` approves * does the same thing except more correct, `samba` approves
* **supports Linux** -- expect `18 MiB/s` (wait what) * **supports Linux** -- expect `18 MiB/s` (wait what)
* **supports Macos** -- probably * **supports Macos** -- probably
# copyparty-fuse-streaming.py
* pretend this doesn't exist

1100
bin/copyparty-fuse-streaming.py Executable file

File diff suppressed because it is too large Load Diff

View File

@@ -12,7 +12,7 @@ __url__ = "https://github.com/9001/copyparty/"
mount a copyparty server (local or remote) as a filesystem mount a copyparty server (local or remote) as a filesystem
usage: usage:
python copyparty-fuse.py ./music http://192.168.1.69:3923/ python copyparty-fuse.py http://192.168.1.69:3923/ ./music
dependencies: dependencies:
python3 -m pip install --user fusepy python3 -m pip install --user fusepy
@@ -20,6 +20,10 @@ dependencies:
+ on Macos: https://osxfuse.github.io/ + on Macos: https://osxfuse.github.io/
+ on Windows: https://github.com/billziss-gh/winfsp/releases/latest + on Windows: https://github.com/billziss-gh/winfsp/releases/latest
note:
you probably want to run this on windows clients:
https://github.com/9001/copyparty/blob/master/contrib/explorer-nothumbs-nofoldertypes.reg
get server cert: get server cert:
awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem
""" """
@@ -100,7 +104,7 @@ def rice_tid():
def fancy_log(msg): def fancy_log(msg):
print("{} {}\n".format(rice_tid(), msg), end="") print("{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg), end="")
def null_log(msg): def null_log(msg):
@@ -159,7 +163,7 @@ class RecentLog(object):
thr.start() thr.start()
def put(self, msg): def put(self, msg):
msg = "{} {}\n".format(rice_tid(), msg) msg = "{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg)
if self.f: if self.f:
fmsg = " ".join([datetime.utcnow().strftime("%H%M%S.%f"), str(msg)]) fmsg = " ".join([datetime.utcnow().strftime("%H%M%S.%f"), str(msg)])
self.f.write(fmsg.encode("utf-8")) self.f.write(fmsg.encode("utf-8"))
@@ -367,7 +371,7 @@ class Gateway(object):
ret = [] ret = []
remainder = b"" remainder = b""
ptn = re.compile( ptn = re.compile(
r'^<tr><td>(-|DIR)</td><td><a[^>]* href="([^"]+)"[^>]*>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$' r'^<tr><td>(-|DIR|<a [^<]+</a>)</td><td><a[^>]* href="([^"]+)"[^>]*>([^<]+)</a></td><td>([^<]+)</td><td>[^<]+</td><td>([^<]+)</td></tr>$'
) )
while True: while True:
@@ -405,7 +409,7 @@ class Gateway(object):
info("bad HTML or OS [{}] [{}]".format(fdate, fsize)) info("bad HTML or OS [{}] [{}]".format(fdate, fsize))
# python cannot strptime(1959-01-01) on windows # python cannot strptime(1959-01-01) on windows
if ftype == "-": if ftype != "DIR":
ret.append([fname, self.stat_file(ts, sz), 0]) ret.append([fname, self.stat_file(ts, sz), 0])
else: else:
ret.append([fname, self.stat_dir(ts, sz), 0]) ret.append([fname, self.stat_dir(ts, sz), 0])
@@ -658,8 +662,18 @@ class CPPF(Operations):
else: else:
if get2 - get1 <= 1024 * 1024: if get2 - get1 <= 1024 * 1024:
h_ofs = get1 - 256 * 1024 # unless the request is for the last n bytes of the file,
h_end = get2 + 1024 * 1024 # grow the start to cache some stuff around the range
if get2 < file_sz - 1:
h_ofs = get1 - 1024 * 256
else:
h_ofs = get1 - 1024 * 32
# likewise grow the end unless start is 0
if get1 > 0:
h_end = get2 + 1024 * 1024
else:
h_end = get2 + 1024 * 64
else: else:
# big enough, doesn't need pads # big enough, doesn't need pads
h_ofs = get1 h_ofs = get1
@@ -705,6 +719,7 @@ class CPPF(Operations):
self.dircache.append(cn) self.dircache.append(cn)
self.clean_dircache() self.clean_dircache()
# import pprint; pprint.pprint(ret)
return ret return ret
def readdir(self, path, fh=None): def readdir(self, path, fh=None):
@@ -802,7 +817,11 @@ class CPPF(Operations):
# dbg("=" + repr(cache_stat)) # dbg("=" + repr(cache_stat))
return cache_stat return cache_stat
info("=ENOENT ({})".format(hexler(path))) fun = info
if MACOS and path.split('/')[-1].startswith('._'):
fun = dbg
fun("=ENOENT ({})".format(hexler(path)))
raise FuseOSError(errno.ENOENT) raise FuseOSError(errno.ENOENT)
access = None access = None
@@ -906,6 +925,7 @@ class TheArgparseFormatter(
def main(): def main():
global info, log, dbg global info, log, dbg
time.strptime("19970815", "%Y%m%d") # python#7980
# filecache helps for reads that are ~64k or smaller; # filecache helps for reads that are ~64k or smaller;
# linux generally does 128k so the cache is a slowdown, # linux generally does 128k so the cache is a slowdown,
@@ -960,7 +980,7 @@ def main():
dbg = null_log dbg = null_log
if WINDOWS: if WINDOWS:
os.system("") os.system("rem")
for ch in '<>:"\\|?*': for ch in '<>:"\\|?*':
# microsoft maps illegal characters to f0xx # microsoft maps illegal characters to f0xx

View File

@@ -567,6 +567,8 @@ class CPPF(Fuse):
def main(): def main():
time.strptime("19970815", "%Y%m%d") # python#7980
server = CPPF() server = CPPF()
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None) server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
server.parse(values=server, errex=1) server.parse(values=server, errex=1)

View File

@@ -9,6 +9,14 @@
* assumes the webserver and copyparty is running on the same server/IP * assumes the webserver and copyparty is running on the same server/IP
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript * modify `10.13.1.1` as necessary if you wish to support browsers without javascript
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
* disables thumbnails and folder-type detection in windows explorer
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
### [`cfssl.sh`](cfssl.sh)
* creates CA and server certificates using cfssl
* give a 3rd argument to install it to your copyparty config
# OS integration # OS integration
init-scripts to start copyparty as a service init-scripts to start copyparty as a service
* [`systemd/copyparty.service`](systemd/copyparty.service) * [`systemd/copyparty.service`](systemd/copyparty.service)

72
contrib/cfssl.sh Executable file
View File

@@ -0,0 +1,72 @@
#!/bin/bash
set -e
# ca-name and server-name
ca_name="$1"
srv_name="$2"
[ -z "$srv_name" ] && {
echo "need arg 1: ca name"
echo "need arg 2: server name"
exit 1
}
gen_ca() {
(tee /dev/stderr <<EOF
{"CN": "$ca_name ca",
"CA": {"expiry":"87600h", "pathlen":0},
"key": {"algo":"rsa", "size":4096},
"names": [{"O":"$ca_name ca"}]}
EOF
)|
cfssl gencert -initca - |
cfssljson -bare ca
mv ca-key.pem ca.key
rm ca.csr
}
gen_srv() {
(tee /dev/stderr <<EOF
{"key": {"algo":"rsa", "size":4096},
"names": [{"O":"$ca_name - $srv_name"}]}
EOF
)|
cfssl gencert -ca ca.pem -ca-key ca.key \
-profile=www -hostname="$srv_name.$ca_name" - |
cfssljson -bare "$srv_name"
mv "$srv_name-key.pem" "$srv_name.key"
rm "$srv_name.csr"
}
# create ca if not exist
[ -e ca.key ] ||
gen_ca
# always create server cert
gen_srv
# dump cert info
show() {
openssl x509 -text -noout -in $1 |
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
}
show ca.pem
show "$srv_name.pem"
# write cert into copyparty config
[ -z "$3" ] || {
mkdir -p ~/.config/copyparty
cat "$srv_name".{key,pem} ca.pem >~/.config/copyparty/cert.pem
}
# rm *.key *.pem
# cfssl print-defaults config
# cfssl print-defaults csr

View File

@@ -0,0 +1,31 @@
Windows Registry Editor Version 5.00
; this will do 3 things, all optional:
; 1) disable thumbnails
; 2) delete all existing folder type settings/detections
; 3) disable folder type detection (force default columns)
;
; this makes the file explorer way faster,
; especially on slow/networked locations
; =====================================================================
; 1) disable thumbnails
[HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Explorer\Advanced]
"IconsOnly"=dword:00000001
; =====================================================================
; 2) delete all existing folder type settings/detections
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags]
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\BagMRU]
; =====================================================================
; 3) disable folder type detection
[HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags\AllFolders\Shell]
"FolderType"="NotSpecified"

View File

@@ -8,7 +8,10 @@ __copyright__ = 2019
__license__ = "MIT" __license__ = "MIT"
__url__ = "https://github.com/9001/copyparty/" __url__ = "https://github.com/9001/copyparty/"
import re
import os import os
import sys
import time
import shutil import shutil
import filecmp import filecmp
import locale import locale
@@ -18,7 +21,13 @@ from textwrap import dedent
from .__init__ import E, WINDOWS, VT100 from .__init__ import E, WINDOWS, VT100
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub from .svchub import SvcHub
from .util import py_desc from .util import py_desc, align_tab
HAVE_SSL = True
try:
import ssl
except:
HAVE_SSL = False
class RiceFormatter(argparse.HelpFormatter): class RiceFormatter(argparse.HelpFormatter):
@@ -84,9 +93,77 @@ def ensure_cert():
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout # printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
def configure_ssl_ver(al):
def terse_sslver(txt):
txt = txt.lower()
for c in ["_", "v", "."]:
txt = txt.replace(c, "")
return txt.replace("tls10", "tls1")
# oh man i love openssl
# check this out
# hold my beer
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
sslver = terse_sslver(al.ssl_ver).split(",")
flags = [k for k in ssl.__dict__ if ptn.match(k)]
# SSLv2 SSLv3 TLSv1 TLSv1_1 TLSv1_2 TLSv1_3
if "help" in sslver:
avail = [terse_sslver(x[6:]) for x in flags]
avail = " ".join(sorted(avail) + ["all"])
print("\navailable ssl/tls versions:\n " + avail)
sys.exit(0)
al.ssl_flags_en = 0
al.ssl_flags_de = 0
for flag in sorted(flags):
ver = terse_sslver(flag[6:])
num = getattr(ssl, flag)
if ver in sslver:
al.ssl_flags_en |= num
else:
al.ssl_flags_de |= num
if sslver == ["all"]:
x = al.ssl_flags_en
al.ssl_flags_en = al.ssl_flags_de
al.ssl_flags_de = x
for k in ["ssl_flags_en", "ssl_flags_de"]:
num = getattr(al, k)
print("{}: {:8x} ({})".format(k, num, num))
# think i need that beer now
def configure_ssl_ciphers(al):
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
if al.ssl_ver:
ctx.options &= ~al.ssl_flags_en
ctx.options |= al.ssl_flags_de
is_help = al.ciphers == "help"
if al.ciphers and not is_help:
try:
ctx.set_ciphers(al.ciphers)
except:
print("\n\033[1;31mfailed to set ciphers\033[0m\n")
if not hasattr(ctx, "get_ciphers"):
print("cannot read cipher list: openssl or python too old")
else:
ciphers = [x["description"] for x in ctx.get_ciphers()]
print("\n ".join(["\nenabled ciphers:"] + align_tab(ciphers) + [""]))
if is_help:
sys.exit(0)
def main(): def main():
time.strptime("19970815", "%Y%m%d") # python#7980
if WINDOWS: if WINDOWS:
os.system("") # enables colors os.system("rem") # enables colors
desc = py_desc().replace("[", "\033[1;30m[") desc = py_desc().replace("[", "\033[1;30m[")
@@ -94,7 +171,8 @@ def main():
print(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc)) print(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
ensure_locale() ensure_locale()
ensure_cert() if HAVE_SSL:
ensure_cert()
ap = argparse.ArgumentParser( ap = argparse.ArgumentParser(
formatter_class=RiceFormatter, formatter_class=RiceFormatter,
@@ -103,44 +181,89 @@ def main():
epilog=dedent( epilog=dedent(
""" """
-a takes username:password, -a takes username:password,
-v takes src:dst:permset:permset:... where "permset" is -v takes src:dst:permset:permset:cflag:cflag:...
accesslevel followed by username (no separator) where "permset" is accesslevel followed by username (no separator)
and "cflag" is config flags to set on this volume
list of cflags:
cnodupe rejects existing files (instead of symlinking them)
example:\033[35m example:\033[35m
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed \033[36m -a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
mount current directory at "/" with mount current directory at "/" with
* r (read-only) for everyone * r (read-only) for everyone
* a (read+write) for ed * a (read+write) for ed
mount ../inc at "/dump" with mount ../inc at "/dump" with
* w (write-only) for everyone * w (write-only) for everyone
* a (read+write) for ed \033[0m * a (read+write) for ed
* reject duplicate files \033[0m
if no accounts or volumes are configured, if no accounts or volumes are configured,
current folder will be read/write for everyone current folder will be read/write for everyone
consider the config file for more flexible account/volume management, consider the config file for more flexible account/volume management,
including dynamic reload at runtime (and being more readable w) including dynamic reload at runtime (and being more readable w)
values for --urlform:
"stash" dumps the data to file and returns length + checksum
"save,get" dumps to file and returns the page like a GET
"print,get" prints the data in the log and returns GET
(leave out the ",get" to return an error instead)
--ciphers help = available ssl/tls ciphers,
--ssl-ver help = available ssl/tls versions,
default is what python considers safe, usually >= TLS1
""" """
), ),
) )
ap.add_argument( # fmt: off
"-c", metavar="PATH", type=str, action="append", help="add config file" ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
) ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind") ap.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
ap.add_argument("-p", metavar="PORT", type=int, default=3923, help="port to bind")
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients") ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
ap.add_argument( ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
"-j", metavar="CORES", type=int, default=1, help="max num cpu cores"
)
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account") ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume") ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
ap.add_argument("-q", action="store_true", help="quiet") ap.add_argument("-q", action="store_true", help="quiet")
ap.add_argument("-ed", action="store_true", help="enable ?dots") ap.add_argument("-ed", action="store_true", help="enable ?dots")
ap.add_argument("-emp", action="store_true", help="enable markdown plugins") ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap.add_argument("-e2d", action="store_true", help="enable up2k database")
ap.add_argument("-e2s", action="store_true", help="enable up2k db-scanner")
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)") ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap.add_argument("-nih", action="store_true", help="no info hostname") ap.add_argument("-nih", action="store_true", help="no info hostname")
ap.add_argument("-nid", action="store_true", help="no info disk-usage") ap.add_argument("-nid", action="store_true", help="no info disk-usage")
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms")
ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
ap2.add_argument("--ssl-ver", type=str, help="ssl/tls versions to allow")
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
al = ap.parse_args() al = ap.parse_args()
# fmt: on
al.i = al.i.split(",")
try:
if "-" in al.p:
lo, hi = [int(x) for x in al.p.split("-")]
al.p = list(range(lo, hi + 1))
else:
al.p = [int(x) for x in al.p.split(",")]
except:
raise Exception("invalid value for -p")
if HAVE_SSL:
if al.ssl_ver:
configure_ssl_ver(al)
if al.ciphers:
configure_ssl_ciphers(al)
else:
print("\033[33m ssl module does not exist; cannot enable https\033[0m\n")
SvcHub(al).run() SvcHub(al).run()

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (0, 5, 5) VERSION = (0, 7, 6)
CODENAME = "fuse jelly" CODENAME = "keeping track"
BUILD_DT = (2020, 11, 27) BUILD_DT = (2021, 2, 12)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -12,11 +12,12 @@ from .util import undot, Pebkac, fsdec, fsenc
class VFS(object): class VFS(object):
"""single level in the virtual fs""" """single level in the virtual fs"""
def __init__(self, realpath, vpath, uread=[], uwrite=[]): def __init__(self, realpath, vpath, uread=[], uwrite=[], flags={}):
self.realpath = realpath # absolute path on host filesystem self.realpath = realpath # absolute path on host filesystem
self.vpath = vpath # absolute path in the virtual filesystem self.vpath = vpath # absolute path in the virtual filesystem
self.uread = uread # users who can read this self.uread = uread # users who can read this
self.uwrite = uwrite # users who can write this self.uwrite = uwrite # users who can write this
self.flags = flags # config switches
self.nodes = {} # child nodes self.nodes = {} # child nodes
def add(self, src, dst): def add(self, src, dst):
@@ -36,6 +37,7 @@ class VFS(object):
"{}/{}".format(self.vpath, name).lstrip("/"), "{}/{}".format(self.vpath, name).lstrip("/"),
self.uread, self.uread,
self.uwrite, self.uwrite,
self.flags,
) )
self.nodes[name] = vn self.nodes[name] = vn
return vn.add(src, dst) return vn.add(src, dst)
@@ -104,7 +106,7 @@ class VFS(object):
real.sort() real.sort()
if not rem: if not rem:
for name, vn2 in sorted(self.nodes.items()): for name, vn2 in sorted(self.nodes.items()):
if uname in vn2.uread: if uname in vn2.uread or "*" in vn2.uread:
virt_vis[name] = vn2 virt_vis[name] = vn2
# no vfs nodes in the list of real inodes # no vfs nodes in the list of real inodes
@@ -128,11 +130,10 @@ class VFS(object):
class AuthSrv(object): class AuthSrv(object):
"""verifies users against given paths""" """verifies users against given paths"""
def __init__(self, args, log_func): def __init__(self, args, log_func, warn_anonwrite=True):
self.log_func = log_func
self.args = args self.args = args
self.log_func = log_func
self.warn_anonwrite = True self.warn_anonwrite = warn_anonwrite
if WINDOWS: if WINDOWS:
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$") self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
@@ -161,7 +162,7 @@ class AuthSrv(object):
yield prev, True yield prev, True
def _parse_config_file(self, fd, user, mread, mwrite, mount): def _parse_config_file(self, fd, user, mread, mwrite, mflags, mount):
vol_src = None vol_src = None
vol_dst = None vol_dst = None
for ln in [x.decode("utf-8").strip() for x in fd]: for ln in [x.decode("utf-8").strip() for x in fd]:
@@ -191,6 +192,7 @@ class AuthSrv(object):
mount[vol_dst] = vol_src mount[vol_dst] = vol_src
mread[vol_dst] = [] mread[vol_dst] = []
mwrite[vol_dst] = [] mwrite[vol_dst] = []
mflags[vol_dst] = {}
continue continue
lvl, uname = ln.split(" ") lvl, uname = ln.split(" ")
@@ -198,6 +200,9 @@ class AuthSrv(object):
mread[vol_dst].append(uname) mread[vol_dst].append(uname)
if lvl in "wa": if lvl in "wa":
mwrite[vol_dst].append(uname) mwrite[vol_dst].append(uname)
if lvl == "c":
# config option, currently switches only
mflags[vol_dst][uname] = True
def reload(self): def reload(self):
""" """
@@ -210,6 +215,7 @@ class AuthSrv(object):
user = {} # username:password user = {} # username:password
mread = {} # mountpoint:[username] mread = {} # mountpoint:[username]
mwrite = {} # mountpoint:[username] mwrite = {} # mountpoint:[username]
mflags = {} # mountpoint:[flag]
mount = {} # dst:src (mountpoint:realpath) mount = {} # dst:src (mountpoint:realpath)
if self.args.a: if self.args.a:
@@ -232,9 +238,13 @@ class AuthSrv(object):
mount[dst] = src mount[dst] = src
mread[dst] = [] mread[dst] = []
mwrite[dst] = [] mwrite[dst] = []
mflags[dst] = {}
perms = perms.split(":") perms = perms.split(":")
for (lvl, uname) in [[x[0], x[1:]] for x in perms]: for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
if lvl == "c":
# config option, currently switches only
mflags[dst][uname] = True
if uname == "": if uname == "":
uname = "*" uname = "*"
if lvl in "ra": if lvl in "ra":
@@ -245,14 +255,15 @@ class AuthSrv(object):
if self.args.c: if self.args.c:
for cfg_fn in self.args.c: for cfg_fn in self.args.c:
with open(cfg_fn, "rb") as f: with open(cfg_fn, "rb") as f:
self._parse_config_file(f, user, mread, mwrite, mount) self._parse_config_file(f, user, mread, mwrite, mflags, mount)
self.all_writable = []
if not mount: if not mount:
# -h says our defaults are CWD at root and read/write for everyone # -h says our defaults are CWD at root and read/write for everyone
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"]) vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
elif "" not in mount: elif "" not in mount:
# there's volumes but no root; make root inaccessible # there's volumes but no root; make root inaccessible
vfs = VFS(os.path.abspath("."), "", [], []) vfs = VFS(os.path.abspath("."), "")
maxdepth = 0 maxdepth = 0
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))): for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
@@ -262,12 +273,18 @@ class AuthSrv(object):
if dst == "": if dst == "":
# rootfs was mapped; fully replaces the default CWD vfs # rootfs was mapped; fully replaces the default CWD vfs
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst]) vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst], mflags[dst])
continue continue
v = vfs.add(mount[dst], dst) v = vfs.add(mount[dst], dst)
v.uread = mread[dst] v.uread = mread[dst]
v.uwrite = mwrite[dst] v.uwrite = mwrite[dst]
v.flags = mflags[dst]
if v.uwrite:
self.all_writable.append(v)
if vfs.uwrite and vfs not in self.all_writable:
self.all_writable.append(vfs)
missing_users = {} missing_users = {}
for d in [mread, mwrite]: for d in [mread, mwrite]:

View File

@@ -73,7 +73,7 @@ class MpWorker(object):
if PY2: if PY2:
sck = pickle.loads(sck) # nosec sck = pickle.loads(sck) # nosec
self.log("%s %s" % addr, "-" * 4 + "C-qpop") self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,))
self.httpsrv.accept(sck, addr) self.httpsrv.accept(sck, addr)
with self.mutex: with self.mutex:

View File

@@ -28,7 +28,7 @@ class BrokerThr(object):
def put(self, want_retval, dest, *args): def put(self, want_retval, dest, *args):
if dest == "httpconn": if dest == "httpconn":
sck, addr = args sck, addr = args
self.log("%s %s" % addr, "-" * 4 + "C-qpop") self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,))
self.httpsrv.accept(sck, addr) self.httpsrv.accept(sck, addr)
else: else:

View File

@@ -28,6 +28,7 @@ class HttpCli(object):
self.conn = conn self.conn = conn
self.s = conn.s self.s = conn.s
self.sr = conn.sr self.sr = conn.sr
self.ip = conn.addr[0]
self.addr = conn.addr self.addr = conn.addr
self.args = conn.args self.args = conn.args
self.auth = conn.auth self.auth = conn.auth
@@ -42,7 +43,7 @@ class HttpCli(object):
self.log_func(self.log_src, msg) self.log_func(self.log_src, msg)
def _check_nonfatal(self, ex): def _check_nonfatal(self, ex):
return ex.code in [404] return ex.code < 400 or ex.code == 404
def _assert_safe_rem(self, rem): def _assert_safe_rem(self, rem):
# sanity check to prevent any disasters # sanity check to prevent any disasters
@@ -85,7 +86,8 @@ class HttpCli(object):
v = self.headers.get("x-forwarded-for", None) v = self.headers.get("x-forwarded-for", None)
if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]: if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]:
self.log_src = self.conn.set_rproxy(v.split(",")[0]) self.ip = v.split(",")[0]
self.log_src = self.conn.set_rproxy(self.ip)
self.uname = "*" self.uname = "*"
if "cookie" in self.headers: if "cookie" in self.headers:
@@ -132,6 +134,16 @@ class HttpCli(object):
uparam["raw"] = True uparam["raw"] = True
uparam["dots"] = True uparam["dots"] = True
if hasattr(self.s, "cipher"):
self.ssl_suf = "".join(
[
" \033[3{}m{}".format(c, s)
for c, s in zip([6, 3, 6], self.s.cipher())
]
)
else:
self.ssl_suf = ""
try: try:
if self.mode in ["GET", "HEAD"]: if self.mode in ["GET", "HEAD"]:
return self.handle_get() and self.keepalive return self.handle_get() and self.keepalive
@@ -209,7 +221,7 @@ class HttpCli(object):
logmsg += " [\033[36m" + rval + "\033[0m]" logmsg += " [\033[36m" + rval + "\033[0m]"
self.log(logmsg) self.log(logmsg + self.ssl_suf)
# "embedded" resources # "embedded" resources
if self.vpath.startswith(".cpr"): if self.vpath.startswith(".cpr"):
@@ -243,7 +255,7 @@ class HttpCli(object):
return self.tx_browser() return self.tx_browser()
def handle_options(self): def handle_options(self):
self.log("OPTIONS " + self.req) self.log("OPTIONS " + self.req + self.ssl_suf)
self.send_headers( self.send_headers(
None, None,
204, 204,
@@ -256,7 +268,7 @@ class HttpCli(object):
return True return True
def handle_put(self): def handle_put(self):
self.log("PUT " + self.req) self.log("PUT " + self.req + self.ssl_suf)
if self.headers.get("expect", "").lower() == "100-continue": if self.headers.get("expect", "").lower() == "100-continue":
try: try:
@@ -267,7 +279,7 @@ class HttpCli(object):
return self.handle_stash() return self.handle_stash()
def handle_post(self): def handle_post(self):
self.log("POST " + self.req) self.log("POST " + self.req + self.ssl_suf)
if self.headers.get("expect", "").lower() == "100-continue": if self.headers.get("expect", "").lower() == "100-continue":
try: try:
@@ -292,26 +304,51 @@ class HttpCli(object):
if "application/octet-stream" in ctype: if "application/octet-stream" in ctype:
return self.handle_post_binary() return self.handle_post_binary()
raise Pebkac(405, "don't know how to handle {} POST".format(ctype)) if "application/x-www-form-urlencoded" in ctype:
opt = self.args.urlform
if "stash" in opt:
return self.handle_stash()
def handle_stash(self): if "save" in opt:
post_sz, _, _, path = self.dump_to_file()
self.log("urlform: {} bytes, {}".format(post_sz, path))
elif "print" in opt:
reader, _ = self.get_body_reader()
for buf in reader:
buf = buf.decode("utf-8", "replace")
self.log("urlform:\n {}\n".format(buf))
if "get" in opt:
return self.handle_get()
raise Pebkac(405, "POST({}) is disabled".format(ctype))
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
def get_body_reader(self):
remains = int(self.headers.get("content-length", None)) remains = int(self.headers.get("content-length", None))
if remains is None: if remains is None:
reader = read_socket_unbounded(self.sr)
self.keepalive = False self.keepalive = False
return read_socket_unbounded(self.sr), remains
else: else:
reader = read_socket(self.sr, remains) return read_socket(self.sr, remains), remains
def dump_to_file(self):
reader, remains = self.get_body_reader()
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
fdir = os.path.join(vfs.realpath, rem) fdir = os.path.join(vfs.realpath, rem)
addr = self.conn.addr[0].replace(":", ".") addr = self.ip.replace(":", ".")
fn = "put-{:.6f}-{}.bin".format(time.time(), addr) fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
path = os.path.join(fdir, fn) path = os.path.join(fdir, fn)
with open(path, "wb", 512 * 1024) as f: with open(path, "wb", 512 * 1024) as f:
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f) post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
return post_sz, sha_b64, remains, path
def handle_stash(self):
post_sz, sha_b64, remains, path = self.dump_to_file()
spd = self._spd(post_sz) spd = self._spd(post_sz)
self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path)) self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path))
self.reply("{}\n{}\n".format(post_sz, sha_b64).encode("utf-8")) self.reply("{}\n{}\n".format(post_sz, sha_b64).encode("utf-8"))
@@ -384,9 +421,11 @@ class HttpCli(object):
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
body["vdir"] = self.vpath body["vtop"] = vfs.vpath
body["rdir"] = os.path.join(vfs.realpath, rem) body["ptop"] = vfs.realpath
body["addr"] = self.addr[0] body["prel"] = rem
body["addr"] = self.ip
body["flag"] = vfs.flags
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body) x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
response = x.get() response = x.get()
@@ -408,7 +447,10 @@ class HttpCli(object):
except KeyError: except KeyError:
raise Pebkac(400, "need hash and wark headers for binary POST") raise Pebkac(400, "need hash and wark headers for binary POST")
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", wark, chash) vfs, _ = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
ptop = vfs.realpath
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash)
response = x.get() response = x.get()
chunksize, cstart, path, lastmod = response chunksize, cstart, path, lastmod = response
@@ -453,8 +495,8 @@ class HttpCli(object):
self.log("clone {} done".format(cstart[0])) self.log("clone {} done".format(cstart[0]))
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", wark, chash) x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
num_left = x.get() num_left, path = x.get()
if not WINDOWS and num_left == 0: if not WINDOWS and num_left == 0:
times = (int(time.time()), int(lastmod)) times = (int(time.time()), int(lastmod))
@@ -510,10 +552,9 @@ class HttpCli(object):
raise Pebkac(500, "mkdir failed, check the logs") raise Pebkac(500, "mkdir failed, check the logs")
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/") vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
esc_paths = [quotep(vpath), html_escape(vpath)]
html = self.conn.tpl_msg.render( html = self.conn.tpl_msg.render(
h2='<a href="/{}">go to /{}</a>'.format( h2='<a href="/{}">go to /{}</a>'.format(*esc_paths),
quotep(vpath), html_escape(vpath)
),
pre="aight", pre="aight",
click=True, click=True,
) )
@@ -568,24 +609,24 @@ class HttpCli(object):
self.log("discarding incoming file without filename") self.log("discarding incoming file without filename")
# fallthrough # fallthrough
fn = os.devnull
if p_file and not nullwrite: if p_file and not nullwrite:
fdir = os.path.join(vfs.realpath, rem) fdir = os.path.join(vfs.realpath, rem)
fn = os.path.join(fdir, sanitize_fn(p_file)) fname = sanitize_fn(p_file)
if not os.path.isdir(fsenc(fdir)): if not os.path.isdir(fsenc(fdir)):
raise Pebkac(404, "that folder does not exist") raise Pebkac(404, "that folder does not exist")
# TODO broker which avoid this race and suffix = ".{:.6f}-{}".format(time.time(), self.ip)
# provides a new filename if taken (same as up2k) open_args = {"fdir": fdir, "suffix": suffix}
if os.path.exists(fsenc(fn)): else:
fn += ".{:.6f}-{}".format(time.time(), self.addr[0]) open_args = {}
# using current-time instead of t0 cause clients fname = os.devnull
# may reuse a name for multiple files in one post fdir = ""
try: try:
with open(fsenc(fn), "wb") as f: with ren_open(fname, "wb", 512 * 1024, **open_args) as f:
self.log("writing to {0}".format(fn)) f, fname = f["orz"]
self.log("writing to {}/{}".format(fdir, fname))
sz, sha512_hex, _ = hashcopy(self.conn, p_data, f) sz, sha512_hex, _ = hashcopy(self.conn, p_data, f)
if sz == 0: if sz == 0:
raise Pebkac(400, "empty files in post") raise Pebkac(400, "empty files in post")
@@ -594,8 +635,14 @@ class HttpCli(object):
self.conn.nbyte += sz self.conn.nbyte += sz
except Pebkac: except Pebkac:
if fn != os.devnull: if fname != os.devnull:
os.rename(fsenc(fn), fsenc(fn + ".PARTIAL")) fp = os.path.join(fdir, fname)
suffix = ".PARTIAL"
try:
os.rename(fsenc(fp), fsenc(fp + suffix))
except:
fp = fp[: -len(suffix)]
os.rename(fsenc(fp), fsenc(fp + suffix))
raise raise
@@ -631,7 +678,7 @@ class HttpCli(object):
"\n".join( "\n".join(
unicode(x) unicode(x)
for x in [ for x in [
":".join(unicode(x) for x in self.addr), ":".join(unicode(x) for x in [self.ip, self.addr[1]]),
msg.rstrip(), msg.rstrip(),
] ]
) )
@@ -680,7 +727,7 @@ class HttpCli(object):
return True return True
fp = os.path.join(vfs.realpath, rem) fp = os.path.join(vfs.realpath, rem)
srv_lastmod = -1 srv_lastmod = srv_lastmod3 = -1
try: try:
st = os.stat(fsenc(fp)) st = os.stat(fsenc(fp))
srv_lastmod = st.st_mtime srv_lastmod = st.st_mtime
@@ -731,7 +778,7 @@ class HttpCli(object):
if p_field != "body": if p_field != "body":
raise Pebkac(400, "expected body, got {}".format(p_field)) raise Pebkac(400, "expected body, got {}".format(p_field))
with open(fp, "wb") as f: with open(fp, "wb", 512 * 1024) as f:
sz, sha512, _ = hashcopy(self.conn, p_data, f) sz, sha512, _ = hashcopy(self.conn, p_data, f)
new_lastmod = os.stat(fsenc(fp)).st_mtime new_lastmod = os.stat(fsenc(fp)).st_mtime
@@ -756,9 +803,12 @@ class HttpCli(object):
cli_dt = time.strptime(cli_lastmod, "%a, %d %b %Y %H:%M:%S GMT") cli_dt = time.strptime(cli_lastmod, "%a, %d %b %Y %H:%M:%S GMT")
cli_ts = calendar.timegm(cli_dt) cli_ts = calendar.timegm(cli_dt)
return file_lastmod, int(file_ts) > int(cli_ts) return file_lastmod, int(file_ts) > int(cli_ts)
except: except Exception as ex:
self.log("bad lastmod format: {}".format(cli_lastmod)) self.log(
self.log(" expected format: {}".format(file_lastmod)) "lastmod {}\nremote: [{}]\n local: [{}]".format(
repr(ex), cli_lastmod, file_lastmod
)
)
return file_lastmod, file_lastmod != cli_lastmod return file_lastmod, file_lastmod != cli_lastmod
return file_lastmod, True return file_lastmod, True
@@ -781,6 +831,8 @@ class HttpCli(object):
editions[ext or "plain"] = [fs_path, st.st_size] editions[ext or "plain"] = [fs_path, st.st_size]
except: except:
pass pass
if not self.vpath.startswith(".cpr/"):
break
if not editions: if not editions:
raise Pebkac(404) raise Pebkac(404)
@@ -875,6 +927,7 @@ class HttpCli(object):
logtail += " [\033[36m{}-{}\033[0m]".format(lower, upper) logtail += " [\033[36m{}-{}\033[0m]".format(lower, upper)
use_sendfile = False
if decompress: if decompress:
open_func = gzip.open open_func = gzip.open
open_args = [fsenc(fs_path), "rb"] open_args = [fsenc(fs_path), "rb"]
@@ -884,6 +937,11 @@ class HttpCli(object):
open_func = open open_func = open
# 512 kB is optimal for huge files, use 64k # 512 kB is optimal for huge files, use 64k
open_args = [fsenc(fs_path), "rb", 64 * 1024] open_args = [fsenc(fs_path), "rb", 64 * 1024]
use_sendfile = (
not self.ssl_suf
and not self.args.no_sendfile
and hasattr(os, "sendfile")
)
# #
# send reply # send reply
@@ -906,24 +964,13 @@ class HttpCli(object):
ret = True ret = True
with open_func(*open_args) as f: with open_func(*open_args) as f:
remains = upper - lower if use_sendfile:
f.seek(lower) remains = sendfile_kern(lower, upper, f, self.s)
while remains > 0: else:
# time.sleep(0.01) remains = sendfile_py(lower, upper, f, self.s)
buf = f.read(4096)
if not buf:
break
if remains < len(buf): if remains > 0:
buf = buf[:remains] logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
try:
self.s.sendall(buf)
remains -= len(buf)
except:
logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
ret = False
break
spd = self._spd((upper - lower) - remains) spd = self._spd((upper - lower) - remains)
self.log("{}, {}".format(logmsg, spd)) self.log("{}, {}".format(logmsg, spd))
@@ -964,6 +1011,7 @@ class HttpCli(object):
"title": html_escape(self.vpath), "title": html_escape(self.vpath),
"lastmod": int(ts_md * 1000), "lastmod": int(ts_md * 1000),
"md_plug": "true" if self.args.emp else "false", "md_plug": "true" if self.args.emp else "false",
"md_chk_rate": self.args.mcr,
"md": "", "md": "",
} }
sz_html = len(template.render(**targs).encode("utf-8")) sz_html = len(template.render(**targs).encode("utf-8"))
@@ -1018,6 +1066,10 @@ class HttpCli(object):
if abspath.endswith(".md") and "raw" not in self.uparam: if abspath.endswith(".md") and "raw" not in self.uparam:
return self.tx_md(abspath) return self.tx_md(abspath)
bad = "{0}.hist{0}up2k.".format(os.sep)
if abspath.endswith(bad + "db") or abspath.endswith(bad + "snap"):
raise Pebkac(403)
return self.tx_file(abspath) return self.tx_file(abspath)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname) fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)
@@ -1043,6 +1095,10 @@ class HttpCli(object):
if not self.args.ed or "dots" not in self.uparam: if not self.args.ed or "dots" not in self.uparam:
vfs_ls = exclude_dotfiles(vfs_ls) vfs_ls = exclude_dotfiles(vfs_ls)
hidden = []
if fsroot.endswith(str(os.sep) + ".hist"):
hidden = ["up2k.db", "up2k.snap"]
dirs = [] dirs = []
files = [] files = []
for fn in vfs_ls: for fn in vfs_ls:
@@ -1054,6 +1110,8 @@ class HttpCli(object):
if fn in vfs_virt: if fn in vfs_virt:
fspath = vfs_virt[fn].realpath fspath = vfs_virt[fn].realpath
elif fn in hidden:
continue
else: else:
fspath = fsroot + "/" + fn fspath = fsroot + "/" + fn

View File

@@ -3,10 +3,15 @@ from __future__ import print_function, unicode_literals
import os import os
import sys import sys
import ssl
import time import time
import socket import socket
HAVE_SSL = True
try:
import ssl
except:
HAVE_SSL = False
try: try:
import jinja2 import jinja2
except ImportError: except ImportError:
@@ -65,6 +70,7 @@ class HttpConn(object):
color = 34 color = 34
self.rproxy = ip self.rproxy = ip
self.ip = ip
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26) self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
return self.log_src return self.log_src
@@ -74,9 +80,8 @@ class HttpConn(object):
def log(self, msg): def log(self, msg):
self.log_func(self.log_src, msg) self.log_func(self.log_src, msg)
def run(self): def _detect_https(self):
method = None method = None
self.sr = None
if self.cert_path: if self.cert_path:
try: try:
method = self.s.recv(4, socket.MSG_PEEK) method = self.s.recv(4, socket.MSG_PEEK)
@@ -101,16 +106,52 @@ class HttpConn(object):
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8")) self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
return return
if method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]: return method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]
def run(self):
self.sr = None
if self.args.https_only:
is_https = True
elif self.args.http_only or not HAVE_SSL:
is_https = False
else:
is_https = self._detect_https()
if is_https:
if self.sr: if self.sr:
self.log("\033[1;31mTODO: cannot do https in jython\033[0m") self.log("\033[1;31mTODO: cannot do https in jython\033[0m")
return return
self.log_src = self.log_src.replace("[36m", "[35m") self.log_src = self.log_src.replace("[36m", "[35m")
try: try:
self.s = ssl.wrap_socket( ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
self.s, server_side=True, certfile=self.cert_path ctx.load_cert_chain(self.cert_path)
) if self.args.ssl_ver:
ctx.options &= ~self.args.ssl_flags_en
ctx.options |= self.args.ssl_flags_de
# print(repr(ctx.options))
if self.args.ssl_log:
try:
ctx.keylog_filename = self.args.ssl_log
except:
self.log("keylog failed; openssl or python too old")
if self.args.ciphers:
ctx.set_ciphers(self.args.ciphers)
self.s = ctx.wrap_socket(self.s, server_side=True)
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
overlap = [y[::-1] for y in self.s.shared_ciphers()]
lines = [str(x) for x in (["TLS cipher overlap:"] + overlap)]
self.log("\n".join(lines))
for k, v in [
["compression", self.s.compression()],
["ALPN proto", self.s.selected_alpn_protocol()],
["NPN proto", self.s.selected_npn_protocol()],
]:
self.log("TLS {}: {}".format(k, v or "nah"))
except Exception as ex: except Exception as ex:
em = str(ex) em = str(ex)

View File

@@ -38,7 +38,7 @@ class HttpSrv(object):
def accept(self, sck, addr): def accept(self, sck, addr):
"""takes an incoming tcp connection and creates a thread to handle it""" """takes an incoming tcp connection and creates a thread to handle it"""
self.log("%s %s" % addr, "-" * 5 + "C-cthr") self.log("%s %s" % addr, "\033[1;30m|%sC-cthr\033[0m" % ("-" * 5,))
thr = threading.Thread(target=self.thr_client, args=(sck, addr)) thr = threading.Thread(target=self.thr_client, args=(sck, addr))
thr.daemon = True thr.daemon = True
thr.start() thr.start()
@@ -66,11 +66,11 @@ class HttpSrv(object):
thr.start() thr.start()
try: try:
self.log("%s %s" % addr, "-" * 6 + "C-crun") self.log("%s %s" % addr, "\033[1;30m|%sC-crun\033[0m" % ("-" * 6,))
cli.run() cli.run()
finally: finally:
self.log("%s %s" % addr, "-" * 7 + "C-done") self.log("%s %s" % addr, "\033[1;30m|%sC-cdone\033[0m" % ("-" * 7,))
try: try:
sck.shutdown(socket.SHUT_RDWR) sck.shutdown(socket.SHUT_RDWR)
sck.close() sck.close()

View File

@@ -9,6 +9,7 @@ from datetime import datetime, timedelta
import calendar import calendar
from .__init__ import PY2, WINDOWS, MACOS, VT100 from .__init__ import PY2, WINDOWS, MACOS, VT100
from .authsrv import AuthSrv
from .tcpsrv import TcpSrv from .tcpsrv import TcpSrv
from .up2k import Up2k from .up2k import Up2k
from .util import mp from .util import mp
@@ -38,6 +39,10 @@ class SvcHub(object):
self.tcpsrv = TcpSrv(self) self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self) self.up2k = Up2k(self)
if self.args.e2d and self.args.e2s:
auth = AuthSrv(self.args, self.log, False)
self.up2k.build_indexes(auth.all_writable)
# decide which worker impl to use # decide which worker impl to use
if self.check_mp_enable(): if self.check_mp_enable():
from .broker_mp import BrokerMp as Broker from .broker_mp import BrokerMp as Broker

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import re import re
import time import time
import socket import socket
import select
from .util import chkcmd, Counter from .util import chkcmd, Counter
@@ -23,55 +24,73 @@ class TcpSrv(object):
ip = "127.0.0.1" ip = "127.0.0.1"
eps = {ip: "local only"} eps = {ip: "local only"}
if self.args.i != ip: nonlocals = [x for x in self.args.i if x != ip]
eps = self.detect_interfaces(self.args.i) or {self.args.i: "external"} if nonlocals:
eps = self.detect_interfaces(self.args.i)
if not eps:
for x in nonlocals:
eps[x] = "external"
for ip, desc in sorted(eps.items(), key=lambda x: x[1]): for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
self.log( for port in sorted(self.args.p):
"tcpsrv", self.log(
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format( "tcpsrv",
ip, self.args.p, desc "available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
), ip, port, desc
) ),
)
self.srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.srv = []
self.srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) for ip in self.args.i:
for port in self.args.p:
self.srv.append(self._listen(ip, port))
def _listen(self, ip, port):
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
try: try:
self.srv.bind((self.args.i, self.args.p)) srv.bind((ip, port))
return srv
except (OSError, socket.error) as ex: except (OSError, socket.error) as ex:
if ex.errno == 98: if ex.errno in [98, 48]:
raise Exception( e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
"\033[1;31mport {} is busy on interface {}\033[0m".format( elif ex.errno in [99, 49]:
self.args.p, self.args.i e = "\033[1;31minterface {} does not exist\033[0m".format(ip)
) else:
) raise
raise Exception(e)
if ex.errno == 99:
raise Exception(
"\033[1;31minterface {} does not exist\033[0m".format(self.args.i)
)
def run(self): def run(self):
self.srv.listen(self.args.nc) for srv in self.srv:
srv.listen(self.args.nc)
self.log("tcpsrv", "listening @ {0}:{1}".format(self.args.i, self.args.p)) ip, port = srv.getsockname()
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
while True: while True:
self.log("tcpsrv", "-" * 1 + "C-ncli") self.log("tcpsrv", "\033[1;30m|%sC-ncli\033[0m" % ("-" * 1,))
if self.num_clients.v >= self.args.nc: if self.num_clients.v >= self.args.nc:
time.sleep(0.1) time.sleep(0.1)
continue continue
self.log("tcpsrv", "-" * 2 + "C-acc1") self.log("tcpsrv", "\033[1;30m|%sC-acc1\033[0m" % ("-" * 2,))
sck, addr = self.srv.accept() ready, _, _ = select.select(self.srv, [], [])
self.log("%s %s" % addr, "-" * 3 + "C-acc2") for srv in ready:
self.num_clients.add() sck, addr = srv.accept()
self.hub.broker.put(False, "httpconn", sck, addr) sip, sport = srv.getsockname()
self.log(
"%s %s" % addr,
"\033[1;30m|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
"-" * 3, sip, sport % 8, sport
),
)
self.num_clients.add()
self.hub.broker.put(False, "httpconn", sck, addr)
def shutdown(self): def shutdown(self):
self.log("tcpsrv", "ok bye") self.log("tcpsrv", "ok bye")
def detect_interfaces(self, listen_ip): def detect_interfaces(self, listen_ips):
eps = {} eps = {}
# get all ips and their interfaces # get all ips and their interfaces
@@ -85,8 +104,9 @@ class TcpSrv(object):
for ln in ip_addr.split("\n"): for ln in ip_addr.split("\n"):
try: try:
ip, dev = r.match(ln.rstrip()).groups() ip, dev = r.match(ln.rstrip()).groups()
if listen_ip in ["0.0.0.0", ip]: for lip in listen_ips:
eps[ip] = dev if lip in ["0.0.0.0", ip]:
eps[ip] = dev
except: except:
pass pass
@@ -113,11 +133,12 @@ class TcpSrv(object):
s.close() s.close()
if default_route and listen_ip in ["0.0.0.0", default_route]: for lip in listen_ips:
desc = "\033[32mexternal" if default_route and lip in ["0.0.0.0", default_route]:
try: desc = "\033[32mexternal"
eps[default_route] += ", " + desc try:
except: eps[default_route] += ", " + desc
eps[default_route] = desc except:
eps[default_route] = desc
return eps return eps

View File

@@ -6,6 +6,9 @@ import os
import re import re
import time import time
import math import math
import json
import gzip
import stat
import shutil import shutil
import base64 import base64
import hashlib import hashlib
@@ -13,7 +16,15 @@ import threading
from copy import deepcopy from copy import deepcopy
from .__init__ import WINDOWS from .__init__ import WINDOWS
from .util import Pebkac, Queue, fsenc, sanitize_fn from .util import Pebkac, Queue, fsdec, fsenc, sanitize_fn, ren_open, atomic_move
HAVE_SQLITE3 = False
try:
import sqlite3
HAVE_SQLITE3 = True
except:
pass
class Up2k(object): class Up2k(object):
@@ -22,20 +33,21 @@ class Up2k(object):
* documentation * documentation
* registry persistence * registry persistence
* ~/.config flatfiles for active jobs * ~/.config flatfiles for active jobs
* wark->path database for finished uploads
""" """
def __init__(self, broker): def __init__(self, broker):
self.broker = broker self.broker = broker
self.args = broker.args self.args = broker.args
self.log = broker.log self.log = broker.log
self.persist = self.args.e2d
# config # config
self.salt = "hunter2" # TODO: config self.salt = "hunter2" # TODO: config
# state # state
self.registry = {}
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.registry = {}
self.db = {}
if WINDOWS: if WINDOWS:
# usually fails to set lastmod too quickly # usually fails to set lastmod too quickly
@@ -44,54 +56,313 @@ class Up2k(object):
thr.daemon = True thr.daemon = True
thr.start() thr.start()
if self.persist:
thr = threading.Thread(target=self._snapshot)
thr.daemon = True
thr.start()
# static # static
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$") self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
if self.persist and not HAVE_SQLITE3:
m = "could not initialize sqlite3, will use in-memory registry only"
self.log("up2k", m)
def _vis_job_progress(self, job):
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
path = os.path.join(job["ptop"], job["prel"], job["name"])
return "{:5.1f}% {}".format(perc, path)
def _vis_reg_progress(self, reg):
ret = []
for _, job in reg.items():
ret.append(self._vis_job_progress(job))
return ret
def register_vpath(self, ptop):
with self.mutex:
if ptop in self.registry:
return None
reg = {}
path = os.path.join(ptop, ".hist", "up2k.snap")
if self.persist and os.path.exists(path):
with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8")
reg = json.loads(j)
for _, job in reg.items():
job["poke"] = time.time()
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
m = [m] + self._vis_reg_progress(reg)
self.log("up2k", "\n".join(m))
self.registry[ptop] = reg
if not self.persist or not HAVE_SQLITE3:
return None
try:
os.mkdir(os.path.join(ptop, ".hist"))
except:
pass
db_path = os.path.join(ptop, ".hist", "up2k.db")
if ptop in self.db:
# self.db[ptop].close()
return None
try:
db = self._open_db(db_path)
self.db[ptop] = db
return db
except Exception as ex:
m = "failed to open [{}]: {}".format(ptop, repr(ex))
self.log("up2k", m)
return None
def build_indexes(self, writeables):
tops = [d.realpath for d in writeables]
for top in tops:
db = self.register_vpath(top)
if db:
# can be symlink so don't `and d.startswith(top)``
excl = set([d for d in tops if d != top])
dbw = [db, 0, time.time()]
self._build_dir(dbw, top, excl, top)
self._drop_lost(db, top)
if dbw[1]:
self.log("up2k", "commit {} new files".format(dbw[1]))
db.commit()
def _build_dir(self, dbw, top, excl, cdir):
try:
inodes = [fsdec(x) for x in os.listdir(fsenc(cdir))]
except Exception as ex:
self.log("up2k", "listdir: {} @ [{}]".format(repr(ex), cdir))
return
histdir = os.path.join(top, ".hist")
for inode in inodes:
abspath = os.path.join(cdir, inode)
try:
inf = os.stat(fsenc(abspath))
except Exception as ex:
self.log("up2k", "stat: {} @ [{}]".format(repr(ex), abspath))
continue
if stat.S_ISDIR(inf.st_mode):
if abspath in excl or abspath == histdir:
continue
# self.log("up2k", " dir: {}".format(abspath))
self._build_dir(dbw, top, excl, abspath)
else:
# self.log("up2k", "file: {}".format(abspath))
rp = abspath[len(top) :].replace("\\", "/").strip("/")
c = dbw[0].execute("select * from up where rp = ?", (rp,))
in_db = list(c.fetchall())
if in_db:
_, dts, dsz, _ = in_db[0]
if len(in_db) > 1:
m = "WARN: multiple entries: [{}] => [{}] ({})"
self.log("up2k", m.format(top, rp, len(in_db)))
dts = -1
if dts == inf.st_mtime and dsz == inf.st_size:
continue
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
top, rp, dts, inf.st_mtime, dsz, inf.st_size
)
self.log("up2k", m)
self.db_rm(dbw[0], rp)
dbw[1] += 1
in_db = None
self.log("up2k", "file: {}".format(abspath))
try:
hashes = self._hashlist_from_file(abspath)
except Exception as ex:
self.log("up2k", "hash: {} @ [{}]".format(repr(ex), abspath))
continue
wark = self._wark_from_hashlist(inf.st_size, hashes)
self.db_add(dbw[0], wark, rp, inf.st_mtime, inf.st_size)
dbw[1] += 1
td = time.time() - dbw[2]
if dbw[1] > 1024 or td > 60:
self.log("up2k", "commit {} new files".format(dbw[1]))
dbw[0].commit()
dbw[1] = 0
dbw[2] = time.time()
def _drop_lost(self, db, top):
rm = []
c = db.execute("select * from up")
for dwark, dts, dsz, drp in c:
abspath = os.path.join(top, drp)
try:
if not os.path.exists(fsenc(abspath)):
rm.append(drp)
except Exception as ex:
self.log("up2k", "stat-rm: {} @ [{}]".format(repr(ex), abspath))
if not rm:
return
self.log("up2k", "forgetting {} deleted files".format(len(rm)))
for rp in rm:
self.db_rm(db, rp)
def _open_db(self, db_path):
conn = sqlite3.connect(db_path, check_same_thread=False)
try:
c = conn.execute(r"select * from kv where k = 'sver'")
rows = c.fetchall()
if rows:
ver = rows[0][1]
else:
self.log("up2k", "WARN: no sver in kv, DB corrupt?")
ver = "unknown"
if ver == "1":
try:
nfiles = next(conn.execute("select count(w) from up"))[0]
self.log("up2k", "found DB at {} |{}|".format(db_path, nfiles))
return conn
except Exception as ex:
m = "WARN: could not list files, DB corrupt?\n " + repr(ex)
self.log("up2k", m)
m = "REPLACING unsupported DB (v.{}) at {}".format(ver, db_path)
self.log("up2k", m)
conn.close()
os.unlink(db_path)
conn = sqlite3.connect(db_path, check_same_thread=False)
except:
pass
# sqlite is variable-width only, no point in using char/nchar/varchar
for cmd in [
r"create table kv (k text, v text)",
r"create table up (w text, mt int, sz int, rp text)",
r"insert into kv values ('sver', '1')",
r"create index up_w on up(w)",
]:
conn.execute(cmd)
conn.commit()
self.log("up2k", "created DB at {}".format(db_path))
return conn
def handle_json(self, cj): def handle_json(self, cj):
self.register_vpath(cj["ptop"])
cj["name"] = sanitize_fn(cj["name"]) cj["name"] = sanitize_fn(cj["name"])
cj["poke"] = time.time()
wark = self._get_wark(cj) wark = self._get_wark(cj)
now = time.time() now = time.time()
job = None
with self.mutex: with self.mutex:
# TODO use registry persistence here to symlink any matching wark db = self.db.get(cj["ptop"], None)
if wark in self.registry: reg = self.registry[cj["ptop"]]
job = self.registry[wark] if db:
if job["rdir"] != cj["rdir"] or job["name"] != cj["name"]: cur = db.execute(r"select * from up where w = ?", (wark,))
src = os.path.join(job["rdir"], job["name"]) for _, dtime, dsize, dp_rel in cur:
dst = os.path.join(cj["rdir"], cj["name"]) dp_abs = os.path.join(cj["ptop"], dp_rel).replace("\\", "/")
# relying on path.exists to return false on broken symlinks
if os.path.exists(fsenc(dp_abs)):
try:
prel, name = dp_rel.rsplit("/", 1)
except:
prel = ""
name = dp_rel
job = {
"name": name,
"prel": prel,
"vtop": cj["vtop"],
"ptop": cj["ptop"],
"flag": cj["flag"],
"size": dsize,
"lmod": dtime,
"hash": [],
"need": [],
}
break
if job and wark in reg:
del reg[wark]
if job or wark in reg:
job = job or reg[wark]
if job["prel"] == cj["prel"] and job["name"] == cj["name"]:
# ensure the files haven't been deleted manually
names = [job[x] for x in ["name", "tnam"] if x in job]
for fn in names:
path = os.path.join(job["ptop"], job["prel"], fn)
try:
if os.path.getsize(path) > 0:
# upload completed or both present
break
except:
# missing; restart
job = None
break
else:
# file contents match, but not the path
src = os.path.join(job["ptop"], job["prel"], job["name"])
dst = os.path.join(cj["ptop"], cj["prel"], cj["name"])
vsrc = os.path.join(job["vtop"], job["prel"], job["name"])
vsrc = vsrc.replace("\\", "/") # just for prints anyways
if job["need"]: if job["need"]:
self.log("up2k", "unfinished:\n {0}\n {1}".format(src, dst)) self.log("up2k", "unfinished:\n {0}\n {1}".format(src, dst))
err = "partial upload exists at a different location; please resume uploading here instead:\n{0}{1} ".format( err = "partial upload exists at a different location; please resume uploading here instead:\n"
job["vdir"], job["name"] err += vsrc + " "
) raise Pebkac(400, err)
elif "nodupe" in job["flag"]:
self.log("up2k", "dupe-reject:\n {0}\n {1}".format(src, dst))
err = "upload rejected, file already exists:\n " + vsrc + " "
raise Pebkac(400, err) raise Pebkac(400, err)
else: else:
# symlink to the client-provided name, # symlink to the client-provided name,
# returning the previous upload info # returning the previous upload info
job = deepcopy(job) job = deepcopy(job)
suffix = self._suffix(dst, now, job["addr"]) for k in ["ptop", "vtop", "prel"]:
job["name"] = cj["name"] + suffix job[k] = cj[k]
self._symlink(src, dst + suffix)
else: pdir = os.path.join(cj["ptop"], cj["prel"])
job["name"] = self._untaken(pdir, cj["name"], now, cj["addr"])
dst = os.path.join(job["ptop"], job["prel"], job["name"])
os.unlink(fsenc(dst)) # TODO ed pls
self._symlink(src, dst)
if not job:
job = { job = {
"wark": wark, "wark": wark,
"t0": now, "t0": now,
"addr": cj["addr"],
"vdir": cj["vdir"],
"rdir": cj["rdir"],
# client-provided, sanitized by _get_wark:
"name": cj["name"],
"size": cj["size"],
"lmod": cj["lmod"],
"hash": deepcopy(cj["hash"]), "hash": deepcopy(cj["hash"]),
"need": [],
} }
# client-provided, sanitized by _get_wark: name, size, lmod
path = os.path.join(job["rdir"], job["name"]) for k in [
job["name"] += self._suffix(path, now, cj["addr"]) "addr",
"vtop",
"ptop",
"prel",
"flag",
"name",
"size",
"lmod",
"poke",
]:
job[k] = cj[k]
# one chunk may occur multiple times in a file; # one chunk may occur multiple times in a file;
# filter to unique values for the list of missing chunks # filter to unique values for the list of missing chunks
# (preserve order to reduce disk thrashing) # (preserve order to reduce disk thrashing)
job["need"] = []
lut = {} lut = {}
for k in cj["hash"]: for k in cj["hash"]:
if k not in lut: if k not in lut:
@@ -108,13 +379,12 @@ class Up2k(object):
"wark": wark, "wark": wark,
} }
def _suffix(self, fpath, ts, ip): def _untaken(self, fdir, fname, ts, ip):
# TODO broker which avoid this race and # TODO broker which avoid this race and
# provides a new filename if taken (same as bup) # provides a new filename if taken (same as bup)
if not os.path.exists(fsenc(fpath)): suffix = ".{:.6f}-{}".format(ts, ip)
return "" with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
return f["orz"][1]
return ".{:.6f}-{}".format(ts, ip)
def _symlink(self, src, dst): def _symlink(self, src, dst):
# TODO store this in linktab so we never delete src if there are links to it # TODO store this in linktab so we never delete src if there are links to it
@@ -141,40 +411,58 @@ class Up2k(object):
lsrc = "../" * (len(lsrc) - 1) + "/".join(lsrc) lsrc = "../" * (len(lsrc) - 1) + "/".join(lsrc)
os.symlink(fsenc(lsrc), fsenc(ldst)) os.symlink(fsenc(lsrc), fsenc(ldst))
except (AttributeError, OSError) as ex: except (AttributeError, OSError) as ex:
self.log("up2k", "cannot symlink; creating copy") self.log("up2k", "cannot symlink; creating copy: " + repr(ex))
shutil.copy2(fsenc(src), fsenc(dst)) shutil.copy2(fsenc(src), fsenc(dst))
def handle_chunk(self, wark, chash): def handle_chunk(self, ptop, wark, chash):
with self.mutex: with self.mutex:
job = self.registry.get(wark) job = self.registry[ptop].get(wark, None)
if not job: if not job:
raise Pebkac(404, "unknown wark") raise Pebkac(400, "unknown wark")
if chash not in job["need"]: if chash not in job["need"]:
raise Pebkac(200, "already got that but thanks??") raise Pebkac(200, "already got that but thanks??")
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash] nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
if not nchunk: if not nchunk:
raise Pebkac(404, "unknown chunk") raise Pebkac(400, "unknown chunk")
job["poke"] = time.time()
chunksize = self._get_chunksize(job["size"]) chunksize = self._get_chunksize(job["size"])
ofs = [chunksize * x for x in nchunk] ofs = [chunksize * x for x in nchunk]
path = os.path.join(job["rdir"], job["name"]) path = os.path.join(job["ptop"], job["prel"], job["tnam"])
return [chunksize, ofs, path, job["lmod"]] return [chunksize, ofs, path, job["lmod"]]
def confirm_chunk(self, wark, chash): def confirm_chunk(self, ptop, wark, chash):
with self.mutex: with self.mutex:
job = self.registry[wark] job = self.registry[ptop][wark]
pdir = os.path.join(job["ptop"], job["prel"])
src = os.path.join(pdir, job["tnam"])
dst = os.path.join(pdir, job["name"])
job["need"].remove(chash) job["need"].remove(chash)
ret = len(job["need"]) ret = len(job["need"])
if ret > 0:
return ret, src
if WINDOWS and ret == 0: atomic_move(src, dst)
path = os.path.join(job["rdir"], job["name"])
self.lastmod_q.put([path, (int(time.time()), int(job["lmod"]))])
return ret if WINDOWS:
self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))])
db = self.db.get(job["ptop"], None)
if db:
rp = os.path.join(job["prel"], job["name"]).replace("\\", "/")
self.db_rm(db, rp)
self.db_add(db, job["wark"], rp, job["lmod"], job["size"])
db.commit()
del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads
return ret, dst
def _get_chunksize(self, filesize): def _get_chunksize(self, filesize):
chunksize = 1024 * 1024 chunksize = 1024 * 1024
@@ -188,6 +476,13 @@ class Up2k(object):
chunksize += stepsize chunksize += stepsize
stepsize *= mul stepsize *= mul
def db_rm(self, db, rp):
db.execute("delete from up where rp = ?", (rp,))
def db_add(self, db, wark, rp, ts, sz):
v = (wark, ts, sz, rp)
db.execute("insert into up values (?,?,?,?)", v)
def _get_wark(self, cj): def _get_wark(self, cj):
if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB
raise Pebkac(400, "name or numchunks not according to spec") raise Pebkac(400, "name or numchunks not according to spec")
@@ -204,9 +499,13 @@ class Up2k(object):
except: except:
cj["lmod"] = int(time.time()) cj["lmod"] = int(time.time())
# server-reproducible file identifier, independent of name or location wark = self._wark_from_hashlist(cj["size"], cj["hash"])
ident = [self.salt, str(cj["size"])] return wark
ident.extend(cj["hash"])
def _wark_from_hashlist(self, filesize, hashes):
""" server-reproducible file identifier, independent of name or location """
ident = [self.salt, str(filesize)]
ident.extend(hashes)
ident = "\n".join(ident) ident = "\n".join(ident)
hasher = hashlib.sha512() hasher = hashlib.sha512()
@@ -216,10 +515,47 @@ class Up2k(object):
wark = base64.urlsafe_b64encode(digest) wark = base64.urlsafe_b64encode(digest)
return wark.decode("utf-8").rstrip("=") return wark.decode("utf-8").rstrip("=")
def _hashlist_from_file(self, path):
fsz = os.path.getsize(path)
csz = self._get_chunksize(fsz)
ret = []
last_print = time.time()
with open(path, "rb", 512 * 1024) as f:
while fsz > 0:
now = time.time()
td = now - last_print
if td >= 0.3:
last_print = now
print(" {} \n\033[A".format(fsz), end="")
hashobj = hashlib.sha512()
rem = min(csz, fsz)
fsz -= rem
while rem > 0:
buf = f.read(min(rem, 64 * 1024))
if not buf:
raise Exception("EOF at " + str(f.tell()))
hashobj.update(buf)
rem -= len(buf)
digest = hashobj.digest()[:32]
digest = base64.urlsafe_b64encode(digest)
ret.append(digest.decode("utf-8").rstrip("="))
return ret
def _new_upload(self, job): def _new_upload(self, job):
self.registry[job["wark"]] = job self.registry[job["ptop"]][job["wark"]] = job
path = os.path.join(job["rdir"], job["name"]) pdir = os.path.join(job["ptop"], job["prel"])
with open(fsenc(path), "wb") as f: job["name"] = self._untaken(pdir, job["name"], job["t0"], job["addr"])
# if len(job["name"].split(".")) > 8:
# raise Exception("aaa")
tnam = job["name"] + ".PARTIAL"
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
f, job["tnam"] = f["orz"]
f.seek(job["size"] - 1) f.seek(job["size"] - 1)
f.write(b"e") f.write(b"e")
@@ -236,3 +572,58 @@ class Up2k(object):
os.utime(fsenc(path), times) os.utime(fsenc(path), times)
except: except:
self.log("lmod", "failed to utime ({}, {})".format(path, times)) self.log("lmod", "failed to utime ({}, {})".format(path, times))
def _snapshot(self):
persist_interval = 30 # persist unfinished uploads index every 30 sec
discard_interval = 21600 # drop unfinished uploads after 6 hours inactivity
prev = {}
while True:
time.sleep(persist_interval)
with self.mutex:
for k, reg in self.registry.items():
self._snap_reg(prev, k, reg, discard_interval)
def _snap_reg(self, prev, k, reg, discard_interval):
now = time.time()
rm = [x for x in reg.values() if now - x["poke"] > discard_interval]
if rm:
m = "dropping {} abandoned uploads in {}".format(len(rm), k)
vis = [self._vis_job_progress(x) for x in rm]
self.log("up2k", "\n".join([m] + vis))
for job in rm:
del reg[job["wark"]]
try:
# remove the filename reservation
path = os.path.join(job["ptop"], job["prel"], job["name"])
if os.path.getsize(path) == 0:
os.unlink(path)
if len(job["hash"]) == len(job["need"]):
# PARTIAL is empty, delete that too
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
os.unlink(path)
except:
pass
path = os.path.join(k, ".hist", "up2k.snap")
if not reg:
if k not in prev or prev[k] is not None:
prev[k] = None
if os.path.exists(path):
os.unlink(path)
return
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
etag = [len(reg), newest]
if etag == prev.get(k, None):
return
path2 = "{}.{}".format(path, os.getpid())
j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8")
with gzip.GzipFile(path2, "wb") as f:
f.write(j)
atomic_move(path2, path)
self.log("up2k", "snap: {} |{}|".format(path, len(reg.keys())))
prev[k] = etag

View File

@@ -2,14 +2,17 @@
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import re import re
import os
import sys import sys
import time import time
import base64 import base64
import select
import struct import struct
import hashlib import hashlib
import platform import platform
import threading import threading
import mimetypes import mimetypes
import contextlib
import subprocess as sp # nosec import subprocess as sp # nosec
from .__init__ import PY2, WINDOWS from .__init__ import PY2, WINDOWS
@@ -96,6 +99,80 @@ class Unrecv(object):
self.buf = buf + self.buf self.buf = buf + self.buf
@contextlib.contextmanager
def ren_open(fname, *args, **kwargs):
fdir = kwargs.pop("fdir", None)
suffix = kwargs.pop("suffix", None)
if fname == os.devnull:
with open(fname, *args, **kwargs) as f:
yield {"orz": [f, fname]}
return
orig_name = fname
bname = fname
ext = ""
while True:
ofs = bname.rfind(".")
if ofs < 0 or ofs < len(bname) - 7:
# doesn't look like an extension anymore
break
ext = bname[ofs:] + ext
bname = bname[:ofs]
b64 = ""
while True:
try:
if fdir:
fpath = os.path.join(fdir, fname)
else:
fpath = fname
if suffix and os.path.exists(fpath):
fpath += suffix
fname += suffix
ext += suffix
with open(fsenc(fpath), *args, **kwargs) as f:
if b64:
fp2 = "fn-trunc.{}.txt".format(b64)
fp2 = os.path.join(fdir, fp2)
with open(fsenc(fp2), "wb") as f2:
f2.write(orig_name.encode("utf-8"))
yield {"orz": [f, fname]}
return
except OSError as ex_:
ex = ex_
if ex.errno != 36:
raise
if not b64:
b64 = (bname + ext).encode("utf-8", "replace")
b64 = hashlib.sha512(b64).digest()[:12]
b64 = base64.urlsafe_b64encode(b64).decode("utf-8").rstrip("=")
badlen = len(fname)
while len(fname) >= badlen:
if len(bname) < 8:
raise ex
if len(bname) > len(ext):
# drop the last letter of the filename
bname = bname[:-1]
else:
try:
# drop the leftmost sub-extension
_, ext = ext.split(".", 1)
except:
# okay do the first letter then
ext = "." + ext[2:]
fname = "{}~{}{}".format(bname, b64, ext)
class MultipartParser(object): class MultipartParser(object):
def __init__(self, log_func, sr, http_headers): def __init__(self, log_func, sr, http_headers):
self.sr = sr self.sr = sr
@@ -472,6 +549,16 @@ else:
fsdec = w8dec fsdec = w8dec
def atomic_move(src, dst):
if not PY2:
os.replace(src, dst)
else:
if os.path.exists(dst):
os.unlink(dst)
os.rename(src, dst)
def read_socket(sr, total_size): def read_socket(sr, total_size):
remains = total_size remains = total_size
while remains > 0: while remains > 0:
@@ -515,6 +602,46 @@ def hashcopy(actor, fin, fout):
return tlen, hashobj.hexdigest(), digest_b64 return tlen, hashobj.hexdigest(), digest_b64
def sendfile_py(lower, upper, f, s):
remains = upper - lower
f.seek(lower)
while remains > 0:
# time.sleep(0.01)
buf = f.read(min(4096, remains))
if not buf:
return remains
try:
s.sendall(buf)
remains -= len(buf)
except:
return remains
return 0
def sendfile_kern(lower, upper, f, s):
out_fd = s.fileno()
in_fd = f.fileno()
ofs = lower
while ofs < upper:
try:
req = min(2 ** 30, upper - ofs)
select.select([], [out_fd], [], 10)
n = os.sendfile(out_fd, in_fd, ofs, req)
except Exception as ex:
# print("sendfile: " + repr(ex))
n = 0
if n <= 0:
return upper - ofs
ofs += n
# print("sendfile: ok, sent {} now, {} total, {} remains".format(n, ofs - lower, upper - ofs))
return 0
def unescape_cookie(orig): def unescape_cookie(orig):
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn # mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
ret = "" ret = ""
@@ -591,7 +718,26 @@ def py_desc():
) )
def align_tab(lines):
rows = []
ncols = 0
for ln in lines:
row = [x for x in ln.split(" ") if x]
ncols = max(ncols, len(row))
rows.append(row)
lens = [0] * ncols
for row in rows:
for n, col in enumerate(row):
lens[n] = max(lens[n], len(col))
return ["".join(x.ljust(y + 2) for x, y in zip(row, lens)) for row in rows]
class Pebkac(Exception): class Pebkac(Exception):
def __init__(self, code, msg=None): def __init__(self, code, msg=None):
super(Pebkac, self).__init__(msg or HTTPCODE[code]) super(Pebkac, self).__init__(msg or HTTPCODE[code])
self.code = code self.code = code
def __repr__(self):
return "Pebkac({}, {})".format(self.code, repr(self.args))

12
copyparty/web/Makefile Normal file
View File

@@ -0,0 +1,12 @@
# run me to zopfli all the static files
# which should help on really slow connections
# but then why are you using copyparty in the first place
pk: $(addsuffix .gz, $(wildcard *.js *.css))
un: $(addsuffix .un, $(wildcard *.gz))
%.gz: %
pigz -11 -J 34 -I 5730 $<
%.un: %
pigz -d $<

View File

@@ -68,6 +68,8 @@
</div> </div>
</div> </div>
<script src="/.cpr/util.js{{ ts }}"></script>
{%- if can_read %} {%- if can_read %}
<script src="/.cpr/browser.js{{ ts }}"></script> <script src="/.cpr/browser.js{{ ts }}"></script>
{%- endif %} {%- endif %}

View File

@@ -1,117 +1,25 @@
"use strict"; "use strict";
// error handler for mobile devices window.onerror = vis_exh;
function hcroak(msg) {
document.body.innerHTML = msg;
window.onerror = undefined;
throw 'fatal_err';
}
function croak(msg) {
document.body.textContent = msg;
window.onerror = undefined;
throw msg;
}
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
'&': '&amp;',
'"': '&quot;',
'<': '&lt;',
'>': '&gt;'
}[c];
});
}
window.onerror = function (msg, url, lineNo, columnNo, error) {
window.onerror = undefined;
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<h2>' + find[a] + '</h2>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
}
document.body.style.fontSize = '0.8em';
document.body.style.padding = '0 1em 1em 1em';
hcroak(html.join('\n'));
};
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
if (!String.prototype.endsWith) {
String.prototype.endsWith = function (search, this_len) {
if (this_len === undefined || this_len > this.length) {
this_len = this.length;
}
return this.substring(this_len - search.length, this_len) === search;
};
}
// https://stackoverflow.com/a/950146
function import_js(url, cb) {
var head = document.head || document.getElementsByTagName('head')[0];
var script = document.createElement('script');
script.type = 'text/javascript';
script.src = url;
script.onreadystatechange = cb;
script.onload = cb;
head.appendChild(script);
}
function o(id) {
return document.getElementById(id);
}
function dbg(msg) { function dbg(msg) {
o('path').innerHTML = msg; ebi('path').innerHTML = msg;
} }
function ev(e) { function ev(e) {
e = e || window.event; e = e || window.event;
e.preventDefault ? e.preventDefault() : (e.returnValue = false);
if (e.preventDefault)
e.preventDefault()
if (e.stopPropagation)
e.stopPropagation();
e.returnValue = false;
return e; return e;
} }
makeSortable(ebi('files'));
function sortTable(table, col) {
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className == 'sort1' ? -1 : 1;
for (var a = 0, thl = th.length; a < thl; a++)
th[a].className = '';
th[col].className = 'sort' + reverse;
var stype = th[col].getAttribute('sort');
tr = tr.sort(function (a, b) {
var v1 = a.cells[col].textContent.trim();
var v2 = b.cells[col].textContent.trim();
if (stype == 'int') {
v1 = parseInt(v1.replace(/,/g, ''));
v2 = parseInt(v2.replace(/,/g, ''));
return reverse * (v1 - v2);
}
return reverse * (v1.localeCompare(v2));
});
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]);
}
function makeSortable(table) {
var th = table.tHead, i;
th && (th = th.rows[0]) && (th = th.cells);
if (th) i = th.length;
else return; // if no `<thead>` then do nothing
while (--i >= 0) (function (i) {
th[i].onclick = function () {
sortTable(table, i);
};
}(i));
}
makeSortable(o('files'));
// extract songs + add play column // extract songs + add play column
@@ -124,9 +32,9 @@ var mp = (function () {
'tracks': tracks, 'tracks': tracks,
'cover_url': '' 'cover_url': ''
}; };
var re_audio = new RegExp('\.(opus|ogg|m4a|aac|mp3|wav|flac)$', 'i'); var re_audio = /\.(opus|ogg|m4a|aac|mp3|wav|flac)$/i;
var trs = document.getElementById('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr'); var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
for (var a = 0, aa = trs.length; a < aa; a++) { for (var a = 0, aa = trs.length; a < aa; a++) {
var tds = trs[a].getElementsByTagName('td'); var tds = trs[a].getElementsByTagName('td');
var link = tds[1].getElementsByTagName('a')[0]; var link = tds[1].getElementsByTagName('a')[0];
@@ -142,7 +50,7 @@ var mp = (function () {
} }
for (var a = 0, aa = tracks.length; a < aa; a++) for (var a = 0, aa = tracks.length; a < aa; a++)
o('trk' + a).onclick = ev_play; ebi('trk' + a).onclick = ev_play;
ret.vol = localStorage.getItem('vol'); ret.vol = localStorage.getItem('vol');
if (ret.vol !== null) if (ret.vol !== null)
@@ -169,8 +77,8 @@ var mp = (function () {
// toggle player widget // toggle player widget
var widget = (function () { var widget = (function () {
var ret = {}; var ret = {};
var widget = document.getElementById('widget'); var widget = ebi('widget');
var wtoggle = document.getElementById('wtoggle'); var wtoggle = ebi('wtoggle');
var touchmode = false; var touchmode = false;
var side_open = false; var side_open = false;
var was_paused = true; var was_paused = true;
@@ -199,7 +107,7 @@ var widget = (function () {
ret.paused = function (paused) { ret.paused = function (paused) {
if (was_paused != paused) { if (was_paused != paused) {
was_paused = paused; was_paused = paused;
o('bplay').innerHTML = paused ? '▶' : '⏸'; ebi('bplay').innerHTML = paused ? '▶' : '⏸';
} }
}; };
var click_handler = function (e) { var click_handler = function (e) {
@@ -223,8 +131,8 @@ var widget = (function () {
// buffer/position bar // buffer/position bar
var pbar = (function () { var pbar = (function () {
var r = {}; var r = {};
r.bcan = o('barbuf'); r.bcan = ebi('barbuf');
r.pcan = o('barpos'); r.pcan = ebi('barpos');
r.bctx = r.bcan.getContext('2d'); r.bctx = r.bcan.getContext('2d');
r.pctx = r.pcan.getContext('2d'); r.pctx = r.pcan.getContext('2d');
@@ -289,7 +197,7 @@ var pbar = (function () {
// volume bar // volume bar
var vbar = (function () { var vbar = (function () {
var r = {}; var r = {};
r.can = o('pvol'); r.can = ebi('pvol');
r.ctx = r.can.getContext('2d'); r.ctx = r.can.getContext('2d');
var bctx = r.ctx; var bctx = r.ctx;
@@ -386,7 +294,7 @@ var vbar = (function () {
else else
play(0); play(0);
}; };
o('bplay').onclick = function (e) { ebi('bplay').onclick = function (e) {
ev(e); ev(e);
if (mp.au) { if (mp.au) {
if (mp.au.paused) if (mp.au.paused)
@@ -397,15 +305,15 @@ var vbar = (function () {
else else
play(0); play(0);
}; };
o('bprev').onclick = function (e) { ebi('bprev').onclick = function (e) {
ev(e); ev(e);
bskip(-1); bskip(-1);
}; };
o('bnext').onclick = function (e) { ebi('bnext').onclick = function (e) {
ev(e); ev(e);
bskip(1); bskip(1);
}; };
o('barpos').onclick = function (e) { ebi('barpos').onclick = function (e) {
if (!mp.au) { if (!mp.au) {
//dbg((new Date()).getTime()); //dbg((new Date()).getTime());
return play(0); return play(0);
@@ -414,8 +322,12 @@ var vbar = (function () {
var rect = pbar.pcan.getBoundingClientRect(); var rect = pbar.pcan.getBoundingClientRect();
var x = e.clientX - rect.left; var x = e.clientX - rect.left;
var mul = x * 1.0 / rect.width; var mul = x * 1.0 / rect.width;
var seek = mp.au.duration * mul;
console.log('seek: ' + seek);
if (!isFinite(seek))
return;
mp.au.currentTime = mp.au.duration * mul; mp.au.currentTime = seek;
if (mp.au === mp.au_native) if (mp.au === mp.au_native)
// hack: ogv.js breaks on .play() during playback // hack: ogv.js breaks on .play() during playback
@@ -471,7 +383,7 @@ function ev_play(e) {
function setclass(id, clas) { function setclass(id, clas) {
o(id).setAttribute('class', clas); ebi(id).setAttribute('class', clas);
} }
@@ -542,7 +454,8 @@ function play(tid, call_depth) {
mp.au.tid = tid; mp.au.tid = tid;
mp.au.src = url; mp.au.src = url;
mp.au.volume = mp.expvol(); mp.au.volume = mp.expvol();
setclass('trk' + tid, 'play act'); var oid = 'trk' + tid;
setclass(oid, 'play act');
try { try {
if (hack_attempt_play) if (hack_attempt_play)
@@ -551,7 +464,11 @@ function play(tid, call_depth) {
if (mp.au.paused) if (mp.au.paused)
autoplay_blocked(); autoplay_blocked();
location.hash = 'trk' + tid; var o = ebi(oid);
o.setAttribute('id', 'thx_js');
location.hash = oid;
o.setAttribute('id', oid);
pbar.drawbuf(); pbar.drawbuf();
return true; return true;
} }
@@ -567,7 +484,6 @@ function play(tid, call_depth) {
function evau_error(e) { function evau_error(e) {
var err = ''; var err = '';
var eplaya = (e && e.target) || (window.event && window.event.srcElement); var eplaya = (e && e.target) || (window.event && window.event.srcElement);
var url = eplaya.src;
switch (eplaya.error.code) { switch (eplaya.error.code) {
case eplaya.error.MEDIA_ERR_ABORTED: case eplaya.error.MEDIA_ERR_ABORTED:
@@ -608,20 +524,20 @@ function show_modal(html) {
// hide fullscreen message // hide fullscreen message
function unblocked() { function unblocked() {
var dom = o('blocked'); var dom = ebi('blocked');
if (dom) if (dom)
dom.parentNode.removeChild(dom); dom.parentNode.removeChild(dom);
} }
// show ui to manually start playback of a linked song // show ui to manually start playback of a linked song
function autoplay_blocked(tid) { function autoplay_blocked() {
show_modal( show_modal(
'<div id="blk_play"><a href="#" id="blk_go"></a></div>' + '<div id="blk_play"><a href="#" id="blk_go"></a></div>' +
'<div id="blk_abrt"><a href="#" id="blk_na">Cancel<br />(show file list)</a></div>'); '<div id="blk_abrt"><a href="#" id="blk_na">Cancel<br />(show file list)</a></div>');
var go = o('blk_go'); var go = ebi('blk_go');
var na = o('blk_na'); var na = ebi('blk_na');
var fn = mp.tracks[mp.au.tid].split(/\//).pop(); var fn = mp.tracks[mp.au.tid].split(/\//).pop();
fn = decodeURIComponent(fn.replace(/\+/g, ' ')); fn = decodeURIComponent(fn.replace(/\+/g, ' '));

View File

@@ -126,7 +126,8 @@ write markdown (most html is 🙆 too)
var last_modified = {{ lastmod }}; var last_modified = {{ lastmod }};
var md_opt = { var md_opt = {
link_md_as_html: false, link_md_as_html: false,
allow_plugins: {{ md_plug }} allow_plugins: {{ md_plug }},
modpoll_freq: {{ md_chk_rate }}
}; };
(function () { (function () {
@@ -144,17 +145,11 @@ var md_opt = {
toggle(); toggle();
})(); })();
if (!String.startsWith) {
String.prototype.startsWith = function(s, i) {
i = i>0 ? i|0 : 0;
return this.substring(i, i + s.length) === s;
};
}
</script> </script>
<script src="/.cpr/util.js"></script>
<script src="/.cpr/deps/marked.full.js"></script> <script src="/.cpr/deps/marked.full.js"></script>
<script src="/.cpr/md.js"></script> <script src="/.cpr/md.js"></script>
{%- if edit %} {%- if edit %}
<script src="/.cpr/md2.js"></script> <script src="/.cpr/md2.js"></script>
{%- endif %} {%- endif %}
</body></html> </body></html>

View File

@@ -1,10 +1,12 @@
var dom_toc = document.getElementById('toc'); "use strict";
var dom_wrap = document.getElementById('mw');
var dom_hbar = document.getElementById('mh'); var dom_toc = ebi('toc');
var dom_nav = document.getElementById('mn'); var dom_wrap = ebi('mw');
var dom_pre = document.getElementById('mp'); var dom_hbar = ebi('mh');
var dom_src = document.getElementById('mt'); var dom_nav = ebi('mn');
var dom_navtgl = document.getElementById('navtoggle'); var dom_pre = ebi('mp');
var dom_src = ebi('mt');
var dom_navtgl = ebi('navtoggle');
// chrome 49 needs this // chrome 49 needs this
@@ -34,7 +36,7 @@ function cls(dom, name, add) {
} }
function static(obj) { function statify(obj) {
return JSON.parse(JSON.stringify(obj)); return JSON.parse(JSON.stringify(obj));
} }
@@ -158,6 +160,46 @@ function copydom(src, dst, lv) {
} }
function md_plug_err(ex, js) {
var errbox = ebi('md_errbox');
if (errbox)
errbox.parentNode.removeChild(errbox);
if (!ex)
return;
var msg = (ex + '').split('\n')[0];
var ln = ex.lineNumber;
var o = null;
if (ln) {
msg = "Line " + ln + ", " + msg;
var lns = js.split('\n');
if (ln < lns.length) {
o = document.createElement('span');
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
o.textContent = lns[ln - 1];
}
}
errbox = document.createElement('div');
errbox.setAttribute('id', 'md_errbox');
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
errbox.textContent = msg;
errbox.onclick = function () {
alert('' + ex.stack);
};
if (o) {
errbox.appendChild(o);
errbox.style.padding = '.25em .5em';
}
dom_nav.appendChild(errbox);
try {
console.trace();
}
catch (ex2) { }
}
function load_plug(md_text, plug_type) { function load_plug(md_text, plug_type) {
if (!md_opt.allow_plugins) if (!md_opt.allow_plugins)
return md_text; return md_text;
@@ -177,7 +219,14 @@ function load_plug(md_text, plug_type) {
var old_plug = md_plug[plug_type]; var old_plug = md_plug[plug_type];
if (!old_plug || old_plug[1] != js) { if (!old_plug || old_plug[1] != js) {
js = 'const x = { ' + js + ' }; x;'; js = 'const x = { ' + js + ' }; x;';
var x = eval(js); try {
var x = eval(js);
}
catch (ex) {
md_plug[plug_type] = null;
md_plug_err(ex, js);
return md;
}
if (x['ctor']) { if (x['ctor']) {
x['ctor'](); x['ctor']();
delete x['ctor']; delete x['ctor'];
@@ -191,20 +240,30 @@ function load_plug(md_text, plug_type) {
function convert_markdown(md_text, dest_dom) { function convert_markdown(md_text, dest_dom) {
md_text = md_text.replace(/\r/g, ''); md_text = md_text.replace(/\r/g, '');
md_plug_err(null);
md_text = load_plug(md_text, 'pre'); md_text = load_plug(md_text, 'pre');
md_text = load_plug(md_text, 'post'); md_text = load_plug(md_text, 'post');
marked.setOptions({ var marked_opts = {
//headerPrefix: 'h-', //headerPrefix: 'h-',
breaks: true, breaks: true,
gfm: true gfm: true
}); };
if (md_plug['pre']) { var ext = md_plug['pre'];
marked.use(md_plug['pre'][0]); if (ext)
Object.assign(marked_opts, ext[0]);
try {
var md_html = marked(md_text, marked_opts);
} }
catch (ex) {
if (ext)
md_plug_err(ex, ext[1]);
var md_html = marked(md_text); throw ex;
}
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body; var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
var nodes = md_dom.getElementsByTagName('a'); var nodes = md_dom.getElementsByTagName('a');
@@ -240,7 +299,7 @@ function convert_markdown(md_text, dest_dom) {
} }
// separate <code> for each line in <pre> // separate <code> for each line in <pre>
var nodes = md_dom.getElementsByTagName('pre'); nodes = md_dom.getElementsByTagName('pre');
for (var a = nodes.length - 1; a >= 0; a--) { for (var a = nodes.length - 1; a >= 0; a--) {
var el = nodes[a]; var el = nodes[a];
@@ -286,15 +345,29 @@ function convert_markdown(md_text, dest_dom) {
el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>'; el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>';
} }
if (md_plug['post']) ext = md_plug['post'];
md_plug['post'][0].render(md_dom); if (ext && ext[0].render)
try {
ext[0].render(md_dom);
}
catch (ex) {
md_plug_err(ex, ext[1]);
}
copydom(md_dom, dest_dom, 0); copydom(md_dom, dest_dom, 0);
if (ext && ext[0].render2)
try {
ext[0].render2(dest_dom);
}
catch (ex) {
md_plug_err(ex, ext[1]);
}
} }
function init_toc() { function init_toc() {
var loader = document.getElementById('ml'); var loader = ebi('ml');
loader.parentNode.removeChild(loader); loader.parentNode.removeChild(loader);
var anchors = []; // list of toc entries, complex objects var anchors = []; // list of toc entries, complex objects

View File

@@ -77,32 +77,52 @@ html.dark #mt {
background: #f97; background: #f97;
border-radius: .15em; border-radius: .15em;
} }
html.dark #save.force-save {
color: #fca;
background: #720;
}
#save.disabled { #save.disabled {
opacity: .4; opacity: .4;
} }
#helpbox,
#toast {
background: #f7f7f7;
border-radius: .4em;
z-index: 9001;
}
#helpbox { #helpbox {
display: none; display: none;
position: fixed; position: fixed;
background: #f7f7f7;
box-shadow: 0 .5em 2em #777;
border-radius: .4em;
padding: 2em; padding: 2em;
top: 4em; top: 4em;
overflow-y: auto; overflow-y: auto;
box-shadow: 0 .5em 2em #777;
height: calc(100% - 12em); height: calc(100% - 12em);
left: calc(50% - 15em); left: calc(50% - 15em);
right: 0; right: 0;
width: 30em; width: 30em;
z-index: 9001;
} }
#helpclose { #helpclose {
display: block; display: block;
} }
html.dark #helpbox { html.dark #helpbox {
background: #222;
box-shadow: 0 .5em 2em #444; box-shadow: 0 .5em 2em #444;
}
html.dark #helpbox,
html.dark #toast {
background: #222;
border: 1px solid #079; border: 1px solid #079;
border-width: 1px 0; border-width: 1px 0;
} }
#toast {
font-weight: bold;
text-align: center;
padding: .6em 0;
position: fixed;
z-index: 9001;
top: 30%;
transition: opacity 0.2s ease-in-out;
opacity: 1;
}
# mt {opacity: .5;top:1px} # mt {opacity: .5;top:1px}

View File

@@ -1,3 +1,6 @@
"use strict";
// server state // server state
var server_md = dom_src.value; var server_md = dom_src.value;
@@ -8,15 +11,15 @@ var js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
// dom nodes // dom nodes
var dom_swrap = document.getElementById('mtw'); var dom_swrap = ebi('mtw');
var dom_sbs = document.getElementById('sbs'); var dom_sbs = ebi('sbs');
var dom_nsbs = document.getElementById('nsbs'); var dom_nsbs = ebi('nsbs');
var dom_tbox = document.getElementById('toolsbox'); var dom_tbox = ebi('toolsbox');
var dom_ref = (function () { var dom_ref = (function () {
var d = document.createElement('div'); var d = document.createElement('div');
d.setAttribute('id', 'mtr'); d.setAttribute('id', 'mtr');
dom_swrap.appendChild(d); dom_swrap.appendChild(d);
d = document.getElementById('mtr'); d = ebi('mtr');
// hide behind the textarea (offsetTop is not computed if display:none) // hide behind the textarea (offsetTop is not computed if display:none)
dom_src.style.zIndex = '4'; dom_src.style.zIndex = '4';
d.style.zIndex = '3'; d.style.zIndex = '3';
@@ -105,7 +108,7 @@ var draw_md = (function () {
map_src = genmap(dom_ref, map_src); map_src = genmap(dom_ref, map_src);
map_pre = genmap(dom_pre, map_pre); map_pre = genmap(dom_pre, map_pre);
cls(document.getElementById('save'), 'disabled', src == server_md); cls(ebi('save'), 'disabled', src == server_md);
var t1 = new Date().getTime(); var t1 = new Date().getTime();
delay = t1 - t0 > 100 ? 25 : 1; delay = t1 - t0 > 100 ? 25 : 1;
@@ -141,7 +144,7 @@ redraw = (function () {
onresize(); onresize();
} }
function modetoggle() { function modetoggle() {
mode = dom_nsbs.innerHTML; var mode = dom_nsbs.innerHTML;
dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor'; dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor';
mode += ' single'; mode += ' single';
dom_wrap.setAttribute('class', mode); dom_wrap.setAttribute('class', mode);
@@ -177,7 +180,7 @@ redraw = (function () {
y += src.clientHeight / 2; y += src.clientHeight / 2;
var sy1 = -1, sy2 = -1, dy1 = -1, dy2 = -1; var sy1 = -1, sy2 = -1, dy1 = -1, dy2 = -1;
for (var a = 1; a < nlines + 1; a++) { for (var a = 1; a < nlines + 1; a++) {
if (srcmap[a] === null || dstmap[a] === null) if (srcmap[a] == null || dstmap[a] == null)
continue; continue;
if (srcmap[a] > y) { if (srcmap[a] > y) {
@@ -220,14 +223,108 @@ redraw = (function () {
})(); })();
// modification checker
function Modpoll() {
this.skip_one = true;
this.disabled = false;
this.periodic = function () {
var that = this;
setTimeout(function () {
that.periodic();
}, 1000 * md_opt.modpoll_freq);
var skip = null;
if (ebi('toast'))
skip = 'toast';
else if (this.skip_one)
skip = 'saved';
else if (this.disabled)
skip = 'disabled';
if (skip) {
console.log('modpoll skip, ' + skip);
this.skip_one = false;
return;
}
console.log('modpoll...');
var url = (document.location + '').split('?')[0] + '?raw&_=' + new Date().getTime();
var xhr = new XMLHttpRequest();
xhr.modpoll = this;
xhr.open('GET', url, true);
xhr.responseType = 'text';
xhr.onreadystatechange = this.cb;
xhr.send();
}
this.cb = function () {
if (this.modpoll.disabled || this.modpoll.skip_one) {
console.log('modpoll abort');
return;
}
if (this.readyState != XMLHttpRequest.DONE)
return;
if (this.status !== 200) {
console.log('modpoll err ' + this.status + ": " + this.responseText);
return;
}
if (!this.responseText)
return;
var server_ref = server_md.replace(/\r/g, '');
var server_now = this.responseText.replace(/\r/g, '');
if (server_ref != server_now) {
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
this.modpoll.disabled = true;
var msg = [
"The document has changed on the server.<br />" +
"The changes will NOT be loaded into your editor automatically.",
"Press F5 or CTRL-R to refresh the page,<br />" +
"replacing your document with the server copy.",
"You can click this message to ignore and contnue."
];
return toast(false, "box-shadow:0 1em 2em rgba(64,64,64,0.8);font-weight:normal",
36, "<p>" + msg.join('</p>\n<p>') + '</p>');
}
console.log('modpoll eq');
}
if (md_opt.modpoll_freq > 0)
this.periodic();
return this;
}
var modpoll = new Modpoll();
window.onbeforeunload = function (e) {
if ((ebi("save").getAttribute('class') + '').indexOf('disabled') >= 0)
return; //nice (todo)
e.preventDefault(); //ff
e.returnValue = ''; //chrome
};
// save handler // save handler
function save(e) { function save(e) {
if (e) e.preventDefault(); if (e) e.preventDefault();
var save_btn = document.getElementById("save"), var save_btn = ebi("save"),
save_cls = save_btn.getAttribute('class') + ''; save_cls = save_btn.getAttribute('class') + '';
if (save_cls.indexOf('disabled') >= 0) { if (save_cls.indexOf('disabled') >= 0) {
toast('font-size:2em;color:#fc6;width:9em;', 'no changes'); toast(true, ";font-size:2em;color:#c90", 9, "no changes");
return; return;
} }
@@ -251,6 +348,8 @@ function save(e) {
xhr.onreadystatechange = save_cb; xhr.onreadystatechange = save_cb;
xhr.btn = save_btn; xhr.btn = save_btn;
xhr.txt = txt; xhr.txt = txt;
modpoll.skip_one = true; // skip one iteration while we save
xhr.send(fd); xhr.send(fd);
} }
@@ -344,23 +443,44 @@ function savechk_cb() {
last_modified = this.lastmod; last_modified = this.lastmod;
server_md = this.txt; server_md = this.txt;
draw_md(); draw_md();
toast('font-size:6em;font-family:serif;color:#cf6;width:4em;', toast(true, ";font-size:6em;font-family:serif;color:#9b4", 4,
'OK✔<span style="font-size:.2em;color:#999;position:absolute">' + this.ntry + '</span>'); 'OK✔<span style="font-size:.2em;color:#999;position:absolute">' + this.ntry + '</span>');
modpoll.disabled = false;
} }
function toast(style, msg) { function toast(autoclose, style, width, msg) {
var ok = document.createElement('div'); var ok = ebi("toast");
style += 'font-weight:bold;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1'; if (ok)
ok.parentNode.removeChild(ok);
style = "width:" + width + "em;left:calc(50% - " + (width / 2) + "em);" + style;
ok = document.createElement('div');
ok.setAttribute('id', 'toast');
ok.setAttribute('style', style); ok.setAttribute('style', style);
ok.innerHTML = msg; ok.innerHTML = msg;
var parent = document.getElementById('m'); var parent = ebi('m');
document.documentElement.appendChild(ok); document.documentElement.appendChild(ok);
setTimeout(function () {
ok.style.opacity = 0; var hide = function (delay) {
}, 500); delay = delay || 0;
setTimeout(function () {
ok.parentNode.removeChild(ok); setTimeout(function () {
}, 750); ok.style.opacity = 0;
}, delay);
setTimeout(function () {
if (ok.parentNode)
ok.parentNode.removeChild(ok);
}, delay + 250);
}
ok.onclick = function () {
hide(0);
};
if (autoclose)
hide(500);
} }
@@ -540,6 +660,10 @@ function md_backspace() {
if (/^\s*$/.test(left)) if (/^\s*$/.test(left))
return true; return true;
// same if selection
if (o0 != dom_src.selectionEnd)
return true;
// same if line is all-whitespace or non-markup // same if line is all-whitespace or non-markup
var v = m[0].replace(/[^ ]/g, " "); var v = m[0].replace(/[^ ]/g, " ");
if (v === m[0] || v.length !== left.length) if (v === m[0] || v.length !== left.length)
@@ -623,7 +747,8 @@ function fmt_table(e) {
lpipe = tab[1].indexOf('|') < tab[1].indexOf('-'), lpipe = tab[1].indexOf('|') < tab[1].indexOf('-'),
rpipe = tab[1].lastIndexOf('|') > tab[1].lastIndexOf('-'), rpipe = tab[1].lastIndexOf('|') > tab[1].lastIndexOf('-'),
re_lpipe = lpipe ? /^\s*\|\s*/ : /^\s*/, re_lpipe = lpipe ? /^\s*\|\s*/ : /^\s*/,
re_rpipe = rpipe ? /\s*\|\s*$/ : /\s*$/; re_rpipe = rpipe ? /\s*\|\s*$/ : /\s*$/,
ncols;
// the second row defines the table, // the second row defines the table,
// need to process that first // need to process that first
@@ -680,7 +805,8 @@ function fmt_table(e) {
for (var col = 0; col < ncols; col++) { for (var col = 0; col < ncols; col++) {
var max = 0; var max = 0;
for (var row = 0; row < tab.length; row++) for (var row = 0; row < tab.length; row++)
max = Math.max(max, tab[row][col].length); if (row != 1)
max = Math.max(max, tab[row][col].length);
var s = ''; var s = '';
for (var n = 0; n < max; n++) for (var n = 0; n < max; n++)
@@ -747,9 +873,8 @@ function mark_uni(e) {
dom_tbox.setAttribute('class', ''); dom_tbox.setAttribute('class', '');
var txt = dom_src.value, var txt = dom_src.value,
ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g'); ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g'),
mod = txt.replace(/\r/g, "").replace(ptn, "\u2588\u2770$1\u2771");
mod = txt.replace(/\r/g, "").replace(ptn, "\u2588\u2770$1\u2771");
if (txt == mod) { if (txt == mod) {
alert('no results; no modifications were made'); alert('no results; no modifications were made');
@@ -785,7 +910,12 @@ function iter_uni(e) {
// configure whitelist // configure whitelist
function cfg_uni(e) { function cfg_uni(e) {
if (e) e.preventDefault(); if (e) e.preventDefault();
esc_uni_whitelist = prompt("unicode whitelist", esc_uni_whitelist);
var reply = prompt("unicode whitelist", esc_uni_whitelist);
if (reply === null)
return;
esc_uni_whitelist = reply;
js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\''); js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
} }
@@ -802,7 +932,7 @@ function cfg_uni(e) {
return false; return false;
} }
if (ev.code == "Escape" || kc == 27) { if (ev.code == "Escape" || kc == 27) {
var d = document.getElementById('helpclose'); var d = ebi('helpclose');
if (d) if (d)
d.click(); d.click();
} }
@@ -859,22 +989,22 @@ function cfg_uni(e) {
} }
} }
document.onkeydown = keydown; document.onkeydown = keydown;
document.getElementById('save').onclick = save; ebi('save').onclick = save;
})(); })();
document.getElementById('tools').onclick = function (e) { ebi('tools').onclick = function (e) {
if (e) e.preventDefault(); if (e) e.preventDefault();
var is_open = dom_tbox.getAttribute('class') != 'open'; var is_open = dom_tbox.getAttribute('class') != 'open';
dom_tbox.setAttribute('class', is_open ? 'open' : ''); dom_tbox.setAttribute('class', is_open ? 'open' : '');
}; };
document.getElementById('help').onclick = function (e) { ebi('help').onclick = function (e) {
if (e) e.preventDefault(); if (e) e.preventDefault();
dom_tbox.setAttribute('class', ''); dom_tbox.setAttribute('class', '');
var dom = document.getElementById('helpbox'); var dom = ebi('helpbox');
var dtxt = dom.getElementsByTagName('textarea'); var dtxt = dom.getElementsByTagName('textarea');
if (dtxt.length > 0) { if (dtxt.length > 0) {
convert_markdown(dtxt[0].value, dom); convert_markdown(dtxt[0].value, dom);
@@ -882,16 +1012,16 @@ document.getElementById('help').onclick = function (e) {
} }
dom.style.display = 'block'; dom.style.display = 'block';
document.getElementById('helpclose').onclick = function () { ebi('helpclose').onclick = function () {
dom.style.display = 'none'; dom.style.display = 'none';
}; };
}; };
document.getElementById('fmt_table').onclick = fmt_table; ebi('fmt_table').onclick = fmt_table;
document.getElementById('mark_uni').onclick = mark_uni; ebi('mark_uni').onclick = mark_uni;
document.getElementById('iter_uni').onclick = iter_uni; ebi('iter_uni').onclick = iter_uni;
document.getElementById('cfg_uni').onclick = cfg_uni; ebi('cfg_uni').onclick = cfg_uni;
// blame steen // blame steen
@@ -999,13 +1129,12 @@ action_stack = (function () {
ref = newtxt; ref = newtxt;
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length); dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
if (hist.un.length > 0) if (hist.un.length > 0)
dbg(static(hist.un.slice(-1)[0])); dbg(statify(hist.un.slice(-1)[0]));
if (hist.re.length > 0) if (hist.re.length > 0)
dbg(static(hist.re.slice(-1)[0])); dbg(statify(hist.re.slice(-1)[0]));
} }
return { return {
push: push,
undo: undo, undo: undo,
redo: redo, redo: redo,
push: schedule_push, push: schedule_push,
@@ -1015,7 +1144,7 @@ action_stack = (function () {
})(); })();
/* /*
document.getElementById('help').onclick = function () { ebi('help').onclick = function () {
var c1 = getComputedStyle(dom_src).cssText.split(';'); var c1 = getComputedStyle(dom_src).cssText.split(';');
var c2 = getComputedStyle(dom_ref).cssText.split(';'); var c2 = getComputedStyle(dom_ref).cssText.split(';');
var max = Math.min(c1.length, c2.length); var max = Math.min(c1.length, c2.length);

View File

@@ -25,7 +25,8 @@
var last_modified = {{ lastmod }}; var last_modified = {{ lastmod }};
var md_opt = { var md_opt = {
link_md_as_html: false, link_md_as_html: false,
allow_plugins: {{ md_plug }} allow_plugins: {{ md_plug }},
modpoll_freq: {{ md_chk_rate }}
}; };
var lightswitch = (function () { var lightswitch = (function () {
@@ -42,6 +43,7 @@ var lightswitch = (function () {
})(); })();
</script> </script>
<script src="/.cpr/util.js"></script>
<script src="/.cpr/deps/easymde.js"></script> <script src="/.cpr/deps/easymde.js"></script>
<script src="/.cpr/mde.js"></script> <script src="/.cpr/mde.js"></script>
</body></html> </body></html>

View File

@@ -1,7 +1,9 @@
var dom_wrap = document.getElementById('mw'); "use strict";
var dom_nav = document.getElementById('mn');
var dom_doc = document.getElementById('m'); var dom_wrap = ebi('mw');
var dom_md = document.getElementById('mt'); var dom_nav = ebi('mn');
var dom_doc = ebi('m');
var dom_md = ebi('mt');
(function () { (function () {
var n = document.location + ''; var n = document.location + '';
@@ -63,7 +65,7 @@ var mde = (function () {
mde.codemirror.on("change", function () { mde.codemirror.on("change", function () {
md_changed(mde); md_changed(mde);
}); });
var loader = document.getElementById('ml'); var loader = ebi('ml');
loader.parentNode.removeChild(loader); loader.parentNode.removeChild(loader);
return mde; return mde;
})(); })();
@@ -213,7 +215,7 @@ function save_chk() {
var ok = document.createElement('div'); var ok = document.createElement('div');
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1'); ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
ok.innerHTML = 'OK✔'; ok.innerHTML = 'OK✔';
var parent = document.getElementById('m'); var parent = ebi('m');
document.documentElement.appendChild(ok); document.documentElement.appendChild(ok);
setTimeout(function () { setTimeout(function () {
ok.style.opacity = 0; ok.style.opacity = 0;

View File

@@ -1,61 +1,6 @@
"use strict"; "use strict";
// error handler for mobile devices window.onerror = vis_exh;
function hcroak(msg) {
document.body.innerHTML = msg;
window.onerror = undefined;
throw 'fatal_err';
}
function croak(msg) {
document.body.textContent = msg;
window.onerror = undefined;
throw msg;
}
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
'&': '&amp;',
'"': '&quot;',
'<': '&lt;',
'>': '&gt;'
}[c];
});
}
window.onerror = function (msg, url, lineNo, columnNo, error) {
window.onerror = undefined;
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<h2>' + find[a] + '</h2>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
}
document.body.style.fontSize = '0.8em';
document.body.style.padding = '0 1em 1em 1em';
hcroak(html.join('\n'));
};
// https://stackoverflow.com/a/950146
function import_js(url, cb) {
var head = document.head || document.getElementsByTagName('head')[0];
var script = document.createElement('script');
script.type = 'text/javascript';
script.src = url;
script.onreadystatechange = cb;
script.onload = cb;
head.appendChild(script);
}
function o(id) {
return document.getElementById(id);
}
(function () { (function () {
@@ -88,12 +33,12 @@ function goto(dest) {
for (var a = obj.length - 1; a >= 0; a--) for (var a = obj.length - 1; a >= 0; a--)
obj[a].classList.remove('act'); obj[a].classList.remove('act');
var obj = document.querySelectorAll('#ops>a'); obj = document.querySelectorAll('#ops>a');
for (var a = obj.length - 1; a >= 0; a--) for (var a = obj.length - 1; a >= 0; a--)
obj[a].classList.remove('act'); obj[a].classList.remove('act');
if (dest) { if (dest) {
document.getElementById('op_' + dest).classList.add('act'); ebi('op_' + dest).classList.add('act');
document.querySelector('#ops>a[data-dest=' + dest + ']').classList.add('act'); document.querySelector('#ops>a[data-dest=' + dest + ']').classList.add('act');
var fn = window['goto_' + dest]; var fn = window['goto_' + dest];
@@ -121,7 +66,7 @@ function goto_up2k() {
if (op !== null && op !== '.') if (op !== null && op !== '.')
goto(op); goto(op);
} }
document.getElementById('ops').style.display = 'block'; ebi('ops').style.display = 'block';
})(); })();
@@ -150,21 +95,21 @@ function up2k_init(have_crypto) {
// show modal message // show modal message
function showmodal(msg) { function showmodal(msg) {
o('u2notbtn').innerHTML = msg; ebi('u2notbtn').innerHTML = msg;
o('u2btn').style.display = 'none'; ebi('u2btn').style.display = 'none';
o('u2notbtn').style.display = 'block'; ebi('u2notbtn').style.display = 'block';
o('u2conf').style.opacity = '0.5'; ebi('u2conf').style.opacity = '0.5';
} }
// hide modal message // hide modal message
function unmodal() { function unmodal() {
o('u2notbtn').style.display = 'none'; ebi('u2notbtn').style.display = 'none';
o('u2btn').style.display = 'block'; ebi('u2btn').style.display = 'block';
o('u2conf').style.opacity = '1'; ebi('u2conf').style.opacity = '1';
o('u2notbtn').innerHTML = ''; ebi('u2notbtn').innerHTML = '';
} }
var post_url = o('op_bup').getElementsByTagName('form')[0].getAttribute('action'); var post_url = ebi('op_bup').getElementsByTagName('form')[0].getAttribute('action');
if (post_url && post_url.charAt(post_url.length - 1) !== '/') if (post_url && post_url.charAt(post_url.length - 1) !== '/')
post_url += '/'; post_url += '/';
@@ -181,25 +126,25 @@ function up2k_init(have_crypto) {
import_js('/.cpr/deps/sha512.js', unmodal); import_js('/.cpr/deps/sha512.js', unmodal);
if (is_https) if (is_https)
o('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best'; ebi('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
else else
o('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance'; ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance';
} }
}; }
// show uploader if the user only has write-access // show uploader if the user only has write-access
if (!o('files')) if (!ebi('files'))
goto('up2k'); goto('up2k');
// shows or clears an error message in the basic uploader ui // shows or clears an error message in the basic uploader ui
function setmsg(msg) { function setmsg(msg) {
if (msg !== undefined) { if (msg !== undefined) {
o('u2err').setAttribute('class', 'err'); ebi('u2err').setAttribute('class', 'err');
o('u2err').innerHTML = msg; ebi('u2err').innerHTML = msg;
} }
else { else {
o('u2err').setAttribute('class', ''); ebi('u2err').setAttribute('class', '');
o('u2err').innerHTML = ''; ebi('u2err').innerHTML = '';
} }
} }
@@ -210,7 +155,7 @@ function up2k_init(have_crypto) {
} }
// handle user intent to use the basic uploader instead // handle user intent to use the basic uploader instead
o('u2nope').onclick = function (e) { ebi('u2nope').onclick = function (e) {
e.preventDefault(); e.preventDefault();
setmsg(''); setmsg('');
goto('bup'); goto('bup');
@@ -229,9 +174,9 @@ function up2k_init(have_crypto) {
function cfg_get(name) { function cfg_get(name) {
var val = localStorage.getItem(name); var val = localStorage.getItem(name);
if (val === null) if (val === null)
return parseInt(o(name).value); return parseInt(ebi(name).value);
o(name).value = val; ebi(name).value = val;
return val; return val;
} }
@@ -242,7 +187,7 @@ function up2k_init(have_crypto) {
else else
val = (val == '1'); val = (val == '1');
o(name).checked = val; ebi(name).checked = val;
return val; return val;
} }
@@ -250,12 +195,13 @@ function up2k_init(have_crypto) {
localStorage.setItem( localStorage.setItem(
name, val ? '1' : '0'); name, val ? '1' : '0');
o(name).checked = val; ebi(name).checked = val;
return val; return val;
} }
var parallel_uploads = cfg_get('nthread'); var parallel_uploads = cfg_get('nthread');
var multitask = bcfg_get('multitask', true); var multitask = bcfg_get('multitask', true);
var ask_up = bcfg_get('ask_up', true);
var col_hashing = '#00bbff'; var col_hashing = '#00bbff';
var col_hashed = '#004466'; var col_hashed = '#004466';
@@ -273,6 +219,10 @@ function up2k_init(have_crypto) {
"hash": [], "hash": [],
"handshake": [], "handshake": [],
"upload": [] "upload": []
},
"bytes": {
"hashed": 0,
"uploaded": 0
} }
}; };
@@ -284,9 +234,9 @@ function up2k_init(have_crypto) {
return un2k("this is the basic uploader; up2k needs at least<br />chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1"); return un2k("this is the basic uploader; up2k needs at least<br />chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1");
function nav() { function nav() {
o('file' + fdom_ctr).click(); ebi('file' + fdom_ctr).click();
} }
o('u2btn').addEventListener('click', nav, false); ebi('u2btn').addEventListener('click', nav, false);
function ondrag(ev) { function ondrag(ev) {
ev.stopPropagation(); ev.stopPropagation();
@@ -294,8 +244,8 @@ function up2k_init(have_crypto) {
ev.dataTransfer.dropEffect = 'copy'; ev.dataTransfer.dropEffect = 'copy';
ev.dataTransfer.effectAllowed = 'copy'; ev.dataTransfer.effectAllowed = 'copy';
} }
o('u2btn').addEventListener('dragover', ondrag, false); ebi('u2btn').addEventListener('dragover', ondrag, false);
o('u2btn').addEventListener('dragenter', ondrag, false); ebi('u2btn').addEventListener('dragenter', ondrag, false);
function gotfile(ev) { function gotfile(ev) {
ev.stopPropagation(); ev.stopPropagation();
@@ -317,6 +267,7 @@ function up2k_init(have_crypto) {
more_one_file(); more_one_file();
var bad_files = []; var bad_files = [];
var good_files = [];
for (var a = 0; a < files.length; a++) { for (var a = 0; a < files.length; a++) {
var fobj = files[a]; var fobj = files[a];
if (is_itemlist) { if (is_itemlist) {
@@ -330,9 +281,32 @@ function up2k_init(have_crypto) {
throw 1; throw 1;
} }
catch (ex) { catch (ex) {
bad_files.push([a, fobj.name]); bad_files.push(fobj.name);
continue; continue;
} }
good_files.push(fobj);
}
if (bad_files.length > 0) {
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length);
for (var a = 0; a < bad_files.length; a++)
msg += '-- ' + bad_files[a] + '\n';
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
alert(msg);
}
var msg = ['upload these ' + good_files.length + ' files?'];
for (var a = 0; a < good_files.length; a++)
msg.push(good_files[a].name);
if (ask_up && !confirm(msg.join('\n')))
return;
for (var a = 0; a < good_files.length; a++) {
var fobj = good_files[a];
var now = new Date().getTime(); var now = new Date().getTime();
var lmod = fobj.lastModified || now; var lmod = fobj.lastModified || now;
var entry = { var entry = {
@@ -357,31 +331,20 @@ function up2k_init(have_crypto) {
var tr = document.createElement('tr'); var tr = document.createElement('tr');
tr.innerHTML = '<td id="f{0}n"></td><td id="f{0}t">hashing</td><td id="f{0}p" class="prog"></td>'.format(st.files.length); tr.innerHTML = '<td id="f{0}n"></td><td id="f{0}t">hashing</td><td id="f{0}p" class="prog"></td>'.format(st.files.length);
tr.getElementsByTagName('td')[0].textContent = entry.name; tr.getElementsByTagName('td')[0].textContent = entry.name;
o('u2tab').appendChild(tr); ebi('u2tab').appendChild(tr);
st.files.push(entry); st.files.push(entry);
st.todo.hash.push(entry); st.todo.hash.push(entry);
} }
if (bad_files.length > 0) {
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length);
for (var a = 0; a < bad_files.length; a++)
msg += '-- ' + bad_files[a][1] + '\n';
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
alert(msg);
}
} }
o('u2btn').addEventListener('drop', gotfile, false); ebi('u2btn').addEventListener('drop', gotfile, false);
function more_one_file() { function more_one_file() {
fdom_ctr++; fdom_ctr++;
var elm = document.createElement('div') var elm = document.createElement('div')
elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" />'.format(fdom_ctr); elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" />'.format(fdom_ctr);
o('u2form').appendChild(elm); ebi('u2form').appendChild(elm);
o('file' + fdom_ctr).addEventListener('change', gotfile, false); ebi('file' + fdom_ctr).addEventListener('change', gotfile, false);
} }
more_one_file(); more_one_file();
@@ -391,16 +354,20 @@ function up2k_init(have_crypto) {
// //
function handshakes_permitted() { function handshakes_permitted() {
return multitask || ( var lim = multitask ? 1 : 0;
st.todo.upload.length == 0 && return lim >=
st.busy.upload.length == 0); st.todo.upload.length +
st.busy.upload.length;
} }
function hashing_permitted() { function hashing_permitted() {
return multitask || ( if (multitask) {
handshakes_permitted() && var ahead = st.bytes.hashed - st.bytes.uploaded;
st.todo.handshake.length == 0 && return ahead < 1024 * 1024 * 128;
st.busy.handshake.length == 0); }
return handshakes_permitted() && 0 ==
st.todo.handshake.length +
st.busy.handshake.length;
} }
var tasker = (function () { var tasker = (function () {
@@ -451,17 +418,6 @@ function up2k_init(have_crypto) {
/// hashing /// hashing
// //
// https://gist.github.com/jonleighton/958841
function buf2b64_maybe_fucky(buffer) {
var ret = '';
var view = new DataView(buffer);
for (var i = 0; i < view.byteLength; i++) {
ret += String.fromCharCode(view.getUint8(i));
}
return window.btoa(ret).replace(
/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
}
// https://gist.github.com/jonleighton/958841 // https://gist.github.com/jonleighton/958841
function buf2b64(arrayBuffer) { function buf2b64(arrayBuffer) {
var base64 = ''; var base64 = '';
@@ -502,20 +458,6 @@ function up2k_init(have_crypto) {
return base64; return base64;
} }
// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest
function buf2hex(buffer) {
var hexCodes = [];
var view = new DataView(buffer);
for (var i = 0; i < view.byteLength; i += 4) {
var value = view.getUint32(i) // 4 bytes per iter
var stringValue = value.toString(16) // doesn't pad
var padding = '00000000'
var paddedValue = (padding + stringValue).slice(-padding.length)
hexCodes.push(paddedValue);
}
return hexCodes.join("");
}
function get_chunksize(filesize) { function get_chunksize(filesize) {
var chunksize = 1024 * 1024; var chunksize = 1024 * 1024;
var stepsize = 512 * 1024; var stepsize = 512 * 1024;
@@ -577,6 +519,7 @@ function up2k_init(have_crypto) {
var t = st.todo.hash.shift(); var t = st.todo.hash.shift();
st.busy.hash.push(t); st.busy.hash.push(t);
st.bytes.hashed += t.size;
t.t1 = new Date().getTime(); t.t1 = new Date().getTime();
var nchunk = 0; var nchunk = 0;
@@ -602,7 +545,7 @@ function up2k_init(have_crypto) {
pb_html += '<div id="f{0}p{1}" style="width:{2}%"><div></div></div>'.format( pb_html += '<div id="f{0}p{1}" style="width:{2}%"><div></div></div>'.format(
t.n, a, pb_perc); t.n, a, pb_perc);
o('f{0}p'.format(t.n)).innerHTML = pb_html; ebi('f{0}p'.format(t.n)).innerHTML = pb_html;
var reader = new FileReader(); var reader = new FileReader();
@@ -677,7 +620,7 @@ function up2k_init(have_crypto) {
alert('{0} ms, {1} MB/s\n'.format(t.t2 - t.t1, spd.toFixed(3)) + t.hash.join('\n')); alert('{0} ms, {1} MB/s\n'.format(t.t2 - t.t1, spd.toFixed(3)) + t.hash.join('\n'));
} }
o('f{0}t'.format(t.n)).innerHTML = 'connecting'; ebi('f{0}t'.format(t.n)).innerHTML = 'connecting';
st.busy.hash.splice(st.busy.hash.indexOf(t), 1); st.busy.hash.splice(st.busy.hash.indexOf(t), 1);
st.todo.handshake.push(t); st.todo.handshake.push(t);
}; };
@@ -706,7 +649,7 @@ function up2k_init(have_crypto) {
if (response.name !== t.name) { if (response.name !== t.name) {
// file exists; server renamed us // file exists; server renamed us
t.name = response.name; t.name = response.name;
o('f{0}n'.format(t.n)).textContent = t.name; ebi('f{0}n'.format(t.n)).textContent = t.name;
} }
t.postlist = []; t.postlist = [];
@@ -736,23 +679,41 @@ function up2k_init(have_crypto) {
msg = 'uploading'; msg = 'uploading';
done = false; done = false;
} }
o('f{0}t'.format(t.n)).innerHTML = msg; ebi('f{0}t'.format(t.n)).innerHTML = msg;
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1); st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
if (done) { if (done) {
var spd1 = (t.size / ((t.t2 - t.t1) / 1000.)) / (1024 * 1024.); var spd1 = (t.size / ((t.t2 - t.t1) / 1000.)) / (1024 * 1024.);
var spd2 = (t.size / ((t.t3 - t.t2) / 1000.)) / (1024 * 1024.); var spd2 = (t.size / ((t.t3 - t.t2) / 1000.)) / (1024 * 1024.);
o('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format( ebi('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format(
spd1.toFixed(2), spd2.toFixed(2)); spd1.toFixed(2), spd2.toFixed(2));
} }
tasker(); tasker();
} }
else else {
var err = "";
var rsp = (xhr.responseText + '');
if (rsp.indexOf('partial upload exists') !== -1 ||
rsp.indexOf('file already exists') !== -1) {
err = rsp;
var ofs = err.lastIndexOf(' : ');
if (ofs > 0)
err = err.slice(0, ofs);
}
if (err != "") {
ebi('f{0}t'.format(t.n)).innerHTML = "ERROR";
ebi('f{0}p'.format(t.n)).innerHTML = err;
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
tasker();
return;
}
alert("server broke (error {0}):\n\"{1}\"\n".format( alert("server broke (error {0}):\n\"{1}\"\n".format(
xhr.status, xhr.status,
(xhr.response && xhr.response.err) || (xhr.response && xhr.response.err) ||
(xhr.responseText && xhr.responseText) || (xhr.responseText && xhr.responseText) ||
"no further information")); "no further information"));
}
}; };
xhr.open('POST', post_url + 'handshake.php', true); xhr.open('POST', post_url + 'handshake.php', true);
xhr.responseType = 'text'; xhr.responseType = 'text';
@@ -799,11 +760,12 @@ function up2k_init(have_crypto) {
xhr.onload = function (xev) { xhr.onload = function (xev) {
if (xhr.status == 200) { if (xhr.status == 200) {
prog(t.n, npart, col_uploaded); prog(t.n, npart, col_uploaded);
st.bytes.uploaded += cdr - car;
st.busy.upload.splice(st.busy.upload.indexOf(upt), 1); st.busy.upload.splice(st.busy.upload.indexOf(upt), 1);
t.postlist.splice(t.postlist.indexOf(npart), 1); t.postlist.splice(t.postlist.indexOf(npart), 1);
if (t.postlist.length == 0) { if (t.postlist.length == 0) {
t.t3 = new Date().getTime(); t.t3 = new Date().getTime();
o('f{0}t'.format(t.n)).innerHTML = 'verifying'; ebi('f{0}t'.format(t.n)).innerHTML = 'verifying';
st.todo.handshake.push(t); st.todo.handshake.push(t);
} }
tasker(); tasker();
@@ -834,7 +796,7 @@ function up2k_init(have_crypto) {
// //
function prog(nfile, nchunk, color, percent) { function prog(nfile, nchunk, color, percent) {
var n1 = o('f{0}p{1}'.format(nfile, nchunk)); var n1 = ebi('f{0}p{1}'.format(nfile, nchunk));
var n2 = n1.getElementsByTagName('div')[0]; var n2 = n1.getElementsByTagName('div')[0];
if (percent === undefined) { if (percent === undefined) {
n1.style.background = color; n1.style.background = color;
@@ -857,7 +819,7 @@ function up2k_init(have_crypto) {
dir.preventDefault(); dir.preventDefault();
} catch (ex) { } } catch (ex) { }
var obj = o('nthread'); var obj = ebi('nthread');
if (dir.target) { if (dir.target) {
obj.style.background = '#922'; obj.style.background = '#922';
var v = Math.floor(parseInt(obj.value)); var v = Math.floor(parseInt(obj.value));
@@ -887,24 +849,30 @@ function up2k_init(have_crypto) {
bcfg_set('multitask', multitask); bcfg_set('multitask', multitask);
} }
function tgl_ask_up() {
ask_up = !ask_up;
bcfg_set('ask_up', ask_up);
}
function nop(ev) { function nop(ev) {
ev.preventDefault(); ev.preventDefault();
this.click(); this.click();
} }
o('nthread_add').onclick = function (ev) { ebi('nthread_add').onclick = function (ev) {
ev.preventDefault(); ev.preventDefault();
bumpthread(1); bumpthread(1);
}; };
o('nthread_sub').onclick = function (ev) { ebi('nthread_sub').onclick = function (ev) {
ev.preventDefault(); ev.preventDefault();
bumpthread(-1); bumpthread(-1);
}; };
o('nthread').addEventListener('input', bumpthread, false); ebi('nthread').addEventListener('input', bumpthread, false);
o('multitask').addEventListener('click', tgl_multitask, false); ebi('multitask').addEventListener('click', tgl_multitask, false);
ebi('ask_up').addEventListener('click', tgl_ask_up, false);
var nodes = o('u2conf').getElementsByTagName('a'); var nodes = ebi('u2conf').getElementsByTagName('a');
for (var a = nodes.length - 1; a >= 0; a--) for (var a = nodes.length - 1; a >= 0; a--)
nodes[a].addEventListener('touchend', nop, false); nodes[a].addEventListener('touchend', nop, false);

View File

@@ -194,6 +194,12 @@
#u2conf input+a { #u2conf input+a {
background: #d80; background: #d80;
} }
#u2conf input[type="checkbox"]+label {
color: #f5a;
}
#u2conf input[type="checkbox"]:checked+label {
color: #fc5;
}
#u2foot { #u2foot {
color: #fff; color: #fff;
font-style: italic; font-style: italic;

View File

@@ -3,7 +3,8 @@
href="#" data-dest="up2k">up2k</a><i></i><a href="#" data-dest="up2k">up2k</a><i></i><a
href="#" data-dest="bup">bup</a><i></i><a href="#" data-dest="bup">bup</a><i></i><a
href="#" data-dest="mkdir">mkdir</a><i></i><a href="#" data-dest="mkdir">mkdir</a><i></i><a
href="#" data-dest="new_md">new.md</a></div> href="#" data-dest="new_md">new.md</a><i></i><a
href="#" data-dest="msg">msg</a></div>
<div id="op_bup" class="opview opbox act"> <div id="op_bup" class="opview opbox act">
<div id="u2err"></div> <div id="u2err"></div>
@@ -30,6 +31,13 @@
</form> </form>
</div> </div>
<div id="op_msg" class="opview opbox">
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="/{{ vdir }}">
<input type="text" name="msg" size="30">
<input type="submit" value="send">
</form>
</div>
<div id="op_up2k" class="opview"> <div id="op_up2k" class="opview">
<form id="u2form" method="post" enctype="multipart/form-data" onsubmit="return false;"></form> <form id="u2form" method="post" enctype="multipart/form-data" onsubmit="return false;"></form>
@@ -43,10 +51,14 @@
<input class="txtbox" id="nthread" value="2" /> <input class="txtbox" id="nthread" value="2" />
<a href="#" id="nthread_add">+</a> <a href="#" id="nthread_add">+</a>
</td> </td>
<td rowspan="2"> <td rowspan="2" style="padding-left:1.5em">
<input type="checkbox" id="multitask" /> <input type="checkbox" id="multitask" />
<label for="multitask">hash while<br />uploading</label> <label for="multitask">hash while<br />uploading</label>
</td> </td>
<td rowspan="2">
<input type="checkbox" id="ask_up" />
<label for="ask_up">ask for<br />confirmation</label>
</td>
</tr> </tr>
</table> </table>

109
copyparty/web/util.js Normal file
View File

@@ -0,0 +1,109 @@
"use strict";
// error handler for mobile devices
function hcroak(msg) {
document.body.innerHTML = msg;
window.onerror = undefined;
throw 'fatal_err';
}
function croak(msg) {
document.body.textContent = msg;
window.onerror = undefined;
throw msg;
}
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
'&': '&amp;',
'"': '&quot;',
'<': '&lt;',
'>': '&gt;'
}[c];
});
}
function vis_exh(msg, url, lineNo, columnNo, error) {
window.onerror = undefined;
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<h2>' + find[a] + '</h2>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
}
document.body.style.fontSize = '0.8em';
document.body.style.padding = '0 1em 1em 1em';
hcroak(html.join('\n'));
}
function ebi(id) {
return document.getElementById(id);
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
if (!String.prototype.endsWith) {
String.prototype.endsWith = function (search, this_len) {
if (this_len === undefined || this_len > this.length) {
this_len = this.length;
}
return this.substring(this_len - search.length, this_len) === search;
};
}
if (!String.startsWith) {
String.prototype.startsWith = function (s, i) {
i = i > 0 ? i | 0 : 0;
return this.substring(i, i + s.length) === s;
};
}
// https://stackoverflow.com/a/950146
function import_js(url, cb) {
var head = document.head || document.getElementsByTagName('head')[0];
var script = document.createElement('script');
script.type = 'text/javascript';
script.src = url;
script.onreadystatechange = cb;
script.onload = cb;
head.appendChild(script);
}
function sortTable(table, col) {
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className == 'sort1' ? -1 : 1;
for (var a = 0, thl = th.length; a < thl; a++)
th[a].className = '';
th[col].className = 'sort' + reverse;
var stype = th[col].getAttribute('sort');
tr = tr.sort(function (a, b) {
var v1 = a.cells[col].textContent.trim();
var v2 = b.cells[col].textContent.trim();
if (stype == 'int') {
v1 = parseInt(v1.replace(/,/g, ''));
v2 = parseInt(v2.replace(/,/g, ''));
return reverse * (v1 - v2);
}
return reverse * (v1.localeCompare(v2));
});
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]);
}
function makeSortable(table) {
var th = table.tHead, i;
th && (th = th.rows[0]) && (th = th.cells);
if (th) i = th.length;
else return; // if no `<thead>` then do nothing
while (--i >= 0) (function (i) {
th[i].onclick = function () {
sortTable(table, i);
};
}(i));
}

View File

@@ -3,6 +3,14 @@ echo not a script
exit 1 exit 1
##
## delete all partial uploads
## (supports linux/macos, probably windows+msys2)
gzip -d < .hist/up2k.snap | jq -r '.[].tnam' | while IFS= read -r f; do rm -f -- "$f"; done
gzip -d < .hist/up2k.snap | jq -r '.[].name' | while IFS= read -r f; do wc -c -- "$f" | grep -qiE '^[^0-9a-z]*0' && rm -f -- "$f"; done
## ##
## create a test payload ## create a test payload

View File

@@ -1,4 +1,5 @@
#!/bin/bash #!/bin/bash
repacker=1
set -e set -e
# -- download latest copyparty (source.tgz and sfx), # -- download latest copyparty (source.tgz and sfx),
@@ -19,19 +20,32 @@ set -e
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py # -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
command -v gtar && tar() { gtar "$@"; }
command -v gsed && sed() { gsed "$@"; }
td="$(mktemp -d)" td="$(mktemp -d)"
od="$(pwd)" od="$(pwd)"
cd "$td" cd "$td"
pwd pwd
# debug: if cache exists, use that instead of bothering github dl_text() {
command -v curl && exec curl "$@"
exec wget -O- "$@"
}
dl_files() {
command -v curl && exec curl -L --remote-name-all "$@"
exec wget "$@"
}
export -f dl_files
# if cache exists, use that instead of bothering github
cache="$od/.copyparty-repack.cache" cache="$od/.copyparty-repack.cache"
[ -e "$cache" ] && [ -e "$cache" ] &&
tar -xvf "$cache" || tar -xf "$cache" ||
{ {
# get download links from github # get download links from github
curl https://api.github.com/repos/9001/copyparty/releases/latest | dl_text https://api.github.com/repos/9001/copyparty/releases/latest |
( (
# prefer jq if available # prefer jq if available
jq -r '.assets[]|select(.name|test("-sfx|tar.gz")).browser_download_url' || jq -r '.assets[]|select(.name|test("-sfx|tar.gz")).browser_download_url' ||
@@ -40,10 +54,10 @@ cache="$od/.copyparty-repack.cache"
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}' awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
) | ) |
tee /dev/stderr | tee /dev/stderr |
tr -d '\r' | tr '\n' '\0' | xargs -0 curl -L --remote-name-all tr -d '\r' | tr '\n' '\0' |
xargs -0 bash -c 'dl_files "$@"' _
# debug: create cache tar -czf "$cache" *
#tar -czvf "$cache" *
} }
@@ -56,10 +70,21 @@ mv copyparty-*.tar.gz copyparty-extras/
# unpack the source code # unpack the source code
( cd copyparty-extras/ ( cd copyparty-extras/
tar -xvf *.tar.gz tar -xf *.tar.gz
) )
# use repacker from release if that is newer
p_other=copyparty-extras/copyparty-*/scripts/copyparty-repack.sh
other=$(awk -F= 'BEGIN{v=-1} NR<10&&/^repacker=/{v=$NF} END{print v}' <$p_other)
[ $repacker -lt $other ] &&
cat $p_other >"$od/$0" && cd "$od" && rm -rf "$td" && exec "$0" "$@"
# now drop the cache
rm -f "$cache"
# fix permissions # fix permissions
chmod 755 \ chmod 755 \
copyparty-extras/sfx-full/* \ copyparty-extras/sfx-full/* \
@@ -86,8 +111,10 @@ rm -rf copyparty-{0..9}*.*.*{0..9}
) )
# and include the repacker itself too # and include the repacker itself too
cp -pv "$od/$0" copyparty-extras/ cp -av "$od/$0" copyparty-extras/ ||
cp -av "$0" copyparty-extras/ ||
true
# create the bundle # create the bundle

View File

@@ -4,10 +4,10 @@ import os
import time import time
""" """
mkdir -p /dev/shm/fusefuzz/{r,v} td=/dev/shm/; [ -e $td ] || td=$HOME; mkdir -p $td/fusefuzz/{r,v}
PYTHONPATH=.. python3 -m copyparty -v /dev/shm/fusefuzz/r::r -i 127.0.0.1 PYTHONPATH=.. python3 -m copyparty -v $td/fusefuzz/r::r -i 127.0.0.1
../bin/copyparty-fuse.py /dev/shm/fusefuzz/v http://127.0.0.1:3923/ 2 0 ../bin/copyparty-fuse.py http://127.0.0.1:3923/ $td/fusefuzz/v -cf 2 -cd 0.5
(d="$PWD"; cd /dev/shm/fusefuzz && "$d"/fusefuzz.py) (d="$PWD"; cd $td/fusefuzz && "$d"/fusefuzz.py)
""" """

View File

@@ -3,12 +3,15 @@ set -e
echo echo
# osx support # osx support
command -v gtar >/dev/null && # port install gnutar findutils gsed coreutils
command -v gfind >/dev/null && { gtar=$(command -v gtar || command -v gnutar) || true
tar() { gtar "$@"; } [ ! -z "$gtar" ] && command -v gfind >/dev/null && {
tar() { $gtar "$@"; }
sed() { gsed "$@"; } sed() { gsed "$@"; }
find() { gfind "$@"; } find() { gfind "$@"; }
sort() { gsort "$@"; } sort() { gsort "$@"; }
command -v grealpath >/dev/null &&
realpath() { grealpath "$@"; }
} }
which md5sum 2>/dev/null >/dev/null && which md5sum 2>/dev/null >/dev/null &&

View File

@@ -18,13 +18,16 @@ echo
# (the fancy markdown editor) # (the fancy markdown editor)
command -v gtar >/dev/null && # port install gnutar findutils gsed coreutils
command -v gfind >/dev/null && { gtar=$(command -v gtar || command -v gnutar) || true
tar() { gtar "$@"; } [ ! -z "$gtar" ] && command -v gfind >/dev/null && {
tar() { $gtar "$@"; }
sed() { gsed "$@"; } sed() { gsed "$@"; }
find() { gfind "$@"; } find() { gfind "$@"; }
sort() { gsort "$@"; } sort() { gsort "$@"; }
unexpand() { gunexpand "$@"; } unexpand() { gunexpand "$@"; }
command -v grealpath >/dev/null &&
realpath() { grealpath "$@"; }
} }
[ -e copyparty/__main__.py ] || cd .. [ -e copyparty/__main__.py ] || cd ..
@@ -59,28 +62,32 @@ cd sfx
)/pe-copyparty" )/pe-copyparty"
echo "repack of files in $old" echo "repack of files in $old"
cp -pR "$old/"*{jinja2,copyparty} . cp -pR "$old/"*{dep-j2,copyparty} .
mv {x.,}jinja2 2>/dev/null || true
} }
[ $repack ] || { [ $repack ] || {
echo collecting jinja2 echo collecting jinja2
f="../build/Jinja2-2.6.tar.gz" f="../build/Jinja2-2.11.3.tar.gz"
[ -e "$f" ] || [ -e "$f" ] ||
(url=https://files.pythonhosted.org/packages/25/c8/212b1c2fd6df9eaf536384b6c6619c4e70a3afd2dffdd00e5296ffbae940/Jinja2-2.6.tar.gz; (url=https://files.pythonhosted.org/packages/4f/e7/65300e6b32e69768ded990494809106f87da1d436418d5f1367ed3966fd7/Jinja2-2.11.3.tar.gz;
wget -O$f "$url" || curl -L "$url" >$f) wget -O$f "$url" || curl -L "$url" >$f)
tar -zxf $f tar -zxf $f
mv Jinja2-*/jinja2 . mv Jinja2-*/src/jinja2 .
rm -rf Jinja2-* jinja2/testsuite jinja2/_markupsafe/tests.py jinja2/_stringdefs.py rm -rf Jinja2-*
f=jinja2/lexer.py echo collecting markupsafe
sed -r '/.*föö.*/ raise SyntaxError/' <$f >t f="../build/MarkupSafe-1.1.1.tar.gz"
tmv $f [ -e "$f" ] ||
(url=https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz;
f=jinja2/_markupsafe/_constants.py wget -O$f "$url" || curl -L "$url" >$f)
awk '!/: [0-9]+,?$/ || /(amp|gt|lt|quot|apos|nbsp).:/' <$f >t
tmv $f tar -zxf $f
mv MarkupSafe-*/src/markupsafe .
rm -rf MarkupSafe-* markupsafe/_speedups.c
mkdir dep-j2/
mv {markupsafe,jinja2} dep-j2/
# msys2 tar is bad, make the best of it # msys2 tar is bad, make the best of it
echo collecting source echo collecting source
@@ -94,8 +101,39 @@ cd sfx
rm -f ../tar rm -f ../tar
} }
ver="$(awk '/^VERSION *= \(/ { ver=
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < ../copyparty/__version__.py)" git describe --tags >/dev/null 2>/dev/null && {
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//; s/-g?/./g')";
t_ver=
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+$' && {
# short format (exact version number)
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g')";
}
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+-[0-9]+-g[0-9a-f]+$' && {
# long format (unreleased commit)
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g; s/(.*) (.*)/\1 "\2"/')"
}
[ -z "$t_ver" ] && {
printf 'unexpected git version format: [%s]\n' "$git_ver"
exit 1
}
dt="$(git log -1 --format=%cd --date=format:'%Y,%m,%d' | sed -E 's/,0?/, /g')"
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
sed -ri '
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;
s/^(S_VERSION =)(.*)/#\1\2\n\1 "'"$ver"'"/;
s/^(BUILD_DT =)(.*)/#\1\2\n\1 ('"$dt"')/;
' copyparty/__version__.py
}
[ -z "$ver" ] &&
ver="$(awk '/^VERSION *= \(/ {
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
ts=$(date -u +%s) ts=$(date -u +%s)
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx) hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
@@ -131,6 +169,15 @@ done
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f" sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
} }
find | grep -E '\.py$' |
grep -vE '__version__' |
tr '\n' '\0' |
xargs -0 python ../scripts/uncomment.py
f=dep-j2/jinja2/constants.py
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
tmv "$f"
# up2k goes from 28k to 22k laff # up2k goes from 28k to 22k laff
echo entabbening echo entabbening
find | grep -E '\.(js|css|html|py)$' | while IFS= read -r f; do find | grep -E '\.(js|css|html|py)$' | while IFS= read -r f; do
@@ -143,7 +190,7 @@ args=(--owner=1000 --group=1000)
[ "$OSTYPE" = msys ] && [ "$OSTYPE" = msys ] &&
args=() args=()
tar -cf tar "${args[@]}" --numeric-owner copyparty jinja2 tar -cf tar "${args[@]}" --numeric-owner copyparty dep-j2
echo compressing tar echo compressing tar
# detect best level; bzip2 -7 is usually better than -9 # detect best level; bzip2 -7 is usually better than -9

View File

@@ -2,12 +2,16 @@
set -e set -e
echo echo
command -v gtar >/dev/null && # osx support
command -v gfind >/dev/null && { # port install gnutar findutils gsed coreutils
tar() { gtar "$@"; } gtar=$(command -v gtar || command -v gnutar) || true
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
tar() { $gtar "$@"; }
sed() { gsed "$@"; } sed() { gsed "$@"; }
find() { gfind "$@"; } find() { gfind "$@"; }
sort() { gsort "$@"; } sort() { gsort "$@"; }
command -v grealpath >/dev/null &&
realpath() { grealpath "$@"; }
} }
which md5sum 2>/dev/null >/dev/null && which md5sum 2>/dev/null >/dev/null &&
@@ -16,15 +20,15 @@ which md5sum 2>/dev/null >/dev/null &&
ver="$1" ver="$1"
[[ "x$ver" == x ]] && [ "x$ver" = x ] &&
{ {
echo "need argument 1: version" echo "need argument 1: version"
echo echo
exit 1 exit 1
} }
[[ -e copyparty/__main__.py ]] || cd .. [ -e copyparty/__main__.py ] || cd ..
[[ -e copyparty/__main__.py ]] || [ -e copyparty/__main__.py ] ||
{ {
echo "run me from within the project root folder" echo "run me from within the project root folder"
echo echo
@@ -35,8 +39,8 @@ mkdir -p dist
zip_path="$(pwd)/dist/copyparty-$ver.zip" zip_path="$(pwd)/dist/copyparty-$ver.zip"
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz" tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"
[[ -e "$zip_path" ]] || [ -e "$zip_path" ] ||
[[ -e "$tgz_path" ]] && [ -e "$tgz_path" ] &&
{ {
echo "found existing archives for this version" echo "found existing archives for this version"
echo " $zip_path" echo " $zip_path"

View File

@@ -2,7 +2,7 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import re, os, sys, time, shutil, signal, tarfile, hashlib, platform, tempfile import os, sys, time, shutil, signal, tarfile, hashlib, platform, tempfile
import subprocess as sp import subprocess as sp
""" """
@@ -202,93 +202,6 @@ def u8(gen):
yield s yield s
def get_py_win(ret):
tops = []
p = str(os.getenv("LocalAppdata"))
if p:
tops.append(os.path.join(p, "Programs", "Python"))
progfiles = {}
for p in ["ProgramFiles", "ProgramFiles(x86)"]:
p = str(os.getenv(p))
if p:
progfiles[p] = 1
# 32bit apps get x86 for both
if p.endswith(" (x86)"):
progfiles[p[:-6]] = 1
tops += list(progfiles.keys())
for sysroot in [me, sys.executable]:
sysroot = sysroot[:3].upper()
if sysroot[1] == ":" and sysroot not in tops:
tops.append(sysroot)
# $WIRESHARK_SLOGAN
for top in tops:
try:
for name1 in u8(sorted(os.listdir(top), reverse=True)):
if name1.lower().startswith("python"):
path1 = os.path.join(top, name1)
try:
for name2 in u8(os.listdir(path1)):
if name2.lower() == "python.exe":
path2 = os.path.join(path1, name2)
ret[path2.lower()] = path2
except:
pass
except:
pass
def get_py_nix(ret):
ptn = re.compile(r"^(python|pypy)[0-9\.-]*$")
for bindir in os.getenv("PATH").split(":"):
if not bindir:
next
try:
for fn in u8(os.listdir(bindir)):
if ptn.match(fn):
fn = os.path.join(bindir, fn)
ret[fn.lower()] = fn
except:
pass
def read_py(binp):
cmd = [
binp,
"-c",
"import sys; sys.stdout.write(' '.join(str(x) for x in sys.version_info)); import jinja2",
]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
ver, _ = p.communicate()
ver = ver.decode("utf-8").split(" ")[:3]
ver = [int(x) if x.isdigit() else 0 for x in ver]
return ver, p.returncode == 0
def get_pys():
ver, chk = read_py(sys.executable)
if chk or PY2:
return [[chk, ver, sys.executable]]
hits = {sys.executable.lower(): sys.executable}
if platform.system() == "Windows":
get_py_win(hits)
else:
get_py_nix(hits)
ret = []
for binp in hits.values():
ver, chk = read_py(binp)
ret.append([chk, ver, binp])
msg("\t".join(str(x) for x in ret[-1]))
return ret
def yieldfile(fn): def yieldfile(fn):
with open(fn, "rb") as f: with open(fn, "rb") as f:
for block in iter(lambda: f.read(64 * 1024), b""): for block in iter(lambda: f.read(64 * 1024), b""):
@@ -440,12 +353,11 @@ def confirm():
pass pass
def run(tmp, py): def run(tmp, j2ver):
global cpp global cpp
msg("OK") msg("jinja2:", j2ver or "bundled")
msg("will use:", py) msg("sfxdir:", tmp)
msg("bound to:", tmp)
# "systemd-tmpfiles-clean.timer"?? HOW do you even come up with this shit # "systemd-tmpfiles-clean.timer"?? HOW do you even come up with this shit
try: try:
@@ -457,24 +369,20 @@ def run(tmp, py):
except: except:
pass pass
fp_py = os.path.join(tmp, "py") ld = [tmp, os.path.join(tmp, "dep-j2")]
try: if j2ver:
with open(fp_py, "wb") as f: del ld[-1]
f.write(py.encode("utf-8") + b"\n")
except:
pass
# avoid loading ./copyparty.py cmd = (
cmd = [ "import sys, runpy; "
py, + "".join(['sys.path.insert(0, r"' + x + '"); ' for x in ld])
"-c", + 'runpy.run_module("copyparty", run_name="__main__")'
'import sys, runpy; sys.path.insert(0, r"' )
+ tmp cmd = [sys.executable, "-c", cmd] + list(sys.argv[1:])
+ '"); runpy.run_module("copyparty", run_name="__main__")',
] + list(sys.argv[1:])
cmd = [str(x) for x in cmd]
msg("\n", cmd, "\n") msg("\n", cmd, "\n")
cpp = sp.Popen(str(x) for x in cmd) cpp = sp.Popen(cmd)
try: try:
cpp.wait() cpp.wait()
except: except:
@@ -494,7 +402,6 @@ def bye(sig, frame):
def main(): def main():
sysver = str(sys.version).replace("\n", "\n" + " " * 18) sysver = str(sys.version).replace("\n", "\n" + " " * 18)
pktime = time.strftime("%Y-%m-%d, %H:%M:%S", time.gmtime(STAMP)) pktime = time.strftime("%Y-%m-%d, %H:%M:%S", time.gmtime(STAMP))
os.system("")
msg() msg()
msg(" this is: copyparty", VER) msg(" this is: copyparty", VER)
msg(" packed at:", pktime, "UTC,", STAMP) msg(" packed at:", pktime, "UTC,", STAMP)
@@ -526,33 +433,13 @@ def main():
signal.signal(signal.SIGTERM, bye) signal.signal(signal.SIGTERM, bye)
tmp = unpack() tmp = unpack()
fp_py = os.path.join(tmp, "py")
if os.path.exists(fp_py):
with open(fp_py, "rb") as f:
py = f.read().decode("utf-8").rstrip()
return run(tmp, py) try:
from jinja2 import __version__ as j2ver
except:
j2ver = None
pys = get_pys() return run(tmp, j2ver)
pys.sort(reverse=True)
j2, ver, py = pys[0]
if j2:
try:
os.rename(os.path.join(tmp, "jinja2"), os.path.join(tmp, "x.jinja2"))
except:
pass
return run(tmp, py)
msg("\n could not find jinja2; will use py2 + the bundled version\n")
for _, ver, py in pys:
if ver > [2, 7] and ver < [3, 0]:
return run(tmp, py)
m = "\033[1;31m\n\n\ncould not find a python with jinja2 installed; please do one of these:\n\n pip install --user jinja2\n\n install python2\n\n\033[0m"
msg(m)
confirm()
sys.exit(1)
if __name__ == "__main__": if __name__ == "__main__":

77
scripts/uncomment.py Normal file
View File

@@ -0,0 +1,77 @@
#!/usr/bin/env python
# coding: utf-8
from __future__ import print_function, unicode_literals
import io
import sys
import tokenize
def uncomment(fpath):
""" modified https://stackoverflow.com/a/62074206 """
with open(fpath, "rb") as f:
orig = f.read().decode("utf-8")
out = ""
for ln in orig.split("\n"):
if not ln.startswith("#"):
break
out += ln + "\n"
io_obj = io.StringIO(orig)
prev_toktype = tokenize.INDENT
last_lineno = -1
last_col = 0
for tok in tokenize.generate_tokens(io_obj.readline):
# print(repr(tok))
token_type = tok[0]
token_string = tok[1]
start_line, start_col = tok[2]
end_line, end_col = tok[3]
if start_line > last_lineno:
last_col = 0
if start_col > last_col:
out += " " * (start_col - last_col)
is_legalese = (
"copyright" in token_string.lower() or "license" in token_string.lower()
)
if token_type == tokenize.STRING:
if (
prev_toktype != tokenize.INDENT
and prev_toktype != tokenize.NEWLINE
and start_col > 0
or is_legalese
):
out += token_string
else:
out += '"a"'
elif token_type != tokenize.COMMENT or is_legalese:
out += token_string
prev_toktype = token_type
last_lineno = end_line
last_col = end_col
# out = "\n".join(x for x in out.splitlines() if x.strip())
with open(fpath, "wb") as f:
f.write(out.encode("utf-8"))
def main():
print("uncommenting", end="")
for f in sys.argv[1:]:
print(".", end="")
uncomment(f)
print("k")
if __name__ == "__main__":
main()

View File

@@ -2,10 +2,8 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function from __future__ import print_function
import io
import os import os
import sys import sys
from glob import glob
from shutil import rmtree from shutil import rmtree
setuptools_available = True setuptools_available = True
@@ -49,7 +47,7 @@ with open(here + "/README.md", "rb") as f:
about = {} about = {}
if not VERSION: if not VERSION:
with open(os.path.join(here, NAME, "__version__.py"), "rb") as f: with open(os.path.join(here, NAME, "__version__.py"), "rb") as f:
exec(f.read().decode("utf-8").split("\n\n", 1)[1], about) exec (f.read().decode("utf-8").split("\n\n", 1)[1], about)
else: else:
about["__version__"] = VERSION about["__version__"] = VERSION
@@ -110,13 +108,13 @@ args = {
"Programming Language :: Python :: 2", "Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7", "Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3", "Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: Implementation :: PyPy",
"Environment :: Console", "Environment :: Console",

View File

@@ -18,6 +18,8 @@ this one becomes a hyperlink to ./except/ thanks to
it is a passthrough to the markdown extension api, see https://marked.js.org/using_pro it is a passthrough to the markdown extension api, see https://marked.js.org/using_pro
in addition to the markdown extension functions, `ctor` will be called on document init
### these/ ### these/
and this one becomes ./except/these/ and this one becomes ./except/these/
@@ -36,6 +38,13 @@ whic hshoud be ./except/also-this.md
# ok # ok
now for another extension type, `copyparty_post` which is called to manipulate the generated dom instead now for another extension type, `copyparty_post` which is called to manipulate the generated dom instead
`copyparty_post` can have the following functions, all optional
* `ctor` is called on document init
* `render` is called when the dom is done but still in-memory
* `render2` is called with the live browser dom as-displayed
## post example
the values in the `ex:` columns are linkified to `example.com/$value` the values in the `ex:` columns are linkified to `example.com/$value`
| ex:foo | bar | ex:baz | | ex:foo | bar | ex:baz |
@@ -43,6 +52,8 @@ the values in the `ex:` columns are linkified to `example.com/$value`
| asdf | nice | fgsfds | | asdf | nice | fgsfds |
| more one row | hi hello | aaa | | more one row | hi hello | aaa |
and the table can be sorted by clicking the headers
the difference is that with `copyparty_pre` you'll probably break various copyparty features but if you use `copyparty_post` then future copyparty versions will probably break you the difference is that with `copyparty_pre` you'll probably break various copyparty features but if you use `copyparty_post` then future copyparty versions will probably break you
@@ -123,5 +134,8 @@ render(dom) {
} }
} }
} }
},
render2(dom) {
window.makeSortable(dom.getElementsByTagName('table')[0]);
} }
``` ```

View File

@@ -6,6 +6,7 @@ import os
import time import time
import json import json
import shutil import shutil
import tempfile
import unittest import unittest
import subprocess as sp # nosec import subprocess as sp # nosec
@@ -31,9 +32,6 @@ class TestVFS(unittest.TestCase):
response = self.unfoo(response) response = self.unfoo(response)
self.assertEqual(util.undot(query), response) self.assertEqual(util.undot(query), response)
def absify(self, root, names):
return ["{}/{}".format(root, x).replace("//", "/") for x in names]
def ls(self, vfs, vpath, uname): def ls(self, vfs, vpath, uname):
"""helper for resolving and listing a folder""" """helper for resolving and listing a folder"""
vn, rem = vfs.get(vpath, uname, True, False) vn, rem = vfs.get(vpath, uname, True, False)
@@ -60,23 +58,31 @@ class TestVFS(unittest.TestCase):
if os.path.exists("/Volumes"): if os.path.exists("/Volumes"):
devname, _ = self.chkcmd("hdiutil", "attach", "-nomount", "ram://8192") devname, _ = self.chkcmd("hdiutil", "attach", "-nomount", "ram://8192")
devname = devname.strip()
print("devname: [{}]".format(devname))
for _ in range(10): for _ in range(10):
try: try:
_, _ = self.chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname) _, _ = self.chkcmd(
"diskutil", "eraseVolume", "HFS+", "cptd", devname
)
return "/Volumes/cptd" return "/Volumes/cptd"
except: except Exception as ex:
print('lol macos') print(repr(ex))
time.sleep(0.25) time.sleep(0.25)
raise Exception("ramdisk creation failed") raise Exception("ramdisk creation failed")
raise Exception("TODO support windows") ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
try:
os.mkdir(ret)
finally:
return ret
def log(self, src, msg): def log(self, src, msg):
pass pass
def test(self): def test(self):
td = self.get_ramdisk() + "/vfs" td = os.path.join(self.get_ramdisk(), "vfs")
try: try:
shutil.rmtree(td) shutil.rmtree(td)
except OSError: except OSError:
@@ -107,7 +113,7 @@ class TestVFS(unittest.TestCase):
vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs
self.assertEqual(vfs.nodes, {}) self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "") self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td + "/a/ab") self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
self.assertEqual(vfs.uread, ["*"]) self.assertEqual(vfs.uread, ["*"])
self.assertEqual(vfs.uwrite, []) self.assertEqual(vfs.uwrite, [])
@@ -117,7 +123,7 @@ class TestVFS(unittest.TestCase):
).vfs ).vfs
self.assertEqual(vfs.nodes, {}) self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "") self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td + "/a/aa") self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
self.assertEqual(vfs.uread, ["*"]) self.assertEqual(vfs.uread, ["*"])
self.assertEqual(vfs.uwrite, []) self.assertEqual(vfs.uwrite, [])
@@ -146,42 +152,63 @@ class TestVFS(unittest.TestCase):
n = n.nodes["acb"] n = n.nodes["acb"]
self.assertEqual(n.nodes, {}) self.assertEqual(n.nodes, {})
self.assertEqual(n.vpath, "a/ac/acb") self.assertEqual(n.vpath, "a/ac/acb")
self.assertEqual(n.realpath, td + "/a/ac/acb") self.assertEqual(n.realpath, os.path.join(td, "a", "ac", "acb"))
self.assertEqual(n.uread, ["k"]) self.assertEqual(n.uread, ["k"])
self.assertEqual(n.uwrite, ["*", "k"]) self.assertEqual(n.uwrite, ["*", "k"])
# something funky about the windows path normalization,
# doesn't really matter but makes the test messy, TODO?
fsdir, real, virt = self.ls(vfs, "/", "*") fsdir, real, virt = self.ls(vfs, "/", "*")
self.assertEqual(fsdir, td) self.assertEqual(fsdir, td)
self.assertEqual(real, ["b", "c"]) self.assertEqual(real, ["b", "c"])
self.assertEqual(list(virt), ["a"]) self.assertEqual(list(virt), ["a"])
fsdir, real, virt = self.ls(vfs, "a", "*") fsdir, real, virt = self.ls(vfs, "a", "*")
self.assertEqual(fsdir, td + "/a") self.assertEqual(fsdir, os.path.join(td, "a"))
self.assertEqual(real, ["aa", "ab"]) self.assertEqual(real, ["aa", "ab"])
self.assertEqual(list(virt), ["ac"]) self.assertEqual(list(virt), ["ac"])
fsdir, real, virt = self.ls(vfs, "a/ab", "*") fsdir, real, virt = self.ls(vfs, "a/ab", "*")
self.assertEqual(fsdir, td + "/a/ab") self.assertEqual(fsdir, os.path.join(td, "a", "ab"))
self.assertEqual(real, ["aba", "abb", "abc"]) self.assertEqual(real, ["aba", "abb", "abc"])
self.assertEqual(list(virt), []) self.assertEqual(list(virt), [])
fsdir, real, virt = self.ls(vfs, "a/ac", "*") fsdir, real, virt = self.ls(vfs, "a/ac", "*")
self.assertEqual(fsdir, td + "/a/ac") self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
self.assertEqual(real, ["aca", "acc"]) self.assertEqual(real, ["aca", "acc"])
self.assertEqual(list(virt), []) self.assertEqual(list(virt), [])
fsdir, real, virt = self.ls(vfs, "a/ac", "k") fsdir, real, virt = self.ls(vfs, "a/ac", "k")
self.assertEqual(fsdir, td + "/a/ac") self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
self.assertEqual(real, ["aca", "acc"]) self.assertEqual(real, ["aca", "acc"])
self.assertEqual(list(virt), ["acb"]) self.assertEqual(list(virt), ["acb"])
self.assertRaises(util.Pebkac, vfs.get, "a/ac/acb", "*", True, False) self.assertRaises(util.Pebkac, vfs.get, "a/ac/acb", "*", True, False)
fsdir, real, virt = self.ls(vfs, "a/ac/acb", "k") fsdir, real, virt = self.ls(vfs, "a/ac/acb", "k")
self.assertEqual(fsdir, td + "/a/ac/acb") self.assertEqual(fsdir, os.path.join(td, "a", "ac", "acb"))
self.assertEqual(real, ["acba", "acbb", "acbc"]) self.assertEqual(real, ["acba", "acbb", "acbc"])
self.assertEqual(list(virt), []) self.assertEqual(list(virt), [])
# admin-only rootfs with all-read-only subfolder
vfs = AuthSrv(Namespace(c=None, a=["k:k"], v=[".::ak", "a:a:r"]), self.log,).vfs
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td)
self.assertEqual(vfs.uread, ["k"])
self.assertEqual(vfs.uwrite, ["k"])
n = vfs.nodes["a"]
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a")
self.assertEqual(n.realpath, os.path.join(td, "a"))
self.assertEqual(n.uread, ["*"])
self.assertEqual(n.uwrite, [])
self.assertEqual(vfs.can_access("/", "*"), [False, False])
self.assertEqual(vfs.can_access("/", "k"), [True, True])
self.assertEqual(vfs.can_access("/a", "*"), [True, False])
self.assertEqual(vfs.can_access("/a", "k"), [True, False])
# breadth-first construction # breadth-first construction
vfs = AuthSrv( vfs = AuthSrv(
Namespace( Namespace(
@@ -215,20 +242,20 @@ class TestVFS(unittest.TestCase):
self.assertEqual(list(v1), ["a"]) self.assertEqual(list(v1), ["a"])
fsp, r1, v1 = self.ls(vfs, "a", "*") fsp, r1, v1 = self.ls(vfs, "a", "*")
self.assertEqual(fsp, td + "/a") self.assertEqual(fsp, os.path.join(td, "a"))
self.assertEqual(r1, ["aa", "ab"]) self.assertEqual(r1, ["aa", "ab"])
self.assertEqual(list(v1), ["ac"]) self.assertEqual(list(v1), ["ac"])
fsp1, r1, v1 = self.ls(vfs, "a/ac", "*") fsp1, r1, v1 = self.ls(vfs, "a/ac", "*")
fsp2, r2, v2 = self.ls(vfs, "b", "*") fsp2, r2, v2 = self.ls(vfs, "b", "*")
self.assertEqual(fsp1, td + "/b") self.assertEqual(fsp1, os.path.join(td, "b"))
self.assertEqual(fsp2, td + "/b") self.assertEqual(fsp2, os.path.join(td, "b"))
self.assertEqual(r1, ["ba", "bb", "bc"]) self.assertEqual(r1, ["ba", "bb", "bc"])
self.assertEqual(r1, r2) self.assertEqual(r1, r2)
self.assertEqual(list(v1), list(v2)) self.assertEqual(list(v1), list(v2))
# config file parser # config file parser
cfg_path = self.get_ramdisk() + "/test.cfg" cfg_path = os.path.join(self.get_ramdisk(), "test.cfg")
with open(cfg_path, "wb") as f: with open(cfg_path, "wb") as f:
f.write( f.write(
dedent( dedent(
@@ -256,10 +283,11 @@ class TestVFS(unittest.TestCase):
self.assertEqual(len(n.nodes), 1) self.assertEqual(len(n.nodes), 1)
n = n.nodes["dst"] n = n.nodes["dst"]
self.assertEqual(n.vpath, "dst") self.assertEqual(n.vpath, "dst")
self.assertEqual(n.realpath, td + "/src") self.assertEqual(n.realpath, os.path.join(td, "src"))
self.assertEqual(n.uread, ["a", "asd"]) self.assertEqual(n.uread, ["a", "asd"])
self.assertEqual(n.uwrite, ["asd"]) self.assertEqual(n.uwrite, ["asd"])
self.assertEqual(len(n.nodes), 0) self.assertEqual(len(n.nodes), 0)
os.chdir(tempfile.gettempdir())
shutil.rmtree(td) shutil.rmtree(td)
os.unlink(cfg_path) os.unlink(cfg_path)