mirror of
https://github.com/9001/copyparty.git
synced 2025-10-24 00:24:04 +00:00
Compare commits
117 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9f578bfec6 | ||
|
|
1f170d7d28 | ||
|
|
5ae14cf9be | ||
|
|
aaf9d53be9 | ||
|
|
75c73f7ba7 | ||
|
|
b6dba8beee | ||
|
|
94521cdc1a | ||
|
|
3365b1c355 | ||
|
|
6c957c4923 | ||
|
|
833997f04c | ||
|
|
68d51e4037 | ||
|
|
ce274d2011 | ||
|
|
280778ed43 | ||
|
|
0f558ecbbf | ||
|
|
58f9e05d93 | ||
|
|
1ec981aea7 | ||
|
|
2a90286a7c | ||
|
|
12d25d09b2 | ||
|
|
a039fae1a4 | ||
|
|
322b9abadc | ||
|
|
0aaf954cea | ||
|
|
c2d22aa3d1 | ||
|
|
6934c75bba | ||
|
|
c58cf78f86 | ||
|
|
7f0de790ab | ||
|
|
d4bb4e3a73 | ||
|
|
d25612d038 | ||
|
|
116b2351b0 | ||
|
|
69b83dfdc4 | ||
|
|
3b1839c2ce | ||
|
|
13742ebdf8 | ||
|
|
634657bea1 | ||
|
|
46e70d50b7 | ||
|
|
d64e9b85a7 | ||
|
|
fb853edbe3 | ||
|
|
cc076c1be1 | ||
|
|
98cc9a6755 | ||
|
|
7bd2b9c23a | ||
|
|
de724a1ff3 | ||
|
|
2163055dae | ||
|
|
93ed0fc10b | ||
|
|
0d98cefd40 | ||
|
|
d58988a033 | ||
|
|
2acfab1e3f | ||
|
|
b915dfe9a6 | ||
|
|
25bd5a823e | ||
|
|
1c35de4716 | ||
|
|
4c00435a0a | ||
|
|
844e3079a8 | ||
|
|
4778cb5b2c | ||
|
|
ec5d60b919 | ||
|
|
e1f4b960e8 | ||
|
|
669e46da54 | ||
|
|
ba94cc5df7 | ||
|
|
d08245c3df | ||
|
|
5c18d12cbf | ||
|
|
580a42dec7 | ||
|
|
29286e159b | ||
|
|
19bcf90e9f | ||
|
|
dae9c00742 | ||
|
|
35324ceb7c | ||
|
|
5aadd47199 | ||
|
|
7d9057cc62 | ||
|
|
c4b322b883 | ||
|
|
19b09c898a | ||
|
|
eafe2098b6 | ||
|
|
2bc6a20d71 | ||
|
|
8b502a7235 | ||
|
|
37567844af | ||
|
|
2f6c4e0e34 | ||
|
|
1c7cc4cb2b | ||
|
|
f83db3648e | ||
|
|
b164aa00d4 | ||
|
|
a2d866d0c2 | ||
|
|
2dfe4ac4c6 | ||
|
|
db65d05cb5 | ||
|
|
300c0194c7 | ||
|
|
37a0d2b087 | ||
|
|
a4959300ea | ||
|
|
223657e5f8 | ||
|
|
0c53de6767 | ||
|
|
9c309b1498 | ||
|
|
1aa1b34c80 | ||
|
|
755a2ee023 | ||
|
|
69d3359e47 | ||
|
|
a90c49b8fb | ||
|
|
b1222edb27 | ||
|
|
b967a92f69 | ||
|
|
90a5cb5e59 | ||
|
|
7aba9cb76b | ||
|
|
f550a8171d | ||
|
|
82e568d4c9 | ||
|
|
7b2a4a3d59 | ||
|
|
0265455cd1 | ||
|
|
afafc886a4 | ||
|
|
8a959f6ac4 | ||
|
|
1c3aa0d2c5 | ||
|
|
79b7d3316a | ||
|
|
fa7768583a | ||
|
|
faf49f6c15 | ||
|
|
765af31b83 | ||
|
|
b6a3c52d67 | ||
|
|
b025c2f660 | ||
|
|
e559a7c878 | ||
|
|
5c8855aafd | ||
|
|
b5fc537b89 | ||
|
|
14899d3a7c | ||
|
|
0ea7881652 | ||
|
|
ec29b59d1e | ||
|
|
9405597c15 | ||
|
|
82441978c6 | ||
|
|
e0e6291bdb | ||
|
|
b2b083fd0a | ||
|
|
f8a51b68e7 | ||
|
|
e0a19108e5 | ||
|
|
770ea68ca8 | ||
|
|
ce36c52baf |
12
.eslintrc.json
Normal file
12
.eslintrc.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2021": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 12
|
||||
},
|
||||
"rules": {
|
||||
}
|
||||
}
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,4 +1,6 @@
|
||||
* text eol=lf
|
||||
|
||||
*.reg text eol=crlf
|
||||
|
||||
*.png binary
|
||||
*.gif binary
|
||||
|
||||
6
.vscode/launch.json
vendored
6
.vscode/launch.json
vendored
@@ -10,10 +10,14 @@
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": [
|
||||
//"-nw",
|
||||
"-ed",
|
||||
"-emp",
|
||||
"-e2d",
|
||||
"-e2s",
|
||||
"-a",
|
||||
"ed:wark",
|
||||
"-v",
|
||||
"srv::r:aed"
|
||||
"srv::r:aed:cnodupe"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@@ -37,7 +37,7 @@
|
||||
"python.linting.banditEnabled": true,
|
||||
"python.linting.flake8Args": [
|
||||
"--max-line-length=120",
|
||||
"--ignore=E722,F405,E203,W503,W293",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402",
|
||||
],
|
||||
"python.linting.banditArgs": [
|
||||
"--ignore=B104"
|
||||
@@ -55,6 +55,10 @@
|
||||
//
|
||||
// things you may wanna edit:
|
||||
//
|
||||
"python.pythonPath": ".venv/bin/python",
|
||||
"python.pythonPath": "/usr/bin/python3",
|
||||
"python.formatting.blackArgs": [
|
||||
"-t",
|
||||
"py27"
|
||||
],
|
||||
//"python.linting.enabled": true,
|
||||
}
|
||||
10
.vscode/tasks.json
vendored
Normal file
10
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "pre",
|
||||
"command": "true;rm -rf inc/* inc/.hist/;mkdir -p inc;",
|
||||
"type": "shell"
|
||||
}
|
||||
]
|
||||
}
|
||||
49
README.md
49
README.md
@@ -8,11 +8,22 @@
|
||||
|
||||
turn your phone or raspi into a portable file server with resumable uploads/downloads using IE6 or any other browser
|
||||
|
||||
* server runs on anything with `py2.7` or `py3.2+`
|
||||
* server runs on anything with `py2.7` or `py3.3+`
|
||||
* *resumable* uploads need `firefox 12+` / `chrome 6+` / `safari 6+` / `IE 10+`
|
||||
* code standard: `black`
|
||||
|
||||
|
||||
## quickstart
|
||||
|
||||
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
|
||||
|
||||
running the sfx without arguments (for example doubleclicking it on Windows) will let anyone access the current folder; see `-h` for help if you want accounts and volumes etc
|
||||
|
||||
you may also want these, especially on servers:
|
||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for legit https)
|
||||
|
||||
|
||||
## notes
|
||||
|
||||
* iPhone/iPad: use Firefox to download files
|
||||
@@ -38,16 +49,27 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* [x] accounts
|
||||
* [x] markdown viewer
|
||||
* [x] markdown editor
|
||||
* [x] FUSE client (read-only)
|
||||
|
||||
summary: it works! you can use it! (but technically not even close to beta)
|
||||
|
||||
|
||||
# client examples
|
||||
|
||||
* javascript: dump some state into a file (two separate examples)
|
||||
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
|
||||
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
||||
|
||||
* FUSE: mount a copyparty server as a local filesystem
|
||||
* cross-platform python client available in [./bin/](bin/)
|
||||
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
* `jinja2`
|
||||
* pulls in `markupsafe` as of v2.7; use jinja 2.6 on py3.2
|
||||
|
||||
optional, enables thumbnails:
|
||||
optional, will eventually enable thumbnails:
|
||||
* `Pillow` (requires py2.7 or py3.5+)
|
||||
|
||||
|
||||
@@ -57,10 +79,13 @@ currently there are two self-contained binaries:
|
||||
* `copyparty-sfx.sh` for unix (linux and osx) -- smaller, more robust
|
||||
* `copyparty-sfx.py` for windows (unix too) -- crossplatform, beta
|
||||
|
||||
launch either of them and it'll unpack and run copyparty, assuming you have python installed of course
|
||||
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
|
||||
|
||||
pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `copyparty.py` and keep it in the same folder because `sys.path` is funky
|
||||
|
||||
|
||||
## sfx repack
|
||||
|
||||
if you don't need all the features you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except for either msys2 or WSL if you're on windows)
|
||||
* `724K` original size as of v0.4.0
|
||||
* `256K` after `./scripts/make-sfx.sh re no-ogv`
|
||||
@@ -72,16 +97,18 @@ the features you can opt to drop are
|
||||
|
||||
for the `re`pack to work, first run one of the sfx'es once to unpack it
|
||||
|
||||
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a `no-ogv no-cm` repack; works on linux/macos (and windows with msys2 or WSL)
|
||||
|
||||
|
||||
# install on android
|
||||
|
||||
install [Termux](https://termux.com/) (see [ocv.me/termux](https://ocv.me/termux/)) and then copy-paste this into Termux (long-tap) all at once:
|
||||
```sh
|
||||
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install curl && cd && curl -L https://github.com/9001/copyparty/raw/master/scripts/copyparty-android.sh > copyparty-android.sh && chmod 755 copyparty-android.sh && ./copyparty-android.sh -h
|
||||
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install python && python -m ensurepip && python -m pip install -U copyparty
|
||||
echo $?
|
||||
```
|
||||
|
||||
after the initial setup (and restarting bash), you can launch copyparty at any time by running "copyparty" in Termux
|
||||
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
|
||||
|
||||
|
||||
# dev env setup
|
||||
@@ -109,13 +136,15 @@ in the `scripts` folder:
|
||||
|
||||
roughly sorted by priority
|
||||
|
||||
* up2k handle filename too long
|
||||
* up2k fails on empty files? alert then stuck
|
||||
* reduce up2k roundtrips
|
||||
* start from a chunk index and just go
|
||||
* terminate client on bad data
|
||||
* drop onto folders
|
||||
* look into android thumbnail cache file format
|
||||
* `os.copy_file_range` for up2k cloning
|
||||
* up2k partials ui
|
||||
* support pillow-simd
|
||||
* cache sha512 chunks on client
|
||||
* symlink existing files on upload
|
||||
* comment field
|
||||
* ~~look into android thumbnail cache file format~~ bad idea
|
||||
* figure out the deal with pixel3a not being connectable as hotspot
|
||||
* pixel3a having unpredictable 3sec latency in general :||||
|
||||
|
||||
41
bin/README.md
Normal file
41
bin/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# copyparty-fuse.py
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||
* **supports Linux** -- expect `117 MiB/s` sequential read
|
||||
* **supports macos** -- expect `85 MiB/s` sequential read
|
||||
|
||||
filecache is default-on for windows and macos;
|
||||
* macos readsize is 64kB, so speed ~32 MiB/s without the cache
|
||||
* windows readsize varies by software; explorer=1M, pv=32k
|
||||
|
||||
note that copyparty should run with `-ed` to enable dotfiles (hidden otherwise)
|
||||
|
||||
also consider using [../docs/rclone.md](../docs/rclone.md) instead for 5x performance
|
||||
|
||||
|
||||
## to run this on windows:
|
||||
* install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/)
|
||||
* [x] add python 3.x to PATH (it asks during install)
|
||||
* `python -m pip install --user fusepy`
|
||||
* `python ./copyparty-fuse.py n: http://192.168.1.69:3923/`
|
||||
|
||||
10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled:
|
||||
* `pacman -S mingw64/mingw-w64-x86_64-python{,-pip}`
|
||||
* `/mingw64/bin/python3 -m pip install --user fusepy`
|
||||
* `/mingw64/bin/python3 ./copyparty-fuse.py [...]`
|
||||
|
||||
you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releases/latest), let me know if you [figure out how](https://github.com/dokan-dev/dokany/wiki/FUSE)
|
||||
(winfsp's sshfs leaks, doesn't look like winfsp itself does, should be fine)
|
||||
|
||||
|
||||
|
||||
# copyparty-fuse🅱️.py
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* does the same thing except more correct, `samba` approves
|
||||
* **supports Linux** -- expect `18 MiB/s` (wait what)
|
||||
* **supports Macos** -- probably
|
||||
|
||||
|
||||
|
||||
# copyparty-fuse-streaming.py
|
||||
* pretend this doesn't exist
|
||||
1100
bin/copyparty-fuse-streaming.py
Executable file
1100
bin/copyparty-fuse-streaming.py
Executable file
File diff suppressed because it is too large
Load Diff
713
bin/copyparty-fuse.py
Normal file → Executable file
713
bin/copyparty-fuse.py
Normal file → Executable file
@@ -7,47 +7,87 @@ __copyright__ = 2019
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import stat
|
||||
import errno
|
||||
import struct
|
||||
import threading
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
|
||||
try:
|
||||
from fuse import FUSE, FuseOSError, Operations
|
||||
except:
|
||||
print(
|
||||
"\n could not import fuse; these may help:\n python3 -m pip install --user fusepy\n apt install libfuse\n modprobe fuse"
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
"""
|
||||
mount a copyparty server (local or remote) as a filesystem
|
||||
|
||||
usage:
|
||||
python copyparty-fuse.py ./music http://192.168.1.69:1234/
|
||||
python copyparty-fuse.py http://192.168.1.69:3923/ ./music
|
||||
|
||||
dependencies:
|
||||
sudo apk add fuse-dev
|
||||
python3 -m pip install --user fusepy
|
||||
+ on Linux: sudo apk add fuse
|
||||
+ on Macos: https://osxfuse.github.io/
|
||||
+ on Windows: https://github.com/billziss-gh/winfsp/releases/latest
|
||||
|
||||
note:
|
||||
you probably want to run this on windows clients:
|
||||
https://github.com/9001/copyparty/blob/master/contrib/explorer-nothumbs-nofoldertypes.reg
|
||||
|
||||
MB/s
|
||||
28 cache NOthread
|
||||
24 cache thread
|
||||
29 cache NOthread NOmutex
|
||||
67 NOcache NOthread NOmutex ( ´・ω・) nyoro~n
|
||||
10 NOcache thread NOmutex
|
||||
get server cert:
|
||||
awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem
|
||||
"""
|
||||
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import stat
|
||||
import errno
|
||||
import struct
|
||||
import codecs
|
||||
import builtins
|
||||
import platform
|
||||
import argparse
|
||||
import threading
|
||||
import traceback
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
WINDOWS = sys.platform == "win32"
|
||||
MACOS = platform.system() == "Darwin"
|
||||
info = log = dbg = None
|
||||
|
||||
|
||||
try:
|
||||
from fuse import FUSE, FuseOSError, Operations
|
||||
except:
|
||||
if WINDOWS:
|
||||
libfuse = "install https://github.com/billziss-gh/winfsp/releases/latest"
|
||||
elif MACOS:
|
||||
libfuse = "install https://osxfuse.github.io/"
|
||||
else:
|
||||
libfuse = "apt install libfuse\n modprobe fuse"
|
||||
|
||||
print(
|
||||
"\n could not import fuse; these may help:"
|
||||
+ "\n python3 -m pip install --user fusepy\n "
|
||||
+ libfuse
|
||||
+ "\n"
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
def print(*args, **kwargs):
|
||||
try:
|
||||
builtins.print(*list(args), **kwargs)
|
||||
except:
|
||||
builtins.print(termsafe(" ".join(str(x) for x in args)), **kwargs)
|
||||
|
||||
|
||||
def termsafe(txt):
|
||||
try:
|
||||
return txt.encode(sys.stdout.encoding, "backslashreplace").decode(
|
||||
sys.stdout.encoding
|
||||
)
|
||||
except:
|
||||
return txt.encode(sys.stdout.encoding, "replace").decode(sys.stdout.encoding)
|
||||
|
||||
|
||||
def threadless_log(msg):
|
||||
print(msg + "\n", end="")
|
||||
|
||||
@@ -64,24 +104,123 @@ def rice_tid():
|
||||
|
||||
|
||||
def fancy_log(msg):
|
||||
print("{} {}\n".format(rice_tid(), msg), end="")
|
||||
print("{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg), end="")
|
||||
|
||||
|
||||
def null_log(msg):
|
||||
pass
|
||||
|
||||
|
||||
info = fancy_log
|
||||
log = fancy_log
|
||||
dbg = fancy_log
|
||||
log = null_log
|
||||
dbg = null_log
|
||||
def hexler(binary):
|
||||
return binary.replace("\r", "\\r").replace("\n", "\\n")
|
||||
return " ".join(["{}\033[36m{:02x}\033[0m".format(b, ord(b)) for b in binary])
|
||||
return " ".join(map(lambda b: format(ord(b), "02x"), binary))
|
||||
|
||||
|
||||
def register_wtf8():
|
||||
def wtf8_enc(text):
|
||||
return str(text).encode("utf-8", "surrogateescape"), len(text)
|
||||
|
||||
def wtf8_dec(binary):
|
||||
return bytes(binary).decode("utf-8", "surrogateescape"), len(binary)
|
||||
|
||||
def wtf8_search(encoding_name):
|
||||
return codecs.CodecInfo(wtf8_enc, wtf8_dec, name="wtf-8")
|
||||
|
||||
codecs.register(wtf8_search)
|
||||
|
||||
|
||||
bad_good = {}
|
||||
good_bad = {}
|
||||
|
||||
|
||||
def enwin(txt):
|
||||
return "".join([bad_good.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(bad, good)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
def dewin(txt):
|
||||
return "".join([good_bad.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(good, bad)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
class RecentLog(object):
|
||||
def __init__(self):
|
||||
self.mtx = threading.Lock()
|
||||
self.f = None # open("copyparty-fuse.log", "wb")
|
||||
self.q = []
|
||||
|
||||
thr = threading.Thread(target=self.printer)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def put(self, msg):
|
||||
msg = "{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg)
|
||||
if self.f:
|
||||
fmsg = " ".join([datetime.utcnow().strftime("%H%M%S.%f"), str(msg)])
|
||||
self.f.write(fmsg.encode("utf-8"))
|
||||
|
||||
with self.mtx:
|
||||
self.q.append(msg)
|
||||
if len(self.q) > 200:
|
||||
self.q = self.q[-50:]
|
||||
|
||||
def printer(self):
|
||||
while True:
|
||||
time.sleep(0.05)
|
||||
with self.mtx:
|
||||
q = self.q
|
||||
if not q:
|
||||
continue
|
||||
|
||||
self.q = []
|
||||
|
||||
print("".join(q), end="")
|
||||
|
||||
|
||||
# [windows/cmd/cpy3] python dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/
|
||||
# [windows/cmd/msys2] C:\msys64\mingw64\bin\python3 dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/
|
||||
# [windows/mty/msys2] /mingw64/bin/python3 /c/Users/ed/dev/copyparty/bin/copyparty-fuse.py q: http://192.168.1.159:1234/
|
||||
#
|
||||
# [windows] find /q/music/albums/Phant*24bit -printf '%s %p\n' | sort -n | tail -n 8 | sed -r 's/^[0-9]+ //' | while IFS= read -r x; do dd if="$x" of=/dev/null bs=4k count=8192 & done
|
||||
# [alpine] ll t; for x in t/2020_0724_16{2,3}*; do dd if="$x" of=/dev/null bs=4k count=10240 & done
|
||||
#
|
||||
# 72.4983 windows mintty msys2 fancy_log
|
||||
# 219.5781 windows cmd msys2 fancy_log
|
||||
# nope.avi windows cmd cpy3 fancy_log
|
||||
# 9.8817 windows mintty msys2 RecentLog 200 50 0.1
|
||||
# 10.2241 windows cmd cpy3 RecentLog 200 50 0.1
|
||||
# 9.8494 windows cmd msys2 RecentLog 200 50 0.1
|
||||
# 7.8061 windows mintty msys2 fancy_log <info-only>
|
||||
# 7.9961 windows mintty msys2 RecentLog <info-only>
|
||||
# 4.2603 alpine xfce4 cpy3 RecentLog
|
||||
# 4.1538 alpine xfce4 cpy3 fancy_log
|
||||
# 3.1742 alpine urxvt cpy3 fancy_log
|
||||
|
||||
|
||||
def get_tid():
|
||||
return threading.current_thread().ident
|
||||
|
||||
|
||||
def html_dec(txt):
|
||||
return (
|
||||
txt.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace(""", '"')
|
||||
.replace(" ", "\r")
|
||||
.replace(" ", "\n")
|
||||
.replace("&", "&")
|
||||
)
|
||||
|
||||
|
||||
class CacheNode(object):
|
||||
def __init__(self, tag, data):
|
||||
self.tag = tag
|
||||
@@ -90,10 +229,11 @@ class CacheNode(object):
|
||||
|
||||
|
||||
class Gateway(object):
|
||||
def __init__(self, base_url):
|
||||
self.base_url = base_url
|
||||
def __init__(self, ar):
|
||||
self.base_url = ar.base_url
|
||||
self.password = ar.a
|
||||
|
||||
ui = urllib.parse.urlparse(base_url)
|
||||
ui = urllib.parse.urlparse(self.base_url)
|
||||
self.web_root = ui.path.strip("/")
|
||||
try:
|
||||
self.web_host, self.web_port = ui.netloc.split(":")
|
||||
@@ -103,15 +243,25 @@ class Gateway(object):
|
||||
if ui.scheme == "http":
|
||||
self.web_port = 80
|
||||
elif ui.scheme == "https":
|
||||
raise Exception("todo")
|
||||
self.web_port = 443
|
||||
else:
|
||||
raise Exception("bad url?")
|
||||
|
||||
self.ssl_context = None
|
||||
self.use_tls = ui.scheme.lower() == "https"
|
||||
if self.use_tls:
|
||||
import ssl
|
||||
|
||||
if ar.td:
|
||||
self.ssl_context = ssl._create_unverified_context()
|
||||
elif ar.te:
|
||||
self.ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS)
|
||||
self.ssl_context.load_verify_locations(ar.te)
|
||||
|
||||
self.conns = {}
|
||||
|
||||
def quotep(self, path):
|
||||
# TODO: mojibake support
|
||||
path = path.encode("utf-8", "ignore")
|
||||
path = path.encode("wtf-8")
|
||||
return quote(path, safe="/")
|
||||
|
||||
def getconn(self, tid=None):
|
||||
@@ -121,7 +271,15 @@ class Gateway(object):
|
||||
except:
|
||||
info("new conn [{}] [{}]".format(self.web_host, self.web_port))
|
||||
|
||||
conn = http.client.HTTPConnection(self.web_host, self.web_port, timeout=260)
|
||||
args = {}
|
||||
if not self.use_tls:
|
||||
C = http.client.HTTPConnection
|
||||
else:
|
||||
C = http.client.HTTPSConnection
|
||||
if self.ssl_context:
|
||||
args = {"context": self.ssl_context}
|
||||
|
||||
conn = C(self.web_host, self.web_port, timeout=260, **args)
|
||||
|
||||
self.conns[tid] = conn
|
||||
return conn
|
||||
@@ -134,38 +292,71 @@ class Gateway(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
def sendreq(self, *args, **kwargs):
|
||||
def sendreq(self, *args, headers={}, **kwargs):
|
||||
tid = get_tid()
|
||||
if self.password:
|
||||
headers["Cookie"] = "=".join(["cppwd", self.password])
|
||||
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
c.request(*list(args), headers=headers, **kwargs)
|
||||
return c.getresponse()
|
||||
except:
|
||||
self.closeconn(tid)
|
||||
dbg("bad conn")
|
||||
|
||||
self.closeconn(tid)
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
c.request(*list(args), headers=headers, **kwargs)
|
||||
return c.getresponse()
|
||||
except:
|
||||
info("http connection failed:\n" + traceback.format_exc())
|
||||
if self.use_tls and not self.ssl_context:
|
||||
import ssl
|
||||
|
||||
cert = ssl.get_server_certificate((self.web_host, self.web_port))
|
||||
info("server certificate probably not trusted:\n" + cert)
|
||||
|
||||
raise
|
||||
|
||||
def listdir(self, path):
|
||||
web_path = "/" + "/".join([self.web_root, path]) + "?dots"
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
r = self.sendreq("GET", self.quotep(web_path))
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
|
||||
r = self.sendreq("GET", web_path)
|
||||
if r.status != 200:
|
||||
self.closeconn()
|
||||
raise Exception(
|
||||
log(
|
||||
"http error {} reading dir {} in {}".format(
|
||||
r.status, web_path, rice_tid()
|
||||
)
|
||||
)
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
return self.parse_html(r)
|
||||
if not r.getheader("Content-Type", "").startswith("text/html"):
|
||||
log("listdir on file: {}".format(path))
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
try:
|
||||
return self.parse_html(r)
|
||||
except:
|
||||
info(repr(path) + "\n" + traceback.format_exc())
|
||||
raise
|
||||
|
||||
def download_file_range(self, path, ofs1, ofs2):
|
||||
web_path = "/" + "/".join([self.web_root, path]) + "?raw"
|
||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
|
||||
log("downloading {}".format(hdr_range))
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
r = self.sendreq("GET", self.quotep(web_path), headers={"Range": hdr_range})
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
|
||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
|
||||
info(
|
||||
"DL {:4.0f}K\033[36m{:>9}-{:<9}\033[0m{}".format(
|
||||
(ofs2 - ofs1) / 1024.0, ofs1, ofs2 - 1, hexler(path)
|
||||
)
|
||||
)
|
||||
|
||||
r = self.sendreq("GET", web_path, headers={"Range": hdr_range})
|
||||
if r.status != http.client.PARTIAL_CONTENT:
|
||||
self.closeconn()
|
||||
raise Exception(
|
||||
@@ -180,7 +371,7 @@ class Gateway(object):
|
||||
ret = []
|
||||
remainder = b""
|
||||
ptn = re.compile(
|
||||
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
|
||||
r'^<tr><td>(-|DIR|<a [^<]+</a>)</td><td><a[^>]* href="([^"]+)"[^>]*>([^<]+)</a></td><td>([^<]+)</td><td>[^<]+</td><td>([^<]+)</td></tr>$'
|
||||
)
|
||||
|
||||
while True:
|
||||
@@ -202,10 +393,23 @@ class Gateway(object):
|
||||
# print(line)
|
||||
continue
|
||||
|
||||
ftype, fname, fsize, fdate = m.groups()
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
sz = int(fsize)
|
||||
if ftype == "-":
|
||||
ftype, furl, fname, fsize, fdate = m.groups()
|
||||
fname = furl.rstrip("/").split("/")[-1]
|
||||
fname = unquote(fname)
|
||||
fname = fname.decode("wtf-8")
|
||||
if bad_good:
|
||||
fname = enwin(fname)
|
||||
|
||||
sz = 1
|
||||
ts = 60 * 60 * 24 * 2
|
||||
try:
|
||||
sz = int(fsize)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
except:
|
||||
info("bad HTML or OS [{}] [{}]".format(fdate, fsize))
|
||||
# python cannot strptime(1959-01-01) on windows
|
||||
|
||||
if ftype != "DIR":
|
||||
ret.append([fname, self.stat_file(ts, sz), 0])
|
||||
else:
|
||||
ret.append([fname, self.stat_dir(ts, sz), 0])
|
||||
@@ -214,7 +418,7 @@ class Gateway(object):
|
||||
|
||||
def stat_dir(self, ts, sz=4096):
|
||||
return {
|
||||
"st_mode": 0o555 | stat.S_IFDIR,
|
||||
"st_mode": stat.S_IFDIR | 0o555,
|
||||
"st_uid": 1000,
|
||||
"st_gid": 1000,
|
||||
"st_size": sz,
|
||||
@@ -226,7 +430,7 @@ class Gateway(object):
|
||||
|
||||
def stat_file(self, ts, sz):
|
||||
return {
|
||||
"st_mode": 0o444 | stat.S_IFREG,
|
||||
"st_mode": stat.S_IFREG | 0o444,
|
||||
"st_uid": 1000,
|
||||
"st_gid": 1000,
|
||||
"st_size": sz,
|
||||
@@ -238,8 +442,11 @@ class Gateway(object):
|
||||
|
||||
|
||||
class CPPF(Operations):
|
||||
def __init__(self, base_url):
|
||||
self.gw = Gateway(base_url)
|
||||
def __init__(self, ar):
|
||||
self.gw = Gateway(ar)
|
||||
self.junk_fh_ctr = 3
|
||||
self.n_dircache = ar.cd
|
||||
self.n_filecache = ar.cf
|
||||
|
||||
self.dircache = []
|
||||
self.dircache_mtx = threading.Lock()
|
||||
@@ -249,12 +456,27 @@ class CPPF(Operations):
|
||||
|
||||
info("up")
|
||||
|
||||
def _describe(self):
|
||||
msg = ""
|
||||
with self.filecache_mtx:
|
||||
for n, cn in enumerate(self.filecache):
|
||||
cache_path, cache1 = cn.tag
|
||||
cache2 = cache1 + len(cn.data)
|
||||
msg += "\n{:<2} {:>7} {:>10}:{:<9} {}".format(
|
||||
n,
|
||||
len(cn.data),
|
||||
cache1,
|
||||
cache2,
|
||||
cache_path.replace("\r", "\\r").replace("\n", "\\n"),
|
||||
)
|
||||
return msg
|
||||
|
||||
def clean_dircache(self):
|
||||
"""not threadsafe"""
|
||||
now = time.time()
|
||||
cutoff = 0
|
||||
for cn in self.dircache:
|
||||
if now - cn.ts > 1:
|
||||
if now - cn.ts > self.n_dircache:
|
||||
cutoff += 1
|
||||
else:
|
||||
break
|
||||
@@ -263,8 +485,7 @@ class CPPF(Operations):
|
||||
self.dircache = self.dircache[cutoff:]
|
||||
|
||||
def get_cached_dir(self, dirpath):
|
||||
# with self.dircache_mtx:
|
||||
if True:
|
||||
with self.dircache_mtx:
|
||||
self.clean_dircache()
|
||||
for cn in self.dircache:
|
||||
if cn.tag == dirpath:
|
||||
@@ -301,9 +522,8 @@ class CPPF(Operations):
|
||||
car = None
|
||||
cdr = None
|
||||
ncn = -1
|
||||
# with self.filecache_mtx:
|
||||
if True:
|
||||
dbg("cache request from {} to {}, size {}".format(get1, get2, file_sz))
|
||||
dbg("cache request {}:{} |{}|".format(get1, get2, file_sz) + self._describe())
|
||||
with self.filecache_mtx:
|
||||
for cn in self.filecache:
|
||||
ncn += 1
|
||||
|
||||
@@ -313,6 +533,12 @@ class CPPF(Operations):
|
||||
|
||||
cache2 = cache1 + len(cn.data)
|
||||
if get2 <= cache1 or get1 >= cache2:
|
||||
# request does not overlap with cached area at all
|
||||
continue
|
||||
|
||||
if get1 < cache1 and get2 > cache2:
|
||||
# cached area does overlap, but must specifically contain
|
||||
# either the first or last byte in the requested range
|
||||
continue
|
||||
|
||||
if get1 >= cache1 and get2 <= cache2:
|
||||
@@ -323,7 +549,7 @@ class CPPF(Operations):
|
||||
buf_ofs = get1 - cache1
|
||||
buf_end = buf_ofs + (get2 - get1)
|
||||
dbg(
|
||||
"found all ({}, {} to {}, len {}) [{}:{}] = {}".format(
|
||||
"found all (#{} {}:{} |{}|) [{}:{}] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
@@ -335,11 +561,11 @@ class CPPF(Operations):
|
||||
)
|
||||
return cn.data[buf_ofs:buf_end]
|
||||
|
||||
if get2 < cache2:
|
||||
if get2 <= cache2:
|
||||
x = cn.data[: get2 - cache1]
|
||||
if not cdr or len(cdr) < len(x):
|
||||
dbg(
|
||||
"found car ({}, {} to {}, len {}) [:{}-{}] = [:{}] = {}".format(
|
||||
"found cdr (#{} {}:{} |{}|) [:{}-{}] = [:{}] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
@@ -354,11 +580,11 @@ class CPPF(Operations):
|
||||
|
||||
continue
|
||||
|
||||
if get1 > cache1:
|
||||
x = cn.data[-(cache2 - get1) :]
|
||||
if get1 >= cache1:
|
||||
x = cn.data[-(max(0, cache2 - get1)) :]
|
||||
if not car or len(car) < len(x):
|
||||
dbg(
|
||||
"found cdr ({}, {} to {}, len {}) [-({}-{}):] = [-{}:] = {}".format(
|
||||
"found car (#{} {}:{} |{}|) [-({}-{}):] = [-{}:] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
@@ -373,38 +599,52 @@ class CPPF(Operations):
|
||||
|
||||
continue
|
||||
|
||||
raise Exception("what")
|
||||
msg = "cache fallthrough\n{} {} {}\n{} {} {}\n{} {} --\n".format(
|
||||
get1,
|
||||
get2,
|
||||
get2 - get1,
|
||||
cache1,
|
||||
cache2,
|
||||
cache2 - cache1,
|
||||
get1 - cache1,
|
||||
get2 - cache2,
|
||||
)
|
||||
msg += self._describe()
|
||||
raise Exception(msg)
|
||||
|
||||
if car and cdr:
|
||||
if car and cdr and len(car) + len(cdr) == get2 - get1:
|
||||
dbg("<cache> have both")
|
||||
return car + cdr
|
||||
|
||||
ret = car + cdr
|
||||
if len(ret) == get2 - get1:
|
||||
return ret
|
||||
|
||||
raise Exception("{} + {} != {} - {}".format(len(car), len(cdr), get2, get1))
|
||||
|
||||
elif cdr:
|
||||
elif cdr and (not car or len(car) < len(cdr)):
|
||||
h_end = get1 + (get2 - get1) - len(cdr)
|
||||
h_ofs = h_end - 512 * 1024
|
||||
h_ofs = min(get1, h_end - 512 * 1024)
|
||||
|
||||
if h_ofs < 0:
|
||||
h_ofs = 0
|
||||
|
||||
buf_ofs = (get2 - get1) - len(cdr)
|
||||
buf_ofs = get1 - h_ofs
|
||||
|
||||
dbg(
|
||||
"<cache> cdr {}, car {}-{}={} [-{}:]".format(
|
||||
"<cache> cdr {}, car {}:{} |{}| [{}:]".format(
|
||||
len(cdr), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = buf[-buf_ofs:] + cdr
|
||||
if len(buf) == h_end - h_ofs:
|
||||
ret = buf[buf_ofs:] + cdr
|
||||
else:
|
||||
ret = buf[get1 - h_ofs :]
|
||||
info(
|
||||
"remote truncated {}:{} to |{}|, will return |{}|".format(
|
||||
h_ofs, h_end, len(buf), len(ret)
|
||||
)
|
||||
)
|
||||
|
||||
elif car:
|
||||
h_ofs = get1 + len(car)
|
||||
h_end = h_ofs + 1024 * 1024
|
||||
h_end = max(get2, h_ofs + 1024 * 1024)
|
||||
|
||||
if h_end > file_sz:
|
||||
h_end = file_sz
|
||||
@@ -412,7 +652,7 @@ class CPPF(Operations):
|
||||
buf_ofs = (get2 - get1) - len(car)
|
||||
|
||||
dbg(
|
||||
"<cache> car {}, cdr {}-{}={} [:{}]".format(
|
||||
"<cache> car {}, cdr {}:{} |{}| [:{}]".format(
|
||||
len(car), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
||||
)
|
||||
)
|
||||
@@ -421,8 +661,23 @@ class CPPF(Operations):
|
||||
ret = car + buf[:buf_ofs]
|
||||
|
||||
else:
|
||||
h_ofs = get1 - 256 * 1024
|
||||
h_end = get2 + 1024 * 1024
|
||||
if get2 - get1 <= 1024 * 1024:
|
||||
# unless the request is for the last n bytes of the file,
|
||||
# grow the start to cache some stuff around the range
|
||||
if get2 < file_sz - 1:
|
||||
h_ofs = get1 - 1024 * 256
|
||||
else:
|
||||
h_ofs = get1 - 1024 * 32
|
||||
|
||||
# likewise grow the end unless start is 0
|
||||
if get1 > 0:
|
||||
h_end = get2 + 1024 * 1024
|
||||
else:
|
||||
h_end = get2 + 1024 * 64
|
||||
else:
|
||||
# big enough, doesn't need pads
|
||||
h_ofs = get1
|
||||
h_end = get2
|
||||
|
||||
if h_ofs < 0:
|
||||
h_ofs = 0
|
||||
@@ -434,7 +689,7 @@ class CPPF(Operations):
|
||||
buf_end = buf_ofs + get2 - get1
|
||||
|
||||
dbg(
|
||||
"<cache> {}-{}={} [{}:{}]".format(
|
||||
"<cache> {}:{} |{}| [{}:{}]".format(
|
||||
h_ofs, h_end, h_end - h_ofs, buf_ofs, buf_end
|
||||
)
|
||||
)
|
||||
@@ -443,48 +698,91 @@ class CPPF(Operations):
|
||||
ret = buf[buf_ofs:buf_end]
|
||||
|
||||
cn = CacheNode([path, h_ofs], buf)
|
||||
# with self.filecache_mtx:
|
||||
if True:
|
||||
if len(self.filecache) > 6:
|
||||
with self.filecache_mtx:
|
||||
if len(self.filecache) >= self.n_filecache:
|
||||
self.filecache = self.filecache[1:] + [cn]
|
||||
else:
|
||||
self.filecache.append(cn)
|
||||
|
||||
return ret
|
||||
|
||||
def readdir(self, path, fh=None):
|
||||
def _readdir(self, path, fh=None):
|
||||
path = path.strip("/")
|
||||
log("readdir {}".format(path))
|
||||
log("readdir [{}] [{}]".format(hexler(path), fh))
|
||||
|
||||
ret = self.gw.listdir(path)
|
||||
if not self.n_dircache:
|
||||
return ret
|
||||
|
||||
# with self.dircache_mtx:
|
||||
if True:
|
||||
with self.dircache_mtx:
|
||||
cn = CacheNode(path, ret)
|
||||
self.dircache.append(cn)
|
||||
self.clean_dircache()
|
||||
|
||||
# import pprint; pprint.pprint(ret)
|
||||
return ret
|
||||
|
||||
def readdir(self, path, fh=None):
|
||||
return [".", ".."] + self._readdir(path, fh)
|
||||
|
||||
def read(self, path, length, offset, fh=None):
|
||||
req_max = 1024 * 1024 * 8
|
||||
cache_max = 1024 * 1024 * 2
|
||||
if length > req_max:
|
||||
# windows actually doing 240 MiB read calls, sausage
|
||||
info("truncate |{}| to {}MiB".format(length, req_max >> 20))
|
||||
length = req_max
|
||||
|
||||
path = path.strip("/")
|
||||
|
||||
ofs2 = offset + length
|
||||
log("read {} @ {} len {} end {}".format(path, offset, length, ofs2))
|
||||
|
||||
file_sz = self.getattr(path)["st_size"]
|
||||
log(
|
||||
"read {} |{}| {}:{} max {}".format(
|
||||
hexler(path), length, offset, ofs2, file_sz
|
||||
)
|
||||
)
|
||||
if ofs2 > file_sz:
|
||||
ofs2 = file_sz
|
||||
log("truncate to len {} end {}".format(ofs2 - offset, ofs2))
|
||||
log("truncate to |{}| :{}".format(ofs2 - offset, ofs2))
|
||||
|
||||
if file_sz == 0 or offset >= ofs2:
|
||||
return b""
|
||||
|
||||
# toggle cache here i suppose
|
||||
# return self.get_cached_file(path, offset, ofs2, file_sz)
|
||||
return self.gw.download_file_range(path, offset, ofs2)
|
||||
if self.n_filecache and length <= cache_max:
|
||||
ret = self.get_cached_file(path, offset, ofs2, file_sz)
|
||||
else:
|
||||
ret = self.gw.download_file_range(path, offset, ofs2)
|
||||
|
||||
return ret
|
||||
|
||||
fn = "cppf-{}-{}-{}".format(time.time(), offset, length)
|
||||
if False:
|
||||
with open(fn, "wb", len(ret)) as f:
|
||||
f.write(ret)
|
||||
elif self.n_filecache:
|
||||
ret2 = self.gw.download_file_range(path, offset, ofs2)
|
||||
if ret != ret2:
|
||||
info(fn)
|
||||
for v in [ret, ret2]:
|
||||
try:
|
||||
info(len(v))
|
||||
except:
|
||||
info("uhh " + repr(v))
|
||||
|
||||
with open(fn + ".bad", "wb") as f:
|
||||
f.write(ret)
|
||||
with open(fn + ".good", "wb") as f:
|
||||
f.write(ret2)
|
||||
|
||||
raise Exception("cache bork")
|
||||
|
||||
return ret
|
||||
|
||||
def getattr(self, path, fh=None):
|
||||
log("getattr [{}]".format(hexler(path)))
|
||||
if WINDOWS:
|
||||
path = enwin(path) # windows occasionally decodes f0xx to xx
|
||||
|
||||
path = path.strip("/")
|
||||
try:
|
||||
dirpath, fname = path.rsplit("/", 1)
|
||||
@@ -492,23 +790,38 @@ class CPPF(Operations):
|
||||
dirpath = ""
|
||||
fname = path
|
||||
|
||||
log("getattr {}".format(path))
|
||||
|
||||
if not path:
|
||||
return self.gw.stat_dir(time.time())
|
||||
ret = self.gw.stat_dir(time.time())
|
||||
# dbg("=" + repr(ret))
|
||||
return ret
|
||||
|
||||
cn = self.get_cached_dir(dirpath)
|
||||
if cn:
|
||||
log("cache ok")
|
||||
dents = cn.data
|
||||
else:
|
||||
log("cache miss")
|
||||
dents = self.readdir(dirpath)
|
||||
dbg("cache miss")
|
||||
dents = self._readdir(dirpath)
|
||||
|
||||
for cache_name, cache_stat, _ in dents:
|
||||
# if "qw" in cache_name and "qw" in fname:
|
||||
# info(
|
||||
# "cmp\n [{}]\n [{}]\n\n{}\n".format(
|
||||
# hexler(cache_name),
|
||||
# hexler(fname),
|
||||
# "\n".join(traceback.format_stack()[:-1]),
|
||||
# )
|
||||
# )
|
||||
|
||||
if cache_name == fname:
|
||||
# dbg("=" + repr(cache_stat))
|
||||
return cache_stat
|
||||
|
||||
fun = info
|
||||
if MACOS and path.split('/')[-1].startswith('._'):
|
||||
fun = dbg
|
||||
|
||||
fun("=ENOENT ({})".format(hexler(path)))
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
access = None
|
||||
@@ -521,17 +834,179 @@ class CPPF(Operations):
|
||||
releasedir = None
|
||||
statfs = None
|
||||
|
||||
if False:
|
||||
# incorrect semantics but good for debugging stuff like samba and msys2
|
||||
def access(self, path, mode):
|
||||
log("@@ access [{}] [{}]".format(path, mode))
|
||||
return 1 if self.getattr(path) else 0
|
||||
|
||||
def flush(self, path, fh):
|
||||
log("@@ flush [{}] [{}]".format(path, fh))
|
||||
return True
|
||||
|
||||
def getxattr(self, *args):
|
||||
log("@@ getxattr [{}]".format("] [".join(str(x) for x in args)))
|
||||
return False
|
||||
|
||||
def listxattr(self, *args):
|
||||
log("@@ listxattr [{}]".format("] [".join(str(x) for x in args)))
|
||||
return False
|
||||
|
||||
def open(self, path, flags):
|
||||
log("@@ open [{}] [{}]".format(path, flags))
|
||||
return 42
|
||||
|
||||
def opendir(self, fh):
|
||||
log("@@ opendir [{}]".format(fh))
|
||||
return 69
|
||||
|
||||
def release(self, ino, fi):
|
||||
log("@@ release [{}] [{}]".format(ino, fi))
|
||||
return True
|
||||
|
||||
def releasedir(self, ino, fi):
|
||||
log("@@ releasedir [{}] [{}]".format(ino, fi))
|
||||
return True
|
||||
|
||||
def statfs(self, path):
|
||||
log("@@ statfs [{}]".format(path))
|
||||
return {}
|
||||
|
||||
if sys.platform == "win32":
|
||||
# quick compat for /mingw64/bin/python3 (msys2)
|
||||
def _open(self, path):
|
||||
try:
|
||||
x = self.getattr(path)
|
||||
if x["st_mode"] <= 0:
|
||||
raise Exception()
|
||||
|
||||
self.junk_fh_ctr += 1
|
||||
if self.junk_fh_ctr > 32000: # TODO untested
|
||||
self.junk_fh_ctr = 4
|
||||
|
||||
return self.junk_fh_ctr
|
||||
|
||||
except Exception as ex:
|
||||
log("open ERR {}".format(repr(ex)))
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
def open(self, path, flags):
|
||||
dbg("open [{}] [{}]".format(hexler(path), flags))
|
||||
return self._open(path)
|
||||
|
||||
def opendir(self, path):
|
||||
dbg("opendir [{}]".format(hexler(path)))
|
||||
return self._open(path)
|
||||
|
||||
def flush(self, path, fh):
|
||||
dbg("flush [{}] [{}]".format(hexler(path), fh))
|
||||
|
||||
def release(self, ino, fi):
|
||||
dbg("release [{}] [{}]".format(hexler(ino), fi))
|
||||
|
||||
def releasedir(self, ino, fi):
|
||||
dbg("releasedir [{}] [{}]".format(hexler(ino), fi))
|
||||
|
||||
def access(self, path, mode):
|
||||
dbg("access [{}] [{}]".format(hexler(path), mode))
|
||||
try:
|
||||
x = self.getattr(path)
|
||||
if x["st_mode"] <= 0:
|
||||
raise Exception()
|
||||
except:
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
|
||||
class TheArgparseFormatter(
|
||||
argparse.RawTextHelpFormatter, argparse.ArgumentDefaultsHelpFormatter
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
local, remote = sys.argv[1:]
|
||||
except:
|
||||
print("need arg 1: local directory")
|
||||
print("need arg 2: root url")
|
||||
return
|
||||
global info, log, dbg
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
|
||||
FUSE(CPPF(remote), local, foreground=True, nothreads=True)
|
||||
# if nothreads=False also uncomment the `with *_mtx` things
|
||||
# filecache helps for reads that are ~64k or smaller;
|
||||
# linux generally does 128k so the cache is a slowdown,
|
||||
# windows likes to use 4k and 64k so cache is required,
|
||||
# value is numChunks (1~3M each) to keep in the cache
|
||||
nf = 24
|
||||
|
||||
# dircache is always a boost,
|
||||
# only want to disable it for tests etc,
|
||||
# value is numSec until an entry goes stale
|
||||
nd = 1
|
||||
|
||||
where = "local directory"
|
||||
if WINDOWS:
|
||||
where += " or DRIVE:"
|
||||
|
||||
ex_pre = "\n " + os.path.basename(__file__) + " "
|
||||
examples = ["http://192.168.1.69:3923/music/ ./music"]
|
||||
if WINDOWS:
|
||||
examples.append("http://192.168.1.69:3923/music/ M:")
|
||||
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=TheArgparseFormatter,
|
||||
epilog="example:" + ex_pre + ex_pre.join(examples),
|
||||
)
|
||||
ap.add_argument(
|
||||
"-cd", metavar="NUM_SECONDS", type=float, default=nd, help="directory cache"
|
||||
)
|
||||
ap.add_argument(
|
||||
"-cf", metavar="NUM_BLOCKS", type=int, default=nf, help="file cache"
|
||||
)
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||
ap.add_argument("-d", action="store_true", help="enable debug")
|
||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||
ap.add_argument("base_url", type=str, help="remote copyparty URL to mount")
|
||||
ap.add_argument("local_path", type=str, help=where + " to mount it on")
|
||||
ar = ap.parse_args()
|
||||
|
||||
if ar.d:
|
||||
# windows terminals are slow (cmd.exe, mintty)
|
||||
# otoh fancy_log beats RecentLog on linux
|
||||
logger = RecentLog().put if WINDOWS else fancy_log
|
||||
|
||||
info = logger
|
||||
log = logger
|
||||
dbg = logger
|
||||
else:
|
||||
# debug=off, speed is dontcare
|
||||
info = fancy_log
|
||||
log = null_log
|
||||
dbg = null_log
|
||||
|
||||
if WINDOWS:
|
||||
os.system("rem")
|
||||
|
||||
for ch in '<>:"\\|?*':
|
||||
# microsoft maps illegal characters to f0xx
|
||||
# (e000 to f8ff is basic-plane private-use)
|
||||
bad_good[ch] = chr(ord(ch) + 0xF000)
|
||||
|
||||
for n in range(0, 0x100):
|
||||
# map surrogateescape to another private-use area
|
||||
bad_good[chr(n + 0xDC00)] = chr(n + 0xF100)
|
||||
|
||||
for k, v in bad_good.items():
|
||||
good_bad[v] = k
|
||||
|
||||
register_wtf8()
|
||||
|
||||
try:
|
||||
with open("/etc/fuse.conf", "rb") as f:
|
||||
allow_other = b"\nuser_allow_other" in f.read()
|
||||
except:
|
||||
allow_other = WINDOWS or MACOS
|
||||
|
||||
args = {"foreground": True, "nothreads": True, "allow_other": allow_other}
|
||||
if not MACOS:
|
||||
args["nonempty"] = True
|
||||
|
||||
FUSE(CPPF(ar), ar.local_path, encoding="wtf-8", **args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
592
bin/copyparty-fuseb.py
Executable file
592
bin/copyparty-fuseb.py
Executable file
@@ -0,0 +1,592 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
"""copyparty-fuseb: remote copyparty as a local filesystem"""
|
||||
__author__ = "ed <copyparty@ocv.me>"
|
||||
__copyright__ = 2020
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import stat
|
||||
import errno
|
||||
import struct
|
||||
import threading
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
|
||||
try:
|
||||
import fuse
|
||||
from fuse import Fuse
|
||||
|
||||
fuse.fuse_python_api = (0, 2)
|
||||
if not hasattr(fuse, "__version__"):
|
||||
raise Exception("your fuse-python is way old")
|
||||
except:
|
||||
print(
|
||||
"\n could not import fuse; these may help:\n python3 -m pip install --user fuse-python\n apt install libfuse\n modprobe fuse\n"
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
"""
|
||||
mount a copyparty server (local or remote) as a filesystem
|
||||
|
||||
usage:
|
||||
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas
|
||||
|
||||
dependencies:
|
||||
sudo apk add fuse-dev python3-dev
|
||||
python3 -m pip install --user fuse-python
|
||||
|
||||
fork of copyparty-fuse.py based on fuse-python which
|
||||
appears to be more compliant than fusepy? since this works with samba
|
||||
(probably just my garbage code tbh)
|
||||
"""
|
||||
|
||||
|
||||
def threadless_log(msg):
|
||||
print(msg + "\n", end="")
|
||||
|
||||
|
||||
def boring_log(msg):
|
||||
msg = "\033[36m{:012x}\033[0m {}\n".format(threading.current_thread().ident, msg)
|
||||
print(msg[4:], end="")
|
||||
|
||||
|
||||
def rice_tid():
|
||||
tid = threading.current_thread().ident
|
||||
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
|
||||
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
|
||||
|
||||
|
||||
def fancy_log(msg):
|
||||
print("{} {}\n".format(rice_tid(), msg), end="")
|
||||
|
||||
|
||||
def null_log(msg):
|
||||
pass
|
||||
|
||||
|
||||
info = fancy_log
|
||||
log = fancy_log
|
||||
dbg = fancy_log
|
||||
log = null_log
|
||||
dbg = null_log
|
||||
|
||||
|
||||
def get_tid():
|
||||
return threading.current_thread().ident
|
||||
|
||||
|
||||
def html_dec(txt):
|
||||
return (
|
||||
txt.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace(""", '"')
|
||||
.replace("&", "&")
|
||||
)
|
||||
|
||||
|
||||
class CacheNode(object):
|
||||
def __init__(self, tag, data):
|
||||
self.tag = tag
|
||||
self.data = data
|
||||
self.ts = time.time()
|
||||
|
||||
|
||||
class Stat(fuse.Stat):
|
||||
def __init__(self):
|
||||
self.st_mode = 0
|
||||
self.st_ino = 0
|
||||
self.st_dev = 0
|
||||
self.st_nlink = 1
|
||||
self.st_uid = 1000
|
||||
self.st_gid = 1000
|
||||
self.st_size = 0
|
||||
self.st_atime = 0
|
||||
self.st_mtime = 0
|
||||
self.st_ctime = 0
|
||||
|
||||
|
||||
class Gateway(object):
|
||||
def __init__(self, base_url):
|
||||
self.base_url = base_url
|
||||
|
||||
ui = urllib.parse.urlparse(base_url)
|
||||
self.web_root = ui.path.strip("/")
|
||||
try:
|
||||
self.web_host, self.web_port = ui.netloc.split(":")
|
||||
self.web_port = int(self.web_port)
|
||||
except:
|
||||
self.web_host = ui.netloc
|
||||
if ui.scheme == "http":
|
||||
self.web_port = 80
|
||||
elif ui.scheme == "https":
|
||||
raise Exception("todo")
|
||||
else:
|
||||
raise Exception("bad url?")
|
||||
|
||||
self.conns = {}
|
||||
|
||||
def quotep(self, path):
|
||||
# TODO: mojibake support
|
||||
path = path.encode("utf-8", "ignore")
|
||||
return quote(path, safe="/")
|
||||
|
||||
def getconn(self, tid=None):
|
||||
tid = tid or get_tid()
|
||||
try:
|
||||
return self.conns[tid]
|
||||
except:
|
||||
info("new conn [{}] [{}]".format(self.web_host, self.web_port))
|
||||
|
||||
conn = http.client.HTTPConnection(self.web_host, self.web_port, timeout=260)
|
||||
|
||||
self.conns[tid] = conn
|
||||
return conn
|
||||
|
||||
def closeconn(self, tid=None):
|
||||
tid = tid or get_tid()
|
||||
try:
|
||||
self.conns[tid].close()
|
||||
del self.conns[tid]
|
||||
except:
|
||||
pass
|
||||
|
||||
def sendreq(self, *args, **kwargs):
|
||||
tid = get_tid()
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
return c.getresponse()
|
||||
except:
|
||||
self.closeconn(tid)
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
return c.getresponse()
|
||||
|
||||
def listdir(self, path):
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
|
||||
r = self.sendreq("GET", web_path)
|
||||
if r.status != 200:
|
||||
self.closeconn()
|
||||
raise Exception(
|
||||
"http error {} reading dir {} in {}".format(
|
||||
r.status, web_path, rice_tid()
|
||||
)
|
||||
)
|
||||
|
||||
return self.parse_html(r)
|
||||
|
||||
def download_file_range(self, path, ofs1, ofs2):
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
|
||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
|
||||
log("downloading {}".format(hdr_range))
|
||||
|
||||
r = self.sendreq("GET", web_path, headers={"Range": hdr_range})
|
||||
if r.status != http.client.PARTIAL_CONTENT:
|
||||
self.closeconn()
|
||||
raise Exception(
|
||||
"http error {} reading file {} range {} in {}".format(
|
||||
r.status, web_path, hdr_range, rice_tid()
|
||||
)
|
||||
)
|
||||
|
||||
return r.read()
|
||||
|
||||
def parse_html(self, datasrc):
|
||||
ret = []
|
||||
remainder = b""
|
||||
ptn = re.compile(
|
||||
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
|
||||
)
|
||||
|
||||
while True:
|
||||
buf = remainder + datasrc.read(4096)
|
||||
# print('[{}]'.format(buf.decode('utf-8')))
|
||||
if not buf:
|
||||
break
|
||||
|
||||
remainder = b""
|
||||
endpos = buf.rfind(b"\n")
|
||||
if endpos >= 0:
|
||||
remainder = buf[endpos + 1 :]
|
||||
buf = buf[:endpos]
|
||||
|
||||
lines = buf.decode("utf-8").split("\n")
|
||||
for line in lines:
|
||||
m = ptn.match(line)
|
||||
if not m:
|
||||
# print(line)
|
||||
continue
|
||||
|
||||
ftype, fname, fsize, fdate = m.groups()
|
||||
fname = html_dec(fname)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
sz = int(fsize)
|
||||
if ftype == "-":
|
||||
ret.append([fname, self.stat_file(ts, sz), 0])
|
||||
else:
|
||||
ret.append([fname, self.stat_dir(ts, sz), 0])
|
||||
|
||||
return ret
|
||||
|
||||
def stat_dir(self, ts, sz=4096):
|
||||
ret = Stat()
|
||||
ret.st_mode = stat.S_IFDIR | 0o555
|
||||
ret.st_nlink = 2
|
||||
ret.st_size = sz
|
||||
ret.st_atime = ts
|
||||
ret.st_mtime = ts
|
||||
ret.st_ctime = ts
|
||||
return ret
|
||||
|
||||
def stat_file(self, ts, sz):
|
||||
ret = Stat()
|
||||
ret.st_mode = stat.S_IFREG | 0o444
|
||||
ret.st_size = sz
|
||||
ret.st_atime = ts
|
||||
ret.st_mtime = ts
|
||||
ret.st_ctime = ts
|
||||
return ret
|
||||
|
||||
|
||||
class CPPF(Fuse):
|
||||
def __init__(self, *args, **kwargs):
|
||||
Fuse.__init__(self, *args, **kwargs)
|
||||
|
||||
self.url = None
|
||||
|
||||
self.dircache = []
|
||||
self.dircache_mtx = threading.Lock()
|
||||
|
||||
self.filecache = []
|
||||
self.filecache_mtx = threading.Lock()
|
||||
|
||||
def init2(self):
|
||||
# TODO figure out how python-fuse wanted this to go
|
||||
self.gw = Gateway(self.url) # .decode('utf-8'))
|
||||
info("up")
|
||||
|
||||
def clean_dircache(self):
|
||||
"""not threadsafe"""
|
||||
now = time.time()
|
||||
cutoff = 0
|
||||
for cn in self.dircache:
|
||||
if now - cn.ts > 1:
|
||||
cutoff += 1
|
||||
else:
|
||||
break
|
||||
|
||||
if cutoff > 0:
|
||||
self.dircache = self.dircache[cutoff:]
|
||||
|
||||
def get_cached_dir(self, dirpath):
|
||||
# with self.dircache_mtx:
|
||||
if True:
|
||||
self.clean_dircache()
|
||||
for cn in self.dircache:
|
||||
if cn.tag == dirpath:
|
||||
return cn
|
||||
|
||||
return None
|
||||
|
||||
"""
|
||||
,-------------------------------, g1>=c1, g2<=c2
|
||||
|cache1 cache2| buf[g1-c1:(g1-c1)+(g2-g1)]
|
||||
`-------------------------------'
|
||||
,---------------,
|
||||
|get1 get2|
|
||||
`---------------'
|
||||
__________________________________________________________________________
|
||||
|
||||
,-------------------------------, g2<=c2, (g2>=c1)
|
||||
|cache1 cache2| cdr=buf[:g2-c1]
|
||||
`-------------------------------' dl car; g1-512K:c1
|
||||
,---------------,
|
||||
|get1 get2|
|
||||
`---------------'
|
||||
__________________________________________________________________________
|
||||
|
||||
,-------------------------------, g1>=c1, (g1<=c2)
|
||||
|cache1 cache2| car=buf[c2-g1:]
|
||||
`-------------------------------' dl cdr; c2:c2+1M
|
||||
,---------------,
|
||||
|get1 get2|
|
||||
`---------------'
|
||||
"""
|
||||
|
||||
def get_cached_file(self, path, get1, get2, file_sz):
|
||||
car = None
|
||||
cdr = None
|
||||
ncn = -1
|
||||
# with self.filecache_mtx:
|
||||
if True:
|
||||
dbg("cache request from {} to {}, size {}".format(get1, get2, file_sz))
|
||||
for cn in self.filecache:
|
||||
ncn += 1
|
||||
|
||||
cache_path, cache1 = cn.tag
|
||||
if cache_path != path:
|
||||
continue
|
||||
|
||||
cache2 = cache1 + len(cn.data)
|
||||
if get2 <= cache1 or get1 >= cache2:
|
||||
continue
|
||||
|
||||
if get1 >= cache1 and get2 <= cache2:
|
||||
# keep cache entry alive by moving it to the end
|
||||
self.filecache = (
|
||||
self.filecache[:ncn] + self.filecache[ncn + 1 :] + [cn]
|
||||
)
|
||||
buf_ofs = get1 - cache1
|
||||
buf_end = buf_ofs + (get2 - get1)
|
||||
dbg(
|
||||
"found all ({}, {} to {}, len {}) [{}:{}] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
len(cn.data),
|
||||
buf_ofs,
|
||||
buf_end,
|
||||
buf_end - buf_ofs,
|
||||
)
|
||||
)
|
||||
return cn.data[buf_ofs:buf_end]
|
||||
|
||||
if get2 < cache2:
|
||||
x = cn.data[: get2 - cache1]
|
||||
if not cdr or len(cdr) < len(x):
|
||||
dbg(
|
||||
"found car ({}, {} to {}, len {}) [:{}-{}] = [:{}] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
len(cn.data),
|
||||
get2,
|
||||
cache1,
|
||||
get2 - cache1,
|
||||
len(x),
|
||||
)
|
||||
)
|
||||
cdr = x
|
||||
|
||||
continue
|
||||
|
||||
if get1 > cache1:
|
||||
x = cn.data[-(cache2 - get1) :]
|
||||
if not car or len(car) < len(x):
|
||||
dbg(
|
||||
"found cdr ({}, {} to {}, len {}) [-({}-{}):] = [-{}:] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
len(cn.data),
|
||||
cache2,
|
||||
get1,
|
||||
cache2 - get1,
|
||||
len(x),
|
||||
)
|
||||
)
|
||||
car = x
|
||||
|
||||
continue
|
||||
|
||||
raise Exception("what")
|
||||
|
||||
if car and cdr:
|
||||
dbg("<cache> have both")
|
||||
|
||||
ret = car + cdr
|
||||
if len(ret) == get2 - get1:
|
||||
return ret
|
||||
|
||||
raise Exception("{} + {} != {} - {}".format(len(car), len(cdr), get2, get1))
|
||||
|
||||
elif cdr:
|
||||
h_end = get1 + (get2 - get1) - len(cdr)
|
||||
h_ofs = h_end - 512 * 1024
|
||||
|
||||
if h_ofs < 0:
|
||||
h_ofs = 0
|
||||
|
||||
buf_ofs = (get2 - get1) - len(cdr)
|
||||
|
||||
dbg(
|
||||
"<cache> cdr {}, car {}-{}={} [-{}:]".format(
|
||||
len(cdr), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = buf[-buf_ofs:] + cdr
|
||||
|
||||
elif car:
|
||||
h_ofs = get1 + len(car)
|
||||
h_end = h_ofs + 1024 * 1024
|
||||
|
||||
if h_end > file_sz:
|
||||
h_end = file_sz
|
||||
|
||||
buf_ofs = (get2 - get1) - len(car)
|
||||
|
||||
dbg(
|
||||
"<cache> car {}, cdr {}-{}={} [:{}]".format(
|
||||
len(car), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = car + buf[:buf_ofs]
|
||||
|
||||
else:
|
||||
h_ofs = get1 - 256 * 1024
|
||||
h_end = get2 + 1024 * 1024
|
||||
|
||||
if h_ofs < 0:
|
||||
h_ofs = 0
|
||||
|
||||
if h_end > file_sz:
|
||||
h_end = file_sz
|
||||
|
||||
buf_ofs = get1 - h_ofs
|
||||
buf_end = buf_ofs + get2 - get1
|
||||
|
||||
dbg(
|
||||
"<cache> {}-{}={} [{}:{}]".format(
|
||||
h_ofs, h_end, h_end - h_ofs, buf_ofs, buf_end
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = buf[buf_ofs:buf_end]
|
||||
|
||||
cn = CacheNode([path, h_ofs], buf)
|
||||
# with self.filecache_mtx:
|
||||
if True:
|
||||
if len(self.filecache) > 6:
|
||||
self.filecache = self.filecache[1:] + [cn]
|
||||
else:
|
||||
self.filecache.append(cn)
|
||||
|
||||
return ret
|
||||
|
||||
def _readdir(self, path):
|
||||
path = path.strip("/")
|
||||
log("readdir {}".format(path))
|
||||
|
||||
ret = self.gw.listdir(path)
|
||||
|
||||
# with self.dircache_mtx:
|
||||
if True:
|
||||
cn = CacheNode(path, ret)
|
||||
self.dircache.append(cn)
|
||||
self.clean_dircache()
|
||||
|
||||
return ret
|
||||
|
||||
def readdir(self, path, offset):
|
||||
for e in self._readdir(path)[offset:]:
|
||||
# log("yield [{}]".format(e[0]))
|
||||
yield fuse.Direntry(e[0])
|
||||
|
||||
def open(self, path, flags):
|
||||
if (flags & (os.O_RDONLY | os.O_WRONLY | os.O_RDWR)) != os.O_RDONLY:
|
||||
return -errno.EACCES
|
||||
|
||||
st = self.getattr(path)
|
||||
try:
|
||||
if st.st_nlink > 0:
|
||||
return st
|
||||
except:
|
||||
return st # -int(os.errcode)
|
||||
|
||||
def read(self, path, length, offset, fh=None, *args):
|
||||
if args:
|
||||
log("unexpected args [" + "] [".join(repr(x) for x in args) + "]")
|
||||
raise Exception()
|
||||
|
||||
path = path.strip("/")
|
||||
|
||||
ofs2 = offset + length
|
||||
log("read {} @ {} len {} end {}".format(path, offset, length, ofs2))
|
||||
|
||||
st = self.getattr(path)
|
||||
try:
|
||||
file_sz = st.st_size
|
||||
except:
|
||||
return st # -int(os.errcode)
|
||||
|
||||
if ofs2 > file_sz:
|
||||
ofs2 = file_sz
|
||||
log("truncate to len {} end {}".format(ofs2 - offset, ofs2))
|
||||
|
||||
if file_sz == 0 or offset >= ofs2:
|
||||
return b""
|
||||
|
||||
# toggle cache here i suppose
|
||||
# return self.get_cached_file(path, offset, ofs2, file_sz)
|
||||
return self.gw.download_file_range(path, offset, ofs2)
|
||||
|
||||
def getattr(self, path):
|
||||
log("getattr [{}]".format(path))
|
||||
|
||||
path = path.strip("/")
|
||||
try:
|
||||
dirpath, fname = path.rsplit("/", 1)
|
||||
except:
|
||||
dirpath = ""
|
||||
fname = path
|
||||
|
||||
if not path:
|
||||
ret = self.gw.stat_dir(time.time())
|
||||
dbg("=root")
|
||||
return ret
|
||||
|
||||
cn = self.get_cached_dir(dirpath)
|
||||
if cn:
|
||||
log("cache ok")
|
||||
dents = cn.data
|
||||
else:
|
||||
log("cache miss")
|
||||
dents = self._readdir(dirpath)
|
||||
|
||||
for cache_name, cache_stat, _ in dents:
|
||||
if cache_name == fname:
|
||||
dbg("=file")
|
||||
return cache_stat
|
||||
|
||||
log("=404")
|
||||
return -errno.ENOENT
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
|
||||
server = CPPF()
|
||||
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
|
||||
server.parse(values=server, errex=1)
|
||||
if not server.url or not str(server.url).startswith("http"):
|
||||
print("\nerror:")
|
||||
print(" need argument: -o url=<...>")
|
||||
print(" need argument: mount-path")
|
||||
print("example:")
|
||||
print(
|
||||
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
server.init2()
|
||||
threading.Thread(target=server.main, daemon=True).start()
|
||||
while True:
|
||||
time.sleep(9001)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -118,7 +118,7 @@ printf ']}' >> /dev/shm/$salt.hs
|
||||
|
||||
printf '\033[36m'
|
||||
|
||||
#curl "http://$target:1234$posturl/handshake.php" -H "Content-Type: text/plain;charset=UTF-8" -H "Cookie: cppwd=$passwd" --data "$(cat "/dev/shm/$salt.hs")" | tee /dev/shm/$salt.res
|
||||
#curl "http://$target:3923$posturl/handshake.php" -H "Content-Type: text/plain;charset=UTF-8" -H "Cookie: cppwd=$passwd" --data "$(cat "/dev/shm/$salt.hs")" | tee /dev/shm/$salt.res
|
||||
|
||||
{
|
||||
{
|
||||
@@ -135,7 +135,7 @@ EOF
|
||||
cat /dev/shm/$salt.hs
|
||||
} |
|
||||
tee /dev/shm/$salt.hsb |
|
||||
ncat $target 1234 |
|
||||
ncat $target 3923 |
|
||||
tee /dev/shm/$salt.hs1r
|
||||
|
||||
wark="$(cat /dev/shm/$salt.hs1r | getwark)"
|
||||
@@ -190,7 +190,7 @@ EOF
|
||||
nchunk=$((nchunk+1))
|
||||
|
||||
done |
|
||||
ncat $target 1234 |
|
||||
ncat $target 3923 |
|
||||
tee /dev/shm/$salt.pr
|
||||
|
||||
t=$(date +%s.%N)
|
||||
@@ -201,7 +201,7 @@ t=$(date +%s.%N)
|
||||
|
||||
printf '\033[36m'
|
||||
|
||||
ncat $target 1234 < /dev/shm/$salt.hsb |
|
||||
ncat $target 3923 < /dev/shm/$salt.hsb |
|
||||
tee /dev/shm/$salt.hs2r |
|
||||
grep -E '"hash": ?\[ *\]'
|
||||
|
||||
|
||||
22
contrib/README.md
Normal file
22
contrib/README.md
Normal file
@@ -0,0 +1,22 @@
|
||||
### [`copyparty.bat`](copyparty.bat)
|
||||
* launches copyparty with no arguments (anon read+write within same folder)
|
||||
* intended for windows machines with no python.exe in PATH
|
||||
* works on windows, linux and macos
|
||||
* assumes `copyparty-sfx.py` was renamed to `copyparty.py` in the same folder as `copyparty.bat`
|
||||
|
||||
### [`index.html`](index.html)
|
||||
* drop-in redirect from an httpd to copyparty
|
||||
* assumes the webserver and copyparty is running on the same server/IP
|
||||
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
|
||||
|
||||
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
|
||||
disables thumbnails and folder-type detection in windows explorer, makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
|
||||
|
||||
# OS integration
|
||||
init-scripts to start copyparty as a service
|
||||
* [`systemd/copyparty.service`](systemd/copyparty.service)
|
||||
* [`openrc/copyparty`](openrc/copyparty)
|
||||
|
||||
# Reverse-proxy
|
||||
copyparty has basic support for running behind another webserver
|
||||
* [`nginx/copyparty.conf`](nginx/copyparty.conf)
|
||||
33
contrib/copyparty.bat
Normal file
33
contrib/copyparty.bat
Normal file
@@ -0,0 +1,33 @@
|
||||
exec python "$(dirname "$0")"/copyparty.py
|
||||
|
||||
@rem on linux, the above will execute and the script will terminate
|
||||
@rem on windows, the rest of this script will run
|
||||
|
||||
@echo off
|
||||
cls
|
||||
|
||||
set py=
|
||||
for /f %%i in ('where python 2^>nul') do (
|
||||
set "py=%%i"
|
||||
goto c1
|
||||
)
|
||||
:c1
|
||||
|
||||
if [%py%] == [] (
|
||||
for /f %%i in ('where /r "%localappdata%\programs\python" python 2^>nul') do (
|
||||
set "py=%%i"
|
||||
goto c2
|
||||
)
|
||||
)
|
||||
:c2
|
||||
|
||||
if [%py%] == [] set "py=c:\python27\python.exe"
|
||||
|
||||
if not exist "%py%" (
|
||||
echo could not find python
|
||||
echo(
|
||||
pause
|
||||
exit /b
|
||||
)
|
||||
|
||||
start cmd /c %py% "%~dp0\copyparty.py"
|
||||
31
contrib/explorer-nothumbs-nofoldertypes.reg
Normal file
31
contrib/explorer-nothumbs-nofoldertypes.reg
Normal file
@@ -0,0 +1,31 @@
|
||||
Windows Registry Editor Version 5.00
|
||||
|
||||
; this will do 3 things, all optional:
|
||||
; 1) disable thumbnails
|
||||
; 2) delete all existing folder type settings/detections
|
||||
; 3) disable folder type detection (force default columns)
|
||||
;
|
||||
; this makes the file explorer way faster,
|
||||
; especially on slow/networked locations
|
||||
|
||||
|
||||
; =====================================================================
|
||||
; 1) disable thumbnails
|
||||
|
||||
[HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Explorer\Advanced]
|
||||
"IconsOnly"=dword:00000001
|
||||
|
||||
|
||||
; =====================================================================
|
||||
; 2) delete all existing folder type settings/detections
|
||||
|
||||
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags]
|
||||
|
||||
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\BagMRU]
|
||||
|
||||
|
||||
; =====================================================================
|
||||
; 3) disable folder type detection
|
||||
|
||||
[HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags\AllFolders\Shell]
|
||||
"FolderType"="NotSpecified"
|
||||
43
contrib/index.html
Normal file
43
contrib/index.html
Normal file
@@ -0,0 +1,43 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>⇆🎉 redirect</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<style>
|
||||
|
||||
html, body {
|
||||
font-family: sans-serif;
|
||||
}
|
||||
body {
|
||||
padding: 1em 2em;
|
||||
font-size: 1.5em;
|
||||
}
|
||||
a {
|
||||
font-size: 1.2em;
|
||||
padding: .1em;
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<span id="desc">you probably want</span> <a id="redir" href="//10.13.1.1:3923/">copyparty</a>
|
||||
<script>
|
||||
|
||||
var a = document.getElementById('redir'),
|
||||
proto = window.location.protocol.indexOf('https') === 0 ? 'https' : 'http',
|
||||
loc = window.location.hostname || '127.0.0.1',
|
||||
port = a.getAttribute('href').split(':').pop().split('/')[0],
|
||||
url = proto + '://' + loc + ':' + port + '/';
|
||||
|
||||
a.setAttribute('href', url);
|
||||
document.getElementById('desc').innerHTML = 'redirecting to';
|
||||
|
||||
setTimeout(function() {
|
||||
window.location.href = url;
|
||||
}, 500);
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
26
contrib/nginx/copyparty.conf
Normal file
26
contrib/nginx/copyparty.conf
Normal file
@@ -0,0 +1,26 @@
|
||||
upstream cpp {
|
||||
server 127.0.0.1:3923;
|
||||
keepalive 120;
|
||||
}
|
||||
server {
|
||||
listen 443 ssl;
|
||||
listen [::]:443 ssl;
|
||||
|
||||
server_name fs.example.com;
|
||||
|
||||
location / {
|
||||
proxy_pass http://cpp;
|
||||
proxy_redirect off;
|
||||
# disable buffering (next 4 lines)
|
||||
proxy_http_version 1.1;
|
||||
client_max_body_size 0;
|
||||
proxy_buffering off;
|
||||
proxy_request_buffering off;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Connection "Keep-Alive";
|
||||
}
|
||||
}
|
||||
18
contrib/openrc/copyparty
Normal file
18
contrib/openrc/copyparty
Normal file
@@ -0,0 +1,18 @@
|
||||
#!/sbin/openrc-run
|
||||
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty /etc/init.d && rc-update add copyparty
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
|
||||
name="$SVCNAME"
|
||||
command_background=true
|
||||
pidfile="/var/run/$SVCNAME.pid"
|
||||
|
||||
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
|
||||
command_args="-q -v /mnt::a"
|
||||
19
contrib/systemd/copyparty.service
Normal file
19
contrib/systemd/copyparty.service
Normal file
@@ -0,0 +1,19 @@
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/python /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
|
||||
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -8,7 +8,10 @@ __copyright__ = 2019
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
import filecmp
|
||||
import locale
|
||||
@@ -18,7 +21,13 @@ from textwrap import dedent
|
||||
from .__init__ import E, WINDOWS, VT100
|
||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||
from .svchub import SvcHub
|
||||
from .util import py_desc
|
||||
from .util import py_desc, align_tab
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
|
||||
class RiceFormatter(argparse.HelpFormatter):
|
||||
@@ -84,9 +93,78 @@ def ensure_cert():
|
||||
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
|
||||
|
||||
|
||||
def configure_ssl_ver(al):
|
||||
def terse_sslver(txt):
|
||||
txt = txt.lower()
|
||||
for c in ["_", "v", "."]:
|
||||
txt = txt.replace(c, "")
|
||||
|
||||
return txt.replace("tls10", "tls1")
|
||||
|
||||
# oh man i love openssl
|
||||
# check this out
|
||||
# hold my beer
|
||||
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
|
||||
sslver = terse_sslver(al.ssl_ver).split(",")
|
||||
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
||||
# SSLv2 SSLv3 TLSv1 TLSv1_1 TLSv1_2 TLSv1_3
|
||||
if "help" in sslver:
|
||||
avail = [terse_sslver(x[6:]) for x in flags]
|
||||
avail = " ".join(sorted(avail) + ["all"])
|
||||
print("\navailable ssl/tls versions:\n " + avail)
|
||||
sys.exit(0)
|
||||
|
||||
al.ssl_flags_en = 0
|
||||
al.ssl_flags_de = 0
|
||||
for flag in sorted(flags):
|
||||
ver = terse_sslver(flag[6:])
|
||||
num = getattr(ssl, flag)
|
||||
if ver in sslver:
|
||||
al.ssl_flags_en |= num
|
||||
else:
|
||||
al.ssl_flags_de |= num
|
||||
|
||||
if sslver == ["all"]:
|
||||
x = al.ssl_flags_en
|
||||
al.ssl_flags_en = al.ssl_flags_de
|
||||
al.ssl_flags_de = x
|
||||
|
||||
for k in ["ssl_flags_en", "ssl_flags_de"]:
|
||||
num = getattr(al, k)
|
||||
print("{}: {:8x} ({})".format(k, num, num))
|
||||
|
||||
# think i need that beer now
|
||||
|
||||
|
||||
def configure_ssl_ciphers(al):
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
if al.ssl_ver:
|
||||
ctx.options &= ~al.ssl_flags_en
|
||||
ctx.options |= al.ssl_flags_de
|
||||
|
||||
is_help = al.ciphers == "help"
|
||||
|
||||
if al.ciphers:
|
||||
try:
|
||||
ctx.set_ciphers(al.ciphers)
|
||||
except:
|
||||
if not is_help:
|
||||
print("\n\033[1;31mfailed to set ciphers\033[0m\n")
|
||||
|
||||
if not hasattr(ctx, "get_ciphers"):
|
||||
print("cannot read cipher list: openssl or python too old")
|
||||
else:
|
||||
ciphers = [x["description"] for x in ctx.get_ciphers()]
|
||||
print("\n ".join(["\nenabled ciphers:"] + align_tab(ciphers) + [""]))
|
||||
|
||||
if is_help:
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if WINDOWS:
|
||||
os.system("") # enables colors
|
||||
os.system("rem") # enables colors
|
||||
|
||||
desc = py_desc().replace("[", "\033[1;30m[")
|
||||
|
||||
@@ -94,7 +172,8 @@ def main():
|
||||
print(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
|
||||
|
||||
ensure_locale()
|
||||
ensure_cert()
|
||||
if HAVE_SSL:
|
||||
ensure_cert()
|
||||
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=RiceFormatter,
|
||||
@@ -103,41 +182,89 @@ def main():
|
||||
epilog=dedent(
|
||||
"""
|
||||
-a takes username:password,
|
||||
-v takes src:dst:permset:permset:... where "permset" is
|
||||
accesslevel followed by username (no separator)
|
||||
-v takes src:dst:permset:permset:cflag:cflag:...
|
||||
where "permset" is accesslevel followed by username (no separator)
|
||||
and "cflag" is config flags to set on this volume
|
||||
|
||||
list of cflags:
|
||||
cnodupe rejects existing files (instead of symlinking them)
|
||||
|
||||
example:\033[35m
|
||||
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed \033[36m
|
||||
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
|
||||
mount current directory at "/" with
|
||||
* r (read-only) for everyone
|
||||
* a (read+write) for ed
|
||||
mount ../inc at "/dump" with
|
||||
* w (write-only) for everyone
|
||||
* a (read+write) for ed \033[0m
|
||||
* a (read+write) for ed
|
||||
* reject duplicate files \033[0m
|
||||
|
||||
if no accounts or volumes are configured,
|
||||
current folder will be read/write for everyone
|
||||
|
||||
consider the config file for more flexible account/volume management,
|
||||
including dynamic reload at runtime (and being more readable w)
|
||||
|
||||
values for --urlform:
|
||||
"stash" dumps the data to file and returns length + checksum
|
||||
"save,get" dumps to file and returns the page like a GET
|
||||
"print,get" prints the data in the log and returns GET
|
||||
(leave out the ",get" to return an error instead)
|
||||
|
||||
see "--ciphers help" for available ssl/tls ciphers,
|
||||
see "--ssl-ver help" for available ssl/tls versions,
|
||||
default is what python considers safe, usually >= TLS1
|
||||
"""
|
||||
),
|
||||
)
|
||||
ap.add_argument(
|
||||
"-c", metavar="PATH", type=str, action="append", help="add config file"
|
||||
)
|
||||
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind")
|
||||
ap.add_argument("-p", metavar="PORT", type=int, default=1234, help="port to bind")
|
||||
ap.add_argument("-nc", metavar="NUM", type=int, default=16, help="max num clients")
|
||||
ap.add_argument(
|
||||
"-j", metavar="CORES", type=int, default=1, help="max num cpu cores"
|
||||
)
|
||||
# fmt: off
|
||||
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
|
||||
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||
ap.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
|
||||
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
||||
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
||||
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
|
||||
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
|
||||
ap.add_argument("-q", action="store_true", help="quiet")
|
||||
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||
ap.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||
ap.add_argument("-e2s", action="store_true", help="enable up2k db-scanner")
|
||||
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
||||
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||
ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms")
|
||||
|
||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
||||
ap2.add_argument("--ssl-ver", type=str, help="ssl/tls versions to allow")
|
||||
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
||||
al = ap.parse_args()
|
||||
# fmt: on
|
||||
|
||||
al.i = al.i.split(",")
|
||||
try:
|
||||
if "-" in al.p:
|
||||
lo, hi = [int(x) for x in al.p.split("-")]
|
||||
al.p = list(range(lo, hi + 1))
|
||||
else:
|
||||
al.p = [int(x) for x in al.p.split(",")]
|
||||
except:
|
||||
raise Exception("invalid value for -p")
|
||||
|
||||
if HAVE_SSL:
|
||||
if al.ssl_ver:
|
||||
configure_ssl_ver(al)
|
||||
|
||||
if al.ciphers:
|
||||
configure_ssl_ciphers(al)
|
||||
else:
|
||||
print("\033[33m ssl module does not exist; cannot enable https\033[0m\n")
|
||||
|
||||
SvcHub(al).run()
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 4, 3)
|
||||
CODENAME = "NIH"
|
||||
BUILD_DT = (2020, 5, 17)
|
||||
VERSION = (0, 7, 5)
|
||||
CODENAME = "keeping track"
|
||||
BUILD_DT = (2021, 2, 12)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -12,11 +12,12 @@ from .util import undot, Pebkac, fsdec, fsenc
|
||||
class VFS(object):
|
||||
"""single level in the virtual fs"""
|
||||
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[]):
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[], flags={}):
|
||||
self.realpath = realpath # absolute path on host filesystem
|
||||
self.vpath = vpath # absolute path in the virtual filesystem
|
||||
self.uread = uread # users who can read this
|
||||
self.uwrite = uwrite # users who can write this
|
||||
self.flags = flags # config switches
|
||||
self.nodes = {} # child nodes
|
||||
|
||||
def add(self, src, dst):
|
||||
@@ -36,6 +37,7 @@ class VFS(object):
|
||||
"{}/{}".format(self.vpath, name).lstrip("/"),
|
||||
self.uread,
|
||||
self.uwrite,
|
||||
self.flags,
|
||||
)
|
||||
self.nodes[name] = vn
|
||||
return vn.add(src, dst)
|
||||
@@ -104,7 +106,7 @@ class VFS(object):
|
||||
real.sort()
|
||||
if not rem:
|
||||
for name, vn2 in sorted(self.nodes.items()):
|
||||
if uname in vn2.uread:
|
||||
if uname in vn2.uread or "*" in vn2.uread:
|
||||
virt_vis[name] = vn2
|
||||
|
||||
# no vfs nodes in the list of real inodes
|
||||
@@ -128,16 +130,15 @@ class VFS(object):
|
||||
class AuthSrv(object):
|
||||
"""verifies users against given paths"""
|
||||
|
||||
def __init__(self, args, log_func):
|
||||
self.log_func = log_func
|
||||
def __init__(self, args, log_func, warn_anonwrite=True):
|
||||
self.args = args
|
||||
|
||||
self.warn_anonwrite = True
|
||||
self.log_func = log_func
|
||||
self.warn_anonwrite = warn_anonwrite
|
||||
|
||||
if WINDOWS:
|
||||
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)")
|
||||
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
|
||||
else:
|
||||
self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)")
|
||||
self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)$")
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.reload()
|
||||
@@ -161,7 +162,7 @@ class AuthSrv(object):
|
||||
|
||||
yield prev, True
|
||||
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, mount):
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, mflags, mount):
|
||||
vol_src = None
|
||||
vol_dst = None
|
||||
for ln in [x.decode("utf-8").strip() for x in fd]:
|
||||
@@ -191,6 +192,7 @@ class AuthSrv(object):
|
||||
mount[vol_dst] = vol_src
|
||||
mread[vol_dst] = []
|
||||
mwrite[vol_dst] = []
|
||||
mflags[vol_dst] = {}
|
||||
continue
|
||||
|
||||
lvl, uname = ln.split(" ")
|
||||
@@ -198,6 +200,9 @@ class AuthSrv(object):
|
||||
mread[vol_dst].append(uname)
|
||||
if lvl in "wa":
|
||||
mwrite[vol_dst].append(uname)
|
||||
if lvl == "c":
|
||||
# config option, currently switches only
|
||||
mflags[vol_dst][uname] = True
|
||||
|
||||
def reload(self):
|
||||
"""
|
||||
@@ -210,6 +215,7 @@ class AuthSrv(object):
|
||||
user = {} # username:password
|
||||
mread = {} # mountpoint:[username]
|
||||
mwrite = {} # mountpoint:[username]
|
||||
mflags = {} # mountpoint:[flag]
|
||||
mount = {} # dst:src (mountpoint:realpath)
|
||||
|
||||
if self.args.a:
|
||||
@@ -220,20 +226,25 @@ class AuthSrv(object):
|
||||
if self.args.v:
|
||||
# list of src:dst:permset:permset:...
|
||||
# permset is [rwa]username
|
||||
for vol_match in [self.re_vol.match(x) for x in self.args.v]:
|
||||
try:
|
||||
src, dst, perms = vol_match.groups()
|
||||
except:
|
||||
raise Exception("invalid -v argument")
|
||||
for v_str in self.args.v:
|
||||
m = self.re_vol.match(v_str)
|
||||
if not m:
|
||||
raise Exception("invalid -v argument: [{}]".format(v_str))
|
||||
|
||||
src, dst, perms = m.groups()
|
||||
# print("\n".join([src, dst, perms]))
|
||||
src = fsdec(os.path.abspath(fsenc(src)))
|
||||
dst = dst.strip("/")
|
||||
mount[dst] = src
|
||||
mread[dst] = []
|
||||
mwrite[dst] = []
|
||||
mflags[dst] = {}
|
||||
|
||||
perms = perms.split(":")
|
||||
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
|
||||
if lvl == "c":
|
||||
# config option, currently switches only
|
||||
mflags[dst][uname] = True
|
||||
if uname == "":
|
||||
uname = "*"
|
||||
if lvl in "ra":
|
||||
@@ -244,14 +255,15 @@ class AuthSrv(object):
|
||||
if self.args.c:
|
||||
for cfg_fn in self.args.c:
|
||||
with open(cfg_fn, "rb") as f:
|
||||
self._parse_config_file(f, user, mread, mwrite, mount)
|
||||
self._parse_config_file(f, user, mread, mwrite, mflags, mount)
|
||||
|
||||
self.all_writable = []
|
||||
if not mount:
|
||||
# -h says our defaults are CWD at root and read/write for everyone
|
||||
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
|
||||
elif "" not in mount:
|
||||
# there's volumes but no root; make root inaccessible
|
||||
vfs = VFS(os.path.abspath("."), "", [], [])
|
||||
vfs = VFS(os.path.abspath("."), "")
|
||||
|
||||
maxdepth = 0
|
||||
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
|
||||
@@ -261,12 +273,18 @@ class AuthSrv(object):
|
||||
|
||||
if dst == "":
|
||||
# rootfs was mapped; fully replaces the default CWD vfs
|
||||
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst])
|
||||
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst], mflags[dst])
|
||||
continue
|
||||
|
||||
v = vfs.add(mount[dst], dst)
|
||||
v.uread = mread[dst]
|
||||
v.uwrite = mwrite[dst]
|
||||
v.flags = mflags[dst]
|
||||
if v.uwrite:
|
||||
self.all_writable.append(v)
|
||||
|
||||
if vfs.uwrite and vfs not in self.all_writable:
|
||||
self.all_writable.append(vfs)
|
||||
|
||||
missing_users = {}
|
||||
for d in [mread, mwrite]:
|
||||
|
||||
@@ -73,7 +73,7 @@ class MpWorker(object):
|
||||
if PY2:
|
||||
sck = pickle.loads(sck) # nosec
|
||||
|
||||
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,))
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
with self.mutex:
|
||||
|
||||
@@ -28,7 +28,7 @@ class BrokerThr(object):
|
||||
def put(self, want_retval, dest, *args):
|
||||
if dest == "httpconn":
|
||||
sck, addr = args
|
||||
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,))
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
else:
|
||||
|
||||
@@ -6,6 +6,8 @@ import stat
|
||||
import gzip
|
||||
import time
|
||||
import json
|
||||
import socket
|
||||
import ctypes
|
||||
from datetime import datetime
|
||||
import calendar
|
||||
|
||||
@@ -14,9 +16,6 @@ from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||
|
||||
if not PY2:
|
||||
unicode = str
|
||||
from html import escape as html_escape
|
||||
else:
|
||||
from cgi import escape as html_escape # pylint: disable=no-name-in-module
|
||||
|
||||
|
||||
class HttpCli(object):
|
||||
@@ -25,9 +24,11 @@ class HttpCli(object):
|
||||
"""
|
||||
|
||||
def __init__(self, conn):
|
||||
self.t0 = time.time()
|
||||
self.conn = conn
|
||||
self.s = conn.s
|
||||
self.sr = conn.sr
|
||||
self.ip = conn.addr[0]
|
||||
self.addr = conn.addr
|
||||
self.args = conn.args
|
||||
self.auth = conn.auth
|
||||
@@ -36,13 +37,13 @@ class HttpCli(object):
|
||||
|
||||
self.bufsz = 1024 * 32
|
||||
self.absolute_urls = False
|
||||
self.out_headers = {}
|
||||
self.out_headers = {"Access-Control-Allow-Origin": "*"}
|
||||
|
||||
def log(self, msg):
|
||||
self.log_func(self.log_src, msg)
|
||||
|
||||
def _check_nonfatal(self, ex):
|
||||
return ex.code in [403, 404]
|
||||
return ex.code < 400 or ex.code == 404
|
||||
|
||||
def _assert_safe_rem(self, rem):
|
||||
# sanity check to prevent any disasters
|
||||
@@ -83,11 +84,16 @@ class HttpCli(object):
|
||||
v = self.headers.get("connection", "").lower()
|
||||
self.keepalive = not v.startswith("close")
|
||||
|
||||
v = self.headers.get("x-forwarded-for", None)
|
||||
if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]:
|
||||
self.ip = v.split(",")[0]
|
||||
self.log_src = self.conn.set_rproxy(self.ip)
|
||||
|
||||
self.uname = "*"
|
||||
if "cookie" in self.headers:
|
||||
cookies = self.headers["cookie"].split(";")
|
||||
for k, v in [x.split("=", 1) for x in cookies]:
|
||||
if k != "cppwd":
|
||||
if k.strip() != "cppwd":
|
||||
continue
|
||||
|
||||
v = unescape_cookie(v)
|
||||
@@ -123,11 +129,30 @@ class HttpCli(object):
|
||||
self.uparam = uparam
|
||||
self.vpath = unquotep(vpath)
|
||||
|
||||
ua = self.headers.get("user-agent", "")
|
||||
if ua.startswith("rclone/"):
|
||||
uparam["raw"] = True
|
||||
uparam["dots"] = True
|
||||
|
||||
if hasattr(self.s, "cipher"):
|
||||
self.ssl_suf = "".join(
|
||||
[
|
||||
" \033[3{}m{}".format(c, s)
|
||||
for c, s in zip([6, 3, 6], self.s.cipher())
|
||||
]
|
||||
)
|
||||
else:
|
||||
self.ssl_suf = ""
|
||||
|
||||
try:
|
||||
if self.mode in ["GET", "HEAD"]:
|
||||
return self.handle_get() and self.keepalive
|
||||
elif self.mode == "POST":
|
||||
return self.handle_post() and self.keepalive
|
||||
elif self.mode == "PUT":
|
||||
return self.handle_put() and self.keepalive
|
||||
elif self.mode == "OPTIONS":
|
||||
return self.handle_options() and self.keepalive
|
||||
else:
|
||||
raise Pebkac(400, 'invalid HTTP mode "{0}"'.format(self.mode))
|
||||
|
||||
@@ -135,7 +160,7 @@ class HttpCli(object):
|
||||
try:
|
||||
# self.log("pebkac at httpcli.run #2: " + repr(ex))
|
||||
self.keepalive = self._check_nonfatal(ex)
|
||||
self.loud_reply(str(ex), status=ex.code)
|
||||
self.loud_reply("{}: {}".format(str(ex), self.vpath), status=ex.code)
|
||||
return self.keepalive
|
||||
except Pebkac:
|
||||
return False
|
||||
@@ -143,9 +168,7 @@ class HttpCli(object):
|
||||
def send_headers(self, length, status=200, mime=None, headers={}):
|
||||
response = ["HTTP/1.1 {} {}".format(status, HTTPCODE[status])]
|
||||
|
||||
if length is None:
|
||||
self.keepalive = False
|
||||
else:
|
||||
if length is not None:
|
||||
response.append("Content-Length: " + str(length))
|
||||
|
||||
# close if unknown length, otherwise take client's preference
|
||||
@@ -176,7 +199,8 @@ class HttpCli(object):
|
||||
self.send_headers(len(body), status, mime, headers)
|
||||
|
||||
try:
|
||||
self.s.sendall(body)
|
||||
if self.mode != "HEAD":
|
||||
self.s.sendall(body)
|
||||
except:
|
||||
raise Pebkac(400, "client d/c while replying body")
|
||||
|
||||
@@ -184,7 +208,7 @@ class HttpCli(object):
|
||||
|
||||
def loud_reply(self, body, *args, **kwargs):
|
||||
self.log(body.rstrip())
|
||||
self.reply(b"<pre>" + body.encode("utf-8"), *list(args), **kwargs)
|
||||
self.reply(b"<pre>" + body.encode("utf-8") + b"\r\n", *list(args), **kwargs)
|
||||
|
||||
def handle_get(self):
|
||||
logmsg = "{:4} {}".format(self.mode, self.req)
|
||||
@@ -197,7 +221,7 @@ class HttpCli(object):
|
||||
|
||||
logmsg += " [\033[36m" + rval + "\033[0m]"
|
||||
|
||||
self.log(logmsg)
|
||||
self.log(logmsg + self.ssl_suf)
|
||||
|
||||
# "embedded" resources
|
||||
if self.vpath.startswith(".cpr"):
|
||||
@@ -230,8 +254,32 @@ class HttpCli(object):
|
||||
|
||||
return self.tx_browser()
|
||||
|
||||
def handle_options(self):
|
||||
self.log("OPTIONS " + self.req + self.ssl_suf)
|
||||
self.send_headers(
|
||||
None,
|
||||
204,
|
||||
headers={
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "*",
|
||||
"Access-Control-Allow-Headers": "*",
|
||||
},
|
||||
)
|
||||
return True
|
||||
|
||||
def handle_put(self):
|
||||
self.log("PUT " + self.req + self.ssl_suf)
|
||||
|
||||
if self.headers.get("expect", "").lower() == "100-continue":
|
||||
try:
|
||||
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
|
||||
except:
|
||||
raise Pebkac(400, "client d/c before 100 continue")
|
||||
|
||||
return self.handle_stash()
|
||||
|
||||
def handle_post(self):
|
||||
self.log("POST " + self.req)
|
||||
self.log("POST " + self.req + self.ssl_suf)
|
||||
|
||||
if self.headers.get("expect", "").lower() == "100-continue":
|
||||
try:
|
||||
@@ -243,6 +291,9 @@ class HttpCli(object):
|
||||
if not ctype:
|
||||
raise Pebkac(400, "you can't post without a content-type header")
|
||||
|
||||
if "raw" in self.uparam:
|
||||
return self.handle_stash()
|
||||
|
||||
if "multipart/form-data" in ctype:
|
||||
return self.handle_post_multipart()
|
||||
|
||||
@@ -253,7 +304,63 @@ class HttpCli(object):
|
||||
if "application/octet-stream" in ctype:
|
||||
return self.handle_post_binary()
|
||||
|
||||
raise Pebkac(405, "don't know how to handle {} POST".format(ctype))
|
||||
if "application/x-www-form-urlencoded" in ctype:
|
||||
opt = self.args.urlform
|
||||
if "stash" in opt:
|
||||
return self.handle_stash()
|
||||
|
||||
if "save" in opt:
|
||||
post_sz, _, _, path = self.dump_to_file()
|
||||
self.log("urlform: {} bytes, {}".format(post_sz, path))
|
||||
elif "print" in opt:
|
||||
reader, _ = self.get_body_reader()
|
||||
for buf in reader:
|
||||
buf = buf.decode("utf-8", "replace")
|
||||
self.log("urlform:\n {}\n".format(buf))
|
||||
|
||||
if "get" in opt:
|
||||
return self.handle_get()
|
||||
|
||||
raise Pebkac(405, "POST({}) is disabled".format(ctype))
|
||||
|
||||
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
|
||||
|
||||
def get_body_reader(self):
|
||||
remains = int(self.headers.get("content-length", None))
|
||||
if remains is None:
|
||||
self.keepalive = False
|
||||
return read_socket_unbounded(self.sr), remains
|
||||
else:
|
||||
return read_socket(self.sr, remains), remains
|
||||
|
||||
def dump_to_file(self):
|
||||
reader, remains = self.get_body_reader()
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
|
||||
addr = self.ip.replace(":", ".")
|
||||
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
||||
path = os.path.join(fdir, fn)
|
||||
|
||||
with open(path, "wb", 512 * 1024) as f:
|
||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||
|
||||
return post_sz, sha_b64, remains, path
|
||||
|
||||
def handle_stash(self):
|
||||
post_sz, sha_b64, remains, path = self.dump_to_file()
|
||||
spd = self._spd(post_sz)
|
||||
self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path))
|
||||
self.reply("{}\n{}\n".format(post_sz, sha_b64).encode("utf-8"))
|
||||
return True
|
||||
|
||||
def _spd(self, nbytes, add=True):
|
||||
if add:
|
||||
self.conn.nbyte += nbytes
|
||||
|
||||
spd1 = get_spd(nbytes, self.t0)
|
||||
spd2 = get_spd(self.conn.nbyte, self.conn.t0)
|
||||
return spd1 + " " + spd2
|
||||
|
||||
def handle_post_multipart(self):
|
||||
self.parser = MultipartParser(self.log, self.sr, self.headers)
|
||||
@@ -314,9 +421,11 @@ class HttpCli(object):
|
||||
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
|
||||
body["vdir"] = self.vpath
|
||||
body["rdir"] = os.path.join(vfs.realpath, rem)
|
||||
body["addr"] = self.addr[0]
|
||||
body["vtop"] = vfs.vpath
|
||||
body["ptop"] = vfs.realpath
|
||||
body["prel"] = rem
|
||||
body["addr"] = self.ip
|
||||
body["flag"] = vfs.flags
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||
response = x.get()
|
||||
@@ -338,7 +447,10 @@ class HttpCli(object):
|
||||
except KeyError:
|
||||
raise Pebkac(400, "need hash and wark headers for binary POST")
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", wark, chash)
|
||||
vfs, _ = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
ptop = vfs.realpath
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash)
|
||||
response = x.get()
|
||||
chunksize, cstart, path, lastmod = response
|
||||
|
||||
@@ -383,8 +495,8 @@ class HttpCli(object):
|
||||
|
||||
self.log("clone {} done".format(cstart[0]))
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", wark, chash)
|
||||
num_left = x.get()
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
|
||||
num_left, path = x.get()
|
||||
|
||||
if not WINDOWS and num_left == 0:
|
||||
times = (int(time.time()), int(lastmod))
|
||||
@@ -394,7 +506,9 @@ class HttpCli(object):
|
||||
except:
|
||||
self.log("failed to utime ({}, {})".format(path, times))
|
||||
|
||||
self.loud_reply("thank")
|
||||
spd = self._spd(post_sz)
|
||||
self.log("{} thank".format(spd))
|
||||
self.reply(b"thank")
|
||||
return True
|
||||
|
||||
def handle_login(self):
|
||||
@@ -407,7 +521,7 @@ class HttpCli(object):
|
||||
msg = "naw dude"
|
||||
pwd = "x" # nosec
|
||||
|
||||
h = {"Set-Cookie": "cppwd={}; Path=/".format(pwd)}
|
||||
h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
|
||||
html = self.conn.tpl_msg.render(h1=msg, h2='<a href="/">ack</a>', redir="/")
|
||||
self.reply(html.encode("utf-8"), headers=h)
|
||||
return True
|
||||
@@ -438,10 +552,9 @@ class HttpCli(object):
|
||||
raise Pebkac(500, "mkdir failed, check the logs")
|
||||
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
esc_paths = [quotep(vpath), html_escape(vpath)]
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}">go to /{}</a>'.format(
|
||||
quotep(vpath), html_escape(vpath, quote=False)
|
||||
),
|
||||
h2='<a href="/{}">go to /{}</a>'.format(*esc_paths),
|
||||
pre="aight",
|
||||
click=True,
|
||||
)
|
||||
@@ -474,7 +587,7 @@ class HttpCli(object):
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
|
||||
quotep(vpath), html_escape(vpath, quote=False)
|
||||
quotep(vpath), html_escape(vpath)
|
||||
),
|
||||
pre="aight",
|
||||
click=True,
|
||||
@@ -496,33 +609,40 @@ class HttpCli(object):
|
||||
self.log("discarding incoming file without filename")
|
||||
# fallthrough
|
||||
|
||||
fn = os.devnull
|
||||
if p_file and not nullwrite:
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
fn = os.path.join(fdir, sanitize_fn(p_file))
|
||||
fname = sanitize_fn(p_file)
|
||||
|
||||
if not os.path.isdir(fsenc(fdir)):
|
||||
raise Pebkac(404, "that folder does not exist")
|
||||
|
||||
# TODO broker which avoid this race and
|
||||
# provides a new filename if taken (same as up2k)
|
||||
if os.path.exists(fsenc(fn)):
|
||||
fn += ".{:.6f}-{}".format(time.time(), self.addr[0])
|
||||
# using current-time instead of t0 cause clients
|
||||
# may reuse a name for multiple files in one post
|
||||
suffix = ".{:.6f}-{}".format(time.time(), self.ip)
|
||||
open_args = {"fdir": fdir, "suffix": suffix}
|
||||
else:
|
||||
open_args = {}
|
||||
fname = os.devnull
|
||||
fdir = ""
|
||||
|
||||
try:
|
||||
with open(fsenc(fn), "wb") as f:
|
||||
self.log("writing to {0}".format(fn))
|
||||
with ren_open(fname, "wb", 512 * 1024, **open_args) as f:
|
||||
f, fname = f["orz"]
|
||||
self.log("writing to {}/{}".format(fdir, fname))
|
||||
sz, sha512_hex, _ = hashcopy(self.conn, p_data, f)
|
||||
if sz == 0:
|
||||
raise Pebkac(400, "empty files in post")
|
||||
|
||||
files.append([sz, sha512_hex])
|
||||
self.conn.nbyte += sz
|
||||
|
||||
except Pebkac:
|
||||
if fn != os.devnull:
|
||||
os.rename(fsenc(fn), fsenc(fn + ".PARTIAL"))
|
||||
if fname != os.devnull:
|
||||
fp = os.path.join(fdir, fname)
|
||||
suffix = ".PARTIAL"
|
||||
try:
|
||||
os.rename(fsenc(fp), fsenc(fp + suffix))
|
||||
except:
|
||||
fp = fp[: -len(suffix)]
|
||||
os.rename(fsenc(fp), fsenc(fp + suffix))
|
||||
|
||||
raise
|
||||
|
||||
@@ -546,7 +666,9 @@ class HttpCli(object):
|
||||
# truncated SHA-512 prevents length extension attacks;
|
||||
# using SHA-512/224, optionally SHA-512/256 = :64
|
||||
|
||||
self.log(msg)
|
||||
vspd = self._spd(sz_total, False)
|
||||
self.log("{} {}".format(vspd, msg))
|
||||
|
||||
if not nullwrite:
|
||||
# TODO this is bad
|
||||
log_fn = "up.{:.6f}.txt".format(t0)
|
||||
@@ -556,7 +678,7 @@ class HttpCli(object):
|
||||
"\n".join(
|
||||
unicode(x)
|
||||
for x in [
|
||||
":".join(unicode(x) for x in self.addr),
|
||||
":".join(unicode(x) for x in [self.ip, self.addr[1]]),
|
||||
msg.rstrip(),
|
||||
]
|
||||
)
|
||||
@@ -568,7 +690,7 @@ class HttpCli(object):
|
||||
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}">return to /{}</a>'.format(
|
||||
quotep(self.vpath), html_escape(self.vpath, quote=False)
|
||||
quotep(self.vpath), html_escape(self.vpath)
|
||||
),
|
||||
pre=msg,
|
||||
)
|
||||
@@ -605,7 +727,7 @@ class HttpCli(object):
|
||||
return True
|
||||
|
||||
fp = os.path.join(vfs.realpath, rem)
|
||||
srv_lastmod = -1
|
||||
srv_lastmod = srv_lastmod3 = -1
|
||||
try:
|
||||
st = os.stat(fsenc(fp))
|
||||
srv_lastmod = st.st_mtime
|
||||
@@ -616,7 +738,16 @@ class HttpCli(object):
|
||||
|
||||
# if file exists, chekc that timestamp matches the client's
|
||||
if srv_lastmod >= 0:
|
||||
if cli_lastmod3 not in [-1, srv_lastmod3]:
|
||||
same_lastmod = cli_lastmod3 in [-1, srv_lastmod3]
|
||||
if not same_lastmod:
|
||||
# some filesystems/transports limit precision to 1sec, hopefully floored
|
||||
same_lastmod = (
|
||||
srv_lastmod == int(srv_lastmod)
|
||||
and cli_lastmod3 > srv_lastmod3
|
||||
and cli_lastmod3 - srv_lastmod3 < 1000
|
||||
)
|
||||
|
||||
if not same_lastmod:
|
||||
response = json.dumps(
|
||||
{
|
||||
"ok": False,
|
||||
@@ -647,7 +778,7 @@ class HttpCli(object):
|
||||
if p_field != "body":
|
||||
raise Pebkac(400, "expected body, got {}".format(p_field))
|
||||
|
||||
with open(fp, "wb") as f:
|
||||
with open(fp, "wb", 512 * 1024) as f:
|
||||
sz, sha512, _ = hashcopy(self.conn, p_data, f)
|
||||
|
||||
new_lastmod = os.stat(fsenc(fp)).st_mtime
|
||||
@@ -672,9 +803,12 @@ class HttpCli(object):
|
||||
cli_dt = time.strptime(cli_lastmod, "%a, %d %b %Y %H:%M:%S GMT")
|
||||
cli_ts = calendar.timegm(cli_dt)
|
||||
return file_lastmod, int(file_ts) > int(cli_ts)
|
||||
except:
|
||||
self.log("bad lastmod format: {}".format(cli_lastmod))
|
||||
self.log(" expected format: {}".format(file_lastmod))
|
||||
except Exception as ex:
|
||||
self.log(
|
||||
"lastmod {}\nremote: [{}]\n local: [{}]".format(
|
||||
repr(ex), cli_lastmod, file_lastmod
|
||||
)
|
||||
)
|
||||
return file_lastmod, file_lastmod != cli_lastmod
|
||||
|
||||
return file_lastmod, True
|
||||
@@ -697,6 +831,8 @@ class HttpCli(object):
|
||||
editions[ext or "plain"] = [fs_path, st.st_size]
|
||||
except:
|
||||
pass
|
||||
if not self.vpath.startswith(".cpr/"):
|
||||
break
|
||||
|
||||
if not editions:
|
||||
raise Pebkac(404)
|
||||
@@ -791,6 +927,7 @@ class HttpCli(object):
|
||||
|
||||
logtail += " [\033[36m{}-{}\033[0m]".format(lower, upper)
|
||||
|
||||
use_sendfile = False
|
||||
if decompress:
|
||||
open_func = gzip.open
|
||||
open_args = [fsenc(fs_path), "rb"]
|
||||
@@ -800,10 +937,18 @@ class HttpCli(object):
|
||||
open_func = open
|
||||
# 512 kB is optimal for huge files, use 64k
|
||||
open_args = [fsenc(fs_path), "rb", 64 * 1024]
|
||||
use_sendfile = (
|
||||
not self.ssl_suf
|
||||
and not self.args.no_sendfile
|
||||
and hasattr(os, "sendfile")
|
||||
)
|
||||
|
||||
#
|
||||
# send reply
|
||||
|
||||
if not is_compressed:
|
||||
self.out_headers["Cache-Control"] = "no-cache"
|
||||
|
||||
self.out_headers["Accept-Ranges"] = "bytes"
|
||||
self.send_headers(
|
||||
length=upper - lower,
|
||||
@@ -817,29 +962,19 @@ class HttpCli(object):
|
||||
self.log(logmsg)
|
||||
return True
|
||||
|
||||
ret = True
|
||||
with open_func(*open_args) as f:
|
||||
remains = upper - lower
|
||||
f.seek(lower)
|
||||
while remains > 0:
|
||||
# time.sleep(0.01)
|
||||
buf = f.read(4096)
|
||||
if not buf:
|
||||
break
|
||||
if use_sendfile:
|
||||
remains = sendfile_kern(lower, upper, f, self.s)
|
||||
else:
|
||||
remains = sendfile_py(lower, upper, f, self.s)
|
||||
|
||||
if remains < len(buf):
|
||||
buf = buf[:remains]
|
||||
if remains > 0:
|
||||
logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
|
||||
|
||||
remains -= len(buf)
|
||||
|
||||
try:
|
||||
self.s.sendall(buf)
|
||||
except:
|
||||
logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
|
||||
self.log(logmsg)
|
||||
return False
|
||||
|
||||
self.log(logmsg)
|
||||
return True
|
||||
spd = self._spd((upper - lower) - remains)
|
||||
self.log("{}, {}".format(logmsg, spd))
|
||||
return ret
|
||||
|
||||
def tx_md(self, fs_path):
|
||||
logmsg = "{:4} {} ".format("", self.req)
|
||||
@@ -873,8 +1008,10 @@ class HttpCli(object):
|
||||
|
||||
targs = {
|
||||
"edit": "edit" in self.uparam,
|
||||
"title": html_escape(self.vpath, quote=False),
|
||||
"title": html_escape(self.vpath),
|
||||
"lastmod": int(ts_md * 1000),
|
||||
"md_plug": "true" if self.args.emp else "false",
|
||||
"md_chk_rate": self.args.mcr,
|
||||
"md": "",
|
||||
}
|
||||
sz_html = len(template.render(**targs).encode("utf-8"))
|
||||
@@ -914,7 +1051,7 @@ class HttpCli(object):
|
||||
else:
|
||||
vpath += "/" + node
|
||||
|
||||
vpnodes.append([quotep(vpath) + "/", html_escape(node, quote=False)])
|
||||
vpnodes.append([quotep(vpath) + "/", html_escape(node)])
|
||||
|
||||
vn, rem = self.auth.vfs.get(
|
||||
self.vpath, self.uname, self.readable, self.writable
|
||||
@@ -929,6 +1066,10 @@ class HttpCli(object):
|
||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||
return self.tx_md(abspath)
|
||||
|
||||
bad = "{0}.hist{0}up2k.".format(os.sep)
|
||||
if abspath.endswith(bad + "db") or abspath.endswith(bad + "snap"):
|
||||
raise Pebkac(403)
|
||||
|
||||
return self.tx_file(abspath)
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)
|
||||
@@ -989,7 +1130,12 @@ class HttpCli(object):
|
||||
dt = datetime.utcfromtimestamp(inf.st_mtime)
|
||||
dt = dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
item = [margin, quotep(href), html_escape(fn, quote=False), sz, dt]
|
||||
try:
|
||||
ext = "---" if is_dir else fn.rsplit(".", 1)[1]
|
||||
except:
|
||||
ext = "%"
|
||||
|
||||
item = [margin, quotep(href), html_escape(fn), sz, ext, dt]
|
||||
if is_dir:
|
||||
dirs.append(item)
|
||||
else:
|
||||
@@ -1002,6 +1148,45 @@ class HttpCli(object):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
logues[n] = f.read().decode("utf-8")
|
||||
|
||||
if False:
|
||||
# this is a mistake
|
||||
md = None
|
||||
for fn in [x[2] for x in files]:
|
||||
if fn.lower() == "readme.md":
|
||||
fn = os.path.join(abspath, fn)
|
||||
with open(fn, "rb") as f:
|
||||
md = f.read().decode("utf-8")
|
||||
|
||||
break
|
||||
|
||||
srv_info = []
|
||||
|
||||
try:
|
||||
if not self.args.nih:
|
||||
srv_info.append(str(socket.gethostname()).split(".")[0])
|
||||
except:
|
||||
self.log("#wow #whoa")
|
||||
pass
|
||||
|
||||
try:
|
||||
# some fuses misbehave
|
||||
if not self.args.nid:
|
||||
if WINDOWS:
|
||||
bfree = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
||||
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
|
||||
)
|
||||
srv_info.append(humansize(bfree.value) + " free")
|
||||
else:
|
||||
sv = os.statvfs(abspath)
|
||||
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
||||
total = humansize(sv.f_frsize * sv.f_blocks, True)
|
||||
|
||||
srv_info.append(free + " free")
|
||||
srv_info.append(total)
|
||||
except:
|
||||
pass
|
||||
|
||||
ts = ""
|
||||
# ts = "?{}".format(time.time())
|
||||
|
||||
@@ -1015,7 +1200,8 @@ class HttpCli(object):
|
||||
ts=ts,
|
||||
prologue=logues[0],
|
||||
epilogue=logues[1],
|
||||
title=html_escape(self.vpath, quote=False),
|
||||
title=html_escape(self.vpath),
|
||||
srv_info="</span> /// <span>".join(srv_info),
|
||||
)
|
||||
self.reply(html.encode("utf-8", "replace"))
|
||||
return True
|
||||
|
||||
@@ -3,9 +3,15 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import ssl
|
||||
import time
|
||||
import socket
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
try:
|
||||
import jinja2
|
||||
except ImportError:
|
||||
@@ -41,9 +47,11 @@ class HttpConn(object):
|
||||
self.auth = hsrv.auth
|
||||
self.cert_path = hsrv.cert_path
|
||||
|
||||
self.t0 = time.time()
|
||||
self.nbyte = 0
|
||||
self.workload = 0
|
||||
self.log_func = hsrv.log
|
||||
self.log_src = "{} \033[36m{}".format(addr[0], addr[1]).ljust(26)
|
||||
self.set_rproxy()
|
||||
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
||||
@@ -53,15 +61,27 @@ class HttpConn(object):
|
||||
self.tpl_md = env.get_template("md.html")
|
||||
self.tpl_mde = env.get_template("mde.html")
|
||||
|
||||
def set_rproxy(self, ip=None):
|
||||
if ip is None:
|
||||
color = 36
|
||||
ip = self.addr[0]
|
||||
self.rproxy = None
|
||||
else:
|
||||
color = 34
|
||||
self.rproxy = ip
|
||||
|
||||
self.ip = ip
|
||||
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
|
||||
return self.log_src
|
||||
|
||||
def respath(self, res_name):
|
||||
return os.path.join(E.mod, "web", res_name)
|
||||
|
||||
def log(self, msg):
|
||||
self.log_func(self.log_src, msg)
|
||||
|
||||
def run(self):
|
||||
def _detect_https(self):
|
||||
method = None
|
||||
self.sr = None
|
||||
if self.cert_path:
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
@@ -86,16 +106,52 @@ class HttpConn(object):
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return
|
||||
|
||||
if method not in [None, b"GET ", b"HEAD", b"POST"]:
|
||||
return method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]
|
||||
|
||||
def run(self):
|
||||
self.sr = None
|
||||
if self.args.https_only:
|
||||
is_https = True
|
||||
elif self.args.http_only or not HAVE_SSL:
|
||||
is_https = False
|
||||
else:
|
||||
is_https = self._detect_https()
|
||||
|
||||
if is_https:
|
||||
if self.sr:
|
||||
self.log("\033[1;31mTODO: cannot do https in jython\033[0m")
|
||||
return
|
||||
|
||||
self.log_src = self.log_src.replace("[36m", "[35m")
|
||||
try:
|
||||
self.s = ssl.wrap_socket(
|
||||
self.s, server_side=True, certfile=self.cert_path
|
||||
)
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
ctx.load_cert_chain(self.cert_path)
|
||||
if self.args.ssl_ver:
|
||||
ctx.options &= ~self.args.ssl_flags_en
|
||||
ctx.options |= self.args.ssl_flags_de
|
||||
# print(repr(ctx.options))
|
||||
|
||||
if self.args.ssl_log:
|
||||
try:
|
||||
ctx.keylog_filename = self.args.ssl_log
|
||||
except:
|
||||
self.log("keylog failed; openssl or python too old")
|
||||
|
||||
if self.args.ciphers:
|
||||
ctx.set_ciphers(self.args.ciphers)
|
||||
|
||||
self.s = ctx.wrap_socket(self.s, server_side=True)
|
||||
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
|
||||
overlap = [y[::-1] for y in self.s.shared_ciphers()]
|
||||
lines = [str(x) for x in (["TLS cipher overlap:"] + overlap)]
|
||||
self.log("\n".join(lines))
|
||||
for k, v in [
|
||||
["compression", self.s.compression()],
|
||||
["ALPN proto", self.s.selected_alpn_protocol()],
|
||||
["NPN proto", self.s.selected_npn_protocol()],
|
||||
]:
|
||||
self.log("TLS {}: {}".format(k, v or "nah"))
|
||||
|
||||
except Exception as ex:
|
||||
em = str(ex)
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ class HttpSrv(object):
|
||||
|
||||
def accept(self, sck, addr):
|
||||
"""takes an incoming tcp connection and creates a thread to handle it"""
|
||||
self.log("%s %s" % addr, "-" * 5 + "C-cthr")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-cthr\033[0m" % ("-" * 5,))
|
||||
thr = threading.Thread(target=self.thr_client, args=(sck, addr))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
@@ -66,11 +66,11 @@ class HttpSrv(object):
|
||||
thr.start()
|
||||
|
||||
try:
|
||||
self.log("%s %s" % addr, "-" * 6 + "C-crun")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-crun\033[0m" % ("-" * 6,))
|
||||
cli.run()
|
||||
|
||||
finally:
|
||||
self.log("%s %s" % addr, "-" * 7 + "C-done")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-cdone\033[0m" % ("-" * 7,))
|
||||
try:
|
||||
sck.shutdown(socket.SHUT_RDWR)
|
||||
sck.close()
|
||||
|
||||
@@ -9,6 +9,7 @@ from datetime import datetime, timedelta
|
||||
import calendar
|
||||
|
||||
from .__init__ import PY2, WINDOWS, MACOS, VT100
|
||||
from .authsrv import AuthSrv
|
||||
from .tcpsrv import TcpSrv
|
||||
from .up2k import Up2k
|
||||
from .util import mp
|
||||
@@ -38,6 +39,10 @@ class SvcHub(object):
|
||||
self.tcpsrv = TcpSrv(self)
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
if self.args.e2d and self.args.e2s:
|
||||
auth = AuthSrv(self.args, self.log, False)
|
||||
self.up2k.build_indexes(auth.all_writable)
|
||||
|
||||
# decide which worker impl to use
|
||||
if self.check_mp_enable():
|
||||
from .broker_mp import BrokerMp as Broker
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import re
|
||||
import time
|
||||
import socket
|
||||
import select
|
||||
|
||||
from .util import chkcmd, Counter
|
||||
|
||||
@@ -23,55 +24,73 @@ class TcpSrv(object):
|
||||
|
||||
ip = "127.0.0.1"
|
||||
eps = {ip: "local only"}
|
||||
if self.args.i != ip:
|
||||
eps = self.detect_interfaces(self.args.i) or {self.args.i: "external"}
|
||||
nonlocals = [x for x in self.args.i if x != ip]
|
||||
if nonlocals:
|
||||
eps = self.detect_interfaces(self.args.i)
|
||||
if not eps:
|
||||
for x in nonlocals:
|
||||
eps[x] = "external"
|
||||
|
||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||
self.log(
|
||||
"tcpsrv",
|
||||
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
|
||||
ip, self.args.p, desc
|
||||
),
|
||||
)
|
||||
for port in sorted(self.args.p):
|
||||
self.log(
|
||||
"tcpsrv",
|
||||
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
|
||||
ip, port, desc
|
||||
),
|
||||
)
|
||||
|
||||
self.srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
self.srv = []
|
||||
for ip in self.args.i:
|
||||
for port in self.args.p:
|
||||
self.srv.append(self._listen(ip, port))
|
||||
|
||||
def _listen(self, ip, port):
|
||||
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
try:
|
||||
self.srv.bind((self.args.i, self.args.p))
|
||||
srv.bind((ip, port))
|
||||
return srv
|
||||
except (OSError, socket.error) as ex:
|
||||
if ex.errno == 98:
|
||||
raise Exception(
|
||||
"\033[1;31mport {} is busy on interface {}\033[0m".format(
|
||||
self.args.p, self.args.i
|
||||
)
|
||||
)
|
||||
|
||||
if ex.errno == 99:
|
||||
raise Exception(
|
||||
"\033[1;31minterface {} does not exist\033[0m".format(self.args.i)
|
||||
)
|
||||
if ex.errno in [98, 48]:
|
||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
elif ex.errno in [99, 49]:
|
||||
e = "\033[1;31minterface {} does not exist\033[0m".format(ip)
|
||||
else:
|
||||
raise
|
||||
raise Exception(e)
|
||||
|
||||
def run(self):
|
||||
self.srv.listen(self.args.nc)
|
||||
|
||||
self.log("tcpsrv", "listening @ {0}:{1}".format(self.args.i, self.args.p))
|
||||
for srv in self.srv:
|
||||
srv.listen(self.args.nc)
|
||||
ip, port = srv.getsockname()
|
||||
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
|
||||
|
||||
while True:
|
||||
self.log("tcpsrv", "-" * 1 + "C-ncli")
|
||||
self.log("tcpsrv", "\033[1;30m|%sC-ncli\033[0m" % ("-" * 1,))
|
||||
if self.num_clients.v >= self.args.nc:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
self.log("tcpsrv", "-" * 2 + "C-acc1")
|
||||
sck, addr = self.srv.accept()
|
||||
self.log("%s %s" % addr, "-" * 3 + "C-acc2")
|
||||
self.num_clients.add()
|
||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||
self.log("tcpsrv", "\033[1;30m|%sC-acc1\033[0m" % ("-" * 2,))
|
||||
ready, _, _ = select.select(self.srv, [], [])
|
||||
for srv in ready:
|
||||
sck, addr = srv.accept()
|
||||
sip, sport = srv.getsockname()
|
||||
self.log(
|
||||
"%s %s" % addr,
|
||||
"\033[1;30m|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||
"-" * 3, sip, sport % 8, sport
|
||||
),
|
||||
)
|
||||
self.num_clients.add()
|
||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||
|
||||
def shutdown(self):
|
||||
self.log("tcpsrv", "ok bye")
|
||||
|
||||
def detect_interfaces(self, listen_ip):
|
||||
def detect_interfaces(self, listen_ips):
|
||||
eps = {}
|
||||
|
||||
# get all ips and their interfaces
|
||||
@@ -85,8 +104,9 @@ class TcpSrv(object):
|
||||
for ln in ip_addr.split("\n"):
|
||||
try:
|
||||
ip, dev = r.match(ln.rstrip()).groups()
|
||||
if listen_ip in ["0.0.0.0", ip]:
|
||||
eps[ip] = dev
|
||||
for lip in listen_ips:
|
||||
if lip in ["0.0.0.0", ip]:
|
||||
eps[ip] = dev
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -113,11 +133,12 @@ class TcpSrv(object):
|
||||
|
||||
s.close()
|
||||
|
||||
if default_route and listen_ip in ["0.0.0.0", default_route]:
|
||||
desc = "\033[32mexternal"
|
||||
try:
|
||||
eps[default_route] += ", " + desc
|
||||
except:
|
||||
eps[default_route] = desc
|
||||
for lip in listen_ips:
|
||||
if default_route and lip in ["0.0.0.0", default_route]:
|
||||
desc = "\033[32mexternal"
|
||||
try:
|
||||
eps[default_route] += ", " + desc
|
||||
except:
|
||||
eps[default_route] = desc
|
||||
|
||||
return eps
|
||||
|
||||
@@ -6,6 +6,9 @@ import os
|
||||
import re
|
||||
import time
|
||||
import math
|
||||
import json
|
||||
import gzip
|
||||
import stat
|
||||
import shutil
|
||||
import base64
|
||||
import hashlib
|
||||
@@ -13,7 +16,15 @@ import threading
|
||||
from copy import deepcopy
|
||||
|
||||
from .__init__ import WINDOWS
|
||||
from .util import Pebkac, Queue, fsenc, sanitize_fn
|
||||
from .util import Pebkac, Queue, fsdec, fsenc, sanitize_fn, ren_open, atomic_move
|
||||
|
||||
HAVE_SQLITE3 = False
|
||||
try:
|
||||
import sqlite3
|
||||
|
||||
HAVE_SQLITE3 = True
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class Up2k(object):
|
||||
@@ -22,20 +33,21 @@ class Up2k(object):
|
||||
* documentation
|
||||
* registry persistence
|
||||
* ~/.config flatfiles for active jobs
|
||||
* wark->path database for finished uploads
|
||||
"""
|
||||
|
||||
def __init__(self, broker):
|
||||
self.broker = broker
|
||||
self.args = broker.args
|
||||
self.log = broker.log
|
||||
self.persist = self.args.e2d
|
||||
|
||||
# config
|
||||
self.salt = "hunter2" # TODO: config
|
||||
|
||||
# state
|
||||
self.registry = {}
|
||||
self.mutex = threading.Lock()
|
||||
self.registry = {}
|
||||
self.db = {}
|
||||
|
||||
if WINDOWS:
|
||||
# usually fails to set lastmod too quickly
|
||||
@@ -44,54 +56,309 @@ class Up2k(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if self.persist:
|
||||
thr = threading.Thread(target=self._snapshot)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
# static
|
||||
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
|
||||
|
||||
if self.persist and not HAVE_SQLITE3:
|
||||
m = "could not initialize sqlite3, will use in-memory registry only"
|
||||
self.log("up2k", m)
|
||||
|
||||
def _vis_job_progress(self, job):
|
||||
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
return "{:5.1f}% {}".format(perc, path)
|
||||
|
||||
def _vis_reg_progress(self, reg):
|
||||
ret = []
|
||||
for _, job in reg.items():
|
||||
ret.append(self._vis_job_progress(job))
|
||||
|
||||
return ret
|
||||
|
||||
def register_vpath(self, ptop):
|
||||
with self.mutex:
|
||||
if ptop in self.registry:
|
||||
return None
|
||||
|
||||
reg = {}
|
||||
path = os.path.join(ptop, ".hist", "up2k.snap")
|
||||
if self.persist and os.path.exists(path):
|
||||
with gzip.GzipFile(path, "rb") as f:
|
||||
j = f.read().decode("utf-8")
|
||||
|
||||
reg = json.loads(j)
|
||||
for _, job in reg.items():
|
||||
job["poke"] = time.time()
|
||||
|
||||
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
|
||||
m = [m] + self._vis_reg_progress(reg)
|
||||
self.log("up2k", "\n".join(m))
|
||||
|
||||
self.registry[ptop] = reg
|
||||
if not self.persist or not HAVE_SQLITE3:
|
||||
return None
|
||||
|
||||
try:
|
||||
os.mkdir(os.path.join(ptop, ".hist"))
|
||||
except:
|
||||
pass
|
||||
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
if ptop in self.db:
|
||||
# self.db[ptop].close()
|
||||
return None
|
||||
|
||||
try:
|
||||
db = self._open_db(db_path)
|
||||
self.db[ptop] = db
|
||||
return db
|
||||
except Exception as ex:
|
||||
m = "failed to open [{}]: {}".format(ptop, repr(ex))
|
||||
self.log("up2k", m)
|
||||
|
||||
return None
|
||||
|
||||
def build_indexes(self, writeables):
|
||||
tops = [d.realpath for d in writeables]
|
||||
for top in tops:
|
||||
db = self.register_vpath(top)
|
||||
if db:
|
||||
# can be symlink so don't `and d.startswith(top)``
|
||||
excl = set([d for d in tops if d != top])
|
||||
self._build_dir([db, 0, time.time()], top, excl, top)
|
||||
self._drop_lost(db, top)
|
||||
db.commit()
|
||||
|
||||
def _build_dir(self, dbw, top, excl, cdir):
|
||||
try:
|
||||
inodes = [fsdec(x) for x in os.listdir(fsenc(cdir))]
|
||||
except Exception as ex:
|
||||
self.log("up2k", "listdir: {} @ [{}]".format(repr(ex), cdir))
|
||||
return
|
||||
|
||||
histdir = os.path.join(top, ".hist")
|
||||
for inode in inodes:
|
||||
abspath = os.path.join(cdir, inode)
|
||||
try:
|
||||
inf = os.stat(fsenc(abspath))
|
||||
except Exception as ex:
|
||||
self.log("up2k", "stat: {} @ [{}]".format(repr(ex), abspath))
|
||||
continue
|
||||
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
if abspath in excl or abspath == histdir:
|
||||
continue
|
||||
# self.log("up2k", " dir: {}".format(abspath))
|
||||
self._build_dir(dbw, top, excl, abspath)
|
||||
else:
|
||||
# self.log("up2k", "file: {}".format(abspath))
|
||||
rp = abspath[len(top) :].replace("\\", "/").strip("/")
|
||||
c = dbw[0].execute("select * from up where rp = ?", (rp,))
|
||||
in_db = list(c.fetchall())
|
||||
if in_db:
|
||||
_, dts, dsz, _ = in_db[0]
|
||||
if len(in_db) > 1:
|
||||
m = "WARN: multiple entries: [{}] => [{}] ({})"
|
||||
self.log("up2k", m.format(top, rp, len(in_db)))
|
||||
dts = -1
|
||||
|
||||
if dts == inf.st_mtime and dsz == inf.st_size:
|
||||
continue
|
||||
|
||||
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
|
||||
top, rp, dts, inf.st_mtime, dsz, inf.st_size
|
||||
)
|
||||
self.log("up2k", m)
|
||||
self.db_rm(dbw[0], rp)
|
||||
dbw[1] += 1
|
||||
in_db = None
|
||||
|
||||
self.log("up2k", "file: {}".format(abspath))
|
||||
try:
|
||||
hashes = self._hashlist_from_file(abspath)
|
||||
except Exception as ex:
|
||||
self.log("up2k", "hash: {} @ [{}]".format(repr(ex), abspath))
|
||||
continue
|
||||
|
||||
wark = self._wark_from_hashlist(inf.st_size, hashes)
|
||||
self.db_add(dbw[0], wark, rp, inf.st_mtime, inf.st_size)
|
||||
dbw[1] += 1
|
||||
td = time.time() - dbw[2]
|
||||
if dbw[1] > 1024 or td > 60:
|
||||
self.log("up2k", "commit {} files".format(dbw[1]))
|
||||
dbw[0].commit()
|
||||
dbw[1] = 0
|
||||
dbw[2] = time.time()
|
||||
|
||||
def _drop_lost(self, db, top):
|
||||
rm = []
|
||||
c = db.execute("select * from up")
|
||||
for dwark, dts, dsz, drp in c:
|
||||
abspath = os.path.join(top, drp)
|
||||
try:
|
||||
if not os.path.exists(fsenc(abspath)):
|
||||
rm.append(drp)
|
||||
except Exception as ex:
|
||||
self.log("up2k", "stat-rm: {} @ [{}]".format(repr(ex), abspath))
|
||||
|
||||
if not rm:
|
||||
return
|
||||
|
||||
self.log("up2k", "forgetting {} deleted files".format(len(rm)))
|
||||
for rp in rm:
|
||||
self.db_rm(db, rp)
|
||||
|
||||
def _open_db(self, db_path):
|
||||
conn = sqlite3.connect(db_path, check_same_thread=False)
|
||||
try:
|
||||
c = conn.execute(r"select * from kv where k = 'sver'")
|
||||
rows = c.fetchall()
|
||||
if rows:
|
||||
ver = rows[0][1]
|
||||
else:
|
||||
self.log("up2k", "WARN: no sver in kv, DB corrupt?")
|
||||
ver = "unknown"
|
||||
|
||||
if ver == "1":
|
||||
try:
|
||||
nfiles = next(conn.execute("select count(w) from up"))[0]
|
||||
self.log("up2k", "found DB at {} |{}|".format(db_path, nfiles))
|
||||
return conn
|
||||
except Exception as ex:
|
||||
m = "WARN: could not list files, DB corrupt?\n " + repr(ex)
|
||||
self.log("up2k", m)
|
||||
|
||||
m = "REPLACING unsupported DB (v.{}) at {}".format(ver, db_path)
|
||||
self.log("up2k", m)
|
||||
conn.close()
|
||||
os.unlink(db_path)
|
||||
conn = sqlite3.connect(db_path, check_same_thread=False)
|
||||
except:
|
||||
pass
|
||||
|
||||
# sqlite is variable-width only, no point in using char/nchar/varchar
|
||||
for cmd in [
|
||||
r"create table kv (k text, v text)",
|
||||
r"create table up (w text, mt int, sz int, rp text)",
|
||||
r"insert into kv values ('sver', '1')",
|
||||
r"create index up_w on up(w)",
|
||||
]:
|
||||
conn.execute(cmd)
|
||||
|
||||
conn.commit()
|
||||
self.log("up2k", "created DB at {}".format(db_path))
|
||||
return conn
|
||||
|
||||
def handle_json(self, cj):
|
||||
self.register_vpath(cj["ptop"])
|
||||
cj["name"] = sanitize_fn(cj["name"])
|
||||
cj["poke"] = time.time()
|
||||
wark = self._get_wark(cj)
|
||||
now = time.time()
|
||||
job = None
|
||||
with self.mutex:
|
||||
# TODO use registry persistence here to symlink any matching wark
|
||||
if wark in self.registry:
|
||||
job = self.registry[wark]
|
||||
if job["rdir"] != cj["rdir"] or job["name"] != cj["name"]:
|
||||
src = os.path.join(job["rdir"], job["name"])
|
||||
dst = os.path.join(cj["rdir"], cj["name"])
|
||||
db = self.db.get(cj["ptop"], None)
|
||||
reg = self.registry[cj["ptop"]]
|
||||
if db:
|
||||
cur = db.execute(r"select * from up where w = ?", (wark,))
|
||||
for _, dtime, dsize, dp_rel in cur:
|
||||
dp_abs = os.path.join(cj["ptop"], dp_rel).replace("\\", "/")
|
||||
# relying on path.exists to return false on broken symlinks
|
||||
if os.path.exists(fsenc(dp_abs)):
|
||||
try:
|
||||
prel, name = dp_rel.rsplit("/", 1)
|
||||
except:
|
||||
prel = ""
|
||||
name = dp_rel
|
||||
|
||||
job = {
|
||||
"name": name,
|
||||
"prel": prel,
|
||||
"vtop": cj["vtop"],
|
||||
"ptop": cj["ptop"],
|
||||
"flag": cj["flag"],
|
||||
"size": dsize,
|
||||
"lmod": dtime,
|
||||
"hash": [],
|
||||
"need": [],
|
||||
}
|
||||
break
|
||||
|
||||
if job and wark in reg:
|
||||
del reg[wark]
|
||||
|
||||
if job or wark in reg:
|
||||
job = job or reg[wark]
|
||||
if job["prel"] == cj["prel"] and job["name"] == cj["name"]:
|
||||
# ensure the files haven't been deleted manually
|
||||
names = [job[x] for x in ["name", "tnam"] if x in job]
|
||||
for fn in names:
|
||||
path = os.path.join(job["ptop"], job["prel"], fn)
|
||||
try:
|
||||
if os.path.getsize(path) > 0:
|
||||
# upload completed or both present
|
||||
break
|
||||
except:
|
||||
# missing; restart
|
||||
job = None
|
||||
break
|
||||
else:
|
||||
# file contents match, but not the path
|
||||
src = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
dst = os.path.join(cj["ptop"], cj["prel"], cj["name"])
|
||||
vsrc = os.path.join(job["vtop"], job["prel"], job["name"])
|
||||
vsrc = vsrc.replace("\\", "/") # just for prints anyways
|
||||
if job["need"]:
|
||||
self.log("up2k", "unfinished:\n {0}\n {1}".format(src, dst))
|
||||
err = "partial upload exists at a different location; please resume uploading here instead:\n{0}{1} ".format(
|
||||
job["vdir"], job["name"]
|
||||
)
|
||||
err = "partial upload exists at a different location; please resume uploading here instead:\n"
|
||||
err += vsrc + " "
|
||||
raise Pebkac(400, err)
|
||||
elif "nodupe" in job["flag"]:
|
||||
self.log("up2k", "dupe-reject:\n {0}\n {1}".format(src, dst))
|
||||
err = "upload rejected, file already exists:\n " + vsrc + " "
|
||||
raise Pebkac(400, err)
|
||||
else:
|
||||
# symlink to the client-provided name,
|
||||
# returning the previous upload info
|
||||
job = deepcopy(job)
|
||||
suffix = self._suffix(dst, now, job["addr"])
|
||||
job["name"] = cj["name"] + suffix
|
||||
self._symlink(src, dst + suffix)
|
||||
else:
|
||||
for k in ["ptop", "vtop", "prel"]:
|
||||
job[k] = cj[k]
|
||||
|
||||
pdir = os.path.join(cj["ptop"], cj["prel"])
|
||||
job["name"] = self._untaken(pdir, cj["name"], now, cj["addr"])
|
||||
dst = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
os.unlink(fsenc(dst)) # TODO ed pls
|
||||
self._symlink(src, dst)
|
||||
|
||||
if not job:
|
||||
job = {
|
||||
"wark": wark,
|
||||
"t0": now,
|
||||
"addr": cj["addr"],
|
||||
"vdir": cj["vdir"],
|
||||
"rdir": cj["rdir"],
|
||||
# client-provided, sanitized by _get_wark:
|
||||
"name": cj["name"],
|
||||
"size": cj["size"],
|
||||
"lmod": cj["lmod"],
|
||||
"hash": deepcopy(cj["hash"]),
|
||||
"need": [],
|
||||
}
|
||||
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
job["name"] += self._suffix(path, now, cj["addr"])
|
||||
# client-provided, sanitized by _get_wark: name, size, lmod
|
||||
for k in [
|
||||
"addr",
|
||||
"vtop",
|
||||
"ptop",
|
||||
"prel",
|
||||
"flag",
|
||||
"name",
|
||||
"size",
|
||||
"lmod",
|
||||
"poke",
|
||||
]:
|
||||
job[k] = cj[k]
|
||||
|
||||
# one chunk may occur multiple times in a file;
|
||||
# filter to unique values for the list of missing chunks
|
||||
# (preserve order to reduce disk thrashing)
|
||||
job["need"] = []
|
||||
lut = {}
|
||||
for k in cj["hash"]:
|
||||
if k not in lut:
|
||||
@@ -108,13 +375,12 @@ class Up2k(object):
|
||||
"wark": wark,
|
||||
}
|
||||
|
||||
def _suffix(self, fpath, ts, ip):
|
||||
def _untaken(self, fdir, fname, ts, ip):
|
||||
# TODO broker which avoid this race and
|
||||
# provides a new filename if taken (same as bup)
|
||||
if not os.path.exists(fsenc(fpath)):
|
||||
return ""
|
||||
|
||||
return ".{:.6f}-{}".format(ts, ip)
|
||||
suffix = ".{:.6f}-{}".format(ts, ip)
|
||||
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
|
||||
return f["orz"][1]
|
||||
|
||||
def _symlink(self, src, dst):
|
||||
# TODO store this in linktab so we never delete src if there are links to it
|
||||
@@ -141,40 +407,58 @@ class Up2k(object):
|
||||
lsrc = "../" * (len(lsrc) - 1) + "/".join(lsrc)
|
||||
os.symlink(fsenc(lsrc), fsenc(ldst))
|
||||
except (AttributeError, OSError) as ex:
|
||||
self.log("up2k", "cannot symlink; creating copy")
|
||||
self.log("up2k", "cannot symlink; creating copy: " + repr(ex))
|
||||
shutil.copy2(fsenc(src), fsenc(dst))
|
||||
|
||||
def handle_chunk(self, wark, chash):
|
||||
def handle_chunk(self, ptop, wark, chash):
|
||||
with self.mutex:
|
||||
job = self.registry.get(wark)
|
||||
job = self.registry[ptop].get(wark, None)
|
||||
if not job:
|
||||
raise Pebkac(404, "unknown wark")
|
||||
raise Pebkac(400, "unknown wark")
|
||||
|
||||
if chash not in job["need"]:
|
||||
raise Pebkac(200, "already got that but thanks??")
|
||||
|
||||
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
|
||||
if not nchunk:
|
||||
raise Pebkac(404, "unknown chunk")
|
||||
raise Pebkac(400, "unknown chunk")
|
||||
|
||||
job["poke"] = time.time()
|
||||
|
||||
chunksize = self._get_chunksize(job["size"])
|
||||
ofs = [chunksize * x for x in nchunk]
|
||||
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
|
||||
|
||||
return [chunksize, ofs, path, job["lmod"]]
|
||||
|
||||
def confirm_chunk(self, wark, chash):
|
||||
def confirm_chunk(self, ptop, wark, chash):
|
||||
with self.mutex:
|
||||
job = self.registry[wark]
|
||||
job = self.registry[ptop][wark]
|
||||
pdir = os.path.join(job["ptop"], job["prel"])
|
||||
src = os.path.join(pdir, job["tnam"])
|
||||
dst = os.path.join(pdir, job["name"])
|
||||
|
||||
job["need"].remove(chash)
|
||||
ret = len(job["need"])
|
||||
if ret > 0:
|
||||
return ret, src
|
||||
|
||||
if WINDOWS and ret == 0:
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
self.lastmod_q.put([path, (int(time.time()), int(job["lmod"]))])
|
||||
atomic_move(src, dst)
|
||||
|
||||
return ret
|
||||
if WINDOWS:
|
||||
self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))])
|
||||
|
||||
db = self.db.get(job["ptop"], None)
|
||||
if db:
|
||||
rp = os.path.join(job["prel"], job["name"]).replace("\\", "/")
|
||||
self.db_rm(db, rp)
|
||||
self.db_add(db, job["wark"], rp, job["lmod"], job["size"])
|
||||
db.commit()
|
||||
del self.registry[ptop][wark]
|
||||
# in-memory registry is reserved for unfinished uploads
|
||||
|
||||
return ret, dst
|
||||
|
||||
def _get_chunksize(self, filesize):
|
||||
chunksize = 1024 * 1024
|
||||
@@ -188,6 +472,13 @@ class Up2k(object):
|
||||
chunksize += stepsize
|
||||
stepsize *= mul
|
||||
|
||||
def db_rm(self, db, rp):
|
||||
db.execute("delete from up where rp = ?", (rp,))
|
||||
|
||||
def db_add(self, db, wark, rp, ts, sz):
|
||||
v = (wark, ts, sz, rp)
|
||||
db.execute("insert into up values (?,?,?,?)", v)
|
||||
|
||||
def _get_wark(self, cj):
|
||||
if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB
|
||||
raise Pebkac(400, "name or numchunks not according to spec")
|
||||
@@ -204,9 +495,13 @@ class Up2k(object):
|
||||
except:
|
||||
cj["lmod"] = int(time.time())
|
||||
|
||||
# server-reproducible file identifier, independent of name or location
|
||||
ident = [self.salt, str(cj["size"])]
|
||||
ident.extend(cj["hash"])
|
||||
wark = self._wark_from_hashlist(cj["size"], cj["hash"])
|
||||
return wark
|
||||
|
||||
def _wark_from_hashlist(self, filesize, hashes):
|
||||
""" server-reproducible file identifier, independent of name or location """
|
||||
ident = [self.salt, str(filesize)]
|
||||
ident.extend(hashes)
|
||||
ident = "\n".join(ident)
|
||||
|
||||
hasher = hashlib.sha512()
|
||||
@@ -216,10 +511,47 @@ class Up2k(object):
|
||||
wark = base64.urlsafe_b64encode(digest)
|
||||
return wark.decode("utf-8").rstrip("=")
|
||||
|
||||
def _hashlist_from_file(self, path):
|
||||
fsz = os.path.getsize(path)
|
||||
csz = self._get_chunksize(fsz)
|
||||
ret = []
|
||||
last_print = time.time()
|
||||
with open(path, "rb", 512 * 1024) as f:
|
||||
while fsz > 0:
|
||||
now = time.time()
|
||||
td = now - last_print
|
||||
if td >= 0.3:
|
||||
last_print = now
|
||||
print(" {} \n\033[A".format(fsz), end="")
|
||||
|
||||
hashobj = hashlib.sha512()
|
||||
rem = min(csz, fsz)
|
||||
fsz -= rem
|
||||
while rem > 0:
|
||||
buf = f.read(min(rem, 64 * 1024))
|
||||
if not buf:
|
||||
raise Exception("EOF at " + str(f.tell()))
|
||||
|
||||
hashobj.update(buf)
|
||||
rem -= len(buf)
|
||||
|
||||
digest = hashobj.digest()[:32]
|
||||
digest = base64.urlsafe_b64encode(digest)
|
||||
ret.append(digest.decode("utf-8").rstrip("="))
|
||||
|
||||
return ret
|
||||
|
||||
def _new_upload(self, job):
|
||||
self.registry[job["wark"]] = job
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
with open(fsenc(path), "wb") as f:
|
||||
self.registry[job["ptop"]][job["wark"]] = job
|
||||
pdir = os.path.join(job["ptop"], job["prel"])
|
||||
job["name"] = self._untaken(pdir, job["name"], job["t0"], job["addr"])
|
||||
# if len(job["name"].split(".")) > 8:
|
||||
# raise Exception("aaa")
|
||||
|
||||
tnam = job["name"] + ".PARTIAL"
|
||||
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
||||
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
||||
f, job["tnam"] = f["orz"]
|
||||
f.seek(job["size"] - 1)
|
||||
f.write(b"e")
|
||||
|
||||
@@ -236,3 +568,58 @@ class Up2k(object):
|
||||
os.utime(fsenc(path), times)
|
||||
except:
|
||||
self.log("lmod", "failed to utime ({}, {})".format(path, times))
|
||||
|
||||
def _snapshot(self):
|
||||
persist_interval = 30 # persist unfinished uploads index every 30 sec
|
||||
discard_interval = 21600 # drop unfinished uploads after 6 hours inactivity
|
||||
prev = {}
|
||||
while True:
|
||||
time.sleep(persist_interval)
|
||||
with self.mutex:
|
||||
for k, reg in self.registry.items():
|
||||
self._snap_reg(prev, k, reg, discard_interval)
|
||||
|
||||
def _snap_reg(self, prev, k, reg, discard_interval):
|
||||
now = time.time()
|
||||
rm = [x for x in reg.values() if now - x["poke"] > discard_interval]
|
||||
if rm:
|
||||
m = "dropping {} abandoned uploads in {}".format(len(rm), k)
|
||||
vis = [self._vis_job_progress(x) for x in rm]
|
||||
self.log("up2k", "\n".join([m] + vis))
|
||||
for job in rm:
|
||||
del reg[job["wark"]]
|
||||
try:
|
||||
# remove the filename reservation
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
if os.path.getsize(path) == 0:
|
||||
os.unlink(path)
|
||||
|
||||
if len(job["hash"]) == len(job["need"]):
|
||||
# PARTIAL is empty, delete that too
|
||||
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
|
||||
os.unlink(path)
|
||||
except:
|
||||
pass
|
||||
|
||||
path = os.path.join(k, ".hist", "up2k.snap")
|
||||
if not reg:
|
||||
if k not in prev or prev[k] is not None:
|
||||
prev[k] = None
|
||||
if os.path.exists(path):
|
||||
os.unlink(path)
|
||||
return
|
||||
|
||||
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
|
||||
etag = [len(reg), newest]
|
||||
if etag == prev.get(k, None):
|
||||
return
|
||||
|
||||
path2 = "{}.{}".format(path, os.getpid())
|
||||
j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8")
|
||||
with gzip.GzipFile(path2, "wb") as f:
|
||||
f.write(j)
|
||||
|
||||
atomic_move(path2, path)
|
||||
|
||||
self.log("up2k", "snap: {} |{}|".format(path, len(reg.keys())))
|
||||
prev[k] = etag
|
||||
|
||||
@@ -2,13 +2,17 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import base64
|
||||
import select
|
||||
import struct
|
||||
import hashlib
|
||||
import platform
|
||||
import threading
|
||||
import mimetypes
|
||||
import contextlib
|
||||
import subprocess as sp # nosec
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
@@ -42,6 +46,7 @@ if WINDOWS and PY2:
|
||||
|
||||
HTTPCODE = {
|
||||
200: "OK",
|
||||
204: "No Content",
|
||||
206: "Partial Content",
|
||||
304: "Not Modified",
|
||||
400: "Bad Request",
|
||||
@@ -94,6 +99,80 @@ class Unrecv(object):
|
||||
self.buf = buf + self.buf
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ren_open(fname, *args, **kwargs):
|
||||
fdir = kwargs.pop("fdir", None)
|
||||
suffix = kwargs.pop("suffix", None)
|
||||
|
||||
if fname == os.devnull:
|
||||
with open(fname, *args, **kwargs) as f:
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
orig_name = fname
|
||||
bname = fname
|
||||
ext = ""
|
||||
while True:
|
||||
ofs = bname.rfind(".")
|
||||
if ofs < 0 or ofs < len(bname) - 7:
|
||||
# doesn't look like an extension anymore
|
||||
break
|
||||
|
||||
ext = bname[ofs:] + ext
|
||||
bname = bname[:ofs]
|
||||
|
||||
b64 = ""
|
||||
while True:
|
||||
try:
|
||||
if fdir:
|
||||
fpath = os.path.join(fdir, fname)
|
||||
else:
|
||||
fpath = fname
|
||||
|
||||
if suffix and os.path.exists(fpath):
|
||||
fpath += suffix
|
||||
fname += suffix
|
||||
ext += suffix
|
||||
|
||||
with open(fsenc(fpath), *args, **kwargs) as f:
|
||||
if b64:
|
||||
fp2 = "fn-trunc.{}.txt".format(b64)
|
||||
fp2 = os.path.join(fdir, fp2)
|
||||
with open(fsenc(fp2), "wb") as f2:
|
||||
f2.write(orig_name.encode("utf-8"))
|
||||
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
except OSError as ex_:
|
||||
ex = ex_
|
||||
if ex.errno != 36:
|
||||
raise
|
||||
|
||||
if not b64:
|
||||
b64 = (bname + ext).encode("utf-8", "replace")
|
||||
b64 = hashlib.sha512(b64).digest()[:12]
|
||||
b64 = base64.urlsafe_b64encode(b64).decode("utf-8").rstrip("=")
|
||||
|
||||
badlen = len(fname)
|
||||
while len(fname) >= badlen:
|
||||
if len(bname) < 8:
|
||||
raise ex
|
||||
|
||||
if len(bname) > len(ext):
|
||||
# drop the last letter of the filename
|
||||
bname = bname[:-1]
|
||||
else:
|
||||
try:
|
||||
# drop the leftmost sub-extension
|
||||
_, ext = ext.split(".", 1)
|
||||
except:
|
||||
# okay do the first letter then
|
||||
ext = "." + ext[2:]
|
||||
|
||||
fname = "{}~{}{}".format(bname, b64, ext)
|
||||
|
||||
|
||||
class MultipartParser(object):
|
||||
def __init__(self, log_func, sr, http_headers):
|
||||
self.sr = sr
|
||||
@@ -333,6 +412,31 @@ def read_header(sr):
|
||||
return ret[:ofs].decode("utf-8", "surrogateescape").split("\r\n")
|
||||
|
||||
|
||||
def humansize(sz, terse=False):
|
||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||
if sz < 1024:
|
||||
break
|
||||
|
||||
sz /= 1024.0
|
||||
|
||||
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||
|
||||
if not terse:
|
||||
return ret
|
||||
|
||||
return ret.replace("iB", "").replace(" ", "")
|
||||
|
||||
|
||||
def get_spd(nbyte, t0, t=None):
|
||||
if t is None:
|
||||
t = time.time()
|
||||
|
||||
bps = nbyte / ((t - t0) + 0.001)
|
||||
s1 = humansize(nbyte).replace(" ", "\033[33m").replace("iB", "")
|
||||
s2 = humansize(bps).replace(" ", "\033[35m").replace("iB", "")
|
||||
return "{} \033[0m{}/s\033[0m".format(s1, s2)
|
||||
|
||||
|
||||
def undot(path):
|
||||
ret = []
|
||||
for node in path.split("/"):
|
||||
@@ -382,6 +486,21 @@ def exclude_dotfiles(filepaths):
|
||||
yield fpath
|
||||
|
||||
|
||||
def html_escape(s, quote=False):
|
||||
"""html.escape but also newlines"""
|
||||
s = (
|
||||
s.replace("&", "&")
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace("\r", " ")
|
||||
.replace("\n", " ")
|
||||
)
|
||||
if quote:
|
||||
s = s.replace('"', """).replace("'", "'")
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def quotep(txt):
|
||||
"""url quoter which deals with bytes correctly"""
|
||||
btxt = w8enc(txt)
|
||||
@@ -396,8 +515,8 @@ def quotep(txt):
|
||||
def unquotep(txt):
|
||||
"""url unquoter which deals with bytes correctly"""
|
||||
btxt = w8enc(txt)
|
||||
unq1 = btxt.replace(b"+", b" ")
|
||||
unq2 = unquote(unq1)
|
||||
# btxt = btxt.replace(b"+", b" ")
|
||||
unq2 = unquote(btxt)
|
||||
return w8dec(unq2)
|
||||
|
||||
|
||||
@@ -430,6 +549,16 @@ else:
|
||||
fsdec = w8dec
|
||||
|
||||
|
||||
def atomic_move(src, dst):
|
||||
if not PY2:
|
||||
os.replace(src, dst)
|
||||
else:
|
||||
if os.path.exists(dst):
|
||||
os.unlink(dst)
|
||||
|
||||
os.rename(src, dst)
|
||||
|
||||
|
||||
def read_socket(sr, total_size):
|
||||
remains = total_size
|
||||
while remains > 0:
|
||||
@@ -445,6 +574,15 @@ def read_socket(sr, total_size):
|
||||
yield buf
|
||||
|
||||
|
||||
def read_socket_unbounded(sr):
|
||||
while True:
|
||||
buf = sr.recv(32 * 1024)
|
||||
if not buf:
|
||||
return
|
||||
|
||||
yield buf
|
||||
|
||||
|
||||
def hashcopy(actor, fin, fout):
|
||||
u32_lim = int((2 ** 31) * 0.9)
|
||||
hashobj = hashlib.sha512()
|
||||
@@ -464,6 +602,46 @@ def hashcopy(actor, fin, fout):
|
||||
return tlen, hashobj.hexdigest(), digest_b64
|
||||
|
||||
|
||||
def sendfile_py(lower, upper, f, s):
|
||||
remains = upper - lower
|
||||
f.seek(lower)
|
||||
while remains > 0:
|
||||
# time.sleep(0.01)
|
||||
buf = f.read(min(4096, remains))
|
||||
if not buf:
|
||||
return remains
|
||||
|
||||
try:
|
||||
s.sendall(buf)
|
||||
remains -= len(buf)
|
||||
except:
|
||||
return remains
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def sendfile_kern(lower, upper, f, s):
|
||||
out_fd = s.fileno()
|
||||
in_fd = f.fileno()
|
||||
ofs = lower
|
||||
while ofs < upper:
|
||||
try:
|
||||
req = min(2 ** 30, upper - ofs)
|
||||
select.select([], [out_fd], [], 10)
|
||||
n = os.sendfile(out_fd, in_fd, ofs, req)
|
||||
except Exception as ex:
|
||||
# print("sendfile: " + repr(ex))
|
||||
n = 0
|
||||
|
||||
if n <= 0:
|
||||
return upper - ofs
|
||||
|
||||
ofs += n
|
||||
# print("sendfile: ok, sent {} now, {} total, {} remains".format(n, ofs - lower, upper - ofs))
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def unescape_cookie(orig):
|
||||
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
|
||||
ret = ""
|
||||
@@ -540,7 +718,26 @@ def py_desc():
|
||||
)
|
||||
|
||||
|
||||
def align_tab(lines):
|
||||
rows = []
|
||||
ncols = 0
|
||||
for ln in lines:
|
||||
row = [x for x in ln.split(" ") if x]
|
||||
ncols = max(ncols, len(row))
|
||||
rows.append(row)
|
||||
|
||||
lens = [0] * ncols
|
||||
for row in rows:
|
||||
for n, col in enumerate(row):
|
||||
lens[n] = max(lens[n], len(col))
|
||||
|
||||
return ["".join(x.ljust(y + 2) for x, y in zip(row, lens)) for row in rows]
|
||||
|
||||
|
||||
class Pebkac(Exception):
|
||||
def __init__(self, code, msg=None):
|
||||
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
||||
self.code = code
|
||||
|
||||
def __repr__(self):
|
||||
return "Pebkac({}, {})".format(self.code, repr(self.args))
|
||||
|
||||
12
copyparty/web/Makefile
Normal file
12
copyparty/web/Makefile
Normal file
@@ -0,0 +1,12 @@
|
||||
# run me to zopfli all the static files
|
||||
# which should help on really slow connections
|
||||
# but then why are you using copyparty in the first place
|
||||
|
||||
pk: $(addsuffix .gz, $(wildcard *.js *.css))
|
||||
un: $(addsuffix .un, $(wildcard *.gz))
|
||||
|
||||
%.gz: %
|
||||
pigz -11 -J 34 -I 5730 $<
|
||||
|
||||
%.un: %
|
||||
pigz -d $<
|
||||
@@ -131,6 +131,17 @@ a {
|
||||
.logue {
|
||||
padding: .2em 1.5em;
|
||||
}
|
||||
#srv_info {
|
||||
opacity: .5;
|
||||
font-size: .8em;
|
||||
color: #fc5;
|
||||
position: absolute;
|
||||
top: .5em;
|
||||
left: 2em;
|
||||
}
|
||||
#srv_info span {
|
||||
color: #fff;
|
||||
}
|
||||
a.play {
|
||||
color: #e70;
|
||||
}
|
||||
|
||||
@@ -34,13 +34,14 @@
|
||||
<th></th>
|
||||
<th>File Name</th>
|
||||
<th sort="int">File Size</th>
|
||||
<th>T</th>
|
||||
<th>Date</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
||||
{%- for f in files %}
|
||||
<tr><td>{{ f[0] }}</td><td><a href="{{ f[1] }}">{{ f[2] }}</a></td><td>{{ f[3] }}</td><td>{{ f[4] }}</td></tr>
|
||||
<tr><td>{{ f[0] }}</td><td><a href="{{ f[1] }}">{{ f[2] }}</a></td><td>{{ f[3] }}</td><td>{{ f[4] }}</td><td>{{ f[5] }}</td></tr>
|
||||
{%- endfor %}
|
||||
|
||||
</tbody>
|
||||
@@ -53,6 +54,10 @@
|
||||
|
||||
<h2><a href="?h">control-panel</a></h2>
|
||||
|
||||
{%- if srv_info %}
|
||||
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
||||
{%- endif %}
|
||||
|
||||
<div id="widget">
|
||||
<div id="wtoggle">♫</div>
|
||||
<div id="widgeti">
|
||||
@@ -63,6 +68,8 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="/.cpr/util.js{{ ts }}"></script>
|
||||
|
||||
{%- if can_read %}
|
||||
<script src="/.cpr/browser.js{{ ts }}"></script>
|
||||
{%- endif %}
|
||||
|
||||
@@ -1,117 +1,25 @@
|
||||
"use strict";
|
||||
|
||||
// error handler for mobile devices
|
||||
function hcroak(msg) {
|
||||
document.body.innerHTML = msg;
|
||||
window.onerror = undefined;
|
||||
throw 'fatal_err';
|
||||
}
|
||||
function croak(msg) {
|
||||
document.body.textContent = msg;
|
||||
window.onerror = undefined;
|
||||
throw msg;
|
||||
}
|
||||
function esc(txt) {
|
||||
return txt.replace(/[&"<>]/g, function (c) {
|
||||
return {
|
||||
'&': '&',
|
||||
'"': '"',
|
||||
'<': '<',
|
||||
'>': '>'
|
||||
}[c];
|
||||
});
|
||||
}
|
||||
window.onerror = function (msg, url, lineNo, columnNo, error) {
|
||||
window.onerror = undefined;
|
||||
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
||||
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
||||
|
||||
if (error) {
|
||||
var find = ['desc', 'stack', 'trace'];
|
||||
for (var a = 0; a < find.length; a++)
|
||||
if (String(error[find[a]]) !== 'undefined')
|
||||
html.push('<h2>' + find[a] + '</h2>' +
|
||||
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
|
||||
}
|
||||
document.body.style.fontSize = '0.8em';
|
||||
document.body.style.padding = '0 1em 1em 1em';
|
||||
hcroak(html.join('\n'));
|
||||
};
|
||||
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||
if (!String.prototype.endsWith) {
|
||||
String.prototype.endsWith = function (search, this_len) {
|
||||
if (this_len === undefined || this_len > this.length) {
|
||||
this_len = this.length;
|
||||
}
|
||||
return this.substring(this_len - search.length, this_len) === search;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = document.createElement('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
|
||||
script.onreadystatechange = cb;
|
||||
script.onload = cb;
|
||||
|
||||
head.appendChild(script);
|
||||
}
|
||||
|
||||
|
||||
function o(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
window.onerror = vis_exh;
|
||||
|
||||
function dbg(msg) {
|
||||
o('path').innerHTML = msg;
|
||||
ebi('path').innerHTML = msg;
|
||||
}
|
||||
|
||||
function ev(e) {
|
||||
e = e || window.event;
|
||||
e.preventDefault ? e.preventDefault() : (e.returnValue = false);
|
||||
|
||||
if (e.preventDefault)
|
||||
e.preventDefault()
|
||||
|
||||
if (e.stopPropagation)
|
||||
e.stopPropagation();
|
||||
|
||||
e.returnValue = false;
|
||||
return e;
|
||||
}
|
||||
|
||||
|
||||
function sortTable(table, col) {
|
||||
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
|
||||
th = table.tHead.rows[0].cells,
|
||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||
i, reverse = th[col].className == 'sort1' ? -1 : 1;
|
||||
for (var a = 0, thl = th.length; a < thl; a++)
|
||||
th[a].className = '';
|
||||
th[col].className = 'sort' + reverse;
|
||||
var stype = th[col].getAttribute('sort');
|
||||
tr = tr.sort(function (a, b) {
|
||||
var v1 = a.cells[col].textContent.trim();
|
||||
var v2 = b.cells[col].textContent.trim();
|
||||
if (stype == 'int') {
|
||||
v1 = parseInt(v1.replace(/,/g, ''));
|
||||
v2 = parseInt(v2.replace(/,/g, ''));
|
||||
return reverse * (v1 - v2);
|
||||
}
|
||||
return reverse * (v1.localeCompare(v2));
|
||||
});
|
||||
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]);
|
||||
}
|
||||
function makeSortable(table) {
|
||||
var th = table.tHead, i;
|
||||
th && (th = th.rows[0]) && (th = th.cells);
|
||||
if (th) i = th.length;
|
||||
else return; // if no `<thead>` then do nothing
|
||||
while (--i >= 0) (function (i) {
|
||||
th[i].onclick = function () {
|
||||
sortTable(table, i);
|
||||
};
|
||||
}(i));
|
||||
}
|
||||
makeSortable(o('files'));
|
||||
makeSortable(ebi('files'));
|
||||
|
||||
|
||||
// extract songs + add play column
|
||||
@@ -124,9 +32,9 @@ var mp = (function () {
|
||||
'tracks': tracks,
|
||||
'cover_url': ''
|
||||
};
|
||||
var re_audio = new RegExp('\.(opus|ogg|m4a|aac|mp3|wav|flac)$', 'i');
|
||||
var re_audio = /\.(opus|ogg|m4a|aac|mp3|wav|flac)$/i;
|
||||
|
||||
var trs = document.getElementById('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
|
||||
var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
|
||||
for (var a = 0, aa = trs.length; a < aa; a++) {
|
||||
var tds = trs[a].getElementsByTagName('td');
|
||||
var link = tds[1].getElementsByTagName('a')[0];
|
||||
@@ -142,7 +50,7 @@ var mp = (function () {
|
||||
}
|
||||
|
||||
for (var a = 0, aa = tracks.length; a < aa; a++)
|
||||
o('trk' + a).onclick = ev_play;
|
||||
ebi('trk' + a).onclick = ev_play;
|
||||
|
||||
ret.vol = localStorage.getItem('vol');
|
||||
if (ret.vol !== null)
|
||||
@@ -169,8 +77,8 @@ var mp = (function () {
|
||||
// toggle player widget
|
||||
var widget = (function () {
|
||||
var ret = {};
|
||||
var widget = document.getElementById('widget');
|
||||
var wtoggle = document.getElementById('wtoggle');
|
||||
var widget = ebi('widget');
|
||||
var wtoggle = ebi('wtoggle');
|
||||
var touchmode = false;
|
||||
var side_open = false;
|
||||
var was_paused = true;
|
||||
@@ -199,7 +107,7 @@ var widget = (function () {
|
||||
ret.paused = function (paused) {
|
||||
if (was_paused != paused) {
|
||||
was_paused = paused;
|
||||
o('bplay').innerHTML = paused ? '▶' : '⏸';
|
||||
ebi('bplay').innerHTML = paused ? '▶' : '⏸';
|
||||
}
|
||||
};
|
||||
var click_handler = function (e) {
|
||||
@@ -223,8 +131,8 @@ var widget = (function () {
|
||||
// buffer/position bar
|
||||
var pbar = (function () {
|
||||
var r = {};
|
||||
r.bcan = o('barbuf');
|
||||
r.pcan = o('barpos');
|
||||
r.bcan = ebi('barbuf');
|
||||
r.pcan = ebi('barpos');
|
||||
r.bctx = r.bcan.getContext('2d');
|
||||
r.pctx = r.pcan.getContext('2d');
|
||||
|
||||
@@ -289,7 +197,7 @@ var pbar = (function () {
|
||||
// volume bar
|
||||
var vbar = (function () {
|
||||
var r = {};
|
||||
r.can = o('pvol');
|
||||
r.can = ebi('pvol');
|
||||
r.ctx = r.can.getContext('2d');
|
||||
|
||||
var bctx = r.ctx;
|
||||
@@ -386,7 +294,7 @@ var vbar = (function () {
|
||||
else
|
||||
play(0);
|
||||
};
|
||||
o('bplay').onclick = function (e) {
|
||||
ebi('bplay').onclick = function (e) {
|
||||
ev(e);
|
||||
if (mp.au) {
|
||||
if (mp.au.paused)
|
||||
@@ -397,15 +305,15 @@ var vbar = (function () {
|
||||
else
|
||||
play(0);
|
||||
};
|
||||
o('bprev').onclick = function (e) {
|
||||
ebi('bprev').onclick = function (e) {
|
||||
ev(e);
|
||||
bskip(-1);
|
||||
};
|
||||
o('bnext').onclick = function (e) {
|
||||
ebi('bnext').onclick = function (e) {
|
||||
ev(e);
|
||||
bskip(1);
|
||||
};
|
||||
o('barpos').onclick = function (e) {
|
||||
ebi('barpos').onclick = function (e) {
|
||||
if (!mp.au) {
|
||||
//dbg((new Date()).getTime());
|
||||
return play(0);
|
||||
@@ -414,8 +322,12 @@ var vbar = (function () {
|
||||
var rect = pbar.pcan.getBoundingClientRect();
|
||||
var x = e.clientX - rect.left;
|
||||
var mul = x * 1.0 / rect.width;
|
||||
var seek = mp.au.duration * mul;
|
||||
console.log('seek: ' + seek);
|
||||
if (!isFinite(seek))
|
||||
return;
|
||||
|
||||
mp.au.currentTime = mp.au.duration * mul;
|
||||
mp.au.currentTime = seek;
|
||||
|
||||
if (mp.au === mp.au_native)
|
||||
// hack: ogv.js breaks on .play() during playback
|
||||
@@ -471,7 +383,7 @@ function ev_play(e) {
|
||||
|
||||
|
||||
function setclass(id, clas) {
|
||||
o(id).setAttribute('class', clas);
|
||||
ebi(id).setAttribute('class', clas);
|
||||
}
|
||||
|
||||
|
||||
@@ -542,7 +454,8 @@ function play(tid, call_depth) {
|
||||
mp.au.tid = tid;
|
||||
mp.au.src = url;
|
||||
mp.au.volume = mp.expvol();
|
||||
setclass('trk' + tid, 'play act');
|
||||
var oid = 'trk' + tid;
|
||||
setclass(oid, 'play act');
|
||||
|
||||
try {
|
||||
if (hack_attempt_play)
|
||||
@@ -551,7 +464,11 @@ function play(tid, call_depth) {
|
||||
if (mp.au.paused)
|
||||
autoplay_blocked();
|
||||
|
||||
location.hash = 'trk' + tid;
|
||||
var o = ebi(oid);
|
||||
o.setAttribute('id', 'thx_js');
|
||||
location.hash = oid;
|
||||
o.setAttribute('id', oid);
|
||||
|
||||
pbar.drawbuf();
|
||||
return true;
|
||||
}
|
||||
@@ -567,7 +484,6 @@ function play(tid, call_depth) {
|
||||
function evau_error(e) {
|
||||
var err = '';
|
||||
var eplaya = (e && e.target) || (window.event && window.event.srcElement);
|
||||
var url = eplaya.src;
|
||||
|
||||
switch (eplaya.error.code) {
|
||||
case eplaya.error.MEDIA_ERR_ABORTED:
|
||||
@@ -608,26 +524,27 @@ function show_modal(html) {
|
||||
|
||||
// hide fullscreen message
|
||||
function unblocked() {
|
||||
var dom = o('blocked');
|
||||
var dom = ebi('blocked');
|
||||
if (dom)
|
||||
dom.parentNode.removeChild(dom);
|
||||
}
|
||||
|
||||
|
||||
// show ui to manually start playback of a linked song
|
||||
function autoplay_blocked(tid) {
|
||||
function autoplay_blocked() {
|
||||
show_modal(
|
||||
'<div id="blk_play"><a id="blk_go"></a></div>' +
|
||||
'<div id="blk_abrt"><a id="blk_na">Cancel<br />(show file list)</a></div>');
|
||||
'<div id="blk_play"><a href="#" id="blk_go"></a></div>' +
|
||||
'<div id="blk_abrt"><a href="#" id="blk_na">Cancel<br />(show file list)</a></div>');
|
||||
|
||||
var go = o('blk_go');
|
||||
var na = o('blk_na');
|
||||
var go = ebi('blk_go');
|
||||
var na = ebi('blk_na');
|
||||
|
||||
var fn = mp.tracks[mp.au.tid].split(/\//).pop();
|
||||
fn = decodeURIComponent(fn.replace(/\+/g, ' '));
|
||||
|
||||
go.textContent = 'Play "' + fn + '"';
|
||||
go.onclick = function () {
|
||||
go.onclick = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
unblocked();
|
||||
mp.au.play();
|
||||
};
|
||||
|
||||
@@ -109,8 +109,12 @@ h2 a, h4 a, h6 a {
|
||||
#mp ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em {
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
@@ -289,6 +293,32 @@ blink {
|
||||
text-decoration: underline;
|
||||
border: none;
|
||||
}
|
||||
#mh a:hover {
|
||||
color: #000;
|
||||
background: #ddd;
|
||||
}
|
||||
#toolsbox {
|
||||
overflow: hidden;
|
||||
display: inline-block;
|
||||
background: #eee;
|
||||
height: 1.5em;
|
||||
padding: 0 .2em;
|
||||
margin: 0 .2em;
|
||||
position: absolute;
|
||||
}
|
||||
#toolsbox.open {
|
||||
height: auto;
|
||||
overflow: visible;
|
||||
background: #eee;
|
||||
box-shadow: 0 .2em .2em #ccc;
|
||||
padding-bottom: .2em;
|
||||
}
|
||||
#toolsbox a {
|
||||
display: block;
|
||||
}
|
||||
#toolsbox a+a {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -332,8 +362,12 @@ blink {
|
||||
html.dark #m>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em {
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
@@ -371,6 +405,17 @@ blink {
|
||||
color: #ccc;
|
||||
background: none;
|
||||
}
|
||||
html.dark #mh a:hover {
|
||||
background: #333;
|
||||
color: #fff;
|
||||
}
|
||||
html.dark #toolsbox {
|
||||
background: #222;
|
||||
}
|
||||
html.dark #toolsbox.open {
|
||||
box-shadow: 0 .2em .2em #069;
|
||||
border-radius: 0 0 .4em .4em;
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (min-width: 66em) {
|
||||
@@ -541,7 +586,8 @@ blink {
|
||||
color: #240;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em {
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #940;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,14 @@
|
||||
<a id="save" href="?edit">save</a>
|
||||
<a id="sbs" href="#">sbs</a>
|
||||
<a id="nsbs" href="#">editor</a>
|
||||
<a id="help" href="#">help</a>
|
||||
<div id="toolsbox">
|
||||
<a id="tools" href="#">tools</a>
|
||||
<a id="fmt_table" href="#">prettify table (ctrl-k)</a>
|
||||
<a id="iter_uni" href="#">non-ascii: iterate (ctrl-u)</a>
|
||||
<a id="mark_uni" href="#">non-ascii: markup</a>
|
||||
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
|
||||
<a id="help" href="#">help</a>
|
||||
</div>
|
||||
{%- else %}
|
||||
<a href="?edit">edit (basic)</a>
|
||||
<a href="?edit2">edit (fancy)</a>
|
||||
@@ -26,7 +33,7 @@
|
||||
</div>
|
||||
<div id="toc"></div>
|
||||
<div id="mtw">
|
||||
<textarea id="mt">{{ md }}</textarea>
|
||||
<textarea id="mt" autocomplete="off">{{ md }}</textarea>
|
||||
</div>
|
||||
<div id="mw">
|
||||
<div id="ml">
|
||||
@@ -40,12 +47,15 @@
|
||||
|
||||
{%- if edit %}
|
||||
<div id="helpbox">
|
||||
<textarea>
|
||||
<textarea autocomplete="off">
|
||||
|
||||
write markdown (most html is 🙆 too)
|
||||
|
||||
## hotkey list
|
||||
* `Ctrl-S` to save
|
||||
* `Ctrl-E` to toggle mode
|
||||
* `Ctrl-K` to prettyprint a table
|
||||
* `Ctrl-U` to iterate non-ascii chars
|
||||
* `Ctrl-H` / `Ctrl-Shift-H` to create a header
|
||||
* `TAB` / `Shift-TAB` to indent/dedent a selection
|
||||
|
||||
@@ -113,8 +123,12 @@ write markdown (most html is 🙆 too)
|
||||
|
||||
<script>
|
||||
|
||||
var link_md_as_html = false; // TODO (does nothing)
|
||||
var last_modified = {{ lastmod }};
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
(function () {
|
||||
var btn = document.getElementById("lightswitch");
|
||||
@@ -131,17 +145,11 @@ var last_modified = {{ lastmod }};
|
||||
toggle();
|
||||
})();
|
||||
|
||||
if (!String.startsWith) {
|
||||
String.prototype.startsWith = function(s, i) {
|
||||
i = i>0 ? i|0 : 0;
|
||||
return this.substring(i, i + s.length) === s;
|
||||
};
|
||||
}
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js"></script>
|
||||
<script src="/.cpr/deps/marked.full.js"></script>
|
||||
<script src="/.cpr/md.js"></script>
|
||||
{%- if edit %}
|
||||
<script src="/.cpr/md2.js"></script>
|
||||
<script src="/.cpr/md2.js"></script>
|
||||
{%- endif %}
|
||||
</body></html>
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
var dom_toc = document.getElementById('toc');
|
||||
var dom_wrap = document.getElementById('mw');
|
||||
var dom_hbar = document.getElementById('mh');
|
||||
var dom_nav = document.getElementById('mn');
|
||||
var dom_pre = document.getElementById('mp');
|
||||
var dom_src = document.getElementById('mt');
|
||||
var dom_navtgl = document.getElementById('navtoggle');
|
||||
"use strict";
|
||||
|
||||
var dom_toc = ebi('toc');
|
||||
var dom_wrap = ebi('mw');
|
||||
var dom_hbar = ebi('mh');
|
||||
var dom_nav = ebi('mn');
|
||||
var dom_pre = ebi('mp');
|
||||
var dom_src = ebi('mt');
|
||||
var dom_navtgl = ebi('navtoggle');
|
||||
|
||||
|
||||
// chrome 49 needs this
|
||||
@@ -18,6 +20,10 @@ var dbg = function () { };
|
||||
// dbg = console.log
|
||||
|
||||
|
||||
// plugins
|
||||
var md_plug = {};
|
||||
|
||||
|
||||
function hesc(txt) {
|
||||
return txt.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
}
|
||||
@@ -30,7 +36,7 @@ function cls(dom, name, add) {
|
||||
}
|
||||
|
||||
|
||||
function static(obj) {
|
||||
function statify(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
@@ -154,13 +160,110 @@ function copydom(src, dst, lv) {
|
||||
}
|
||||
|
||||
|
||||
function md_plug_err(ex, js) {
|
||||
var errbox = ebi('md_errbox');
|
||||
if (errbox)
|
||||
errbox.parentNode.removeChild(errbox);
|
||||
|
||||
if (!ex)
|
||||
return;
|
||||
|
||||
var msg = (ex + '').split('\n')[0];
|
||||
var ln = ex.lineNumber;
|
||||
var o = null;
|
||||
if (ln) {
|
||||
msg = "Line " + ln + ", " + msg;
|
||||
var lns = js.split('\n');
|
||||
if (ln < lns.length) {
|
||||
o = document.createElement('span');
|
||||
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
|
||||
o.textContent = lns[ln - 1];
|
||||
}
|
||||
}
|
||||
errbox = document.createElement('div');
|
||||
errbox.setAttribute('id', 'md_errbox');
|
||||
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
|
||||
errbox.textContent = msg;
|
||||
errbox.onclick = function () {
|
||||
alert('' + ex.stack);
|
||||
};
|
||||
if (o) {
|
||||
errbox.appendChild(o);
|
||||
errbox.style.padding = '.25em .5em';
|
||||
}
|
||||
dom_nav.appendChild(errbox);
|
||||
|
||||
try {
|
||||
console.trace();
|
||||
}
|
||||
catch (ex2) { }
|
||||
}
|
||||
|
||||
|
||||
function load_plug(md_text, plug_type) {
|
||||
if (!md_opt.allow_plugins)
|
||||
return md_text;
|
||||
|
||||
var find = '\n```copyparty_' + plug_type + '\n';
|
||||
var ofs = md_text.indexOf(find);
|
||||
if (ofs === -1)
|
||||
return md_text;
|
||||
|
||||
var ofs2 = md_text.indexOf('\n```', ofs + 1);
|
||||
if (ofs2 == -1)
|
||||
return md_text;
|
||||
|
||||
var js = md_text.slice(ofs + find.length, ofs2 + 1);
|
||||
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
|
||||
|
||||
var old_plug = md_plug[plug_type];
|
||||
if (!old_plug || old_plug[1] != js) {
|
||||
js = 'const x = { ' + js + ' }; x;';
|
||||
try {
|
||||
var x = eval(js);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug[plug_type] = null;
|
||||
md_plug_err(ex, js);
|
||||
return md;
|
||||
}
|
||||
if (x['ctor']) {
|
||||
x['ctor']();
|
||||
delete x['ctor'];
|
||||
}
|
||||
md_plug[plug_type] = [x, js];
|
||||
}
|
||||
|
||||
return md;
|
||||
}
|
||||
|
||||
|
||||
function convert_markdown(md_text, dest_dom) {
|
||||
marked.setOptions({
|
||||
md_text = md_text.replace(/\r/g, '');
|
||||
|
||||
md_plug_err(null);
|
||||
md_text = load_plug(md_text, 'pre');
|
||||
md_text = load_plug(md_text, 'post');
|
||||
|
||||
var marked_opts = {
|
||||
//headerPrefix: 'h-',
|
||||
breaks: true,
|
||||
gfm: true
|
||||
});
|
||||
var md_html = marked(md_text);
|
||||
};
|
||||
|
||||
var ext = md_plug['pre'];
|
||||
if (ext)
|
||||
Object.assign(marked_opts, ext[0]);
|
||||
|
||||
try {
|
||||
var md_html = marked(md_text, marked_opts);
|
||||
}
|
||||
catch (ex) {
|
||||
if (ext)
|
||||
md_plug_err(ex, ext[1]);
|
||||
|
||||
throw ex;
|
||||
}
|
||||
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
||||
|
||||
var nodes = md_dom.getElementsByTagName('a');
|
||||
@@ -196,7 +299,7 @@ function convert_markdown(md_text, dest_dom) {
|
||||
}
|
||||
|
||||
// separate <code> for each line in <pre>
|
||||
var nodes = md_dom.getElementsByTagName('pre');
|
||||
nodes = md_dom.getElementsByTagName('pre');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
var el = nodes[a];
|
||||
|
||||
@@ -209,7 +312,7 @@ function convert_markdown(md_text, dest_dom) {
|
||||
continue;
|
||||
|
||||
var nline = parseInt(el.getAttribute('data-ln')) + 1;
|
||||
var lines = el.innerHTML.replace(/\r?\n<\/code>$/i, '</code>').split(/\r?\n/g);
|
||||
var lines = el.innerHTML.replace(/\n<\/code>$/i, '</code>').split(/\n/g);
|
||||
for (var b = 0; b < lines.length - 1; b++)
|
||||
lines[b] += '</code>\n<code data-ln="' + (nline + b) + '">';
|
||||
|
||||
@@ -242,12 +345,29 @@ function convert_markdown(md_text, dest_dom) {
|
||||
el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>';
|
||||
}
|
||||
|
||||
ext = md_plug['post'];
|
||||
if (ext && ext[0].render)
|
||||
try {
|
||||
ext[0].render(md_dom);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug_err(ex, ext[1]);
|
||||
}
|
||||
|
||||
copydom(md_dom, dest_dom, 0);
|
||||
|
||||
if (ext && ext[0].render2)
|
||||
try {
|
||||
ext[0].render2(dest_dom);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug_err(ex, ext[1]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function init_toc() {
|
||||
var loader = document.getElementById('ml');
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
|
||||
var anchors = []; // list of toc entries, complex objects
|
||||
@@ -281,7 +401,12 @@ function init_toc() {
|
||||
|
||||
elm.childNodes[0].setAttribute('ctr', ctr.slice(0, lv).join('.'));
|
||||
|
||||
html.push('<li>' + elm.innerHTML + '</li>');
|
||||
var elm2 = elm.cloneNode(true);
|
||||
elm2.childNodes[0].textContent = elm.textContent;
|
||||
while (elm2.childNodes.length > 1)
|
||||
elm2.removeChild(elm2.childNodes[1]);
|
||||
|
||||
html.push('<li>' + elm2.innerHTML + '</li>');
|
||||
|
||||
if (anchor != null)
|
||||
anchors.push(anchor);
|
||||
|
||||
@@ -77,32 +77,52 @@ html.dark #mt {
|
||||
background: #f97;
|
||||
border-radius: .15em;
|
||||
}
|
||||
html.dark #save.force-save {
|
||||
color: #fca;
|
||||
background: #720;
|
||||
}
|
||||
#save.disabled {
|
||||
opacity: .4;
|
||||
}
|
||||
#helpbox,
|
||||
#toast {
|
||||
background: #f7f7f7;
|
||||
border-radius: .4em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#helpbox {
|
||||
display: none;
|
||||
position: fixed;
|
||||
background: #f7f7f7;
|
||||
box-shadow: 0 .5em 2em #777;
|
||||
border-radius: .4em;
|
||||
padding: 2em;
|
||||
top: 4em;
|
||||
overflow-y: auto;
|
||||
box-shadow: 0 .5em 2em #777;
|
||||
height: calc(100% - 12em);
|
||||
left: calc(50% - 15em);
|
||||
right: 0;
|
||||
width: 30em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#helpclose {
|
||||
display: block;
|
||||
}
|
||||
html.dark #helpbox {
|
||||
background: #222;
|
||||
box-shadow: 0 .5em 2em #444;
|
||||
}
|
||||
html.dark #helpbox,
|
||||
html.dark #toast {
|
||||
background: #222;
|
||||
border: 1px solid #079;
|
||||
border-width: 1px 0;
|
||||
}
|
||||
#toast {
|
||||
font-weight: bold;
|
||||
text-align: center;
|
||||
padding: .6em 0;
|
||||
position: fixed;
|
||||
z-index: 9001;
|
||||
top: 30%;
|
||||
transition: opacity 0.2s ease-in-out;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
# mt {opacity: .5;top:1px}
|
||||
|
||||
@@ -1,16 +1,25 @@
|
||||
"use strict";
|
||||
|
||||
|
||||
// server state
|
||||
var server_md = dom_src.value;
|
||||
|
||||
|
||||
// the non-ascii whitelist
|
||||
var esc_uni_whitelist = '\\n\\t\\x20-\\x7eÆØÅæøå';
|
||||
var js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
|
||||
|
||||
|
||||
// dom nodes
|
||||
var dom_swrap = document.getElementById('mtw');
|
||||
var dom_sbs = document.getElementById('sbs');
|
||||
var dom_nsbs = document.getElementById('nsbs');
|
||||
var dom_swrap = ebi('mtw');
|
||||
var dom_sbs = ebi('sbs');
|
||||
var dom_nsbs = ebi('nsbs');
|
||||
var dom_tbox = ebi('toolsbox');
|
||||
var dom_ref = (function () {
|
||||
var d = document.createElement('div');
|
||||
d.setAttribute('id', 'mtr');
|
||||
dom_swrap.appendChild(d);
|
||||
d = document.getElementById('mtr');
|
||||
d = ebi('mtr');
|
||||
// hide behind the textarea (offsetTop is not computed if display:none)
|
||||
dom_src.style.zIndex = '4';
|
||||
d.style.zIndex = '3';
|
||||
@@ -99,7 +108,7 @@ var draw_md = (function () {
|
||||
map_src = genmap(dom_ref, map_src);
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
|
||||
cls(document.getElementById('save'), 'disabled', src == server_md);
|
||||
cls(ebi('save'), 'disabled', src == server_md);
|
||||
|
||||
var t1 = new Date().getTime();
|
||||
delay = t1 - t0 > 100 ? 25 : 1;
|
||||
@@ -135,7 +144,7 @@ redraw = (function () {
|
||||
onresize();
|
||||
}
|
||||
function modetoggle() {
|
||||
mode = dom_nsbs.innerHTML;
|
||||
var mode = dom_nsbs.innerHTML;
|
||||
dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor';
|
||||
mode += ' single';
|
||||
dom_wrap.setAttribute('class', mode);
|
||||
@@ -164,14 +173,14 @@ redraw = (function () {
|
||||
dst.scrollTop = 0;
|
||||
return;
|
||||
}
|
||||
if (y + 8 + src.clientHeight > src.scrollHeight) {
|
||||
if (y + 48 + src.clientHeight > src.scrollHeight) {
|
||||
dst.scrollTop = dst.scrollHeight - dst.clientHeight;
|
||||
return;
|
||||
}
|
||||
y += src.clientHeight / 2;
|
||||
var sy1 = -1, sy2 = -1, dy1 = -1, dy2 = -1;
|
||||
for (var a = 1; a < nlines + 1; a++) {
|
||||
if (srcmap[a] === null || dstmap[a] === null)
|
||||
if (srcmap[a] == null || dstmap[a] == null)
|
||||
continue;
|
||||
|
||||
if (srcmap[a] > y) {
|
||||
@@ -214,14 +223,108 @@ redraw = (function () {
|
||||
})();
|
||||
|
||||
|
||||
// modification checker
|
||||
function Modpoll() {
|
||||
this.skip_one = true;
|
||||
this.disabled = false;
|
||||
|
||||
this.periodic = function () {
|
||||
var that = this;
|
||||
setTimeout(function () {
|
||||
that.periodic();
|
||||
}, 1000 * md_opt.modpoll_freq);
|
||||
|
||||
var skip = null;
|
||||
|
||||
if (ebi('toast'))
|
||||
skip = 'toast';
|
||||
|
||||
else if (this.skip_one)
|
||||
skip = 'saved';
|
||||
|
||||
else if (this.disabled)
|
||||
skip = 'disabled';
|
||||
|
||||
if (skip) {
|
||||
console.log('modpoll skip, ' + skip);
|
||||
this.skip_one = false;
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('modpoll...');
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + new Date().getTime();
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.modpoll = this;
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = this.cb;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
this.cb = function () {
|
||||
if (this.modpoll.disabled || this.modpoll.skip_one) {
|
||||
console.log('modpoll abort');
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
console.log('modpoll err ' + this.status + ": " + this.responseText);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.responseText)
|
||||
return;
|
||||
|
||||
var server_ref = server_md.replace(/\r/g, '');
|
||||
var server_now = this.responseText.replace(/\r/g, '');
|
||||
|
||||
if (server_ref != server_now) {
|
||||
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
|
||||
this.modpoll.disabled = true;
|
||||
var msg = [
|
||||
"The document has changed on the server.<br />" +
|
||||
"The changes will NOT be loaded into your editor automatically.",
|
||||
|
||||
"Press F5 or CTRL-R to refresh the page,<br />" +
|
||||
"replacing your document with the server copy.",
|
||||
|
||||
"You can click this message to ignore and contnue."
|
||||
];
|
||||
return toast(false, "box-shadow:0 1em 2em rgba(64,64,64,0.8);font-weight:normal",
|
||||
36, "<p>" + msg.join('</p>\n<p>') + '</p>');
|
||||
}
|
||||
|
||||
console.log('modpoll eq');
|
||||
}
|
||||
|
||||
if (md_opt.modpoll_freq > 0)
|
||||
this.periodic();
|
||||
|
||||
return this;
|
||||
}
|
||||
var modpoll = new Modpoll();
|
||||
|
||||
|
||||
window.onbeforeunload = function (e) {
|
||||
if ((ebi("save").getAttribute('class') + '').indexOf('disabled') >= 0)
|
||||
return; //nice (todo)
|
||||
|
||||
e.preventDefault(); //ff
|
||||
e.returnValue = ''; //chrome
|
||||
};
|
||||
|
||||
|
||||
// save handler
|
||||
function save(e) {
|
||||
if (e) e.preventDefault();
|
||||
var save_btn = document.getElementById("save"),
|
||||
var save_btn = ebi("save"),
|
||||
save_cls = save_btn.getAttribute('class') + '';
|
||||
|
||||
if (save_cls.indexOf('disabled') >= 0) {
|
||||
alert('there is nothing to save');
|
||||
toast(true, ";font-size:2em;color:#c90", 9, "no changes");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -238,13 +341,15 @@ function save(e) {
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
|
||||
var url = (document.location + '').split('?')[0] + '?raw';
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.txt = txt;
|
||||
|
||||
modpoll.skip_one = true; // skip one iteration while we save
|
||||
xhr.send(fd);
|
||||
}
|
||||
|
||||
@@ -289,19 +394,24 @@ function save_cb() {
|
||||
this.btn.classList.remove('force-save');
|
||||
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
|
||||
|
||||
run_savechk(r.lastmod, this.txt, this.btn, 0);
|
||||
}
|
||||
|
||||
function run_savechk(lastmod, txt, btn, ntry) {
|
||||
// download the saved doc from the server and compare
|
||||
var url = (document.location + '').split('?')[0] + '?raw';
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + new Date().getTime();
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_chk;
|
||||
xhr.btn = this.save_btn;
|
||||
xhr.txt = this.txt;
|
||||
xhr.lastmod = r.lastmod;
|
||||
xhr.onreadystatechange = savechk_cb;
|
||||
xhr.lastmod = lastmod;
|
||||
xhr.txt = txt;
|
||||
xhr.btn = btn;
|
||||
xhr.ntry = ntry;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
function save_chk() {
|
||||
function savechk_cb() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
@@ -313,6 +423,14 @@ function save_chk() {
|
||||
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
||||
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
||||
if (doc1 != doc2) {
|
||||
var that = this;
|
||||
if (that.ntry < 10) {
|
||||
// qnap funny, try a few more times
|
||||
setTimeout(function () {
|
||||
run_savechk(that.lastmod, that.txt, that.btn, that.ntry + 1)
|
||||
}, 100);
|
||||
return;
|
||||
}
|
||||
alert(
|
||||
'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' +
|
||||
'Length: yours=' + doc1.length + ', server=' + doc2.length
|
||||
@@ -325,18 +443,44 @@ function save_chk() {
|
||||
last_modified = this.lastmod;
|
||||
server_md = this.txt;
|
||||
draw_md();
|
||||
toast(true, ";font-size:6em;font-family:serif;color:#9b4", 4,
|
||||
'OK✔️<span style="font-size:.2em;color:#999;position:absolute">' + this.ntry + '</span>');
|
||||
|
||||
var ok = document.createElement('div');
|
||||
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
|
||||
ok.innerHTML = 'OK✔️';
|
||||
var parent = document.getElementById('m');
|
||||
document.documentElement.appendChild(ok);
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
modpoll.disabled = false;
|
||||
}
|
||||
|
||||
function toast(autoclose, style, width, msg) {
|
||||
var ok = ebi("toast");
|
||||
if (ok)
|
||||
ok.parentNode.removeChild(ok);
|
||||
}, 750);
|
||||
|
||||
style = "width:" + width + "em;left:calc(50% - " + (width / 2) + "em);" + style;
|
||||
ok = document.createElement('div');
|
||||
ok.setAttribute('id', 'toast');
|
||||
ok.setAttribute('style', style);
|
||||
ok.innerHTML = msg;
|
||||
var parent = ebi('m');
|
||||
document.documentElement.appendChild(ok);
|
||||
|
||||
var hide = function (delay) {
|
||||
delay = delay || 0;
|
||||
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
}, delay);
|
||||
|
||||
setTimeout(function () {
|
||||
if (ok.parentNode)
|
||||
ok.parentNode.removeChild(ok);
|
||||
}, delay + 250);
|
||||
}
|
||||
|
||||
ok.onclick = function () {
|
||||
hide(0);
|
||||
};
|
||||
|
||||
if (autoclose)
|
||||
hide(500);
|
||||
}
|
||||
|
||||
|
||||
@@ -409,6 +553,9 @@ function setsel(s) {
|
||||
dom_src.value = [s.pre, s.sel, s.post].join('');
|
||||
dom_src.setSelectionRange(s.car, s.cdr, dom_src.selectionDirection);
|
||||
dom_src.oninput();
|
||||
// support chrome:
|
||||
dom_src.blur();
|
||||
dom_src.focus();
|
||||
}
|
||||
|
||||
|
||||
@@ -482,7 +629,8 @@ function md_newline() {
|
||||
var s = linebounds(true),
|
||||
ln = s.md.substring(s.n1, s.n2),
|
||||
m1 = /^( *)([0-9]+)(\. +)/.exec(ln),
|
||||
m2 = /^[ \t>+-]*(\* )?/.exec(ln);
|
||||
m2 = /^[ \t>+-]*(\* )?/.exec(ln),
|
||||
drop = dom_src.selectionEnd - dom_src.selectionStart;
|
||||
|
||||
var pre = m2[0];
|
||||
if (m1 !== null)
|
||||
@@ -494,7 +642,7 @@ function md_newline() {
|
||||
|
||||
s.pre = s.md.substring(0, s.car) + '\n' + pre;
|
||||
s.sel = '';
|
||||
s.post = s.md.substring(s.car);
|
||||
s.post = s.md.substring(s.car + drop);
|
||||
s.car = s.cdr = s.pre.length;
|
||||
setsel(s);
|
||||
return false;
|
||||
@@ -504,11 +652,21 @@ function md_newline() {
|
||||
// backspace
|
||||
function md_backspace() {
|
||||
var s = linebounds(true),
|
||||
ln = s.md.substring(s.n1, s.n2),
|
||||
m = /^[ \t>+-]*(\* )?([0-9]+\. +)?/.exec(ln);
|
||||
o0 = dom_src.selectionStart,
|
||||
left = s.md.slice(s.n1, o0),
|
||||
m = /^[ \t>+-]*(\* )?([0-9]+\. +)?/.exec(left);
|
||||
|
||||
// if car is in whitespace area, do nothing
|
||||
if (/^\s*$/.test(left))
|
||||
return true;
|
||||
|
||||
// same if selection
|
||||
if (o0 != dom_src.selectionEnd)
|
||||
return true;
|
||||
|
||||
// same if line is all-whitespace or non-markup
|
||||
var v = m[0].replace(/[^ ]/g, " ");
|
||||
if (v === m[0] || v.length !== ln.length)
|
||||
if (v === m[0] || v.length !== left.length)
|
||||
return true;
|
||||
|
||||
s.pre = s.md.substring(0, s.n1) + v;
|
||||
@@ -520,6 +678,248 @@ function md_backspace() {
|
||||
}
|
||||
|
||||
|
||||
// paragraph jump
|
||||
function md_p_jump(down) {
|
||||
var txt = dom_src.value,
|
||||
ofs = dom_src.selectionStart;
|
||||
|
||||
if (down) {
|
||||
while (txt[ofs] == '\n' && --ofs > 0);
|
||||
ofs = txt.indexOf("\n\n", ofs);
|
||||
if (ofs < 0)
|
||||
ofs = txt.length - 1;
|
||||
|
||||
while (txt[ofs] == '\n' && ++ofs < txt.length - 1);
|
||||
}
|
||||
else {
|
||||
txt += '\n\n';
|
||||
while (ofs > 1 && txt[ofs - 1] == '\n') ofs--;
|
||||
ofs = Math.max(0, txt.lastIndexOf("\n\n", ofs - 1));
|
||||
while (txt[ofs] == '\n' && ++ofs < txt.length - 1);
|
||||
}
|
||||
|
||||
dom_src.setSelectionRange(ofs, ofs, "none");
|
||||
}
|
||||
|
||||
|
||||
function reLastIndexOf(txt, ptn, end) {
|
||||
var ofs = (typeof end !== 'undefined') ? end : txt.length;
|
||||
end = ofs;
|
||||
while (ofs >= 0) {
|
||||
var sub = txt.slice(ofs, end);
|
||||
if (ptn.test(sub))
|
||||
return ofs;
|
||||
|
||||
ofs--;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
// table formatter
|
||||
function fmt_table(e) {
|
||||
if (e) e.preventDefault();
|
||||
//dom_tbox.setAttribute('class', '');
|
||||
|
||||
var txt = dom_src.value,
|
||||
ofs = dom_src.selectionStart,
|
||||
//o0 = txt.lastIndexOf('\n\n', ofs),
|
||||
//o1 = txt.indexOf('\n\n', ofs);
|
||||
o0 = reLastIndexOf(txt, /\n\s*\n/m, ofs),
|
||||
o1 = txt.slice(ofs).search(/\n\s*\n|\n\s*$/m);
|
||||
// note \s contains \n but its fine
|
||||
|
||||
if (o0 < 0)
|
||||
o0 = 0;
|
||||
else {
|
||||
// seek past the hit
|
||||
var m = /\n\s*\n/m.exec(txt.slice(o0));
|
||||
o0 += m[0].length;
|
||||
}
|
||||
|
||||
o1 = o1 < 0 ? txt.length : o1 + ofs;
|
||||
|
||||
var err = 'cannot format table due to ',
|
||||
tab = txt.slice(o0, o1).split(/\s*\n/),
|
||||
re_ind = /^\s*/,
|
||||
ind = tab[1].match(re_ind)[0],
|
||||
r0_ind = tab[0].slice(0, ind.length),
|
||||
lpipe = tab[1].indexOf('|') < tab[1].indexOf('-'),
|
||||
rpipe = tab[1].lastIndexOf('|') > tab[1].lastIndexOf('-'),
|
||||
re_lpipe = lpipe ? /^\s*\|\s*/ : /^\s*/,
|
||||
re_rpipe = rpipe ? /\s*\|\s*$/ : /\s*$/,
|
||||
ncols;
|
||||
|
||||
// the second row defines the table,
|
||||
// need to process that first
|
||||
var tmp = tab[0];
|
||||
tab[0] = tab[1];
|
||||
tab[1] = tmp;
|
||||
|
||||
for (var a = 0; a < tab.length; a++) {
|
||||
var row_name = (a == 1) ? 'header' : 'row#' + (a + 1);
|
||||
|
||||
var ind2 = tab[a].match(re_ind)[0];
|
||||
if (ind != ind2 && a != 1) // the table can be a list entry or something, ignore [0]
|
||||
return alert(err + 'indentation mismatch on row#2 and ' + row_name + ',\n' + tab[a]);
|
||||
|
||||
var t = tab[a].slice(ind.length);
|
||||
t = t.replace(re_lpipe, "");
|
||||
t = t.replace(re_rpipe, "");
|
||||
tab[a] = t.split(/\s*\|\s*/g);
|
||||
|
||||
if (a == 0)
|
||||
ncols = tab[a].length;
|
||||
else if (ncols < tab[a].length)
|
||||
return alert(err + 'num.columns(' + row_name + ') exceeding row#2; ' + ncols + ' < ' + tab[a].length);
|
||||
|
||||
// if row has less columns than row2, fill them in
|
||||
while (tab[a].length < ncols)
|
||||
tab[a].push('');
|
||||
}
|
||||
|
||||
// aight now swap em back
|
||||
tmp = tab[0];
|
||||
tab[0] = tab[1];
|
||||
tab[1] = tmp;
|
||||
|
||||
var re_align = /^ *(:?)-+(:?) *$/;
|
||||
var align = [];
|
||||
for (var col = 0; col < tab[1].length; col++) {
|
||||
var m = tab[1][col].match(re_align);
|
||||
if (!m)
|
||||
return alert(err + 'invalid column specification, row#2, col ' + (col + 1) + ', [' + tab[1][col] + ']');
|
||||
|
||||
if (m[2]) {
|
||||
if (m[1])
|
||||
align.push('c');
|
||||
else
|
||||
align.push('r');
|
||||
}
|
||||
else
|
||||
align.push('l');
|
||||
}
|
||||
|
||||
var pad = [];
|
||||
var tmax = 0;
|
||||
for (var col = 0; col < ncols; col++) {
|
||||
var max = 0;
|
||||
for (var row = 0; row < tab.length; row++)
|
||||
if (row != 1)
|
||||
max = Math.max(max, tab[row][col].length);
|
||||
|
||||
var s = '';
|
||||
for (var n = 0; n < max; n++)
|
||||
s += ' ';
|
||||
|
||||
pad.push(s);
|
||||
tmax = Math.max(max, tmax);
|
||||
}
|
||||
|
||||
var dashes = '';
|
||||
for (var a = 0; a < tmax; a++)
|
||||
dashes += '-';
|
||||
|
||||
var ret = [];
|
||||
for (var row = 0; row < tab.length; row++) {
|
||||
var ln = [];
|
||||
for (var col = 0; col < tab[row].length; col++) {
|
||||
var p = pad[col];
|
||||
var s = tab[row][col];
|
||||
|
||||
if (align[col] == 'l') {
|
||||
s = (s + p).slice(0, p.length);
|
||||
}
|
||||
else if (align[col] == 'r') {
|
||||
s = (p + s).slice(-p.length);
|
||||
}
|
||||
else {
|
||||
var pt = p.length - s.length;
|
||||
var pl = p.slice(0, Math.floor(pt / 2));
|
||||
var pr = p.slice(0, pt - pl.length);
|
||||
s = pl + s + pr;
|
||||
}
|
||||
|
||||
if (row == 1) {
|
||||
if (align[col] == 'l')
|
||||
s = dashes.slice(0, p.length);
|
||||
else if (align[col] == 'r')
|
||||
s = dashes.slice(0, p.length - 1) + ':';
|
||||
else
|
||||
s = ':' + dashes.slice(0, p.length - 2) + ':';
|
||||
}
|
||||
ln.push(s);
|
||||
}
|
||||
ret.push(ind + '| ' + ln.join(' | ') + ' |');
|
||||
}
|
||||
|
||||
// restore any markup in the row0 gutter
|
||||
ret[0] = r0_ind + ret[0].slice(ind.length);
|
||||
|
||||
ret = {
|
||||
"pre": txt.slice(0, o0),
|
||||
"sel": ret.join('\n'),
|
||||
"post": txt.slice(o1),
|
||||
"car": o0,
|
||||
"cdr": o0
|
||||
};
|
||||
setsel(ret);
|
||||
}
|
||||
|
||||
|
||||
// show unicode
|
||||
function mark_uni(e) {
|
||||
if (e) e.preventDefault();
|
||||
dom_tbox.setAttribute('class', '');
|
||||
|
||||
var txt = dom_src.value,
|
||||
ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g'),
|
||||
mod = txt.replace(/\r/g, "").replace(ptn, "\u2588\u2770$1\u2771");
|
||||
|
||||
if (txt == mod) {
|
||||
alert('no results; no modifications were made');
|
||||
return;
|
||||
}
|
||||
dom_src.value = mod;
|
||||
}
|
||||
|
||||
|
||||
// iterate unicode
|
||||
function iter_uni(e) {
|
||||
if (e) e.preventDefault();
|
||||
|
||||
var txt = dom_src.value,
|
||||
ofs = dom_src.selectionDirection == "forward" ? dom_src.selectionEnd : dom_src.selectionStart,
|
||||
re = new RegExp('([^' + js_uni_whitelist + ']+)'),
|
||||
m = re.exec(txt.slice(ofs));
|
||||
|
||||
if (!m) {
|
||||
alert('no more hits from cursor onwards');
|
||||
return;
|
||||
}
|
||||
ofs += m.index;
|
||||
|
||||
dom_src.setSelectionRange(ofs, ofs + m[0].length, "forward");
|
||||
dom_src.oninput();
|
||||
// support chrome:
|
||||
dom_src.blur();
|
||||
dom_src.focus();
|
||||
}
|
||||
|
||||
|
||||
// configure whitelist
|
||||
function cfg_uni(e) {
|
||||
if (e) e.preventDefault();
|
||||
|
||||
var reply = prompt("unicode whitelist", esc_uni_whitelist);
|
||||
if (reply === null)
|
||||
return;
|
||||
|
||||
esc_uni_whitelist = reply;
|
||||
js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
|
||||
}
|
||||
|
||||
|
||||
// hotkeys / toolbar
|
||||
(function () {
|
||||
function keydown(ev) {
|
||||
@@ -531,6 +931,11 @@ function md_backspace() {
|
||||
save();
|
||||
return false;
|
||||
}
|
||||
if (ev.code == "Escape" || kc == 27) {
|
||||
var d = ebi('helpclose');
|
||||
if (d)
|
||||
d.click();
|
||||
}
|
||||
if (document.activeElement == dom_src) {
|
||||
if (ev.code == "Tab" || kc == 9) {
|
||||
md_indent(ev.shiftKey);
|
||||
@@ -562,16 +967,44 @@ function md_backspace() {
|
||||
if (!ctrl && !ev.shiftKey && kc == 8) {
|
||||
return md_backspace();
|
||||
}
|
||||
if (ctrl && (ev.code == "KeyK")) {
|
||||
fmt_table();
|
||||
return false;
|
||||
}
|
||||
if (ctrl && (ev.code == "KeyU")) {
|
||||
iter_uni();
|
||||
return false;
|
||||
}
|
||||
if (ctrl && (ev.code == "KeyE")) {
|
||||
dom_nsbs.click();
|
||||
//fmt_table();
|
||||
return false;
|
||||
}
|
||||
var up = ev.code == "ArrowUp" || kc == 38;
|
||||
var dn = ev.code == "ArrowDown" || kc == 40;
|
||||
if (ctrl && (up || dn)) {
|
||||
md_p_jump(dn);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
document.onkeydown = keydown;
|
||||
document.getElementById('save').onclick = save;
|
||||
ebi('save').onclick = save;
|
||||
})();
|
||||
|
||||
|
||||
document.getElementById('help').onclick = function (e) {
|
||||
ebi('tools').onclick = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
var dom = document.getElementById('helpbox');
|
||||
var is_open = dom_tbox.getAttribute('class') != 'open';
|
||||
dom_tbox.setAttribute('class', is_open ? 'open' : '');
|
||||
};
|
||||
|
||||
|
||||
ebi('help').onclick = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
dom_tbox.setAttribute('class', '');
|
||||
|
||||
var dom = ebi('helpbox');
|
||||
var dtxt = dom.getElementsByTagName('textarea');
|
||||
if (dtxt.length > 0) {
|
||||
convert_markdown(dtxt[0].value, dom);
|
||||
@@ -579,12 +1012,18 @@ document.getElementById('help').onclick = function (e) {
|
||||
}
|
||||
|
||||
dom.style.display = 'block';
|
||||
document.getElementById('helpclose').onclick = function () {
|
||||
ebi('helpclose').onclick = function () {
|
||||
dom.style.display = 'none';
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
ebi('fmt_table').onclick = fmt_table;
|
||||
ebi('mark_uni').onclick = mark_uni;
|
||||
ebi('iter_uni').onclick = iter_uni;
|
||||
ebi('cfg_uni').onclick = cfg_uni;
|
||||
|
||||
|
||||
// blame steen
|
||||
action_stack = (function () {
|
||||
var hist = {
|
||||
@@ -690,13 +1129,12 @@ action_stack = (function () {
|
||||
ref = newtxt;
|
||||
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
|
||||
if (hist.un.length > 0)
|
||||
dbg(static(hist.un.slice(-1)[0]));
|
||||
dbg(statify(hist.un.slice(-1)[0]));
|
||||
if (hist.re.length > 0)
|
||||
dbg(static(hist.re.slice(-1)[0]));
|
||||
dbg(statify(hist.re.slice(-1)[0]));
|
||||
}
|
||||
|
||||
return {
|
||||
push: push,
|
||||
undo: undo,
|
||||
redo: redo,
|
||||
push: schedule_push,
|
||||
@@ -706,7 +1144,7 @@ action_stack = (function () {
|
||||
})();
|
||||
|
||||
/*
|
||||
document.getElementById('help').onclick = function () {
|
||||
ebi('help').onclick = function () {
|
||||
var c1 = getComputedStyle(dom_src).cssText.split(';');
|
||||
var c2 = getComputedStyle(dom_ref).cssText.split(';');
|
||||
var max = Math.min(c1.length, c2.length);
|
||||
@@ -714,4 +1152,4 @@ document.getElementById('help').onclick = function () {
|
||||
if (c1[a] !== c2[a])
|
||||
console.log(c1[a] + '\n' + c2[a]);
|
||||
}
|
||||
*/
|
||||
*/
|
||||
|
||||
@@ -160,8 +160,12 @@ h2 {
|
||||
.mdo ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em {
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
@@ -253,8 +257,12 @@ html.dark .mdo>ul,
|
||||
html.dark .mdo>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em {
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
|
||||
@@ -17,13 +17,17 @@
|
||||
</div>
|
||||
</div>
|
||||
<div id="m">
|
||||
<textarea id="mt" style="display:none">{{ md }}</textarea>
|
||||
<textarea id="mt" style="display:none" autocomplete="off">{{ md }}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
|
||||
var link_md_as_html = false; // TODO (does nothing)
|
||||
var last_modified = {{ lastmod }};
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
var lightswitch = (function () {
|
||||
var fun = function () {
|
||||
@@ -39,6 +43,7 @@ var lightswitch = (function () {
|
||||
})();
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js"></script>
|
||||
<script src="/.cpr/deps/easymde.js"></script>
|
||||
<script src="/.cpr/mde.js"></script>
|
||||
</body></html>
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
var dom_wrap = document.getElementById('mw');
|
||||
var dom_nav = document.getElementById('mn');
|
||||
var dom_doc = document.getElementById('m');
|
||||
var dom_md = document.getElementById('mt');
|
||||
"use strict";
|
||||
|
||||
var dom_wrap = ebi('mw');
|
||||
var dom_nav = ebi('mn');
|
||||
var dom_doc = ebi('m');
|
||||
var dom_md = ebi('mt');
|
||||
|
||||
(function () {
|
||||
var n = document.location + '';
|
||||
@@ -63,7 +65,7 @@ var mde = (function () {
|
||||
mde.codemirror.on("change", function () {
|
||||
md_changed(mde);
|
||||
});
|
||||
var loader = document.getElementById('ml');
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
return mde;
|
||||
})();
|
||||
@@ -121,7 +123,7 @@ function save(mde) {
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
|
||||
var url = (document.location + '').split('?')[0] + '?raw';
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
@@ -213,7 +215,7 @@ function save_chk() {
|
||||
var ok = document.createElement('div');
|
||||
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
|
||||
ok.innerHTML = 'OK✔️';
|
||||
var parent = document.getElementById('m');
|
||||
var parent = ebi('m');
|
||||
document.documentElement.appendChild(ok);
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
|
||||
@@ -1,61 +1,6 @@
|
||||
"use strict";
|
||||
|
||||
// error handler for mobile devices
|
||||
function hcroak(msg) {
|
||||
document.body.innerHTML = msg;
|
||||
window.onerror = undefined;
|
||||
throw 'fatal_err';
|
||||
}
|
||||
function croak(msg) {
|
||||
document.body.textContent = msg;
|
||||
window.onerror = undefined;
|
||||
throw msg;
|
||||
}
|
||||
function esc(txt) {
|
||||
return txt.replace(/[&"<>]/g, function (c) {
|
||||
return {
|
||||
'&': '&',
|
||||
'"': '"',
|
||||
'<': '<',
|
||||
'>': '>'
|
||||
}[c];
|
||||
});
|
||||
}
|
||||
window.onerror = function (msg, url, lineNo, columnNo, error) {
|
||||
window.onerror = undefined;
|
||||
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
||||
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
||||
|
||||
if (error) {
|
||||
var find = ['desc', 'stack', 'trace'];
|
||||
for (var a = 0; a < find.length; a++)
|
||||
if (String(error[find[a]]) !== 'undefined')
|
||||
html.push('<h2>' + find[a] + '</h2>' +
|
||||
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
|
||||
}
|
||||
document.body.style.fontSize = '0.8em';
|
||||
document.body.style.padding = '0 1em 1em 1em';
|
||||
hcroak(html.join('\n'));
|
||||
};
|
||||
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = document.createElement('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
|
||||
script.onreadystatechange = cb;
|
||||
script.onload = cb;
|
||||
|
||||
head.appendChild(script);
|
||||
}
|
||||
|
||||
|
||||
function o(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
window.onerror = vis_exh;
|
||||
|
||||
|
||||
(function () {
|
||||
@@ -88,12 +33,12 @@ function goto(dest) {
|
||||
for (var a = obj.length - 1; a >= 0; a--)
|
||||
obj[a].classList.remove('act');
|
||||
|
||||
var obj = document.querySelectorAll('#ops>a');
|
||||
obj = document.querySelectorAll('#ops>a');
|
||||
for (var a = obj.length - 1; a >= 0; a--)
|
||||
obj[a].classList.remove('act');
|
||||
|
||||
if (dest) {
|
||||
document.getElementById('op_' + dest).classList.add('act');
|
||||
ebi('op_' + dest).classList.add('act');
|
||||
document.querySelector('#ops>a[data-dest=' + dest + ']').classList.add('act');
|
||||
|
||||
var fn = window['goto_' + dest];
|
||||
@@ -121,7 +66,7 @@ function goto_up2k() {
|
||||
if (op !== null && op !== '.')
|
||||
goto(op);
|
||||
}
|
||||
document.getElementById('ops').style.display = 'block';
|
||||
ebi('ops').style.display = 'block';
|
||||
})();
|
||||
|
||||
|
||||
@@ -150,21 +95,21 @@ function up2k_init(have_crypto) {
|
||||
|
||||
// show modal message
|
||||
function showmodal(msg) {
|
||||
o('u2notbtn').innerHTML = msg;
|
||||
o('u2btn').style.display = 'none';
|
||||
o('u2notbtn').style.display = 'block';
|
||||
o('u2conf').style.opacity = '0.5';
|
||||
ebi('u2notbtn').innerHTML = msg;
|
||||
ebi('u2btn').style.display = 'none';
|
||||
ebi('u2notbtn').style.display = 'block';
|
||||
ebi('u2conf').style.opacity = '0.5';
|
||||
}
|
||||
|
||||
// hide modal message
|
||||
function unmodal() {
|
||||
o('u2notbtn').style.display = 'none';
|
||||
o('u2btn').style.display = 'block';
|
||||
o('u2conf').style.opacity = '1';
|
||||
o('u2notbtn').innerHTML = '';
|
||||
ebi('u2notbtn').style.display = 'none';
|
||||
ebi('u2btn').style.display = 'block';
|
||||
ebi('u2conf').style.opacity = '1';
|
||||
ebi('u2notbtn').innerHTML = '';
|
||||
}
|
||||
|
||||
var post_url = o('op_bup').getElementsByTagName('form')[0].getAttribute('action');
|
||||
var post_url = ebi('op_bup').getElementsByTagName('form')[0].getAttribute('action');
|
||||
if (post_url && post_url.charAt(post_url.length - 1) !== '/')
|
||||
post_url += '/';
|
||||
|
||||
@@ -181,25 +126,25 @@ function up2k_init(have_crypto) {
|
||||
import_js('/.cpr/deps/sha512.js', unmodal);
|
||||
|
||||
if (is_https)
|
||||
o('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
|
||||
ebi('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
|
||||
else
|
||||
o('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance';
|
||||
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance';
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// show uploader if the user only has write-access
|
||||
if (!o('files'))
|
||||
if (!ebi('files'))
|
||||
goto('up2k');
|
||||
|
||||
// shows or clears an error message in the basic uploader ui
|
||||
function setmsg(msg) {
|
||||
if (msg !== undefined) {
|
||||
o('u2err').setAttribute('class', 'err');
|
||||
o('u2err').innerHTML = msg;
|
||||
ebi('u2err').setAttribute('class', 'err');
|
||||
ebi('u2err').innerHTML = msg;
|
||||
}
|
||||
else {
|
||||
o('u2err').setAttribute('class', '');
|
||||
o('u2err').innerHTML = '';
|
||||
ebi('u2err').setAttribute('class', '');
|
||||
ebi('u2err').innerHTML = '';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -210,7 +155,7 @@ function up2k_init(have_crypto) {
|
||||
}
|
||||
|
||||
// handle user intent to use the basic uploader instead
|
||||
o('u2nope').onclick = function (e) {
|
||||
ebi('u2nope').onclick = function (e) {
|
||||
e.preventDefault();
|
||||
setmsg('');
|
||||
goto('bup');
|
||||
@@ -229,9 +174,9 @@ function up2k_init(have_crypto) {
|
||||
function cfg_get(name) {
|
||||
var val = localStorage.getItem(name);
|
||||
if (val === null)
|
||||
return parseInt(o(name).value);
|
||||
return parseInt(ebi(name).value);
|
||||
|
||||
o(name).value = val;
|
||||
ebi(name).value = val;
|
||||
return val;
|
||||
}
|
||||
|
||||
@@ -242,7 +187,7 @@ function up2k_init(have_crypto) {
|
||||
else
|
||||
val = (val == '1');
|
||||
|
||||
o(name).checked = val;
|
||||
ebi(name).checked = val;
|
||||
return val;
|
||||
}
|
||||
|
||||
@@ -250,12 +195,13 @@ function up2k_init(have_crypto) {
|
||||
localStorage.setItem(
|
||||
name, val ? '1' : '0');
|
||||
|
||||
o(name).checked = val;
|
||||
ebi(name).checked = val;
|
||||
return val;
|
||||
}
|
||||
|
||||
var parallel_uploads = cfg_get('nthread');
|
||||
var multitask = bcfg_get('multitask', true);
|
||||
var ask_up = bcfg_get('ask_up', true);
|
||||
|
||||
var col_hashing = '#00bbff';
|
||||
var col_hashed = '#004466';
|
||||
@@ -284,9 +230,9 @@ function up2k_init(have_crypto) {
|
||||
return un2k("this is the basic uploader; up2k needs at least<br />chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1");
|
||||
|
||||
function nav() {
|
||||
o('file' + fdom_ctr).click();
|
||||
ebi('file' + fdom_ctr).click();
|
||||
}
|
||||
o('u2btn').addEventListener('click', nav, false);
|
||||
ebi('u2btn').addEventListener('click', nav, false);
|
||||
|
||||
function ondrag(ev) {
|
||||
ev.stopPropagation();
|
||||
@@ -294,8 +240,8 @@ function up2k_init(have_crypto) {
|
||||
ev.dataTransfer.dropEffect = 'copy';
|
||||
ev.dataTransfer.effectAllowed = 'copy';
|
||||
}
|
||||
o('u2btn').addEventListener('dragover', ondrag, false);
|
||||
o('u2btn').addEventListener('dragenter', ondrag, false);
|
||||
ebi('u2btn').addEventListener('dragover', ondrag, false);
|
||||
ebi('u2btn').addEventListener('dragenter', ondrag, false);
|
||||
|
||||
function gotfile(ev) {
|
||||
ev.stopPropagation();
|
||||
@@ -317,6 +263,7 @@ function up2k_init(have_crypto) {
|
||||
|
||||
more_one_file();
|
||||
var bad_files = [];
|
||||
var good_files = [];
|
||||
for (var a = 0; a < files.length; a++) {
|
||||
var fobj = files[a];
|
||||
if (is_itemlist) {
|
||||
@@ -330,9 +277,32 @@ function up2k_init(have_crypto) {
|
||||
throw 1;
|
||||
}
|
||||
catch (ex) {
|
||||
bad_files.push([a, fobj.name]);
|
||||
bad_files.push(fobj.name);
|
||||
continue;
|
||||
}
|
||||
good_files.push(fobj);
|
||||
}
|
||||
|
||||
if (bad_files.length > 0) {
|
||||
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length);
|
||||
for (var a = 0; a < bad_files.length; a++)
|
||||
msg += '-- ' + bad_files[a] + '\n';
|
||||
|
||||
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
|
||||
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
|
||||
|
||||
alert(msg);
|
||||
}
|
||||
|
||||
var msg = ['upload these ' + good_files.length + ' files?'];
|
||||
for (var a = 0; a < good_files.length; a++)
|
||||
msg.push(good_files[a].name);
|
||||
|
||||
if (ask_up && !confirm(msg.join('\n')))
|
||||
return;
|
||||
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var fobj = good_files[a];
|
||||
var now = new Date().getTime();
|
||||
var lmod = fobj.lastModified || now;
|
||||
var entry = {
|
||||
@@ -357,31 +327,20 @@ function up2k_init(have_crypto) {
|
||||
var tr = document.createElement('tr');
|
||||
tr.innerHTML = '<td id="f{0}n"></td><td id="f{0}t">hashing</td><td id="f{0}p" class="prog"></td>'.format(st.files.length);
|
||||
tr.getElementsByTagName('td')[0].textContent = entry.name;
|
||||
o('u2tab').appendChild(tr);
|
||||
ebi('u2tab').appendChild(tr);
|
||||
|
||||
st.files.push(entry);
|
||||
st.todo.hash.push(entry);
|
||||
}
|
||||
|
||||
if (bad_files.length > 0) {
|
||||
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length);
|
||||
for (var a = 0; a < bad_files.length; a++)
|
||||
msg += '-- ' + bad_files[a][1] + '\n';
|
||||
|
||||
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
|
||||
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
|
||||
|
||||
alert(msg);
|
||||
}
|
||||
}
|
||||
o('u2btn').addEventListener('drop', gotfile, false);
|
||||
ebi('u2btn').addEventListener('drop', gotfile, false);
|
||||
|
||||
function more_one_file() {
|
||||
fdom_ctr++;
|
||||
var elm = document.createElement('div')
|
||||
elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" />'.format(fdom_ctr);
|
||||
o('u2form').appendChild(elm);
|
||||
o('file' + fdom_ctr).addEventListener('change', gotfile, false);
|
||||
ebi('u2form').appendChild(elm);
|
||||
ebi('file' + fdom_ctr).addEventListener('change', gotfile, false);
|
||||
}
|
||||
more_one_file();
|
||||
|
||||
@@ -391,16 +350,17 @@ function up2k_init(have_crypto) {
|
||||
//
|
||||
|
||||
function handshakes_permitted() {
|
||||
return multitask || (
|
||||
st.todo.upload.length == 0 &&
|
||||
st.busy.upload.length == 0);
|
||||
var lim = multitask ? 1 : 0;
|
||||
return lim >=
|
||||
st.todo.upload.length +
|
||||
st.busy.upload.length;
|
||||
}
|
||||
|
||||
function hashing_permitted() {
|
||||
return multitask || (
|
||||
handshakes_permitted() &&
|
||||
st.todo.handshake.length == 0 &&
|
||||
st.busy.handshake.length == 0);
|
||||
var lim = multitask ? 1 : 0;
|
||||
return handshakes_permitted() && lim >=
|
||||
st.todo.handshake.length +
|
||||
st.busy.handshake.length;
|
||||
}
|
||||
|
||||
var tasker = (function () {
|
||||
@@ -451,17 +411,6 @@ function up2k_init(have_crypto) {
|
||||
/// hashing
|
||||
//
|
||||
|
||||
// https://gist.github.com/jonleighton/958841
|
||||
function buf2b64_maybe_fucky(buffer) {
|
||||
var ret = '';
|
||||
var view = new DataView(buffer);
|
||||
for (var i = 0; i < view.byteLength; i++) {
|
||||
ret += String.fromCharCode(view.getUint8(i));
|
||||
}
|
||||
return window.btoa(ret).replace(
|
||||
/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
|
||||
}
|
||||
|
||||
// https://gist.github.com/jonleighton/958841
|
||||
function buf2b64(arrayBuffer) {
|
||||
var base64 = '';
|
||||
@@ -502,20 +451,6 @@ function up2k_init(have_crypto) {
|
||||
return base64;
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest
|
||||
function buf2hex(buffer) {
|
||||
var hexCodes = [];
|
||||
var view = new DataView(buffer);
|
||||
for (var i = 0; i < view.byteLength; i += 4) {
|
||||
var value = view.getUint32(i) // 4 bytes per iter
|
||||
var stringValue = value.toString(16) // doesn't pad
|
||||
var padding = '00000000'
|
||||
var paddedValue = (padding + stringValue).slice(-padding.length)
|
||||
hexCodes.push(paddedValue);
|
||||
}
|
||||
return hexCodes.join("");
|
||||
}
|
||||
|
||||
function get_chunksize(filesize) {
|
||||
var chunksize = 1024 * 1024;
|
||||
var stepsize = 512 * 1024;
|
||||
@@ -602,7 +537,7 @@ function up2k_init(have_crypto) {
|
||||
pb_html += '<div id="f{0}p{1}" style="width:{2}%"><div></div></div>'.format(
|
||||
t.n, a, pb_perc);
|
||||
|
||||
o('f{0}p'.format(t.n)).innerHTML = pb_html;
|
||||
ebi('f{0}p'.format(t.n)).innerHTML = pb_html;
|
||||
|
||||
var reader = new FileReader();
|
||||
|
||||
@@ -677,7 +612,7 @@ function up2k_init(have_crypto) {
|
||||
alert('{0} ms, {1} MB/s\n'.format(t.t2 - t.t1, spd.toFixed(3)) + t.hash.join('\n'));
|
||||
}
|
||||
|
||||
o('f{0}t'.format(t.n)).innerHTML = 'connecting';
|
||||
ebi('f{0}t'.format(t.n)).innerHTML = 'connecting';
|
||||
st.busy.hash.splice(st.busy.hash.indexOf(t), 1);
|
||||
st.todo.handshake.push(t);
|
||||
};
|
||||
@@ -706,7 +641,7 @@ function up2k_init(have_crypto) {
|
||||
if (response.name !== t.name) {
|
||||
// file exists; server renamed us
|
||||
t.name = response.name;
|
||||
o('f{0}n'.format(t.n)).textContent = t.name;
|
||||
ebi('f{0}n'.format(t.n)).textContent = t.name;
|
||||
}
|
||||
|
||||
t.postlist = [];
|
||||
@@ -736,23 +671,41 @@ function up2k_init(have_crypto) {
|
||||
msg = 'uploading';
|
||||
done = false;
|
||||
}
|
||||
o('f{0}t'.format(t.n)).innerHTML = msg;
|
||||
ebi('f{0}t'.format(t.n)).innerHTML = msg;
|
||||
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
|
||||
|
||||
if (done) {
|
||||
var spd1 = (t.size / ((t.t2 - t.t1) / 1000.)) / (1024 * 1024.);
|
||||
var spd2 = (t.size / ((t.t3 - t.t2) / 1000.)) / (1024 * 1024.);
|
||||
o('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format(
|
||||
ebi('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format(
|
||||
spd1.toFixed(2), spd2.toFixed(2));
|
||||
}
|
||||
tasker();
|
||||
}
|
||||
else
|
||||
else {
|
||||
var err = "";
|
||||
var rsp = (xhr.responseText + '');
|
||||
if (rsp.indexOf('partial upload exists') !== -1 ||
|
||||
rsp.indexOf('file already exists') !== -1) {
|
||||
err = rsp;
|
||||
var ofs = err.lastIndexOf(' : ');
|
||||
if (ofs > 0)
|
||||
err = err.slice(0, ofs);
|
||||
}
|
||||
if (err != "") {
|
||||
ebi('f{0}t'.format(t.n)).innerHTML = "ERROR";
|
||||
ebi('f{0}p'.format(t.n)).innerHTML = err;
|
||||
|
||||
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
|
||||
tasker();
|
||||
return;
|
||||
}
|
||||
alert("server broke (error {0}):\n\"{1}\"\n".format(
|
||||
xhr.status,
|
||||
(xhr.response && xhr.response.err) ||
|
||||
(xhr.responseText && xhr.responseText) ||
|
||||
"no further information"));
|
||||
}
|
||||
};
|
||||
xhr.open('POST', post_url + 'handshake.php', true);
|
||||
xhr.responseType = 'text';
|
||||
@@ -803,7 +756,7 @@ function up2k_init(have_crypto) {
|
||||
t.postlist.splice(t.postlist.indexOf(npart), 1);
|
||||
if (t.postlist.length == 0) {
|
||||
t.t3 = new Date().getTime();
|
||||
o('f{0}t'.format(t.n)).innerHTML = 'verifying';
|
||||
ebi('f{0}t'.format(t.n)).innerHTML = 'verifying';
|
||||
st.todo.handshake.push(t);
|
||||
}
|
||||
tasker();
|
||||
@@ -834,7 +787,7 @@ function up2k_init(have_crypto) {
|
||||
//
|
||||
|
||||
function prog(nfile, nchunk, color, percent) {
|
||||
var n1 = o('f{0}p{1}'.format(nfile, nchunk));
|
||||
var n1 = ebi('f{0}p{1}'.format(nfile, nchunk));
|
||||
var n2 = n1.getElementsByTagName('div')[0];
|
||||
if (percent === undefined) {
|
||||
n1.style.background = color;
|
||||
@@ -857,7 +810,7 @@ function up2k_init(have_crypto) {
|
||||
dir.preventDefault();
|
||||
} catch (ex) { }
|
||||
|
||||
var obj = o('nthread');
|
||||
var obj = ebi('nthread');
|
||||
if (dir.target) {
|
||||
obj.style.background = '#922';
|
||||
var v = Math.floor(parseInt(obj.value));
|
||||
@@ -887,24 +840,30 @@ function up2k_init(have_crypto) {
|
||||
bcfg_set('multitask', multitask);
|
||||
}
|
||||
|
||||
function tgl_ask_up() {
|
||||
ask_up = !ask_up;
|
||||
bcfg_set('ask_up', ask_up);
|
||||
}
|
||||
|
||||
function nop(ev) {
|
||||
ev.preventDefault();
|
||||
this.click();
|
||||
}
|
||||
|
||||
o('nthread_add').onclick = function (ev) {
|
||||
ebi('nthread_add').onclick = function (ev) {
|
||||
ev.preventDefault();
|
||||
bumpthread(1);
|
||||
};
|
||||
o('nthread_sub').onclick = function (ev) {
|
||||
ebi('nthread_sub').onclick = function (ev) {
|
||||
ev.preventDefault();
|
||||
bumpthread(-1);
|
||||
};
|
||||
|
||||
o('nthread').addEventListener('input', bumpthread, false);
|
||||
o('multitask').addEventListener('click', tgl_multitask, false);
|
||||
ebi('nthread').addEventListener('input', bumpthread, false);
|
||||
ebi('multitask').addEventListener('click', tgl_multitask, false);
|
||||
ebi('ask_up').addEventListener('click', tgl_ask_up, false);
|
||||
|
||||
var nodes = o('u2conf').getElementsByTagName('a');
|
||||
var nodes = ebi('u2conf').getElementsByTagName('a');
|
||||
for (var a = nodes.length - 1; a >= 0; a--)
|
||||
nodes[a].addEventListener('touchend', nop, false);
|
||||
|
||||
|
||||
@@ -194,6 +194,12 @@
|
||||
#u2conf input+a {
|
||||
background: #d80;
|
||||
}
|
||||
#u2conf input[type="checkbox"]+label {
|
||||
color: #f5a;
|
||||
}
|
||||
#u2conf input[type="checkbox"]:checked+label {
|
||||
color: #fc5;
|
||||
}
|
||||
#u2foot {
|
||||
color: #fff;
|
||||
font-style: italic;
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
href="#" data-dest="up2k">up2k</a><i></i><a
|
||||
href="#" data-dest="bup">bup</a><i></i><a
|
||||
href="#" data-dest="mkdir">mkdir</a><i></i><a
|
||||
href="#" data-dest="new_md">new.md</a></div>
|
||||
href="#" data-dest="new_md">new.md</a><i></i><a
|
||||
href="#" data-dest="msg">msg</a></div>
|
||||
|
||||
<div id="op_bup" class="opview opbox act">
|
||||
<div id="u2err"></div>
|
||||
@@ -30,6 +31,13 @@
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_msg" class="opview opbox">
|
||||
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="/{{ vdir }}">
|
||||
<input type="text" name="msg" size="30">
|
||||
<input type="submit" value="send">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_up2k" class="opview">
|
||||
<form id="u2form" method="post" enctype="multipart/form-data" onsubmit="return false;"></form>
|
||||
|
||||
@@ -43,10 +51,14 @@
|
||||
<input class="txtbox" id="nthread" value="2" />
|
||||
<a href="#" id="nthread_add">+</a>
|
||||
</td>
|
||||
<td rowspan="2">
|
||||
<td rowspan="2" style="padding-left:1.5em">
|
||||
<input type="checkbox" id="multitask" />
|
||||
<label for="multitask">hash while<br />uploading</label>
|
||||
</td>
|
||||
<td rowspan="2">
|
||||
<input type="checkbox" id="ask_up" />
|
||||
<label for="ask_up">ask for<br />confirmation</label>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
109
copyparty/web/util.js
Normal file
109
copyparty/web/util.js
Normal file
@@ -0,0 +1,109 @@
|
||||
"use strict";
|
||||
|
||||
// error handler for mobile devices
|
||||
function hcroak(msg) {
|
||||
document.body.innerHTML = msg;
|
||||
window.onerror = undefined;
|
||||
throw 'fatal_err';
|
||||
}
|
||||
function croak(msg) {
|
||||
document.body.textContent = msg;
|
||||
window.onerror = undefined;
|
||||
throw msg;
|
||||
}
|
||||
function esc(txt) {
|
||||
return txt.replace(/[&"<>]/g, function (c) {
|
||||
return {
|
||||
'&': '&',
|
||||
'"': '"',
|
||||
'<': '<',
|
||||
'>': '>'
|
||||
}[c];
|
||||
});
|
||||
}
|
||||
function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
window.onerror = undefined;
|
||||
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
||||
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
||||
|
||||
if (error) {
|
||||
var find = ['desc', 'stack', 'trace'];
|
||||
for (var a = 0; a < find.length; a++)
|
||||
if (String(error[find[a]]) !== 'undefined')
|
||||
html.push('<h2>' + find[a] + '</h2>' +
|
||||
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
|
||||
}
|
||||
document.body.style.fontSize = '0.8em';
|
||||
document.body.style.padding = '0 1em 1em 1em';
|
||||
hcroak(html.join('\n'));
|
||||
}
|
||||
|
||||
|
||||
function ebi(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||
if (!String.prototype.endsWith) {
|
||||
String.prototype.endsWith = function (search, this_len) {
|
||||
if (this_len === undefined || this_len > this.length) {
|
||||
this_len = this.length;
|
||||
}
|
||||
return this.substring(this_len - search.length, this_len) === search;
|
||||
};
|
||||
}
|
||||
if (!String.startsWith) {
|
||||
String.prototype.startsWith = function (s, i) {
|
||||
i = i > 0 ? i | 0 : 0;
|
||||
return this.substring(i, i + s.length) === s;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = document.createElement('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
|
||||
script.onreadystatechange = cb;
|
||||
script.onload = cb;
|
||||
|
||||
head.appendChild(script);
|
||||
}
|
||||
|
||||
|
||||
function sortTable(table, col) {
|
||||
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
|
||||
th = table.tHead.rows[0].cells,
|
||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||
i, reverse = th[col].className == 'sort1' ? -1 : 1;
|
||||
for (var a = 0, thl = th.length; a < thl; a++)
|
||||
th[a].className = '';
|
||||
th[col].className = 'sort' + reverse;
|
||||
var stype = th[col].getAttribute('sort');
|
||||
tr = tr.sort(function (a, b) {
|
||||
var v1 = a.cells[col].textContent.trim();
|
||||
var v2 = b.cells[col].textContent.trim();
|
||||
if (stype == 'int') {
|
||||
v1 = parseInt(v1.replace(/,/g, ''));
|
||||
v2 = parseInt(v2.replace(/,/g, ''));
|
||||
return reverse * (v1 - v2);
|
||||
}
|
||||
return reverse * (v1.localeCompare(v2));
|
||||
});
|
||||
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]);
|
||||
}
|
||||
function makeSortable(table) {
|
||||
var th = table.tHead, i;
|
||||
th && (th = th.rows[0]) && (th = th.cells);
|
||||
if (th) i = th.length;
|
||||
else return; // if no `<thead>` then do nothing
|
||||
while (--i >= 0) (function (i) {
|
||||
th[i].onclick = function () {
|
||||
sortTable(table, i);
|
||||
};
|
||||
}(i));
|
||||
}
|
||||
@@ -3,6 +3,14 @@ echo not a script
|
||||
exit 1
|
||||
|
||||
|
||||
##
|
||||
## delete all partial uploads
|
||||
## (supports linux/macos, probably windows+msys2)
|
||||
|
||||
gzip -d < .hist/up2k.snap | jq -r '.[].tnam' | while IFS= read -r f; do rm -f -- "$f"; done
|
||||
gzip -d < .hist/up2k.snap | jq -r '.[].name' | while IFS= read -r f; do wc -c -- "$f" | grep -qiE '^[^0-9a-z]*0' && rm -f -- "$f"; done
|
||||
|
||||
|
||||
##
|
||||
## create a test payload
|
||||
|
||||
@@ -13,7 +21,7 @@ head -c $((2*1024*1024*1024)) /dev/zero | openssl enc -aes-256-ctr -pass pass:hu
|
||||
## testing multiple parallel uploads
|
||||
## usage: para | tee log
|
||||
|
||||
para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}; do for ((n=0;n<s;n++)); do curl -sF "act=bput" -F "f=@garbage.file" http://127.0.0.1:1234/ 2>&1 & done; wait; echo; done; done; }
|
||||
para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}; do for ((n=0;n<s;n++)); do curl -sF "act=bput" -F "f=@garbage.file" http://127.0.0.1:3923/ 2>&1 & done; wait; echo; done; done; }
|
||||
|
||||
|
||||
##
|
||||
@@ -36,13 +44,13 @@ for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qwe,rty;asd
|
||||
|
||||
fn=$(printf '\xba\xdc\xab.cab')
|
||||
echo asdf > "$fn"
|
||||
curl --cookie cppwd=wark -sF "act=bput" -F "f=@$fn" http://127.0.0.1:1234/moji/%ED%91/
|
||||
curl --cookie cppwd=wark -sF "act=bput" -F "f=@$fn" http://127.0.0.1:3923/moji/%ED%91/
|
||||
|
||||
|
||||
##
|
||||
## test compression
|
||||
|
||||
wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:1234/.cpr/deps/ogv.js -O- | md5sum; p=~ed/dev/copyparty/copyparty/web/deps/ogv.js.gz; md5sum $p; gzip -d < $p | md5sum
|
||||
wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:3923/.cpr/deps/ogv.js -O- | md5sum; p=~ed/dev/copyparty/copyparty/web/deps/ogv.js.gz; md5sum $p; gzip -d < $p | md5sum
|
||||
|
||||
|
||||
##
|
||||
@@ -113,3 +121,12 @@ function convert_markdown(md_text, dest_dom) {
|
||||
var end = tsh.slice(-2);
|
||||
console.log("render", end.pop() - end.pop(), (tsh[tsh.length - 1] - tsh[0]) / (tsh.length - 1));
|
||||
}
|
||||
|
||||
|
||||
##
|
||||
## tmpfiles.d meme
|
||||
|
||||
mk() { rm -rf /tmp/foo; sudo -u ed bash -c 'mkdir /tmp/foo; echo hi > /tmp/foo/bar'; }
|
||||
mk && t0="$(date)" && while true; do date -s "$(date '+ 1 hour')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
||||
mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
||||
mk && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done; echo "$t0"
|
||||
|
||||
35
docs/pretend-youre-qnap.patch
Normal file
35
docs/pretend-youre-qnap.patch
Normal file
@@ -0,0 +1,35 @@
|
||||
diff --git a/copyparty/httpcli.py b/copyparty/httpcli.py
|
||||
index 2d3c1ad..e1e85a0 100644
|
||||
--- a/copyparty/httpcli.py
|
||||
+++ b/copyparty/httpcli.py
|
||||
@@ -864,6 +864,30 @@ class HttpCli(object):
|
||||
#
|
||||
# send reply
|
||||
|
||||
+ try:
|
||||
+ fakefn = self.conn.hsrv.fakefn
|
||||
+ fakectr = self.conn.hsrv.fakectr
|
||||
+ fakedata = self.conn.hsrv.fakedata
|
||||
+ except:
|
||||
+ fakefn = b''
|
||||
+ fakectr = 0
|
||||
+ fakedata = b''
|
||||
+
|
||||
+ self.log('\n{} {}\n{}'.format(fakefn, fakectr, open_args[0]))
|
||||
+ if fakefn == open_args[0] and fakectr > 0:
|
||||
+ self.reply(fakedata, mime=guess_mime(req_path)[0])
|
||||
+ self.conn.hsrv.fakectr = fakectr - 1
|
||||
+ else:
|
||||
+ with open_func(*open_args) as f:
|
||||
+ fakedata = f.read()
|
||||
+
|
||||
+ self.conn.hsrv.fakefn = open_args[0]
|
||||
+ self.conn.hsrv.fakedata = fakedata
|
||||
+ self.conn.hsrv.fakectr = 15
|
||||
+ self.reply(fakedata, mime=guess_mime(req_path)[0])
|
||||
+
|
||||
+ return True
|
||||
+
|
||||
self.out_headers["Accept-Ranges"] = "bytes"
|
||||
self.send_headers(
|
||||
length=upper - lower,
|
||||
62
docs/rclone.md
Normal file
62
docs/rclone.md
Normal file
@@ -0,0 +1,62 @@
|
||||
# using rclone to mount a remote copyparty server as a local filesystem
|
||||
|
||||
speed estimates with server and client on the same win10 machine:
|
||||
* `1070 MiB/s` with rclone as both server and client
|
||||
* `570 MiB/s` with rclone-client and `copyparty -ed -j16` as server
|
||||
* `220 MiB/s` with rclone-client and `copyparty -ed` as server
|
||||
* `100 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
|
||||
|
||||
when server is on another machine (1gbit LAN),
|
||||
* `75 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
|
||||
* `92 MiB/s` with rclone-client and `copyparty -ed` as server
|
||||
* `103 MiB/s` (connection max) with `copyparty -ed -j16` and all the others
|
||||
|
||||
|
||||
# creating the config file
|
||||
|
||||
if you want to use password auth, add `headers = Cookie,cppwd=fgsfds` below
|
||||
|
||||
|
||||
### on windows clients:
|
||||
```
|
||||
(
|
||||
echo [cpp]
|
||||
echo type = http
|
||||
echo url = http://127.0.0.1:3923/
|
||||
) > %userprofile%\.config\rclone\rclone.conf
|
||||
```
|
||||
|
||||
also install the windows dependencies: [winfsp](https://github.com/billziss-gh/winfsp/releases/latest)
|
||||
|
||||
|
||||
### on unix clients:
|
||||
```
|
||||
cat > ~/.config/rclone/rclone.conf <<'EOF'
|
||||
[cpp]
|
||||
type = http
|
||||
url = http://127.0.0.1:3923/
|
||||
EOF
|
||||
```
|
||||
|
||||
|
||||
# mounting the copyparty server locally
|
||||
```
|
||||
rclone.exe mount --vfs-cache-max-age 5s --attr-timeout 5s --dir-cache-time 5s cpp: Z:
|
||||
```
|
||||
|
||||
|
||||
# use rclone as server too, replacing copyparty
|
||||
|
||||
feels out of place but is too good not to mention
|
||||
|
||||
```
|
||||
rclone.exe serve http --read-only .
|
||||
```
|
||||
|
||||
* `webdav` gives write-access but `http` is twice as fast
|
||||
* `ftp` is buggy, avoid
|
||||
|
||||
|
||||
# bugs
|
||||
|
||||
* rclone-client throws an exception if you try to read an empty file (should return zero bytes)
|
||||
129
scripts/copyparty-repack.sh
Executable file
129
scripts/copyparty-repack.sh
Executable file
@@ -0,0 +1,129 @@
|
||||
#!/bin/bash
|
||||
repacker=1
|
||||
set -e
|
||||
|
||||
# -- download latest copyparty (source.tgz and sfx),
|
||||
# -- build minimal sfx versions,
|
||||
# -- create a .tar.gz bundle
|
||||
#
|
||||
# convenient for deploying updates to inconvenient locations
|
||||
# (and those are usually linux so bash is good inaff)
|
||||
# (but that said this even has macos support)
|
||||
#
|
||||
# bundle will look like:
|
||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty
|
||||
# -rw-r--r-- 0 ed ed 491318 Nov 19 00:40 copyparty-extras/copyparty-0.5.4.tar.gz
|
||||
# -rwxr-xr-x 0 ed ed 30254 Nov 17 23:58 copyparty-extras/copyparty-fuse.py
|
||||
# -rwxr-xr-x 0 ed ed 481403 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.sh
|
||||
# -rwxr-xr-x 0 ed ed 506043 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.py
|
||||
# -rwxr-xr-x 0 ed ed 167699 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.sh
|
||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
|
||||
|
||||
|
||||
command -v gtar && tar() { gtar "$@"; }
|
||||
command -v gsed && sed() { gsed "$@"; }
|
||||
td="$(mktemp -d)"
|
||||
od="$(pwd)"
|
||||
cd "$td"
|
||||
pwd
|
||||
|
||||
|
||||
dl_text() {
|
||||
command -v curl && exec curl "$@"
|
||||
exec wget -O- "$@"
|
||||
}
|
||||
dl_files() {
|
||||
command -v curl && exec curl -L --remote-name-all "$@"
|
||||
exec wget "$@"
|
||||
}
|
||||
export -f dl_files
|
||||
|
||||
|
||||
# if cache exists, use that instead of bothering github
|
||||
cache="$od/.copyparty-repack.cache"
|
||||
[ -e "$cache" ] &&
|
||||
tar -xf "$cache" ||
|
||||
{
|
||||
# get download links from github
|
||||
dl_text https://api.github.com/repos/9001/copyparty/releases/latest |
|
||||
(
|
||||
# prefer jq if available
|
||||
jq -r '.assets[]|select(.name|test("-sfx|tar.gz")).browser_download_url' ||
|
||||
|
||||
# fallback to awk (sorry)
|
||||
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
|
||||
tar -czf "$cache" *
|
||||
}
|
||||
|
||||
|
||||
# move src into copyparty-extras/,
|
||||
# move sfx into copyparty-extras/sfx-full/
|
||||
mkdir -p copyparty-extras/sfx-{full,lite}
|
||||
mv copyparty-sfx.* copyparty-extras/sfx-full/
|
||||
mv copyparty-*.tar.gz copyparty-extras/
|
||||
|
||||
|
||||
# unpack the source code
|
||||
( cd copyparty-extras/
|
||||
tar -xf *.tar.gz
|
||||
)
|
||||
|
||||
|
||||
# use repacker from release if that is newer
|
||||
p_other=copyparty-extras/copyparty-*/scripts/copyparty-repack.sh
|
||||
other=$(awk -F= 'BEGIN{v=-1} NR<10&&/^repacker=/{v=$NF} END{print v}' <$p_other)
|
||||
[ $repacker -lt $other ] &&
|
||||
cat $p_other >"$od/$0" && cd "$od" && rm -rf "$td" && exec "$0" "$@"
|
||||
|
||||
|
||||
# now drop the cache
|
||||
rm -f "$cache"
|
||||
|
||||
|
||||
# fix permissions
|
||||
chmod 755 \
|
||||
copyparty-extras/sfx-full/* \
|
||||
copyparty-extras/copyparty-*/{scripts,bin}/*
|
||||
|
||||
|
||||
# extract and repack the sfx with less features enabled
|
||||
( cd copyparty-extras/sfx-full/
|
||||
./copyparty-sfx.py -h
|
||||
cd ../copyparty-*/
|
||||
./scripts/make-sfx.sh re no-ogv no-cm
|
||||
)
|
||||
|
||||
|
||||
# put new sfx into copyparty-extras/sfx-lite/,
|
||||
# fuse client into copyparty-extras/,
|
||||
# copy lite-sfx.py to ./copyparty,
|
||||
# delete extracted source code
|
||||
( cd copyparty-extras/
|
||||
mv copyparty-*/dist/* sfx-lite/
|
||||
mv copyparty-*/bin/copyparty-fuse.py .
|
||||
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
|
||||
rm -rf copyparty-{0..9}*.*.*{0..9}
|
||||
)
|
||||
|
||||
|
||||
# and include the repacker itself too
|
||||
cp -av "$od/$0" copyparty-extras/ ||
|
||||
cp -av "$0" copyparty-extras/ ||
|
||||
true
|
||||
|
||||
|
||||
# create the bundle
|
||||
fn=copyparty-$(date +%Y-%m%d-%H%M%S).tgz
|
||||
tar -czvf "$od/$fn" *
|
||||
cd "$od"
|
||||
rm -rf "$td"
|
||||
|
||||
|
||||
echo
|
||||
echo "done, here's your bundle:"
|
||||
ls -al "$fn"
|
||||
100
scripts/fusefuzz.py
Executable file
100
scripts/fusefuzz.py
Executable file
@@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
"""
|
||||
td=/dev/shm/; [ -e $td ] || td=$HOME; mkdir -p $td/fusefuzz/{r,v}
|
||||
PYTHONPATH=.. python3 -m copyparty -v $td/fusefuzz/r::r -i 127.0.0.1
|
||||
../bin/copyparty-fuse.py http://127.0.0.1:3923/ $td/fusefuzz/v -cf 2 -cd 0.5
|
||||
(d="$PWD"; cd $td/fusefuzz && "$d"/fusefuzz.py)
|
||||
"""
|
||||
|
||||
|
||||
def chk(fsz, rsz, ofs0, shift, ofs, rf, vf):
|
||||
if ofs != rf.tell():
|
||||
rf.seek(ofs)
|
||||
vf.seek(ofs)
|
||||
|
||||
rb = rf.read(rsz)
|
||||
vb = vf.read(rsz)
|
||||
|
||||
print(f"fsz {fsz} rsz {rsz} ofs {ofs0} shift {shift} ofs {ofs} = {len(rb)}")
|
||||
|
||||
if rb != vb:
|
||||
for n, buf in enumerate([rb, vb]):
|
||||
with open("buf." + str(n), "wb") as f:
|
||||
f.write(buf)
|
||||
|
||||
raise Exception(f"{len(rb)} != {len(vb)}")
|
||||
|
||||
return rb, vb
|
||||
|
||||
|
||||
def main():
|
||||
v = "v"
|
||||
for n in range(5):
|
||||
with open(f"r/{n}", "wb") as f:
|
||||
f.write(b"h" * n)
|
||||
|
||||
rand = os.urandom(7919) # prime
|
||||
for fsz in range(1024 * 1024 * 2 - 3, 1024 * 1024 * 2 + 3):
|
||||
with open("r/f", "wb", fsz) as f:
|
||||
f.write((rand * int(fsz / len(rand) + 1))[:fsz])
|
||||
|
||||
for rsz in range(64 * 1024 - 2, 64 * 1024 + 2):
|
||||
ofslist = [0, 1, 2]
|
||||
for n in range(3):
|
||||
ofslist.append(fsz - n)
|
||||
ofslist.append(fsz - (rsz * 1 + n))
|
||||
ofslist.append(fsz - (rsz * 2 + n))
|
||||
|
||||
for ofs0 in ofslist:
|
||||
for shift in range(-3, 3):
|
||||
print(f"fsz {fsz} rsz {rsz} ofs {ofs0} shift {shift}")
|
||||
ofs = ofs0
|
||||
if ofs < 0 or ofs >= fsz:
|
||||
continue
|
||||
|
||||
for n in range(1, 3):
|
||||
with open(f"{v}/{n}", "rb") as f:
|
||||
f.read()
|
||||
|
||||
prev_ofs = -99
|
||||
with open("r/f", "rb", rsz) as rf:
|
||||
with open(f"{v}/f", "rb", rsz) as vf:
|
||||
while True:
|
||||
ofs += shift
|
||||
if ofs < 0 or ofs > fsz or ofs == prev_ofs:
|
||||
break
|
||||
|
||||
prev_ofs = ofs
|
||||
|
||||
rb, vb = chk(fsz, rsz, ofs0, shift, ofs, rf, vf)
|
||||
|
||||
if not rb:
|
||||
break
|
||||
|
||||
ofs += len(rb)
|
||||
|
||||
for n in range(1, 3):
|
||||
with open(f"{v}/{n}", "rb") as f:
|
||||
f.read()
|
||||
|
||||
with open("r/f", "rb", rsz) as rf:
|
||||
with open(f"{v}/f", "rb", rsz) as vf:
|
||||
for n in range(2):
|
||||
ofs += shift
|
||||
if ofs < 0 or ofs > fsz:
|
||||
break
|
||||
|
||||
rb, vb = chk(fsz, rsz, ofs0, shift, ofs, rf, vf)
|
||||
|
||||
ofs -= rsz
|
||||
|
||||
# bumping fsz, sleep away the dentry cache in cppf
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -3,12 +3,15 @@ set -e
|
||||
echo
|
||||
|
||||
# osx support
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
which md5sum 2>/dev/null >/dev/null &&
|
||||
|
||||
@@ -18,13 +18,16 @@ echo
|
||||
# (the fancy markdown editor)
|
||||
|
||||
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
unexpand() { gunexpand "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
[ -e copyparty/__main__.py ] || cd ..
|
||||
@@ -59,28 +62,32 @@ cd sfx
|
||||
)/pe-copyparty"
|
||||
|
||||
echo "repack of files in $old"
|
||||
cp -pR "$old/"*{jinja2,copyparty} .
|
||||
mv {x.,}jinja2 2>/dev/null || true
|
||||
cp -pR "$old/"*{dep-j2,copyparty} .
|
||||
}
|
||||
|
||||
[ $repack ] || {
|
||||
echo collecting jinja2
|
||||
f="../build/Jinja2-2.6.tar.gz"
|
||||
f="../build/Jinja2-2.11.3.tar.gz"
|
||||
[ -e "$f" ] ||
|
||||
(url=https://files.pythonhosted.org/packages/25/c8/212b1c2fd6df9eaf536384b6c6619c4e70a3afd2dffdd00e5296ffbae940/Jinja2-2.6.tar.gz;
|
||||
(url=https://files.pythonhosted.org/packages/4f/e7/65300e6b32e69768ded990494809106f87da1d436418d5f1367ed3966fd7/Jinja2-2.11.3.tar.gz;
|
||||
wget -O$f "$url" || curl -L "$url" >$f)
|
||||
|
||||
tar -zxf $f
|
||||
mv Jinja2-*/jinja2 .
|
||||
rm -rf Jinja2-* jinja2/testsuite jinja2/_markupsafe/tests.py jinja2/_stringdefs.py
|
||||
mv Jinja2-*/src/jinja2 .
|
||||
rm -rf Jinja2-*
|
||||
|
||||
f=jinja2/lexer.py
|
||||
sed -r '/.*föö.*/ raise SyntaxError/' <$f >t
|
||||
tmv $f
|
||||
|
||||
f=jinja2/_markupsafe/_constants.py
|
||||
awk '!/: [0-9]+,?$/ || /(amp|gt|lt|quot|apos|nbsp).:/' <$f >t
|
||||
tmv $f
|
||||
echo collecting markupsafe
|
||||
f="../build/MarkupSafe-1.1.1.tar.gz"
|
||||
[ -e "$f" ] ||
|
||||
(url=https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz;
|
||||
wget -O$f "$url" || curl -L "$url" >$f)
|
||||
|
||||
tar -zxf $f
|
||||
mv MarkupSafe-*/src/markupsafe .
|
||||
rm -rf MarkupSafe-* markupsafe/_speedups.c
|
||||
|
||||
mkdir dep-j2/
|
||||
mv {markupsafe,jinja2} dep-j2/
|
||||
|
||||
# msys2 tar is bad, make the best of it
|
||||
echo collecting source
|
||||
@@ -94,8 +101,39 @@ cd sfx
|
||||
rm -f ../tar
|
||||
}
|
||||
|
||||
ver="$(awk '/^VERSION *= \(/ {
|
||||
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < ../copyparty/__version__.py)"
|
||||
ver=
|
||||
git describe --tags >/dev/null 2>/dev/null && {
|
||||
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
|
||||
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//; s/-g?/./g')";
|
||||
t_ver=
|
||||
|
||||
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+$' && {
|
||||
# short format (exact version number)
|
||||
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g')";
|
||||
}
|
||||
|
||||
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+-[0-9]+-g[0-9a-f]+$' && {
|
||||
# long format (unreleased commit)
|
||||
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g; s/(.*) (.*)/\1 "\2"/')"
|
||||
}
|
||||
|
||||
[ -z "$t_ver" ] && {
|
||||
printf 'unexpected git version format: [%s]\n' "$git_ver"
|
||||
exit 1
|
||||
}
|
||||
|
||||
dt="$(git log -1 --format=%cd --date=format:'%Y,%m,%d' | sed -E 's/,0?/, /g')"
|
||||
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
|
||||
sed -ri '
|
||||
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;
|
||||
s/^(S_VERSION =)(.*)/#\1\2\n\1 "'"$ver"'"/;
|
||||
s/^(BUILD_DT =)(.*)/#\1\2\n\1 ('"$dt"')/;
|
||||
' copyparty/__version__.py
|
||||
}
|
||||
|
||||
[ -z "$ver" ] &&
|
||||
ver="$(awk '/^VERSION *= \(/ {
|
||||
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
|
||||
|
||||
ts=$(date -u +%s)
|
||||
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
|
||||
@@ -131,6 +169,15 @@ done
|
||||
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
|
||||
}
|
||||
|
||||
find | grep -E '\.py$' |
|
||||
grep -vE '__version__' |
|
||||
tr '\n' '\0' |
|
||||
xargs -0 python ../scripts/uncomment.py
|
||||
|
||||
f=dep-j2/jinja2/constants.py
|
||||
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
|
||||
tmv "$f"
|
||||
|
||||
# up2k goes from 28k to 22k laff
|
||||
echo entabbening
|
||||
find | grep -E '\.(js|css|html|py)$' | while IFS= read -r f; do
|
||||
@@ -143,7 +190,7 @@ args=(--owner=1000 --group=1000)
|
||||
[ "$OSTYPE" = msys ] &&
|
||||
args=()
|
||||
|
||||
tar -cf tar "${args[@]}" --numeric-owner copyparty jinja2
|
||||
tar -cf tar "${args[@]}" --numeric-owner copyparty dep-j2
|
||||
|
||||
echo compressing tar
|
||||
# detect best level; bzip2 -7 is usually better than -9
|
||||
|
||||
@@ -2,12 +2,16 @@
|
||||
set -e
|
||||
echo
|
||||
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# osx support
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
which md5sum 2>/dev/null >/dev/null &&
|
||||
@@ -16,15 +20,15 @@ which md5sum 2>/dev/null >/dev/null &&
|
||||
|
||||
ver="$1"
|
||||
|
||||
[[ "x$ver" == x ]] &&
|
||||
[ "x$ver" = x ] &&
|
||||
{
|
||||
echo "need argument 1: version"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
[[ -e copyparty/__main__.py ]] || cd ..
|
||||
[[ -e copyparty/__main__.py ]] ||
|
||||
[ -e copyparty/__main__.py ] || cd ..
|
||||
[ -e copyparty/__main__.py ] ||
|
||||
{
|
||||
echo "run me from within the project root folder"
|
||||
echo
|
||||
@@ -35,8 +39,8 @@ mkdir -p dist
|
||||
zip_path="$(pwd)/dist/copyparty-$ver.zip"
|
||||
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"
|
||||
|
||||
[[ -e "$zip_path" ]] ||
|
||||
[[ -e "$tgz_path" ]] &&
|
||||
[ -e "$zip_path" ] ||
|
||||
[ -e "$tgz_path" ] &&
|
||||
{
|
||||
echo "found existing archives for this version"
|
||||
echo " $zip_path"
|
||||
|
||||
226
scripts/sfx.py
226
scripts/sfx.py
@@ -2,7 +2,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re, os, sys, stat, time, shutil, tarfile, hashlib, platform, tempfile
|
||||
import os, sys, time, shutil, signal, tarfile, hashlib, platform, tempfile
|
||||
import subprocess as sp
|
||||
|
||||
"""
|
||||
@@ -29,6 +29,7 @@ STAMP = None
|
||||
PY2 = sys.version_info[0] == 2
|
||||
sys.dont_write_bytecode = True
|
||||
me = os.path.abspath(os.path.realpath(__file__))
|
||||
cpp = None
|
||||
|
||||
|
||||
def eprint(*args, **kwargs):
|
||||
@@ -191,91 +192,14 @@ def makesfx(tar_src, ver, ts):
|
||||
# skip 0
|
||||
|
||||
|
||||
def get_py_win(ret):
|
||||
tops = []
|
||||
p = str(os.getenv("LocalAppdata"))
|
||||
if p:
|
||||
tops.append(os.path.join(p, "Programs", "Python"))
|
||||
|
||||
progfiles = {}
|
||||
for p in ["ProgramFiles", "ProgramFiles(x86)"]:
|
||||
p = str(os.getenv(p))
|
||||
if p:
|
||||
progfiles[p] = 1
|
||||
# 32bit apps get x86 for both
|
||||
if p.endswith(" (x86)"):
|
||||
progfiles[p[:-6]] = 1
|
||||
|
||||
tops += list(progfiles.keys())
|
||||
|
||||
for sysroot in [me, sys.executable]:
|
||||
sysroot = sysroot[:3].upper()
|
||||
if sysroot[1] == ":" and sysroot not in tops:
|
||||
tops.append(sysroot)
|
||||
|
||||
# $WIRESHARK_SLOGAN
|
||||
for top in tops:
|
||||
try:
|
||||
for name1 in sorted(os.listdir(top), reverse=True):
|
||||
if name1.lower().startswith("python"):
|
||||
path1 = os.path.join(top, name1)
|
||||
try:
|
||||
for name2 in os.listdir(path1):
|
||||
if name2.lower() == "python.exe":
|
||||
path2 = os.path.join(path1, name2)
|
||||
ret[path2.lower()] = path2
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def get_py_nix(ret):
|
||||
ptn = re.compile(r"^(python|pypy)[0-9\.-]*$")
|
||||
for bindir in os.getenv("PATH").split(":"):
|
||||
if not bindir:
|
||||
next
|
||||
|
||||
try:
|
||||
for fn in os.listdir(bindir):
|
||||
if ptn.match(fn):
|
||||
fn = os.path.join(bindir, fn)
|
||||
ret[fn.lower()] = fn
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def read_py(binp):
|
||||
cmd = [
|
||||
binp,
|
||||
"-c",
|
||||
"import sys; sys.stdout.write(' '.join(str(x) for x in sys.version_info)); import jinja2",
|
||||
]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
ver, _ = p.communicate()
|
||||
ver = ver.decode("utf-8").split(" ")[:3]
|
||||
ver = [int(x) if x.isdigit() else 0 for x in ver]
|
||||
return ver, p.returncode == 0
|
||||
|
||||
|
||||
def get_pys():
|
||||
ver, chk = read_py(sys.executable)
|
||||
if chk or PY2:
|
||||
return [[chk, ver, sys.executable]]
|
||||
|
||||
hits = {sys.executable.lower(): sys.executable}
|
||||
if platform.system() == "Windows":
|
||||
get_py_win(hits)
|
||||
else:
|
||||
get_py_nix(hits)
|
||||
|
||||
ret = []
|
||||
for binp in hits.values():
|
||||
ver, chk = read_py(binp)
|
||||
ret.append([chk, ver, binp])
|
||||
msg("\t".join(str(x) for x in ret[-1]))
|
||||
|
||||
return ret
|
||||
def u8(gen):
|
||||
try:
|
||||
for s in gen:
|
||||
yield s.decode("utf-8", "ignore")
|
||||
except:
|
||||
yield s
|
||||
for s in gen:
|
||||
yield s
|
||||
|
||||
|
||||
def yieldfile(fn):
|
||||
@@ -295,17 +219,19 @@ def hashfile(fn):
|
||||
def unpack():
|
||||
"""unpacks the tar yielded by `data`"""
|
||||
name = "pe-copyparty"
|
||||
tag = "v" + str(STAMP)
|
||||
withpid = "{}.{}".format(name, os.getpid())
|
||||
top = tempfile.gettempdir()
|
||||
final = os.path.join(top, name)
|
||||
mine = os.path.join(top, withpid)
|
||||
tar = os.path.join(mine, "tar")
|
||||
tag_mine = os.path.join(mine, "v" + str(STAMP))
|
||||
tag_final = os.path.join(final, "v" + str(STAMP))
|
||||
|
||||
if os.path.exists(tag_final):
|
||||
msg("found early")
|
||||
return final
|
||||
try:
|
||||
if tag in os.listdir(final):
|
||||
msg("found early")
|
||||
return final
|
||||
except:
|
||||
pass
|
||||
|
||||
nwrite = 0
|
||||
os.mkdir(mine)
|
||||
@@ -328,12 +254,15 @@ def unpack():
|
||||
|
||||
os.remove(tar)
|
||||
|
||||
with open(tag_mine, "wb") as f:
|
||||
with open(os.path.join(mine, tag), "wb") as f:
|
||||
f.write(b"h\n")
|
||||
|
||||
if os.path.exists(tag_final):
|
||||
msg("found late")
|
||||
return final
|
||||
try:
|
||||
if tag in os.listdir(final):
|
||||
msg("found late")
|
||||
return final
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.path.islink(final):
|
||||
@@ -352,7 +281,7 @@ def unpack():
|
||||
msg("reloc fail,", mine)
|
||||
return mine
|
||||
|
||||
for fn in os.listdir(top):
|
||||
for fn in u8(os.listdir(top)):
|
||||
if fn.startswith(name) and fn not in [name, withpid]:
|
||||
try:
|
||||
old = os.path.join(top, fn)
|
||||
@@ -418,44 +347,61 @@ def get_payload():
|
||||
def confirm():
|
||||
msg()
|
||||
msg("*** hit enter to exit ***")
|
||||
raw_input() if PY2 else input()
|
||||
|
||||
|
||||
def run(tmp, py):
|
||||
msg("OK")
|
||||
msg("will use:", py)
|
||||
msg("bound to:", tmp)
|
||||
|
||||
fp_py = os.path.join(tmp, "py")
|
||||
with open(fp_py, "wb") as f:
|
||||
f.write(py.encode("utf-8") + b"\n")
|
||||
|
||||
# avoid loading ./copyparty.py
|
||||
cmd = [
|
||||
py,
|
||||
"-c",
|
||||
'import sys, runpy; sys.path.insert(0, r"'
|
||||
+ tmp
|
||||
+ '"); runpy.run_module("copyparty", run_name="__main__")',
|
||||
] + list(sys.argv[1:])
|
||||
|
||||
msg("\n", cmd, "\n")
|
||||
p = sp.Popen(str(x) for x in cmd)
|
||||
try:
|
||||
p.wait()
|
||||
raw_input() if PY2 else input()
|
||||
except:
|
||||
p.wait()
|
||||
pass
|
||||
|
||||
if p.returncode != 0:
|
||||
|
||||
def run(tmp, j2ver):
|
||||
global cpp
|
||||
|
||||
msg("jinja2:", j2ver or "bundled")
|
||||
msg("sfxdir:", tmp)
|
||||
|
||||
# "systemd-tmpfiles-clean.timer"?? HOW do you even come up with this shit
|
||||
try:
|
||||
import fcntl
|
||||
|
||||
fd = os.open(tmp, os.O_RDONLY)
|
||||
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
tmp = os.readlink(tmp) # can't flock a symlink, even with O_NOFOLLOW
|
||||
except:
|
||||
pass
|
||||
|
||||
ld = [tmp, os.path.join(tmp, "dep-j2")]
|
||||
if j2ver:
|
||||
del ld[-1]
|
||||
|
||||
cmd = (
|
||||
"import sys, runpy; "
|
||||
+ "".join(['sys.path.insert(0, r"' + x + '"); ' for x in ld])
|
||||
+ 'runpy.run_module("copyparty", run_name="__main__")'
|
||||
)
|
||||
cmd = [sys.executable, "-c", cmd] + list(sys.argv[1:])
|
||||
|
||||
cmd = [str(x) for x in cmd]
|
||||
msg("\n", cmd, "\n")
|
||||
cpp = sp.Popen(cmd)
|
||||
try:
|
||||
cpp.wait()
|
||||
except:
|
||||
cpp.wait()
|
||||
|
||||
if cpp.returncode != 0:
|
||||
confirm()
|
||||
|
||||
sys.exit(p.returncode)
|
||||
sys.exit(cpp.returncode)
|
||||
|
||||
|
||||
def bye(sig, frame):
|
||||
if cpp is not None:
|
||||
cpp.terminate()
|
||||
|
||||
|
||||
def main():
|
||||
sysver = str(sys.version).replace("\n", "\n" + " " * 18)
|
||||
pktime = time.strftime("%Y-%m-%d, %H:%M:%S", time.gmtime(STAMP))
|
||||
os.system("")
|
||||
msg()
|
||||
msg(" this is: copyparty", VER)
|
||||
msg(" packed at:", pktime, "UTC,", STAMP)
|
||||
@@ -484,34 +430,16 @@ def main():
|
||||
|
||||
# skip 0
|
||||
|
||||
signal.signal(signal.SIGTERM, bye)
|
||||
|
||||
tmp = unpack()
|
||||
fp_py = os.path.join(tmp, "py")
|
||||
if os.path.exists(fp_py):
|
||||
with open(fp_py, "rb") as f:
|
||||
py = f.read().decode("utf-8").rstrip()
|
||||
|
||||
return run(tmp, py)
|
||||
try:
|
||||
from jinja2 import __version__ as j2ver
|
||||
except:
|
||||
j2ver = None
|
||||
|
||||
pys = get_pys()
|
||||
pys.sort(reverse=True)
|
||||
j2, ver, py = pys[0]
|
||||
if j2:
|
||||
try:
|
||||
os.rename(os.path.join(tmp, "jinja2"), os.path.join(tmp, "x.jinja2"))
|
||||
except:
|
||||
pass
|
||||
|
||||
return run(tmp, py)
|
||||
|
||||
msg("\n could not find jinja2; will use py2 + the bundled version\n")
|
||||
for _, ver, py in pys:
|
||||
if ver > [2, 7] and ver < [3, 0]:
|
||||
return run(tmp, py)
|
||||
|
||||
m = "\033[1;31m\n\n\ncould not find a python with jinja2 installed; please do one of these:\n\n pip install --user jinja2\n\n install python2\n\n\033[0m"
|
||||
msg(m)
|
||||
confirm()
|
||||
sys.exit(1)
|
||||
return run(tmp, j2ver)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -32,8 +32,12 @@ dir="$(
|
||||
|
||||
# detect available pythons
|
||||
(IFS=:; for d in $PATH; do
|
||||
printf '%s\n' "$d"/python* "$d"/pypy* | tac;
|
||||
done) | grep -E '(python|pypy)[0-9\.-]*$' > $dir/pys || true
|
||||
printf '%s\n' "$d"/python* "$d"/pypy*;
|
||||
done) |
|
||||
(sed -E 's/(.*\/[^/0-9]+)([0-9]?[^/]*)$/\2 \1/' || cat) |
|
||||
(sort -nr || cat) |
|
||||
(sed -E 's/([^ ]*) (.*)/\2\1/' || cat) |
|
||||
grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
|
||||
|
||||
# see if we made a choice before
|
||||
[ -z "$pybin" ] && pybin="$(cat $dir/py 2>/dev/null || true)"
|
||||
|
||||
164
scripts/speedtest-fs.py
Normal file
164
scripts/speedtest-fs.py
Normal file
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import time
|
||||
import signal
|
||||
import traceback
|
||||
import threading
|
||||
from queue import Queue
|
||||
|
||||
|
||||
"""speedtest-fs: filesystem performance estimate"""
|
||||
__author__ = "ed <copyparty@ocv.me>"
|
||||
__copyright__ = 2020
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
|
||||
def get_spd(nbyte, nsec):
|
||||
if not nsec:
|
||||
return "0.000 MB 0.000 sec 0.000 MB/s"
|
||||
|
||||
mb = nbyte / (1024 * 1024.0)
|
||||
spd = mb / nsec
|
||||
|
||||
return f"{mb:.3f} MB {nsec:.3f} sec {spd:.3f} MB/s"
|
||||
|
||||
|
||||
class Inf(object):
|
||||
def __init__(self, t0):
|
||||
self.msgs = []
|
||||
self.errors = []
|
||||
self.reports = []
|
||||
self.mtx_msgs = threading.Lock()
|
||||
self.mtx_reports = threading.Lock()
|
||||
|
||||
self.n_byte = 0
|
||||
self.n_sec = 0
|
||||
self.n_done = 0
|
||||
self.t0 = t0
|
||||
|
||||
thr = threading.Thread(target=self.print_msgs)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def msg(self, fn, n_read):
|
||||
with self.mtx_msgs:
|
||||
self.msgs.append(f"{fn} {n_read}")
|
||||
|
||||
def err(self, fn):
|
||||
with self.mtx_reports:
|
||||
self.errors.append(f"{fn}\n{traceback.format_exc()}")
|
||||
|
||||
def print_msgs(self):
|
||||
while True:
|
||||
time.sleep(0.02)
|
||||
with self.mtx_msgs:
|
||||
msgs = self.msgs
|
||||
self.msgs = []
|
||||
|
||||
if not msgs:
|
||||
continue
|
||||
|
||||
msgs = msgs[-64:]
|
||||
msgs = [f"{get_spd(self.n_byte, self.n_sec)} {x}" for x in msgs]
|
||||
print("\n".join(msgs))
|
||||
|
||||
def report(self, fn, n_byte, n_sec):
|
||||
with self.mtx_reports:
|
||||
self.reports.append([n_byte, n_sec, fn])
|
||||
self.n_byte += n_byte
|
||||
self.n_sec += n_sec
|
||||
|
||||
def done(self):
|
||||
with self.mtx_reports:
|
||||
self.n_done += 1
|
||||
|
||||
|
||||
def get_files(dir_path):
|
||||
for fn in os.listdir(dir_path):
|
||||
fn = os.path.join(dir_path, fn)
|
||||
st = os.stat(fn).st_mode
|
||||
|
||||
if stat.S_ISDIR(st):
|
||||
yield from get_files(fn)
|
||||
|
||||
if stat.S_ISREG(st):
|
||||
yield fn
|
||||
|
||||
|
||||
def worker(q, inf, read_sz):
|
||||
while True:
|
||||
fn = q.get()
|
||||
if not fn:
|
||||
break
|
||||
|
||||
n_read = 0
|
||||
try:
|
||||
t0 = time.time()
|
||||
with open(fn, "rb") as f:
|
||||
while True:
|
||||
buf = f.read(read_sz)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
n_read += len(buf)
|
||||
inf.msg(fn, n_read)
|
||||
|
||||
inf.report(fn, n_read, time.time() - t0)
|
||||
except:
|
||||
inf.err(fn)
|
||||
|
||||
inf.done()
|
||||
|
||||
|
||||
def sighandler(signo, frame):
|
||||
os._exit(0)
|
||||
|
||||
|
||||
def main():
|
||||
signal.signal(signal.SIGINT, sighandler)
|
||||
|
||||
root = "."
|
||||
if len(sys.argv) > 1:
|
||||
root = sys.argv[1]
|
||||
|
||||
t0 = time.time()
|
||||
q = Queue(256)
|
||||
inf = Inf(t0)
|
||||
|
||||
num_threads = 8
|
||||
read_sz = 32 * 1024
|
||||
for _ in range(num_threads):
|
||||
thr = threading.Thread(target=worker, args=(q, inf, read_sz,))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
for fn in get_files(root):
|
||||
q.put(fn)
|
||||
|
||||
for _ in range(num_threads):
|
||||
q.put(None)
|
||||
|
||||
while inf.n_done < num_threads:
|
||||
time.sleep(0.1)
|
||||
|
||||
t2 = time.time()
|
||||
print("\n")
|
||||
|
||||
log = inf.reports
|
||||
log.sort()
|
||||
for nbyte, nsec, fn in log[-64:]:
|
||||
print(f"{get_spd(nbyte, nsec)} {fn}")
|
||||
|
||||
print()
|
||||
print("\n".join(inf.errors))
|
||||
|
||||
print(get_spd(inf.n_byte, t2 - t0))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
77
scripts/uncomment.py
Normal file
77
scripts/uncomment.py
Normal file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import io
|
||||
import sys
|
||||
import tokenize
|
||||
|
||||
|
||||
def uncomment(fpath):
|
||||
""" modified https://stackoverflow.com/a/62074206 """
|
||||
|
||||
with open(fpath, "rb") as f:
|
||||
orig = f.read().decode("utf-8")
|
||||
|
||||
out = ""
|
||||
for ln in orig.split("\n"):
|
||||
if not ln.startswith("#"):
|
||||
break
|
||||
|
||||
out += ln + "\n"
|
||||
|
||||
io_obj = io.StringIO(orig)
|
||||
prev_toktype = tokenize.INDENT
|
||||
last_lineno = -1
|
||||
last_col = 0
|
||||
for tok in tokenize.generate_tokens(io_obj.readline):
|
||||
# print(repr(tok))
|
||||
token_type = tok[0]
|
||||
token_string = tok[1]
|
||||
start_line, start_col = tok[2]
|
||||
end_line, end_col = tok[3]
|
||||
|
||||
if start_line > last_lineno:
|
||||
last_col = 0
|
||||
|
||||
if start_col > last_col:
|
||||
out += " " * (start_col - last_col)
|
||||
|
||||
is_legalese = (
|
||||
"copyright" in token_string.lower() or "license" in token_string.lower()
|
||||
)
|
||||
|
||||
if token_type == tokenize.STRING:
|
||||
if (
|
||||
prev_toktype != tokenize.INDENT
|
||||
and prev_toktype != tokenize.NEWLINE
|
||||
and start_col > 0
|
||||
or is_legalese
|
||||
):
|
||||
out += token_string
|
||||
else:
|
||||
out += '"a"'
|
||||
elif token_type != tokenize.COMMENT or is_legalese:
|
||||
out += token_string
|
||||
|
||||
prev_toktype = token_type
|
||||
last_lineno = end_line
|
||||
last_col = end_col
|
||||
|
||||
# out = "\n".join(x for x in out.splitlines() if x.strip())
|
||||
|
||||
with open(fpath, "wb") as f:
|
||||
f.write(out.encode("utf-8"))
|
||||
|
||||
|
||||
def main():
|
||||
print("uncommenting", end="")
|
||||
for f in sys.argv[1:]:
|
||||
print(".", end="")
|
||||
uncomment(f)
|
||||
|
||||
print("k")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
1
setup.py
1
setup.py
@@ -110,7 +110,6 @@ args = {
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.2",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
|
||||
141
srv/extend.md
Normal file
141
srv/extend.md
Normal file
@@ -0,0 +1,141 @@
|
||||
# hi
|
||||
this showcases my worst idea yet; *extending markdown with inline javascript*
|
||||
|
||||
due to obvious reasons it's disabled by default, and can be enabled with `-emp`
|
||||
|
||||
the examples are by no means correct, they're as much of a joke as this feature itself
|
||||
|
||||
|
||||
### sub-header
|
||||
nothing special about this one
|
||||
|
||||
|
||||
## except/
|
||||
this one becomes a hyperlink to ./except/ thanks to
|
||||
* the `copyparty_pre` plugin at the end of this file
|
||||
* which is invoked as a markdown filter every time the document is modified
|
||||
* which looks for headers ending with a `/` and erwrites all headers below that
|
||||
|
||||
it is a passthrough to the markdown extension api, see https://marked.js.org/using_pro
|
||||
|
||||
in addition to the markdown extension functions, `ctor` will be called on document init
|
||||
|
||||
|
||||
### these/
|
||||
and this one becomes ./except/these/
|
||||
|
||||
|
||||
#### ones.md
|
||||
finally ./except/these/ones.md
|
||||
|
||||
|
||||
### also-this.md
|
||||
whic hshoud be ./except/also-this.md
|
||||
|
||||
|
||||
|
||||
|
||||
# ok
|
||||
now for another extension type, `copyparty_post` which is called to manipulate the generated dom instead
|
||||
|
||||
`copyparty_post` can have the following functions, all optional
|
||||
* `ctor` is called on document init
|
||||
* `render` is called when the dom is done but still in-memory
|
||||
* `render2` is called with the live browser dom as-displayed
|
||||
|
||||
## post example
|
||||
|
||||
the values in the `ex:` columns are linkified to `example.com/$value`
|
||||
|
||||
| ex:foo | bar | ex:baz |
|
||||
| ------------ | -------- | ------ |
|
||||
| asdf | nice | fgsfds |
|
||||
| more one row | hi hello | aaa |
|
||||
|
||||
and the table can be sorted by clicking the headers
|
||||
|
||||
the difference is that with `copyparty_pre` you'll probably break various copyparty features but if you use `copyparty_post` then future copyparty versions will probably break you
|
||||
|
||||
|
||||
|
||||
|
||||
# heres the plugins
|
||||
if there is anything below ths line in the preview then the plugin feature is disabled (good)
|
||||
|
||||
|
||||
|
||||
|
||||
```copyparty_pre
|
||||
ctor() {
|
||||
md_plug['h'] = {
|
||||
on: false,
|
||||
lv: -1,
|
||||
path: []
|
||||
}
|
||||
},
|
||||
walkTokens(token) {
|
||||
if (token.type == 'heading') {
|
||||
var h = md_plug['h'],
|
||||
is_dir = token.text.endsWith('/');
|
||||
|
||||
if (h.lv >= token.depth) {
|
||||
h.on = false;
|
||||
}
|
||||
if (!h.on && is_dir) {
|
||||
h.on = true;
|
||||
h.lv = token.depth;
|
||||
h.path = [token.text];
|
||||
}
|
||||
else if (h.on && h.lv < token.depth) {
|
||||
h.path = h.path.slice(0, token.depth - h.lv);
|
||||
h.path.push(token.text);
|
||||
}
|
||||
if (!h.on)
|
||||
return false;
|
||||
|
||||
var path = h.path.join('');
|
||||
var emoji = is_dir ? '📂' : '📜';
|
||||
token.tokens[0].text = '<a href="' + path + '">' + emoji + ' ' + path + '</a>';
|
||||
}
|
||||
if (token.type == 'paragraph') {
|
||||
//console.log(JSON.parse(JSON.stringify(token.tokens)));
|
||||
for (var a = 0; a < token.tokens.length; a++) {
|
||||
var t = token.tokens[a];
|
||||
if (t.type == 'text' || t.type == 'strong' || t.type == 'em') {
|
||||
var ret = '', text = t.text;
|
||||
for (var b = 0; b < text.length; b++)
|
||||
ret += (Math.random() > 0.5) ? text[b] : text[b].toUpperCase();
|
||||
|
||||
t.text = ret;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
```copyparty_post
|
||||
render(dom) {
|
||||
var ths = dom.querySelectorAll('th');
|
||||
for (var a = 0; a < ths.length; a++) {
|
||||
var th = ths[a];
|
||||
if (th.textContent.indexOf('ex:') === 0) {
|
||||
th.textContent = th.textContent.slice(3);
|
||||
var nrow = 0;
|
||||
while ((th = th.previousSibling) != null)
|
||||
nrow++;
|
||||
|
||||
var trs = ths[a].parentNode.parentNode.parentNode.querySelectorAll('tr');
|
||||
for (var b = 1; b < trs.length; b++) {
|
||||
var td = trs[b].childNodes[nrow];
|
||||
td.innerHTML = '<a href="//example.com/' + td.innerHTML + '">' + td.innerHTML + '</a>';
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
render2(dom) {
|
||||
window.makeSortable(dom.getElementsByTagName('table')[0]);
|
||||
}
|
||||
```
|
||||
26
srv/test.md
26
srv/test.md
@@ -1,5 +1,16 @@
|
||||
### hello world
|
||||
|
||||
* qwe
|
||||
* asd
|
||||
* zxc
|
||||
* 573
|
||||
* one
|
||||
* two
|
||||
|
||||
* |||
|
||||
|--|--|
|
||||
|listed|table|
|
||||
|
||||
```
|
||||
[72....................................................................]
|
||||
[80............................................................................]
|
||||
@@ -21,6 +32,8 @@
|
||||
l[i]=1I;(){}o0O</> var foo = "$(`bar`)"; a's'd
|
||||
```
|
||||
|
||||
🔍🌽.📕.🍙🔎
|
||||
|
||||
[](#s1)
|
||||
[s1](#s1)
|
||||
[#s1](#s1)
|
||||
@@ -121,6 +134,15 @@ a newline toplevel
|
||||
| a table | on the right |
|
||||
| second row | foo bar |
|
||||
|
||||
||
|
||||
--|:-:|-:
|
||||
a table | big text in this | aaakbfddd
|
||||
second row | centred | bbb
|
||||
|
||||
||
|
||||
--|--|--
|
||||
foo
|
||||
|
||||
* list entry
|
||||
* [x] yes
|
||||
* [ ] no
|
||||
@@ -209,3 +231,7 @@ unrelated neat stuff:
|
||||
awk '/./ {printf "%s %d\n", $0, NR; next} 1' <test.md >ln.md
|
||||
gawk '{print gensub(/([a-zA-Z\.])/,NR" \\1","1")}' <test.md >ln.md
|
||||
```
|
||||
|
||||
a|b|c
|
||||
--|--|--
|
||||
foo
|
||||
|
||||
@@ -3,8 +3,10 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
import subprocess as sp # nosec
|
||||
|
||||
@@ -30,9 +32,6 @@ class TestVFS(unittest.TestCase):
|
||||
response = self.unfoo(response)
|
||||
self.assertEqual(util.undot(query), response)
|
||||
|
||||
def absify(self, root, names):
|
||||
return ["{}/{}".format(root, x).replace("//", "/") for x in names]
|
||||
|
||||
def ls(self, vfs, vpath, uname):
|
||||
"""helper for resolving and listing a folder"""
|
||||
vn, rem = vfs.get(vpath, uname, True, False)
|
||||
@@ -59,16 +58,31 @@ class TestVFS(unittest.TestCase):
|
||||
|
||||
if os.path.exists("/Volumes"):
|
||||
devname, _ = self.chkcmd("hdiutil", "attach", "-nomount", "ram://8192")
|
||||
_, _ = self.chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
|
||||
return "/Volumes/cptd"
|
||||
devname = devname.strip()
|
||||
print("devname: [{}]".format(devname))
|
||||
for _ in range(10):
|
||||
try:
|
||||
_, _ = self.chkcmd(
|
||||
"diskutil", "eraseVolume", "HFS+", "cptd", devname
|
||||
)
|
||||
return "/Volumes/cptd"
|
||||
except Exception as ex:
|
||||
print(repr(ex))
|
||||
time.sleep(0.25)
|
||||
|
||||
raise Exception("TODO support windows")
|
||||
raise Exception("ramdisk creation failed")
|
||||
|
||||
ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
|
||||
try:
|
||||
os.mkdir(ret)
|
||||
finally:
|
||||
return ret
|
||||
|
||||
def log(self, src, msg):
|
||||
pass
|
||||
|
||||
def test(self):
|
||||
td = self.get_ramdisk() + "/vfs"
|
||||
td = os.path.join(self.get_ramdisk(), "vfs")
|
||||
try:
|
||||
shutil.rmtree(td)
|
||||
except OSError:
|
||||
@@ -99,7 +113,7 @@ class TestVFS(unittest.TestCase):
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td + "/a/ab")
|
||||
self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
|
||||
self.assertEqual(vfs.uread, ["*"])
|
||||
self.assertEqual(vfs.uwrite, [])
|
||||
|
||||
@@ -109,7 +123,7 @@ class TestVFS(unittest.TestCase):
|
||||
).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td + "/a/aa")
|
||||
self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
|
||||
self.assertEqual(vfs.uread, ["*"])
|
||||
self.assertEqual(vfs.uwrite, [])
|
||||
|
||||
@@ -138,42 +152,63 @@ class TestVFS(unittest.TestCase):
|
||||
n = n.nodes["acb"]
|
||||
self.assertEqual(n.nodes, {})
|
||||
self.assertEqual(n.vpath, "a/ac/acb")
|
||||
self.assertEqual(n.realpath, td + "/a/ac/acb")
|
||||
self.assertEqual(n.realpath, os.path.join(td, "a", "ac", "acb"))
|
||||
self.assertEqual(n.uread, ["k"])
|
||||
self.assertEqual(n.uwrite, ["*", "k"])
|
||||
|
||||
# something funky about the windows path normalization,
|
||||
# doesn't really matter but makes the test messy, TODO?
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "/", "*")
|
||||
self.assertEqual(fsdir, td)
|
||||
self.assertEqual(real, ["b", "c"])
|
||||
self.assertEqual(list(virt), ["a"])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a", "*")
|
||||
self.assertEqual(fsdir, td + "/a")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a"))
|
||||
self.assertEqual(real, ["aa", "ab"])
|
||||
self.assertEqual(list(virt), ["ac"])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ab", "*")
|
||||
self.assertEqual(fsdir, td + "/a/ab")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ab"))
|
||||
self.assertEqual(real, ["aba", "abb", "abc"])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac", "*")
|
||||
self.assertEqual(fsdir, td + "/a/ac")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
|
||||
self.assertEqual(real, ["aca", "acc"])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac", "k")
|
||||
self.assertEqual(fsdir, td + "/a/ac")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
|
||||
self.assertEqual(real, ["aca", "acc"])
|
||||
self.assertEqual(list(virt), ["acb"])
|
||||
|
||||
self.assertRaises(util.Pebkac, vfs.get, "a/ac/acb", "*", True, False)
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac/acb", "k")
|
||||
self.assertEqual(fsdir, td + "/a/ac/acb")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ac", "acb"))
|
||||
self.assertEqual(real, ["acba", "acbb", "acbc"])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
# admin-only rootfs with all-read-only subfolder
|
||||
vfs = AuthSrv(Namespace(c=None, a=["k:k"], v=[".::ak", "a:a:r"]), self.log,).vfs
|
||||
self.assertEqual(len(vfs.nodes), 1)
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td)
|
||||
self.assertEqual(vfs.uread, ["k"])
|
||||
self.assertEqual(vfs.uwrite, ["k"])
|
||||
n = vfs.nodes["a"]
|
||||
self.assertEqual(len(vfs.nodes), 1)
|
||||
self.assertEqual(n.vpath, "a")
|
||||
self.assertEqual(n.realpath, os.path.join(td, "a"))
|
||||
self.assertEqual(n.uread, ["*"])
|
||||
self.assertEqual(n.uwrite, [])
|
||||
self.assertEqual(vfs.can_access("/", "*"), [False, False])
|
||||
self.assertEqual(vfs.can_access("/", "k"), [True, True])
|
||||
self.assertEqual(vfs.can_access("/a", "*"), [True, False])
|
||||
self.assertEqual(vfs.can_access("/a", "k"), [True, False])
|
||||
|
||||
# breadth-first construction
|
||||
vfs = AuthSrv(
|
||||
Namespace(
|
||||
@@ -207,20 +242,20 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(list(v1), ["a"])
|
||||
|
||||
fsp, r1, v1 = self.ls(vfs, "a", "*")
|
||||
self.assertEqual(fsp, td + "/a")
|
||||
self.assertEqual(fsp, os.path.join(td, "a"))
|
||||
self.assertEqual(r1, ["aa", "ab"])
|
||||
self.assertEqual(list(v1), ["ac"])
|
||||
|
||||
fsp1, r1, v1 = self.ls(vfs, "a/ac", "*")
|
||||
fsp2, r2, v2 = self.ls(vfs, "b", "*")
|
||||
self.assertEqual(fsp1, td + "/b")
|
||||
self.assertEqual(fsp2, td + "/b")
|
||||
self.assertEqual(fsp1, os.path.join(td, "b"))
|
||||
self.assertEqual(fsp2, os.path.join(td, "b"))
|
||||
self.assertEqual(r1, ["ba", "bb", "bc"])
|
||||
self.assertEqual(r1, r2)
|
||||
self.assertEqual(list(v1), list(v2))
|
||||
|
||||
# config file parser
|
||||
cfg_path = self.get_ramdisk() + "/test.cfg"
|
||||
cfg_path = os.path.join(self.get_ramdisk(), "test.cfg")
|
||||
with open(cfg_path, "wb") as f:
|
||||
f.write(
|
||||
dedent(
|
||||
@@ -248,10 +283,11 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(len(n.nodes), 1)
|
||||
n = n.nodes["dst"]
|
||||
self.assertEqual(n.vpath, "dst")
|
||||
self.assertEqual(n.realpath, td + "/src")
|
||||
self.assertEqual(n.realpath, os.path.join(td, "src"))
|
||||
self.assertEqual(n.uread, ["a", "asd"])
|
||||
self.assertEqual(n.uwrite, ["asd"])
|
||||
self.assertEqual(len(n.nodes), 0)
|
||||
|
||||
os.chdir(tempfile.gettempdir())
|
||||
shutil.rmtree(td)
|
||||
os.unlink(cfg_path)
|
||||
|
||||
Reference in New Issue
Block a user