mirror of
https://github.com/9001/copyparty.git
synced 2025-10-24 16:43:55 +00:00
Compare commits
130 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7d9057cc62 | ||
|
|
c4b322b883 | ||
|
|
19b09c898a | ||
|
|
eafe2098b6 | ||
|
|
2bc6a20d71 | ||
|
|
8b502a7235 | ||
|
|
37567844af | ||
|
|
2f6c4e0e34 | ||
|
|
1c7cc4cb2b | ||
|
|
f83db3648e | ||
|
|
b164aa00d4 | ||
|
|
a2d866d0c2 | ||
|
|
2dfe4ac4c6 | ||
|
|
db65d05cb5 | ||
|
|
300c0194c7 | ||
|
|
37a0d2b087 | ||
|
|
a4959300ea | ||
|
|
223657e5f8 | ||
|
|
0c53de6767 | ||
|
|
9c309b1498 | ||
|
|
1aa1b34c80 | ||
|
|
755a2ee023 | ||
|
|
69d3359e47 | ||
|
|
a90c49b8fb | ||
|
|
b1222edb27 | ||
|
|
b967a92f69 | ||
|
|
90a5cb5e59 | ||
|
|
7aba9cb76b | ||
|
|
f550a8171d | ||
|
|
82e568d4c9 | ||
|
|
7b2a4a3d59 | ||
|
|
0265455cd1 | ||
|
|
afafc886a4 | ||
|
|
8a959f6ac4 | ||
|
|
1c3aa0d2c5 | ||
|
|
79b7d3316a | ||
|
|
fa7768583a | ||
|
|
faf49f6c15 | ||
|
|
765af31b83 | ||
|
|
b6a3c52d67 | ||
|
|
b025c2f660 | ||
|
|
e559a7c878 | ||
|
|
5c8855aafd | ||
|
|
b5fc537b89 | ||
|
|
14899d3a7c | ||
|
|
0ea7881652 | ||
|
|
ec29b59d1e | ||
|
|
9405597c15 | ||
|
|
82441978c6 | ||
|
|
e0e6291bdb | ||
|
|
b2b083fd0a | ||
|
|
f8a51b68e7 | ||
|
|
e0a19108e5 | ||
|
|
770ea68ca8 | ||
|
|
ce36c52baf | ||
|
|
a7da1dd233 | ||
|
|
678ef296b4 | ||
|
|
9e5627d805 | ||
|
|
5958ee4439 | ||
|
|
7127e57f0e | ||
|
|
ee9c6dc8aa | ||
|
|
92779b3f48 | ||
|
|
2f1baf17d4 | ||
|
|
583da3d4a9 | ||
|
|
bf9ff78bcc | ||
|
|
2cb07792cc | ||
|
|
47bc8bb466 | ||
|
|
94ad1f5732 | ||
|
|
09557fbe83 | ||
|
|
1c0f44fa4e | ||
|
|
fc4d59d2d7 | ||
|
|
12345fbacc | ||
|
|
2e33c8d222 | ||
|
|
db5f07f164 | ||
|
|
e050e69a43 | ||
|
|
27cb1d4fc7 | ||
|
|
5d6a740947 | ||
|
|
da3f68c363 | ||
|
|
d7d1c3685c | ||
|
|
dab3407beb | ||
|
|
592987a54a | ||
|
|
8dca8326f7 | ||
|
|
633481fae3 | ||
|
|
e7b99e6fb7 | ||
|
|
2a6a3aedd0 | ||
|
|
866c74c841 | ||
|
|
dad92bde26 | ||
|
|
a994e034f7 | ||
|
|
2801c04f2e | ||
|
|
316e3abfab | ||
|
|
c15ecb6c8e | ||
|
|
ee96005026 | ||
|
|
5b55d05a20 | ||
|
|
2f09c62c4e | ||
|
|
1cc8b873d4 | ||
|
|
15d5859750 | ||
|
|
a1ecef8020 | ||
|
|
e0a38ceeee | ||
|
|
c4bea13be5 | ||
|
|
5dcefab183 | ||
|
|
28e3178ac5 | ||
|
|
23b021a98b | ||
|
|
0cda38f53d | ||
|
|
6e43ee7cc7 | ||
|
|
da1094db84 | ||
|
|
717d8dc7d9 | ||
|
|
75e68d3427 | ||
|
|
d9c71c11fd | ||
|
|
706f30033e | ||
|
|
04047f3a72 | ||
|
|
060368e93d | ||
|
|
bef2e92cef | ||
|
|
334c07cc0c | ||
|
|
ee284dd282 | ||
|
|
c53126d373 | ||
|
|
00f05941d4 | ||
|
|
1c49b71606 | ||
|
|
fc5c815824 | ||
|
|
836463bab2 | ||
|
|
9e3a560ea6 | ||
|
|
8786416428 | ||
|
|
53f22c25c9 | ||
|
|
c2016ba037 | ||
|
|
5283837e6d | ||
|
|
82f2200f55 | ||
|
|
5cf49928b6 | ||
|
|
eec3efd683 | ||
|
|
bf0aac2cbd | ||
|
|
10652427bc | ||
|
|
a4b0c810a4 |
12
.eslintrc.json
Normal file
12
.eslintrc.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2021": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 12
|
||||
},
|
||||
"rules": {
|
||||
}
|
||||
}
|
||||
14
.gitignore
vendored
14
.gitignore
vendored
@@ -8,17 +8,15 @@ copyparty.egg-info/
|
||||
buildenv/
|
||||
build/
|
||||
dist/
|
||||
*.rst
|
||||
.env/
|
||||
sfx/
|
||||
.venv/
|
||||
|
||||
# sublime
|
||||
# ide
|
||||
*.sublime-workspace
|
||||
|
||||
# winmerge
|
||||
*.bak
|
||||
|
||||
# other licenses
|
||||
contrib/
|
||||
|
||||
# deps
|
||||
copyparty/web/deps
|
||||
# derived
|
||||
copyparty/web/deps/
|
||||
srv/
|
||||
|
||||
6
.vscode/launch.json
vendored
6
.vscode/launch.json
vendored
@@ -9,13 +9,13 @@
|
||||
"console": "integratedTerminal",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": [
|
||||
"-j",
|
||||
"0",
|
||||
//"-nw",
|
||||
"-ed",
|
||||
"-emp",
|
||||
"-a",
|
||||
"ed:wark",
|
||||
"-v",
|
||||
"/home/ed/inc:inc:r:aed"
|
||||
"srv::r:aed"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -37,7 +37,7 @@
|
||||
"python.linting.banditEnabled": true,
|
||||
"python.linting.flake8Args": [
|
||||
"--max-line-length=120",
|
||||
"--ignore=E722,F405,E203,W503,W293",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402",
|
||||
],
|
||||
"python.linting.banditArgs": [
|
||||
"--ignore=B104"
|
||||
@@ -55,6 +55,6 @@
|
||||
//
|
||||
// things you may wanna edit:
|
||||
//
|
||||
"python.pythonPath": ".env/bin/python",
|
||||
"python.pythonPath": "/usr/bin/python3",
|
||||
//"python.linting.enabled": true,
|
||||
}
|
||||
59
README.md
59
README.md
@@ -1,6 +1,6 @@
|
||||
# ⇆🎉 copyparty
|
||||
|
||||
* http file sharing hub (py2/py3)
|
||||
* http file sharing hub (py2/py3) [(on PyPI)](https://pypi.org/project/copyparty/)
|
||||
* MIT-Licensed, 2019-05-26, ed @ irc.rizon.net
|
||||
|
||||
|
||||
@@ -19,6 +19,8 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* Android-Chrome: set max "parallel uploads" for 200% upload speed (android bug)
|
||||
* Android-Firefox: takes a while to select files (in order to avoid the above android-chrome issue)
|
||||
* Desktop-Firefox: may use gigabytes of RAM if your connection is great and your files are massive
|
||||
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
|
||||
* because no browsers currently implement the media-query to do this properly orz
|
||||
|
||||
|
||||
## status
|
||||
@@ -34,8 +36,22 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* [ ] download as zip
|
||||
* [x] volumes
|
||||
* [x] accounts
|
||||
* [x] markdown viewer
|
||||
* [x] markdown editor
|
||||
* [x] FUSE client (read-only)
|
||||
|
||||
summary: close to beta
|
||||
summary: it works! you can use it! (but technically not even close to beta)
|
||||
|
||||
|
||||
# client examples
|
||||
|
||||
* javascript: dump some state into a file (two separate examples)
|
||||
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
|
||||
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
||||
|
||||
* FUSE: mount a copyparty server as a local filesystem
|
||||
* cross-platform python client available in [./bin/](bin/)
|
||||
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
||||
|
||||
|
||||
# dependencies
|
||||
@@ -47,22 +63,49 @@ optional, enables thumbnails:
|
||||
* `Pillow` (requires py2.7 or py3.5+)
|
||||
|
||||
|
||||
# sfx
|
||||
|
||||
currently there are two self-contained binaries:
|
||||
* `copyparty-sfx.sh` for unix (linux and osx) -- smaller, more robust
|
||||
* `copyparty-sfx.py` for windows (unix too) -- crossplatform, beta
|
||||
|
||||
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
|
||||
|
||||
pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `copyparty.py` and keep it in the same folder because `sys.path` is funky
|
||||
|
||||
|
||||
## sfx repack
|
||||
|
||||
if you don't need all the features you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except for either msys2 or WSL if you're on windows)
|
||||
* `724K` original size as of v0.4.0
|
||||
* `256K` after `./scripts/make-sfx.sh re no-ogv`
|
||||
* `164K` after `./scripts/make-sfx.sh re no-ogv no-cm`
|
||||
|
||||
the features you can opt to drop are
|
||||
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files
|
||||
* `cm`/easymde, the "fancy" markdown editor
|
||||
|
||||
for the `re`pack to work, first run one of the sfx'es once to unpack it
|
||||
|
||||
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a `no-ogv no-cm` repack; works on linux/macos (and windows with msys2 or WSL)
|
||||
|
||||
|
||||
# install on android
|
||||
|
||||
install [Termux](https://termux.com/) (see [ocv.me/termux](https://ocv.me/termux/)) and then copy-paste this into Termux (long-tap) all at once:
|
||||
```sh
|
||||
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install curl && cd && curl -L https://github.com/9001/copyparty/raw/master/scripts/copyparty-android.sh > copyparty-android.sh && chmod 755 copyparty-android.sh && ./copyparty-android.sh -h
|
||||
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install python && python -m ensurepip && python -m pip install -U copyparty
|
||||
echo $?
|
||||
```
|
||||
|
||||
after the initial setup (and restarting bash), you can launch copyparty at any time by running "copyparty" in Termux
|
||||
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
|
||||
|
||||
|
||||
# dev env setup
|
||||
|
||||
```sh
|
||||
python3 -m venv .env
|
||||
. .env/bin/activate
|
||||
python3 -m venv .venv
|
||||
. .venv/bin/activate
|
||||
pip install jinja2 # mandatory deps
|
||||
pip install Pillow # thumbnail deps
|
||||
pip install black bandit pylint flake8 # vscode tooling
|
||||
@@ -76,12 +119,16 @@ in the `scripts` folder:
|
||||
* run `make -C deps-docker` to build all dependencies
|
||||
* create github release with `make-tgz-release.sh`
|
||||
* upload to pypi with `make-pypi-release.(sh|bat)`
|
||||
* create sfx with `make-sfx.sh`
|
||||
|
||||
|
||||
# todo
|
||||
|
||||
roughly sorted by priority
|
||||
|
||||
* up2k handle filename too long
|
||||
* up2k fails on empty files? alert then stuck
|
||||
* drop onto folders
|
||||
* look into android thumbnail cache file format
|
||||
* support pillow-simd
|
||||
* cache sha512 chunks on client
|
||||
|
||||
36
bin/README.md
Normal file
36
bin/README.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# copyparty-fuse.py
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||
* **supports Linux** -- expect `117 MiB/s` sequential read
|
||||
* **supports macos** -- expect `85 MiB/s` sequential read
|
||||
|
||||
filecache is default-on for windows and macos;
|
||||
* macos readsize is 64kB, so speed ~32 MiB/s without the cache
|
||||
* windows readsize varies by software; explorer=1M, pv=32k
|
||||
|
||||
note that copyparty should run with `-ed` to enable dotfiles (hidden otherwise)
|
||||
|
||||
also consider using [../docs/rclone.md](../docs/rclone.md) instead for 5x performance
|
||||
|
||||
|
||||
## to run this on windows:
|
||||
* install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/)
|
||||
* [x] add python 3.x to PATH (it asks during install)
|
||||
* `python -m pip install --user fusepy`
|
||||
* `python ./copyparty-fuse.py n: http://192.168.1.69:3923/`
|
||||
|
||||
10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled:
|
||||
* `pacman -S mingw64/mingw-w64-x86_64-python{,-pip}`
|
||||
* `/mingw64/bin/python3 -m pip install --user fusepy`
|
||||
* `/mingw64/bin/python3 ./copyparty-fuse.py [...]`
|
||||
|
||||
you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releases/latest), let me know if you [figure out how](https://github.com/dokan-dev/dokany/wiki/FUSE)
|
||||
(winfsp's sshfs leaks, doesn't look like winfsp itself does, should be fine)
|
||||
|
||||
|
||||
|
||||
# copyparty-fuse🅱️.py
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* does the same thing except more correct, `samba` approves
|
||||
* **supports Linux** -- expect `18 MiB/s` (wait what)
|
||||
* **supports Macos** -- probably
|
||||
@@ -7,47 +7,83 @@ __copyright__ = 2019
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import stat
|
||||
import errno
|
||||
import struct
|
||||
import threading
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
|
||||
try:
|
||||
from fuse import FUSE, FuseOSError, Operations
|
||||
except:
|
||||
print("\n could not import fuse;\n pip install fusepy\n")
|
||||
raise
|
||||
|
||||
|
||||
"""
|
||||
mount a copyparty server (local or remote) as a filesystem
|
||||
|
||||
usage:
|
||||
python copyparty-fuse.py ./music http://192.168.1.69:1234/
|
||||
python copyparty-fuse.py ./music http://192.168.1.69:3923/
|
||||
|
||||
dependencies:
|
||||
sudo apk add fuse-dev
|
||||
python3 -m venv ~/pe/ve.fusepy
|
||||
. ~/pe/ve.fusepy/bin/activate
|
||||
pip install fusepy
|
||||
python3 -m pip install --user fusepy
|
||||
+ on Linux: sudo apk add fuse
|
||||
+ on Macos: https://osxfuse.github.io/
|
||||
+ on Windows: https://github.com/billziss-gh/winfsp/releases/latest
|
||||
|
||||
|
||||
MB/s
|
||||
28 cache NOthread
|
||||
24 cache thread
|
||||
29 cache NOthread NOmutex
|
||||
67 NOcache NOthread NOmutex ( ´・ω・) nyoro~n
|
||||
10 NOcache thread NOmutex
|
||||
get server cert:
|
||||
awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem
|
||||
"""
|
||||
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import stat
|
||||
import errno
|
||||
import struct
|
||||
import codecs
|
||||
import builtins
|
||||
import platform
|
||||
import argparse
|
||||
import threading
|
||||
import traceback
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
WINDOWS = sys.platform == "win32"
|
||||
MACOS = platform.system() == "Darwin"
|
||||
info = log = dbg = None
|
||||
|
||||
|
||||
try:
|
||||
from fuse import FUSE, FuseOSError, Operations
|
||||
except:
|
||||
if WINDOWS:
|
||||
libfuse = "install https://github.com/billziss-gh/winfsp/releases/latest"
|
||||
elif MACOS:
|
||||
libfuse = "install https://osxfuse.github.io/"
|
||||
else:
|
||||
libfuse = "apt install libfuse\n modprobe fuse"
|
||||
|
||||
print(
|
||||
"\n could not import fuse; these may help:"
|
||||
+ "\n python3 -m pip install --user fusepy\n "
|
||||
+ libfuse
|
||||
+ "\n"
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
def print(*args, **kwargs):
|
||||
try:
|
||||
builtins.print(*list(args), **kwargs)
|
||||
except:
|
||||
builtins.print(termsafe(" ".join(str(x) for x in args)), **kwargs)
|
||||
|
||||
|
||||
def termsafe(txt):
|
||||
try:
|
||||
return txt.encode(sys.stdout.encoding, "backslashreplace").decode(
|
||||
sys.stdout.encoding
|
||||
)
|
||||
except:
|
||||
return txt.encode(sys.stdout.encoding, "replace").decode(sys.stdout.encoding)
|
||||
|
||||
|
||||
def threadless_log(msg):
|
||||
print(msg + "\n", end="")
|
||||
|
||||
@@ -60,27 +96,127 @@ def boring_log(msg):
|
||||
def rice_tid():
|
||||
tid = threading.current_thread().ident
|
||||
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
|
||||
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c)
|
||||
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
|
||||
|
||||
|
||||
def fancy_log(msg):
|
||||
print("{}\033[0m {}\n".format(rice_tid(), msg), end="")
|
||||
print("{} {}\n".format(rice_tid(), msg), end="")
|
||||
|
||||
|
||||
def null_log(msg):
|
||||
pass
|
||||
|
||||
|
||||
log = boring_log
|
||||
log = fancy_log
|
||||
log = threadless_log
|
||||
dbg = null_log
|
||||
def hexler(binary):
|
||||
return binary.replace("\r", "\\r").replace("\n", "\\n")
|
||||
return " ".join(["{}\033[36m{:02x}\033[0m".format(b, ord(b)) for b in binary])
|
||||
return " ".join(map(lambda b: format(ord(b), "02x"), binary))
|
||||
|
||||
|
||||
def register_wtf8():
|
||||
def wtf8_enc(text):
|
||||
return str(text).encode("utf-8", "surrogateescape"), len(text)
|
||||
|
||||
def wtf8_dec(binary):
|
||||
return bytes(binary).decode("utf-8", "surrogateescape"), len(binary)
|
||||
|
||||
def wtf8_search(encoding_name):
|
||||
return codecs.CodecInfo(wtf8_enc, wtf8_dec, name="wtf-8")
|
||||
|
||||
codecs.register(wtf8_search)
|
||||
|
||||
|
||||
bad_good = {}
|
||||
good_bad = {}
|
||||
|
||||
|
||||
def enwin(txt):
|
||||
return "".join([bad_good.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(bad, good)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
def dewin(txt):
|
||||
return "".join([good_bad.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(good, bad)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
class RecentLog(object):
|
||||
def __init__(self):
|
||||
self.mtx = threading.Lock()
|
||||
self.f = None # open("copyparty-fuse.log", "wb")
|
||||
self.q = []
|
||||
|
||||
thr = threading.Thread(target=self.printer)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def put(self, msg):
|
||||
msg = "{} {}\n".format(rice_tid(), msg)
|
||||
if self.f:
|
||||
fmsg = " ".join([datetime.utcnow().strftime("%H%M%S.%f"), str(msg)])
|
||||
self.f.write(fmsg.encode("utf-8"))
|
||||
|
||||
with self.mtx:
|
||||
self.q.append(msg)
|
||||
if len(self.q) > 200:
|
||||
self.q = self.q[-50:]
|
||||
|
||||
def printer(self):
|
||||
while True:
|
||||
time.sleep(0.05)
|
||||
with self.mtx:
|
||||
q = self.q
|
||||
if not q:
|
||||
continue
|
||||
|
||||
self.q = []
|
||||
|
||||
print("".join(q), end="")
|
||||
|
||||
|
||||
# [windows/cmd/cpy3] python dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/
|
||||
# [windows/cmd/msys2] C:\msys64\mingw64\bin\python3 dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/
|
||||
# [windows/mty/msys2] /mingw64/bin/python3 /c/Users/ed/dev/copyparty/bin/copyparty-fuse.py q: http://192.168.1.159:1234/
|
||||
#
|
||||
# [windows] find /q/music/albums/Phant*24bit -printf '%s %p\n' | sort -n | tail -n 8 | sed -r 's/^[0-9]+ //' | while IFS= read -r x; do dd if="$x" of=/dev/null bs=4k count=8192 & done
|
||||
# [alpine] ll t; for x in t/2020_0724_16{2,3}*; do dd if="$x" of=/dev/null bs=4k count=10240 & done
|
||||
#
|
||||
# 72.4983 windows mintty msys2 fancy_log
|
||||
# 219.5781 windows cmd msys2 fancy_log
|
||||
# nope.avi windows cmd cpy3 fancy_log
|
||||
# 9.8817 windows mintty msys2 RecentLog 200 50 0.1
|
||||
# 10.2241 windows cmd cpy3 RecentLog 200 50 0.1
|
||||
# 9.8494 windows cmd msys2 RecentLog 200 50 0.1
|
||||
# 7.8061 windows mintty msys2 fancy_log <info-only>
|
||||
# 7.9961 windows mintty msys2 RecentLog <info-only>
|
||||
# 4.2603 alpine xfce4 cpy3 RecentLog
|
||||
# 4.1538 alpine xfce4 cpy3 fancy_log
|
||||
# 3.1742 alpine urxvt cpy3 fancy_log
|
||||
|
||||
|
||||
def get_tid():
|
||||
return threading.current_thread().ident
|
||||
|
||||
|
||||
def html_dec(txt):
|
||||
return (
|
||||
txt.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace(""", '"')
|
||||
.replace(" ", "\r")
|
||||
.replace(" ", "\n")
|
||||
.replace("&", "&")
|
||||
)
|
||||
|
||||
|
||||
class CacheNode(object):
|
||||
def __init__(self, tag, data):
|
||||
self.tag = tag
|
||||
@@ -89,10 +225,11 @@ class CacheNode(object):
|
||||
|
||||
|
||||
class Gateway(object):
|
||||
def __init__(self, base_url):
|
||||
self.base_url = base_url
|
||||
def __init__(self, ar):
|
||||
self.base_url = ar.base_url
|
||||
self.password = ar.a
|
||||
|
||||
ui = urllib.parse.urlparse(base_url)
|
||||
ui = urllib.parse.urlparse(self.base_url)
|
||||
self.web_root = ui.path.strip("/")
|
||||
try:
|
||||
self.web_host, self.web_port = ui.netloc.split(":")
|
||||
@@ -102,15 +239,25 @@ class Gateway(object):
|
||||
if ui.scheme == "http":
|
||||
self.web_port = 80
|
||||
elif ui.scheme == "https":
|
||||
raise Exception("todo")
|
||||
self.web_port = 443
|
||||
else:
|
||||
raise Exception("bad url?")
|
||||
|
||||
self.ssl_context = None
|
||||
self.use_tls = ui.scheme.lower() == "https"
|
||||
if self.use_tls:
|
||||
import ssl
|
||||
|
||||
if ar.td:
|
||||
self.ssl_context = ssl._create_unverified_context()
|
||||
elif ar.te:
|
||||
self.ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS)
|
||||
self.ssl_context.load_verify_locations(ar.te)
|
||||
|
||||
self.conns = {}
|
||||
|
||||
def quotep(self, path):
|
||||
# TODO: mojibake support
|
||||
path = path.encode("utf-8", "ignore")
|
||||
path = path.encode("wtf-8")
|
||||
return quote(path, safe="/")
|
||||
|
||||
def getconn(self, tid=None):
|
||||
@@ -118,9 +265,17 @@ class Gateway(object):
|
||||
try:
|
||||
return self.conns[tid]
|
||||
except:
|
||||
log("new conn [{}] [{}]".format(self.web_host, self.web_port))
|
||||
info("new conn [{}] [{}]".format(self.web_host, self.web_port))
|
||||
|
||||
conn = http.client.HTTPConnection(self.web_host, self.web_port, timeout=260)
|
||||
args = {}
|
||||
if not self.use_tls:
|
||||
C = http.client.HTTPConnection
|
||||
else:
|
||||
C = http.client.HTTPSConnection
|
||||
if self.ssl_context:
|
||||
args = {"context": self.ssl_context}
|
||||
|
||||
conn = C(self.web_host, self.web_port, timeout=260, **args)
|
||||
|
||||
self.conns[tid] = conn
|
||||
return conn
|
||||
@@ -133,42 +288,75 @@ class Gateway(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
def sendreq(self, *args, **kwargs):
|
||||
def sendreq(self, *args, headers={}, **kwargs):
|
||||
tid = get_tid()
|
||||
if self.password:
|
||||
headers["Cookie"] = "=".join(["cppwd", self.password])
|
||||
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
c.request(*list(args), headers=headers, **kwargs)
|
||||
return c.getresponse()
|
||||
except:
|
||||
self.closeconn(tid)
|
||||
dbg("bad conn")
|
||||
|
||||
self.closeconn(tid)
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
c.request(*list(args), headers=headers, **kwargs)
|
||||
return c.getresponse()
|
||||
except:
|
||||
info("http connection failed:\n" + traceback.format_exc())
|
||||
if self.use_tls and not self.ssl_context:
|
||||
import ssl
|
||||
|
||||
cert = ssl.get_server_certificate((self.web_host, self.web_port))
|
||||
info("server certificate probably not trusted:\n" + cert)
|
||||
|
||||
raise
|
||||
|
||||
def listdir(self, path):
|
||||
web_path = "/" + "/".join([self.web_root, path])
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
r = self.sendreq("GET", self.quotep(web_path))
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
|
||||
r = self.sendreq("GET", web_path)
|
||||
if r.status != 200:
|
||||
self.closeconn()
|
||||
raise Exception(
|
||||
"http error {} reading dir {} in {:x}".format(
|
||||
log(
|
||||
"http error {} reading dir {} in {}".format(
|
||||
r.status, web_path, rice_tid()
|
||||
)
|
||||
)
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
return self.parse_html(r)
|
||||
if not r.getheader("Content-Type", "").startswith("text/html"):
|
||||
log("listdir on file: {}".format(path))
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
try:
|
||||
return self.parse_html(r)
|
||||
except:
|
||||
info(repr(path) + "\n" + traceback.format_exc())
|
||||
raise
|
||||
|
||||
def download_file_range(self, path, ofs1, ofs2):
|
||||
web_path = "/" + "/".join([self.web_root, path])
|
||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2)
|
||||
log("downloading {}".format(hdr_range))
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
r = self.sendreq("GET", self.quotep(web_path), headers={"Range": hdr_range})
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
|
||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
|
||||
info(
|
||||
"DL {:4.0f}K\033[36m{:>9}-{:<9}\033[0m{}".format(
|
||||
(ofs2 - ofs1) / 1024.0, ofs1, ofs2 - 1, hexler(path)
|
||||
)
|
||||
)
|
||||
|
||||
r = self.sendreq("GET", web_path, headers={"Range": hdr_range})
|
||||
if r.status != http.client.PARTIAL_CONTENT:
|
||||
self.closeconn()
|
||||
raise Exception(
|
||||
"http error {} reading file {} range {} in {:x}".format(
|
||||
"http error {} reading file {} range {} in {}".format(
|
||||
r.status, web_path, hdr_range, rice_tid()
|
||||
)
|
||||
)
|
||||
@@ -179,7 +367,7 @@ class Gateway(object):
|
||||
ret = []
|
||||
remainder = b""
|
||||
ptn = re.compile(
|
||||
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
|
||||
r'^<tr><td>(-|DIR)</td><td><a[^>]* href="([^"]+)"[^>]*>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$'
|
||||
)
|
||||
|
||||
while True:
|
||||
@@ -201,9 +389,22 @@ class Gateway(object):
|
||||
# print(line)
|
||||
continue
|
||||
|
||||
ftype, fname, fsize, fdate = m.groups()
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
sz = int(fsize)
|
||||
ftype, furl, fname, fsize, fdate = m.groups()
|
||||
fname = furl.rstrip("/").split("/")[-1]
|
||||
fname = unquote(fname)
|
||||
fname = fname.decode("wtf-8")
|
||||
if bad_good:
|
||||
fname = enwin(fname)
|
||||
|
||||
sz = 1
|
||||
ts = 60 * 60 * 24 * 2
|
||||
try:
|
||||
sz = int(fsize)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
except:
|
||||
info("bad HTML or OS [{}] [{}]".format(fdate, fsize))
|
||||
# python cannot strptime(1959-01-01) on windows
|
||||
|
||||
if ftype == "-":
|
||||
ret.append([fname, self.stat_file(ts, sz), 0])
|
||||
else:
|
||||
@@ -213,7 +414,7 @@ class Gateway(object):
|
||||
|
||||
def stat_dir(self, ts, sz=4096):
|
||||
return {
|
||||
"st_mode": 0o555 | stat.S_IFDIR,
|
||||
"st_mode": stat.S_IFDIR | 0o555,
|
||||
"st_uid": 1000,
|
||||
"st_gid": 1000,
|
||||
"st_size": sz,
|
||||
@@ -225,7 +426,7 @@ class Gateway(object):
|
||||
|
||||
def stat_file(self, ts, sz):
|
||||
return {
|
||||
"st_mode": 0o444 | stat.S_IFREG,
|
||||
"st_mode": stat.S_IFREG | 0o444,
|
||||
"st_uid": 1000,
|
||||
"st_gid": 1000,
|
||||
"st_size": sz,
|
||||
@@ -237,8 +438,11 @@ class Gateway(object):
|
||||
|
||||
|
||||
class CPPF(Operations):
|
||||
def __init__(self, base_url):
|
||||
self.gw = Gateway(base_url)
|
||||
def __init__(self, ar):
|
||||
self.gw = Gateway(ar)
|
||||
self.junk_fh_ctr = 3
|
||||
self.n_dircache = ar.cd
|
||||
self.n_filecache = ar.cf
|
||||
|
||||
self.dircache = []
|
||||
self.dircache_mtx = threading.Lock()
|
||||
@@ -246,14 +450,29 @@ class CPPF(Operations):
|
||||
self.filecache = []
|
||||
self.filecache_mtx = threading.Lock()
|
||||
|
||||
log("up")
|
||||
info("up")
|
||||
|
||||
def _describe(self):
|
||||
msg = ""
|
||||
with self.filecache_mtx:
|
||||
for n, cn in enumerate(self.filecache):
|
||||
cache_path, cache1 = cn.tag
|
||||
cache2 = cache1 + len(cn.data)
|
||||
msg += "\n{:<2} {:>7} {:>10}:{:<9} {}".format(
|
||||
n,
|
||||
len(cn.data),
|
||||
cache1,
|
||||
cache2,
|
||||
cache_path.replace("\r", "\\r").replace("\n", "\\n"),
|
||||
)
|
||||
return msg
|
||||
|
||||
def clean_dircache(self):
|
||||
"""not threadsafe"""
|
||||
now = time.time()
|
||||
cutoff = 0
|
||||
for cn in self.dircache:
|
||||
if cn.ts - now > 1:
|
||||
if now - cn.ts > self.n_dircache:
|
||||
cutoff += 1
|
||||
else:
|
||||
break
|
||||
@@ -262,8 +481,7 @@ class CPPF(Operations):
|
||||
self.dircache = self.dircache[cutoff:]
|
||||
|
||||
def get_cached_dir(self, dirpath):
|
||||
# with self.dircache_mtx:
|
||||
if True:
|
||||
with self.dircache_mtx:
|
||||
self.clean_dircache()
|
||||
for cn in self.dircache:
|
||||
if cn.tag == dirpath:
|
||||
@@ -300,9 +518,8 @@ class CPPF(Operations):
|
||||
car = None
|
||||
cdr = None
|
||||
ncn = -1
|
||||
# with self.filecache_mtx:
|
||||
if True:
|
||||
dbg("cache request from {} to {}, size {}".format(get1, get2, file_sz))
|
||||
dbg("cache request {}:{} |{}|".format(get1, get2, file_sz) + self._describe())
|
||||
with self.filecache_mtx:
|
||||
for cn in self.filecache:
|
||||
ncn += 1
|
||||
|
||||
@@ -312,6 +529,12 @@ class CPPF(Operations):
|
||||
|
||||
cache2 = cache1 + len(cn.data)
|
||||
if get2 <= cache1 or get1 >= cache2:
|
||||
# request does not overlap with cached area at all
|
||||
continue
|
||||
|
||||
if get1 < cache1 and get2 > cache2:
|
||||
# cached area does overlap, but must specifically contain
|
||||
# either the first or last byte in the requested range
|
||||
continue
|
||||
|
||||
if get1 >= cache1 and get2 <= cache2:
|
||||
@@ -322,7 +545,7 @@ class CPPF(Operations):
|
||||
buf_ofs = get1 - cache1
|
||||
buf_end = buf_ofs + (get2 - get1)
|
||||
dbg(
|
||||
"found all ({}, {} to {}, len {}) [{}:{}] = {}".format(
|
||||
"found all (#{} {}:{} |{}|) [{}:{}] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
@@ -334,11 +557,11 @@ class CPPF(Operations):
|
||||
)
|
||||
return cn.data[buf_ofs:buf_end]
|
||||
|
||||
if get2 < cache2:
|
||||
if get2 <= cache2:
|
||||
x = cn.data[: get2 - cache1]
|
||||
if not cdr or len(cdr) < len(x):
|
||||
dbg(
|
||||
"found car ({}, {} to {}, len {}) [:{}-{}] = [:{}] = {}".format(
|
||||
"found cdr (#{} {}:{} |{}|) [:{}-{}] = [:{}] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
@@ -353,11 +576,11 @@ class CPPF(Operations):
|
||||
|
||||
continue
|
||||
|
||||
if get1 > cache1:
|
||||
x = cn.data[-(cache2 - get1) :]
|
||||
if get1 >= cache1:
|
||||
x = cn.data[-(max(0, cache2 - get1)) :]
|
||||
if not car or len(car) < len(x):
|
||||
dbg(
|
||||
"found cdr ({}, {} to {}, len {}) [-({}-{}):] = [-{}:] = {}".format(
|
||||
"found car (#{} {}:{} |{}|) [-({}-{}):] = [-{}:] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
@@ -372,38 +595,52 @@ class CPPF(Operations):
|
||||
|
||||
continue
|
||||
|
||||
raise Exception("what")
|
||||
msg = "cache fallthrough\n{} {} {}\n{} {} {}\n{} {} --\n".format(
|
||||
get1,
|
||||
get2,
|
||||
get2 - get1,
|
||||
cache1,
|
||||
cache2,
|
||||
cache2 - cache1,
|
||||
get1 - cache1,
|
||||
get2 - cache2,
|
||||
)
|
||||
msg += self._describe()
|
||||
raise Exception(msg)
|
||||
|
||||
if car and cdr:
|
||||
if car and cdr and len(car) + len(cdr) == get2 - get1:
|
||||
dbg("<cache> have both")
|
||||
return car + cdr
|
||||
|
||||
ret = car + cdr
|
||||
if len(ret) == get2 - get1:
|
||||
return ret
|
||||
|
||||
raise Exception("{} + {} != {} - {}".format(len(car), len(cdr), get2, get1))
|
||||
|
||||
elif cdr:
|
||||
elif cdr and (not car or len(car) < len(cdr)):
|
||||
h_end = get1 + (get2 - get1) - len(cdr)
|
||||
h_ofs = h_end - 512 * 1024
|
||||
h_ofs = min(get1, h_end - 512 * 1024)
|
||||
|
||||
if h_ofs < 0:
|
||||
h_ofs = 0
|
||||
|
||||
buf_ofs = (get2 - get1) - len(cdr)
|
||||
buf_ofs = get1 - h_ofs
|
||||
|
||||
dbg(
|
||||
"<cache> cdr {}, car {}-{}={} [-{}:]".format(
|
||||
"<cache> cdr {}, car {}:{} |{}| [{}:]".format(
|
||||
len(cdr), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end - 1)
|
||||
ret = buf[-buf_ofs:] + cdr
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
if len(buf) == h_end - h_ofs:
|
||||
ret = buf[buf_ofs:] + cdr
|
||||
else:
|
||||
ret = buf[get1 - h_ofs :]
|
||||
info(
|
||||
"remote truncated {}:{} to |{}|, will return |{}|".format(
|
||||
h_ofs, h_end, len(buf), len(ret)
|
||||
)
|
||||
)
|
||||
|
||||
elif car:
|
||||
h_ofs = get1 + len(car)
|
||||
h_end = h_ofs + 1024 * 1024
|
||||
h_end = max(get2, h_ofs + 1024 * 1024)
|
||||
|
||||
if h_end > file_sz:
|
||||
h_end = file_sz
|
||||
@@ -411,17 +648,22 @@ class CPPF(Operations):
|
||||
buf_ofs = (get2 - get1) - len(car)
|
||||
|
||||
dbg(
|
||||
"<cache> car {}, cdr {}-{}={} [:{}]".format(
|
||||
"<cache> car {}, cdr {}:{} |{}| [:{}]".format(
|
||||
len(car), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end - 1)
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = car + buf[:buf_ofs]
|
||||
|
||||
else:
|
||||
h_ofs = get1 - 256 * 1024
|
||||
h_end = get2 + 1024 * 1024
|
||||
if get2 - get1 <= 1024 * 1024:
|
||||
h_ofs = get1 - 256 * 1024
|
||||
h_end = get2 + 1024 * 1024
|
||||
else:
|
||||
# big enough, doesn't need pads
|
||||
h_ofs = get1
|
||||
h_end = get2
|
||||
|
||||
if h_ofs < 0:
|
||||
h_ofs = 0
|
||||
@@ -433,54 +675,99 @@ class CPPF(Operations):
|
||||
buf_end = buf_ofs + get2 - get1
|
||||
|
||||
dbg(
|
||||
"<cache> {}-{}={} [{}:{}]".format(
|
||||
"<cache> {}:{} |{}| [{}:{}]".format(
|
||||
h_ofs, h_end, h_end - h_ofs, buf_ofs, buf_end
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end - 1)
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = buf[buf_ofs:buf_end]
|
||||
|
||||
cn = CacheNode([path, h_ofs], buf)
|
||||
# with self.filecache_mtx:
|
||||
if True:
|
||||
if len(self.filecache) > 6:
|
||||
with self.filecache_mtx:
|
||||
if len(self.filecache) >= self.n_filecache:
|
||||
self.filecache = self.filecache[1:] + [cn]
|
||||
else:
|
||||
self.filecache.append(cn)
|
||||
|
||||
return ret
|
||||
|
||||
def readdir(self, path, fh=None):
|
||||
def _readdir(self, path, fh=None):
|
||||
path = path.strip("/")
|
||||
log("readdir {}".format(path))
|
||||
log("readdir [{}] [{}]".format(hexler(path), fh))
|
||||
|
||||
ret = self.gw.listdir(path)
|
||||
if not self.n_dircache:
|
||||
return ret
|
||||
|
||||
# with self.dircache_mtx:
|
||||
if True:
|
||||
with self.dircache_mtx:
|
||||
cn = CacheNode(path, ret)
|
||||
self.dircache.append(cn)
|
||||
self.clean_dircache()
|
||||
|
||||
return ret
|
||||
|
||||
def readdir(self, path, fh=None):
|
||||
return [".", ".."] + self._readdir(path, fh)
|
||||
|
||||
def read(self, path, length, offset, fh=None):
|
||||
req_max = 1024 * 1024 * 8
|
||||
cache_max = 1024 * 1024 * 2
|
||||
if length > req_max:
|
||||
# windows actually doing 240 MiB read calls, sausage
|
||||
info("truncate |{}| to {}MiB".format(length, req_max >> 20))
|
||||
length = req_max
|
||||
|
||||
path = path.strip("/")
|
||||
|
||||
ofs2 = offset + length
|
||||
log("read {} @ {} len {} end {}".format(path, offset, length, ofs2))
|
||||
|
||||
file_sz = self.getattr(path)["st_size"]
|
||||
if ofs2 >= file_sz:
|
||||
ofs2 = file_sz - 1
|
||||
log("truncate to len {} end {}".format((ofs2 - offset) + 1, ofs2))
|
||||
log(
|
||||
"read {} |{}| {}:{} max {}".format(
|
||||
hexler(path), length, offset, ofs2, file_sz
|
||||
)
|
||||
)
|
||||
if ofs2 > file_sz:
|
||||
ofs2 = file_sz
|
||||
log("truncate to |{}| :{}".format(ofs2 - offset, ofs2))
|
||||
|
||||
# toggle cache here i suppose
|
||||
# return self.get_cached_file(path, offset, ofs2, file_sz)
|
||||
return self.gw.download_file_range(path, offset, ofs2 - 1)
|
||||
if file_sz == 0 or offset >= ofs2:
|
||||
return b""
|
||||
|
||||
if self.n_filecache and length <= cache_max:
|
||||
ret = self.get_cached_file(path, offset, ofs2, file_sz)
|
||||
else:
|
||||
ret = self.gw.download_file_range(path, offset, ofs2)
|
||||
|
||||
return ret
|
||||
|
||||
fn = "cppf-{}-{}-{}".format(time.time(), offset, length)
|
||||
if False:
|
||||
with open(fn, "wb", len(ret)) as f:
|
||||
f.write(ret)
|
||||
elif self.n_filecache:
|
||||
ret2 = self.gw.download_file_range(path, offset, ofs2)
|
||||
if ret != ret2:
|
||||
info(fn)
|
||||
for v in [ret, ret2]:
|
||||
try:
|
||||
info(len(v))
|
||||
except:
|
||||
info("uhh " + repr(v))
|
||||
|
||||
with open(fn + ".bad", "wb") as f:
|
||||
f.write(ret)
|
||||
with open(fn + ".good", "wb") as f:
|
||||
f.write(ret2)
|
||||
|
||||
raise Exception("cache bork")
|
||||
|
||||
return ret
|
||||
|
||||
def getattr(self, path, fh=None):
|
||||
log("getattr [{}]".format(hexler(path)))
|
||||
if WINDOWS:
|
||||
path = enwin(path) # windows occasionally decodes f0xx to xx
|
||||
|
||||
path = path.strip("/")
|
||||
try:
|
||||
dirpath, fname = path.rsplit("/", 1)
|
||||
@@ -488,23 +775,34 @@ class CPPF(Operations):
|
||||
dirpath = ""
|
||||
fname = path
|
||||
|
||||
log("getattr {}".format(path))
|
||||
|
||||
if not path:
|
||||
return self.gw.stat_dir(time.time())
|
||||
ret = self.gw.stat_dir(time.time())
|
||||
# dbg("=" + repr(ret))
|
||||
return ret
|
||||
|
||||
cn = self.get_cached_dir(dirpath)
|
||||
if cn:
|
||||
# log('cache ok')
|
||||
log("cache ok")
|
||||
dents = cn.data
|
||||
else:
|
||||
log("cache miss")
|
||||
dents = self.readdir(dirpath)
|
||||
dbg("cache miss")
|
||||
dents = self._readdir(dirpath)
|
||||
|
||||
for cache_name, cache_stat, _ in dents:
|
||||
# if "qw" in cache_name and "qw" in fname:
|
||||
# info(
|
||||
# "cmp\n [{}]\n [{}]\n\n{}\n".format(
|
||||
# hexler(cache_name),
|
||||
# hexler(fname),
|
||||
# "\n".join(traceback.format_stack()[:-1]),
|
||||
# )
|
||||
# )
|
||||
|
||||
if cache_name == fname:
|
||||
# dbg("=" + repr(cache_stat))
|
||||
return cache_stat
|
||||
|
||||
info("=ENOENT ({})".format(hexler(path)))
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
access = None
|
||||
@@ -517,17 +815,178 @@ class CPPF(Operations):
|
||||
releasedir = None
|
||||
statfs = None
|
||||
|
||||
if False:
|
||||
# incorrect semantics but good for debugging stuff like samba and msys2
|
||||
def access(self, path, mode):
|
||||
log("@@ access [{}] [{}]".format(path, mode))
|
||||
return 1 if self.getattr(path) else 0
|
||||
|
||||
def flush(self, path, fh):
|
||||
log("@@ flush [{}] [{}]".format(path, fh))
|
||||
return True
|
||||
|
||||
def getxattr(self, *args):
|
||||
log("@@ getxattr [{}]".format("] [".join(str(x) for x in args)))
|
||||
return False
|
||||
|
||||
def listxattr(self, *args):
|
||||
log("@@ listxattr [{}]".format("] [".join(str(x) for x in args)))
|
||||
return False
|
||||
|
||||
def open(self, path, flags):
|
||||
log("@@ open [{}] [{}]".format(path, flags))
|
||||
return 42
|
||||
|
||||
def opendir(self, fh):
|
||||
log("@@ opendir [{}]".format(fh))
|
||||
return 69
|
||||
|
||||
def release(self, ino, fi):
|
||||
log("@@ release [{}] [{}]".format(ino, fi))
|
||||
return True
|
||||
|
||||
def releasedir(self, ino, fi):
|
||||
log("@@ releasedir [{}] [{}]".format(ino, fi))
|
||||
return True
|
||||
|
||||
def statfs(self, path):
|
||||
log("@@ statfs [{}]".format(path))
|
||||
return {}
|
||||
|
||||
if sys.platform == "win32":
|
||||
# quick compat for /mingw64/bin/python3 (msys2)
|
||||
def _open(self, path):
|
||||
try:
|
||||
x = self.getattr(path)
|
||||
if x["st_mode"] <= 0:
|
||||
raise Exception()
|
||||
|
||||
self.junk_fh_ctr += 1
|
||||
if self.junk_fh_ctr > 32000: # TODO untested
|
||||
self.junk_fh_ctr = 4
|
||||
|
||||
return self.junk_fh_ctr
|
||||
|
||||
except Exception as ex:
|
||||
log("open ERR {}".format(repr(ex)))
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
def open(self, path, flags):
|
||||
dbg("open [{}] [{}]".format(hexler(path), flags))
|
||||
return self._open(path)
|
||||
|
||||
def opendir(self, path):
|
||||
dbg("opendir [{}]".format(hexler(path)))
|
||||
return self._open(path)
|
||||
|
||||
def flush(self, path, fh):
|
||||
dbg("flush [{}] [{}]".format(hexler(path), fh))
|
||||
|
||||
def release(self, ino, fi):
|
||||
dbg("release [{}] [{}]".format(hexler(ino), fi))
|
||||
|
||||
def releasedir(self, ino, fi):
|
||||
dbg("releasedir [{}] [{}]".format(hexler(ino), fi))
|
||||
|
||||
def access(self, path, mode):
|
||||
dbg("access [{}] [{}]".format(hexler(path), mode))
|
||||
try:
|
||||
x = self.getattr(path)
|
||||
if x["st_mode"] <= 0:
|
||||
raise Exception()
|
||||
except:
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
|
||||
class TheArgparseFormatter(
|
||||
argparse.RawTextHelpFormatter, argparse.ArgumentDefaultsHelpFormatter
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
local, remote = sys.argv[1:]
|
||||
except:
|
||||
print("need arg 1: local directory")
|
||||
print("need arg 2: root url")
|
||||
return
|
||||
global info, log, dbg
|
||||
|
||||
FUSE(CPPF(remote), local, foreground=True, nothreads=True)
|
||||
# if nothreads=False also uncomment the `with *_mtx` things
|
||||
# filecache helps for reads that are ~64k or smaller;
|
||||
# linux generally does 128k so the cache is a slowdown,
|
||||
# windows likes to use 4k and 64k so cache is required,
|
||||
# value is numChunks (1~3M each) to keep in the cache
|
||||
nf = 24
|
||||
|
||||
# dircache is always a boost,
|
||||
# only want to disable it for tests etc,
|
||||
# value is numSec until an entry goes stale
|
||||
nd = 1
|
||||
|
||||
where = "local directory"
|
||||
if WINDOWS:
|
||||
where += " or DRIVE:"
|
||||
|
||||
ex_pre = "\n " + os.path.basename(__file__) + " "
|
||||
examples = ["http://192.168.1.69:3923/music/ ./music"]
|
||||
if WINDOWS:
|
||||
examples.append("http://192.168.1.69:3923/music/ M:")
|
||||
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=TheArgparseFormatter,
|
||||
epilog="example:" + ex_pre + ex_pre.join(examples),
|
||||
)
|
||||
ap.add_argument(
|
||||
"-cd", metavar="NUM_SECONDS", type=float, default=nd, help="directory cache"
|
||||
)
|
||||
ap.add_argument(
|
||||
"-cf", metavar="NUM_BLOCKS", type=int, default=nf, help="file cache"
|
||||
)
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||
ap.add_argument("-d", action="store_true", help="enable debug")
|
||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||
ap.add_argument("base_url", type=str, help="remote copyparty URL to mount")
|
||||
ap.add_argument("local_path", type=str, help=where + " to mount it on")
|
||||
ar = ap.parse_args()
|
||||
|
||||
if ar.d:
|
||||
# windows terminals are slow (cmd.exe, mintty)
|
||||
# otoh fancy_log beats RecentLog on linux
|
||||
logger = RecentLog().put if WINDOWS else fancy_log
|
||||
|
||||
info = logger
|
||||
log = logger
|
||||
dbg = logger
|
||||
else:
|
||||
# debug=off, speed is dontcare
|
||||
info = fancy_log
|
||||
log = null_log
|
||||
dbg = null_log
|
||||
|
||||
if WINDOWS:
|
||||
os.system("")
|
||||
|
||||
for ch in '<>:"\\|?*':
|
||||
# microsoft maps illegal characters to f0xx
|
||||
# (e000 to f8ff is basic-plane private-use)
|
||||
bad_good[ch] = chr(ord(ch) + 0xF000)
|
||||
|
||||
for n in range(0, 0x100):
|
||||
# map surrogateescape to another private-use area
|
||||
bad_good[chr(n + 0xDC00)] = chr(n + 0xF100)
|
||||
|
||||
for k, v in bad_good.items():
|
||||
good_bad[v] = k
|
||||
|
||||
register_wtf8()
|
||||
|
||||
try:
|
||||
with open("/etc/fuse.conf", "rb") as f:
|
||||
allow_other = b"\nuser_allow_other" in f.read()
|
||||
except:
|
||||
allow_other = WINDOWS or MACOS
|
||||
|
||||
args = {"foreground": True, "nothreads": True, "allow_other": allow_other}
|
||||
if not MACOS:
|
||||
args["nonempty"] = True
|
||||
|
||||
FUSE(CPPF(ar), ar.local_path, encoding="wtf-8", **args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
590
bin/copyparty-fuseb.py
Executable file
590
bin/copyparty-fuseb.py
Executable file
@@ -0,0 +1,590 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
"""copyparty-fuseb: remote copyparty as a local filesystem"""
|
||||
__author__ = "ed <copyparty@ocv.me>"
|
||||
__copyright__ = 2020
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import stat
|
||||
import errno
|
||||
import struct
|
||||
import threading
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
|
||||
try:
|
||||
import fuse
|
||||
from fuse import Fuse
|
||||
|
||||
fuse.fuse_python_api = (0, 2)
|
||||
if not hasattr(fuse, "__version__"):
|
||||
raise Exception("your fuse-python is way old")
|
||||
except:
|
||||
print(
|
||||
"\n could not import fuse; these may help:\n python3 -m pip install --user fuse-python\n apt install libfuse\n modprobe fuse\n"
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
"""
|
||||
mount a copyparty server (local or remote) as a filesystem
|
||||
|
||||
usage:
|
||||
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas
|
||||
|
||||
dependencies:
|
||||
sudo apk add fuse-dev python3-dev
|
||||
python3 -m pip install --user fuse-python
|
||||
|
||||
fork of copyparty-fuse.py based on fuse-python which
|
||||
appears to be more compliant than fusepy? since this works with samba
|
||||
(probably just my garbage code tbh)
|
||||
"""
|
||||
|
||||
|
||||
def threadless_log(msg):
|
||||
print(msg + "\n", end="")
|
||||
|
||||
|
||||
def boring_log(msg):
|
||||
msg = "\033[36m{:012x}\033[0m {}\n".format(threading.current_thread().ident, msg)
|
||||
print(msg[4:], end="")
|
||||
|
||||
|
||||
def rice_tid():
|
||||
tid = threading.current_thread().ident
|
||||
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
|
||||
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
|
||||
|
||||
|
||||
def fancy_log(msg):
|
||||
print("{} {}\n".format(rice_tid(), msg), end="")
|
||||
|
||||
|
||||
def null_log(msg):
|
||||
pass
|
||||
|
||||
|
||||
info = fancy_log
|
||||
log = fancy_log
|
||||
dbg = fancy_log
|
||||
log = null_log
|
||||
dbg = null_log
|
||||
|
||||
|
||||
def get_tid():
|
||||
return threading.current_thread().ident
|
||||
|
||||
|
||||
def html_dec(txt):
|
||||
return (
|
||||
txt.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace(""", '"')
|
||||
.replace("&", "&")
|
||||
)
|
||||
|
||||
|
||||
class CacheNode(object):
|
||||
def __init__(self, tag, data):
|
||||
self.tag = tag
|
||||
self.data = data
|
||||
self.ts = time.time()
|
||||
|
||||
|
||||
class Stat(fuse.Stat):
|
||||
def __init__(self):
|
||||
self.st_mode = 0
|
||||
self.st_ino = 0
|
||||
self.st_dev = 0
|
||||
self.st_nlink = 1
|
||||
self.st_uid = 1000
|
||||
self.st_gid = 1000
|
||||
self.st_size = 0
|
||||
self.st_atime = 0
|
||||
self.st_mtime = 0
|
||||
self.st_ctime = 0
|
||||
|
||||
|
||||
class Gateway(object):
|
||||
def __init__(self, base_url):
|
||||
self.base_url = base_url
|
||||
|
||||
ui = urllib.parse.urlparse(base_url)
|
||||
self.web_root = ui.path.strip("/")
|
||||
try:
|
||||
self.web_host, self.web_port = ui.netloc.split(":")
|
||||
self.web_port = int(self.web_port)
|
||||
except:
|
||||
self.web_host = ui.netloc
|
||||
if ui.scheme == "http":
|
||||
self.web_port = 80
|
||||
elif ui.scheme == "https":
|
||||
raise Exception("todo")
|
||||
else:
|
||||
raise Exception("bad url?")
|
||||
|
||||
self.conns = {}
|
||||
|
||||
def quotep(self, path):
|
||||
# TODO: mojibake support
|
||||
path = path.encode("utf-8", "ignore")
|
||||
return quote(path, safe="/")
|
||||
|
||||
def getconn(self, tid=None):
|
||||
tid = tid or get_tid()
|
||||
try:
|
||||
return self.conns[tid]
|
||||
except:
|
||||
info("new conn [{}] [{}]".format(self.web_host, self.web_port))
|
||||
|
||||
conn = http.client.HTTPConnection(self.web_host, self.web_port, timeout=260)
|
||||
|
||||
self.conns[tid] = conn
|
||||
return conn
|
||||
|
||||
def closeconn(self, tid=None):
|
||||
tid = tid or get_tid()
|
||||
try:
|
||||
self.conns[tid].close()
|
||||
del self.conns[tid]
|
||||
except:
|
||||
pass
|
||||
|
||||
def sendreq(self, *args, **kwargs):
|
||||
tid = get_tid()
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
return c.getresponse()
|
||||
except:
|
||||
self.closeconn(tid)
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
return c.getresponse()
|
||||
|
||||
def listdir(self, path):
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
|
||||
r = self.sendreq("GET", web_path)
|
||||
if r.status != 200:
|
||||
self.closeconn()
|
||||
raise Exception(
|
||||
"http error {} reading dir {} in {}".format(
|
||||
r.status, web_path, rice_tid()
|
||||
)
|
||||
)
|
||||
|
||||
return self.parse_html(r)
|
||||
|
||||
def download_file_range(self, path, ofs1, ofs2):
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
|
||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
|
||||
log("downloading {}".format(hdr_range))
|
||||
|
||||
r = self.sendreq("GET", web_path, headers={"Range": hdr_range})
|
||||
if r.status != http.client.PARTIAL_CONTENT:
|
||||
self.closeconn()
|
||||
raise Exception(
|
||||
"http error {} reading file {} range {} in {}".format(
|
||||
r.status, web_path, hdr_range, rice_tid()
|
||||
)
|
||||
)
|
||||
|
||||
return r.read()
|
||||
|
||||
def parse_html(self, datasrc):
|
||||
ret = []
|
||||
remainder = b""
|
||||
ptn = re.compile(
|
||||
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
|
||||
)
|
||||
|
||||
while True:
|
||||
buf = remainder + datasrc.read(4096)
|
||||
# print('[{}]'.format(buf.decode('utf-8')))
|
||||
if not buf:
|
||||
break
|
||||
|
||||
remainder = b""
|
||||
endpos = buf.rfind(b"\n")
|
||||
if endpos >= 0:
|
||||
remainder = buf[endpos + 1 :]
|
||||
buf = buf[:endpos]
|
||||
|
||||
lines = buf.decode("utf-8").split("\n")
|
||||
for line in lines:
|
||||
m = ptn.match(line)
|
||||
if not m:
|
||||
# print(line)
|
||||
continue
|
||||
|
||||
ftype, fname, fsize, fdate = m.groups()
|
||||
fname = html_dec(fname)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
sz = int(fsize)
|
||||
if ftype == "-":
|
||||
ret.append([fname, self.stat_file(ts, sz), 0])
|
||||
else:
|
||||
ret.append([fname, self.stat_dir(ts, sz), 0])
|
||||
|
||||
return ret
|
||||
|
||||
def stat_dir(self, ts, sz=4096):
|
||||
ret = Stat()
|
||||
ret.st_mode = stat.S_IFDIR | 0o555
|
||||
ret.st_nlink = 2
|
||||
ret.st_size = sz
|
||||
ret.st_atime = ts
|
||||
ret.st_mtime = ts
|
||||
ret.st_ctime = ts
|
||||
return ret
|
||||
|
||||
def stat_file(self, ts, sz):
|
||||
ret = Stat()
|
||||
ret.st_mode = stat.S_IFREG | 0o444
|
||||
ret.st_size = sz
|
||||
ret.st_atime = ts
|
||||
ret.st_mtime = ts
|
||||
ret.st_ctime = ts
|
||||
return ret
|
||||
|
||||
|
||||
class CPPF(Fuse):
|
||||
def __init__(self, *args, **kwargs):
|
||||
Fuse.__init__(self, *args, **kwargs)
|
||||
|
||||
self.url = None
|
||||
|
||||
self.dircache = []
|
||||
self.dircache_mtx = threading.Lock()
|
||||
|
||||
self.filecache = []
|
||||
self.filecache_mtx = threading.Lock()
|
||||
|
||||
def init2(self):
|
||||
# TODO figure out how python-fuse wanted this to go
|
||||
self.gw = Gateway(self.url) # .decode('utf-8'))
|
||||
info("up")
|
||||
|
||||
def clean_dircache(self):
|
||||
"""not threadsafe"""
|
||||
now = time.time()
|
||||
cutoff = 0
|
||||
for cn in self.dircache:
|
||||
if now - cn.ts > 1:
|
||||
cutoff += 1
|
||||
else:
|
||||
break
|
||||
|
||||
if cutoff > 0:
|
||||
self.dircache = self.dircache[cutoff:]
|
||||
|
||||
def get_cached_dir(self, dirpath):
|
||||
# with self.dircache_mtx:
|
||||
if True:
|
||||
self.clean_dircache()
|
||||
for cn in self.dircache:
|
||||
if cn.tag == dirpath:
|
||||
return cn
|
||||
|
||||
return None
|
||||
|
||||
"""
|
||||
,-------------------------------, g1>=c1, g2<=c2
|
||||
|cache1 cache2| buf[g1-c1:(g1-c1)+(g2-g1)]
|
||||
`-------------------------------'
|
||||
,---------------,
|
||||
|get1 get2|
|
||||
`---------------'
|
||||
__________________________________________________________________________
|
||||
|
||||
,-------------------------------, g2<=c2, (g2>=c1)
|
||||
|cache1 cache2| cdr=buf[:g2-c1]
|
||||
`-------------------------------' dl car; g1-512K:c1
|
||||
,---------------,
|
||||
|get1 get2|
|
||||
`---------------'
|
||||
__________________________________________________________________________
|
||||
|
||||
,-------------------------------, g1>=c1, (g1<=c2)
|
||||
|cache1 cache2| car=buf[c2-g1:]
|
||||
`-------------------------------' dl cdr; c2:c2+1M
|
||||
,---------------,
|
||||
|get1 get2|
|
||||
`---------------'
|
||||
"""
|
||||
|
||||
def get_cached_file(self, path, get1, get2, file_sz):
|
||||
car = None
|
||||
cdr = None
|
||||
ncn = -1
|
||||
# with self.filecache_mtx:
|
||||
if True:
|
||||
dbg("cache request from {} to {}, size {}".format(get1, get2, file_sz))
|
||||
for cn in self.filecache:
|
||||
ncn += 1
|
||||
|
||||
cache_path, cache1 = cn.tag
|
||||
if cache_path != path:
|
||||
continue
|
||||
|
||||
cache2 = cache1 + len(cn.data)
|
||||
if get2 <= cache1 or get1 >= cache2:
|
||||
continue
|
||||
|
||||
if get1 >= cache1 and get2 <= cache2:
|
||||
# keep cache entry alive by moving it to the end
|
||||
self.filecache = (
|
||||
self.filecache[:ncn] + self.filecache[ncn + 1 :] + [cn]
|
||||
)
|
||||
buf_ofs = get1 - cache1
|
||||
buf_end = buf_ofs + (get2 - get1)
|
||||
dbg(
|
||||
"found all ({}, {} to {}, len {}) [{}:{}] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
len(cn.data),
|
||||
buf_ofs,
|
||||
buf_end,
|
||||
buf_end - buf_ofs,
|
||||
)
|
||||
)
|
||||
return cn.data[buf_ofs:buf_end]
|
||||
|
||||
if get2 < cache2:
|
||||
x = cn.data[: get2 - cache1]
|
||||
if not cdr or len(cdr) < len(x):
|
||||
dbg(
|
||||
"found car ({}, {} to {}, len {}) [:{}-{}] = [:{}] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
len(cn.data),
|
||||
get2,
|
||||
cache1,
|
||||
get2 - cache1,
|
||||
len(x),
|
||||
)
|
||||
)
|
||||
cdr = x
|
||||
|
||||
continue
|
||||
|
||||
if get1 > cache1:
|
||||
x = cn.data[-(cache2 - get1) :]
|
||||
if not car or len(car) < len(x):
|
||||
dbg(
|
||||
"found cdr ({}, {} to {}, len {}) [-({}-{}):] = [-{}:] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
len(cn.data),
|
||||
cache2,
|
||||
get1,
|
||||
cache2 - get1,
|
||||
len(x),
|
||||
)
|
||||
)
|
||||
car = x
|
||||
|
||||
continue
|
||||
|
||||
raise Exception("what")
|
||||
|
||||
if car and cdr:
|
||||
dbg("<cache> have both")
|
||||
|
||||
ret = car + cdr
|
||||
if len(ret) == get2 - get1:
|
||||
return ret
|
||||
|
||||
raise Exception("{} + {} != {} - {}".format(len(car), len(cdr), get2, get1))
|
||||
|
||||
elif cdr:
|
||||
h_end = get1 + (get2 - get1) - len(cdr)
|
||||
h_ofs = h_end - 512 * 1024
|
||||
|
||||
if h_ofs < 0:
|
||||
h_ofs = 0
|
||||
|
||||
buf_ofs = (get2 - get1) - len(cdr)
|
||||
|
||||
dbg(
|
||||
"<cache> cdr {}, car {}-{}={} [-{}:]".format(
|
||||
len(cdr), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = buf[-buf_ofs:] + cdr
|
||||
|
||||
elif car:
|
||||
h_ofs = get1 + len(car)
|
||||
h_end = h_ofs + 1024 * 1024
|
||||
|
||||
if h_end > file_sz:
|
||||
h_end = file_sz
|
||||
|
||||
buf_ofs = (get2 - get1) - len(car)
|
||||
|
||||
dbg(
|
||||
"<cache> car {}, cdr {}-{}={} [:{}]".format(
|
||||
len(car), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = car + buf[:buf_ofs]
|
||||
|
||||
else:
|
||||
h_ofs = get1 - 256 * 1024
|
||||
h_end = get2 + 1024 * 1024
|
||||
|
||||
if h_ofs < 0:
|
||||
h_ofs = 0
|
||||
|
||||
if h_end > file_sz:
|
||||
h_end = file_sz
|
||||
|
||||
buf_ofs = get1 - h_ofs
|
||||
buf_end = buf_ofs + get2 - get1
|
||||
|
||||
dbg(
|
||||
"<cache> {}-{}={} [{}:{}]".format(
|
||||
h_ofs, h_end, h_end - h_ofs, buf_ofs, buf_end
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = buf[buf_ofs:buf_end]
|
||||
|
||||
cn = CacheNode([path, h_ofs], buf)
|
||||
# with self.filecache_mtx:
|
||||
if True:
|
||||
if len(self.filecache) > 6:
|
||||
self.filecache = self.filecache[1:] + [cn]
|
||||
else:
|
||||
self.filecache.append(cn)
|
||||
|
||||
return ret
|
||||
|
||||
def _readdir(self, path):
|
||||
path = path.strip("/")
|
||||
log("readdir {}".format(path))
|
||||
|
||||
ret = self.gw.listdir(path)
|
||||
|
||||
# with self.dircache_mtx:
|
||||
if True:
|
||||
cn = CacheNode(path, ret)
|
||||
self.dircache.append(cn)
|
||||
self.clean_dircache()
|
||||
|
||||
return ret
|
||||
|
||||
def readdir(self, path, offset):
|
||||
for e in self._readdir(path)[offset:]:
|
||||
# log("yield [{}]".format(e[0]))
|
||||
yield fuse.Direntry(e[0])
|
||||
|
||||
def open(self, path, flags):
|
||||
if (flags & (os.O_RDONLY | os.O_WRONLY | os.O_RDWR)) != os.O_RDONLY:
|
||||
return -errno.EACCES
|
||||
|
||||
st = self.getattr(path)
|
||||
try:
|
||||
if st.st_nlink > 0:
|
||||
return st
|
||||
except:
|
||||
return st # -int(os.errcode)
|
||||
|
||||
def read(self, path, length, offset, fh=None, *args):
|
||||
if args:
|
||||
log("unexpected args [" + "] [".join(repr(x) for x in args) + "]")
|
||||
raise Exception()
|
||||
|
||||
path = path.strip("/")
|
||||
|
||||
ofs2 = offset + length
|
||||
log("read {} @ {} len {} end {}".format(path, offset, length, ofs2))
|
||||
|
||||
st = self.getattr(path)
|
||||
try:
|
||||
file_sz = st.st_size
|
||||
except:
|
||||
return st # -int(os.errcode)
|
||||
|
||||
if ofs2 > file_sz:
|
||||
ofs2 = file_sz
|
||||
log("truncate to len {} end {}".format(ofs2 - offset, ofs2))
|
||||
|
||||
if file_sz == 0 or offset >= ofs2:
|
||||
return b""
|
||||
|
||||
# toggle cache here i suppose
|
||||
# return self.get_cached_file(path, offset, ofs2, file_sz)
|
||||
return self.gw.download_file_range(path, offset, ofs2)
|
||||
|
||||
def getattr(self, path):
|
||||
log("getattr [{}]".format(path))
|
||||
|
||||
path = path.strip("/")
|
||||
try:
|
||||
dirpath, fname = path.rsplit("/", 1)
|
||||
except:
|
||||
dirpath = ""
|
||||
fname = path
|
||||
|
||||
if not path:
|
||||
ret = self.gw.stat_dir(time.time())
|
||||
dbg("=root")
|
||||
return ret
|
||||
|
||||
cn = self.get_cached_dir(dirpath)
|
||||
if cn:
|
||||
log("cache ok")
|
||||
dents = cn.data
|
||||
else:
|
||||
log("cache miss")
|
||||
dents = self._readdir(dirpath)
|
||||
|
||||
for cache_name, cache_stat, _ in dents:
|
||||
if cache_name == fname:
|
||||
dbg("=file")
|
||||
return cache_stat
|
||||
|
||||
log("=404")
|
||||
return -errno.ENOENT
|
||||
|
||||
|
||||
def main():
|
||||
server = CPPF()
|
||||
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
|
||||
server.parse(values=server, errex=1)
|
||||
if not server.url or not str(server.url).startswith("http"):
|
||||
print("\nerror:")
|
||||
print(" need argument: -o url=<...>")
|
||||
print(" need argument: mount-path")
|
||||
print("example:")
|
||||
print(
|
||||
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
server.init2()
|
||||
threading.Thread(target=server.main, daemon=True).start()
|
||||
while True:
|
||||
time.sleep(9001)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -118,7 +118,7 @@ printf ']}' >> /dev/shm/$salt.hs
|
||||
|
||||
printf '\033[36m'
|
||||
|
||||
#curl "http://$target:1234$posturl/handshake.php" -H "Content-Type: text/plain;charset=UTF-8" -H "Cookie: cppwd=$passwd" --data "$(cat "/dev/shm/$salt.hs")" | tee /dev/shm/$salt.res
|
||||
#curl "http://$target:3923$posturl/handshake.php" -H "Content-Type: text/plain;charset=UTF-8" -H "Cookie: cppwd=$passwd" --data "$(cat "/dev/shm/$salt.hs")" | tee /dev/shm/$salt.res
|
||||
|
||||
{
|
||||
{
|
||||
@@ -135,7 +135,7 @@ EOF
|
||||
cat /dev/shm/$salt.hs
|
||||
} |
|
||||
tee /dev/shm/$salt.hsb |
|
||||
ncat $target 1234 |
|
||||
ncat $target 3923 |
|
||||
tee /dev/shm/$salt.hs1r
|
||||
|
||||
wark="$(cat /dev/shm/$salt.hs1r | getwark)"
|
||||
@@ -190,7 +190,7 @@ EOF
|
||||
nchunk=$((nchunk+1))
|
||||
|
||||
done |
|
||||
ncat $target 1234 |
|
||||
ncat $target 3923 |
|
||||
tee /dev/shm/$salt.pr
|
||||
|
||||
t=$(date +%s.%N)
|
||||
@@ -201,7 +201,7 @@ t=$(date +%s.%N)
|
||||
|
||||
printf '\033[36m'
|
||||
|
||||
ncat $target 1234 < /dev/shm/$salt.hsb |
|
||||
ncat $target 3923 < /dev/shm/$salt.hsb |
|
||||
tee /dev/shm/$salt.hs2r |
|
||||
grep -E '"hash": ?\[ *\]'
|
||||
|
||||
|
||||
19
contrib/README.md
Normal file
19
contrib/README.md
Normal file
@@ -0,0 +1,19 @@
|
||||
### [`copyparty.bat`](copyparty.bat)
|
||||
* launches copyparty with no arguments (anon read+write within same folder)
|
||||
* intended for windows machines with no python.exe in PATH
|
||||
* works on windows, linux and macos
|
||||
* assumes `copyparty-sfx.py` was renamed to `copyparty.py` in the same folder as `copyparty.bat`
|
||||
|
||||
### [`index.html`](index.html)
|
||||
* drop-in redirect from an httpd to copyparty
|
||||
* assumes the webserver and copyparty is running on the same server/IP
|
||||
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
|
||||
|
||||
# OS integration
|
||||
init-scripts to start copyparty as a service
|
||||
* [`systemd/copyparty.service`](systemd/copyparty.service)
|
||||
* [`openrc/copyparty`](openrc/copyparty)
|
||||
|
||||
# Reverse-proxy
|
||||
copyparty has basic support for running behind another webserver
|
||||
* [`nginx/copyparty.conf`](nginx/copyparty.conf)
|
||||
33
contrib/copyparty.bat
Normal file
33
contrib/copyparty.bat
Normal file
@@ -0,0 +1,33 @@
|
||||
exec python "$(dirname "$0")"/copyparty.py
|
||||
|
||||
@rem on linux, the above will execute and the script will terminate
|
||||
@rem on windows, the rest of this script will run
|
||||
|
||||
@echo off
|
||||
cls
|
||||
|
||||
set py=
|
||||
for /f %%i in ('where python 2^>nul') do (
|
||||
set "py=%%i"
|
||||
goto c1
|
||||
)
|
||||
:c1
|
||||
|
||||
if [%py%] == [] (
|
||||
for /f %%i in ('where /r "%localappdata%\programs\python" python 2^>nul') do (
|
||||
set "py=%%i"
|
||||
goto c2
|
||||
)
|
||||
)
|
||||
:c2
|
||||
|
||||
if [%py%] == [] set "py=c:\python27\python.exe"
|
||||
|
||||
if not exist "%py%" (
|
||||
echo could not find python
|
||||
echo(
|
||||
pause
|
||||
exit /b
|
||||
)
|
||||
|
||||
start cmd /c %py% "%~dp0\copyparty.py"
|
||||
43
contrib/index.html
Normal file
43
contrib/index.html
Normal file
@@ -0,0 +1,43 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>⇆🎉 redirect</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<style>
|
||||
|
||||
html, body {
|
||||
font-family: sans-serif;
|
||||
}
|
||||
body {
|
||||
padding: 1em 2em;
|
||||
font-size: 1.5em;
|
||||
}
|
||||
a {
|
||||
font-size: 1.2em;
|
||||
padding: .1em;
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<span id="desc">you probably want</span> <a id="redir" href="//10.13.1.1:3923/">copyparty</a>
|
||||
<script>
|
||||
|
||||
var a = document.getElementById('redir'),
|
||||
proto = window.location.protocol.indexOf('https') === 0 ? 'https' : 'http',
|
||||
loc = window.location.hostname || '127.0.0.1',
|
||||
port = a.getAttribute('href').split(':').pop().split('/')[0],
|
||||
url = proto + '://' + loc + ':' + port + '/';
|
||||
|
||||
a.setAttribute('href', url);
|
||||
document.getElementById('desc').innerHTML = 'redirecting to';
|
||||
|
||||
setTimeout(function() {
|
||||
window.location.href = url;
|
||||
}, 500);
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
26
contrib/nginx/copyparty.conf
Normal file
26
contrib/nginx/copyparty.conf
Normal file
@@ -0,0 +1,26 @@
|
||||
upstream cpp {
|
||||
server 127.0.0.1:3923;
|
||||
keepalive 120;
|
||||
}
|
||||
server {
|
||||
listen 443 ssl;
|
||||
listen [::]:443 ssl;
|
||||
|
||||
server_name fs.example.com;
|
||||
|
||||
location / {
|
||||
proxy_pass http://cpp;
|
||||
proxy_redirect off;
|
||||
# disable buffering (next 4 lines)
|
||||
proxy_http_version 1.1;
|
||||
client_max_body_size 0;
|
||||
proxy_buffering off;
|
||||
proxy_request_buffering off;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Connection "Keep-Alive";
|
||||
}
|
||||
}
|
||||
18
contrib/openrc/copyparty
Normal file
18
contrib/openrc/copyparty
Normal file
@@ -0,0 +1,18 @@
|
||||
#!/sbin/openrc-run
|
||||
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty /etc/init.d && rc-update add copyparty
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
|
||||
name="$SVCNAME"
|
||||
command_background=true
|
||||
pidfile="/var/run/$SVCNAME.pid"
|
||||
|
||||
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
|
||||
command_args="-q -v /mnt::a"
|
||||
19
contrib/systemd/copyparty.service
Normal file
19
contrib/systemd/copyparty.service
Normal file
@@ -0,0 +1,19 @@
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/python /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
|
||||
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -5,11 +5,19 @@ import platform
|
||||
import sys
|
||||
import os
|
||||
|
||||
WINDOWS = platform.system() == "Windows"
|
||||
PY2 = sys.version_info[0] == 2
|
||||
if PY2:
|
||||
sys.dont_write_bytecode = True
|
||||
|
||||
WINDOWS = False
|
||||
if platform.system() == "Windows":
|
||||
WINDOWS = [int(x) for x in platform.version().split(".")]
|
||||
|
||||
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393]
|
||||
# introduced in anniversary update
|
||||
|
||||
MACOS = platform.system() == "Darwin"
|
||||
|
||||
|
||||
class EnvParams(object):
|
||||
def __init__(self):
|
||||
@@ -24,6 +32,7 @@ class EnvParams(object):
|
||||
+ "/copyparty"
|
||||
)
|
||||
|
||||
self.cfg = self.cfg.replace("\\", "/")
|
||||
try:
|
||||
os.makedirs(self.cfg)
|
||||
except:
|
||||
|
||||
@@ -15,8 +15,8 @@ import locale
|
||||
import argparse
|
||||
from textwrap import dedent
|
||||
|
||||
from .__init__ import E, WINDOWS
|
||||
from .__version__ import S_VERSION, S_BUILD_DT
|
||||
from .__init__ import E, WINDOWS, VT100
|
||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||
from .svchub import SvcHub
|
||||
from .util import py_desc
|
||||
|
||||
@@ -28,7 +28,7 @@ class RiceFormatter(argparse.HelpFormatter):
|
||||
except the help += [...] line now has colors
|
||||
"""
|
||||
fmt = "\033[36m (default: \033[35m%(default)s\033[36m)\033[0m"
|
||||
if WINDOWS:
|
||||
if not VT100:
|
||||
fmt = " (default: %(default)s)"
|
||||
|
||||
help = action.help
|
||||
@@ -85,8 +85,13 @@ def ensure_cert():
|
||||
|
||||
|
||||
def main():
|
||||
f = "\033[36mcopyparty v{} ({})\n python v{}\033[0m\n"
|
||||
print(f.format(S_VERSION, S_BUILD_DT, py_desc()))
|
||||
if WINDOWS:
|
||||
os.system("") # enables colors
|
||||
|
||||
desc = py_desc().replace("[", "\033[1;30m[")
|
||||
|
||||
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0m\n'
|
||||
print(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
|
||||
|
||||
ensure_locale()
|
||||
ensure_cert()
|
||||
@@ -118,17 +123,20 @@ def main():
|
||||
"""
|
||||
),
|
||||
)
|
||||
ap.add_argument(
|
||||
"-c", metavar="PATH", type=str, action="append", help="add config file"
|
||||
)
|
||||
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
|
||||
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind")
|
||||
ap.add_argument("-p", metavar="PORT", type=int, default=1234, help="port to bind")
|
||||
ap.add_argument("-nc", metavar="NUM", type=int, default=16, help="max num clients")
|
||||
ap.add_argument("-j", metavar="CORES", type=int, help="max num cpu cores")
|
||||
ap.add_argument("-p", metavar="PORT", type=int, default=3923, help="port to bind")
|
||||
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
||||
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
||||
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
|
||||
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
|
||||
ap.add_argument("-q", action="store_true", help="quiet")
|
||||
ap.add_argument("-nw", action="store_true", help="benchmark: disable writing")
|
||||
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
||||
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||
al = ap.parse_args()
|
||||
|
||||
SvcHub(al).run()
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 2, 3)
|
||||
BUILD_DT = (2020, 1, 19)
|
||||
VERSION = (0, 6, 0)
|
||||
CODENAME = "CHRISTMAAAAAS"
|
||||
BUILD_DT = (2020, 12, 1)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -97,24 +97,18 @@ class VFS(object):
|
||||
|
||||
def ls(self, rem, uname):
|
||||
"""return user-readable [fsdir,real,virt] items at vpath"""
|
||||
virt_vis = {} # nodes readable by user
|
||||
abspath = self.canonical(rem)
|
||||
real = [fsdec(x) for x in os.listdir(fsenc(abspath))]
|
||||
items = os.listdir(fsenc(abspath))
|
||||
real = [fsdec(x) for x in items]
|
||||
real.sort()
|
||||
if rem:
|
||||
virt_vis = []
|
||||
else:
|
||||
virt_all = [] # all nodes that exist
|
||||
virt_vis = [] # nodes readable by user
|
||||
if not rem:
|
||||
for name, vn2 in sorted(self.nodes.items()):
|
||||
virt_all.append(name)
|
||||
if uname in vn2.uread:
|
||||
virt_vis.append(name)
|
||||
virt_vis[name] = vn2
|
||||
|
||||
for name in virt_all:
|
||||
try:
|
||||
real.remove(name)
|
||||
except ValueError:
|
||||
pass
|
||||
# no vfs nodes in the list of real inodes
|
||||
real = [x for x in real if x not in self.nodes]
|
||||
|
||||
return [abspath, real, virt_vis]
|
||||
|
||||
@@ -141,9 +135,9 @@ class AuthSrv(object):
|
||||
self.warn_anonwrite = True
|
||||
|
||||
if WINDOWS:
|
||||
self.re_vol = re.compile(r'^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)')
|
||||
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
|
||||
else:
|
||||
self.re_vol = re.compile(r'^([^:]*):([^:]*):(.*)')
|
||||
self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)$")
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.reload()
|
||||
@@ -226,12 +220,13 @@ class AuthSrv(object):
|
||||
if self.args.v:
|
||||
# list of src:dst:permset:permset:...
|
||||
# permset is [rwa]username
|
||||
for vol_match in [self.re_vol.match(x) for x in self.args.v]:
|
||||
try:
|
||||
src, dst, perms = vol_match.groups()
|
||||
except:
|
||||
raise Exception('invalid -v argument')
|
||||
for v_str in self.args.v:
|
||||
m = self.re_vol.match(v_str)
|
||||
if not m:
|
||||
raise Exception("invalid -v argument: [{}]".format(v_str))
|
||||
|
||||
src, dst, perms = m.groups()
|
||||
# print("\n".join([src, dst, perms]))
|
||||
src = fsdec(os.path.abspath(fsenc(src)))
|
||||
dst = dst.strip("/")
|
||||
mount[dst] = src
|
||||
@@ -274,13 +269,28 @@ class AuthSrv(object):
|
||||
v.uread = mread[dst]
|
||||
v.uwrite = mwrite[dst]
|
||||
|
||||
missing_users = {}
|
||||
for d in [mread, mwrite]:
|
||||
for _, ul in d.items():
|
||||
for usr in ul:
|
||||
if usr != "*" and usr not in user:
|
||||
missing_users[usr] = 1
|
||||
|
||||
if missing_users:
|
||||
self.log(
|
||||
"\033[31myou must -a the following users: "
|
||||
+ ", ".join(k for k in sorted(missing_users))
|
||||
+ "\033[0m"
|
||||
)
|
||||
raise Exception("invalid config")
|
||||
|
||||
try:
|
||||
vfs.get("/", "*", False, True)
|
||||
if self.warn_anonwrite:
|
||||
v, _ = vfs.get("/", "*", False, True)
|
||||
if self.warn_anonwrite and os.getcwd() == v.realpath:
|
||||
self.warn_anonwrite = False
|
||||
self.log(
|
||||
"\033[31manyone can read/write the current directory: {}\033[0m".format(
|
||||
os.getcwd()
|
||||
v.realpath
|
||||
)
|
||||
)
|
||||
except Pebkac:
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import time
|
||||
import threading
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
from .__init__ import PY2, WINDOWS, VT100
|
||||
from .broker_util import try_exec
|
||||
from .broker_mpw import MpWorker
|
||||
from .util import mp
|
||||
@@ -29,7 +29,7 @@ class BrokerMp(object):
|
||||
self.mutex = threading.Lock()
|
||||
|
||||
cores = self.args.j
|
||||
if cores is None:
|
||||
if not cores:
|
||||
cores = mp.cpu_count()
|
||||
|
||||
self.log("broker", "booting {} subprocesses".format(cores))
|
||||
@@ -141,7 +141,7 @@ class BrokerMp(object):
|
||||
|
||||
def debug_load_balancer(self):
|
||||
fmt = "\033[1m{}\033[0;36m{:4}\033[0m "
|
||||
if WINDOWS:
|
||||
if not VT100:
|
||||
fmt = "({}{:4})"
|
||||
|
||||
last = ""
|
||||
|
||||
@@ -73,7 +73,7 @@ class MpWorker(object):
|
||||
if PY2:
|
||||
sck = pickle.loads(sck) # nosec
|
||||
|
||||
self.log(str(addr), "-" * 4 + "C-qpop")
|
||||
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
with self.mutex:
|
||||
|
||||
@@ -28,7 +28,7 @@ class BrokerThr(object):
|
||||
def put(self, want_retval, dest, *args):
|
||||
if dest == "httpconn":
|
||||
sck, addr = args
|
||||
self.log(str(addr), "-" * 4 + "C-qpop")
|
||||
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
else:
|
||||
|
||||
@@ -4,13 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
import traceback
|
||||
|
||||
from .__init__ import PY2
|
||||
from .util import Pebkac
|
||||
|
||||
if not PY2:
|
||||
from queue import Queue
|
||||
else:
|
||||
from Queue import Queue # pylint: disable=import-error,no-name-in-module
|
||||
from .util import Pebkac, Queue
|
||||
|
||||
|
||||
class ExceptionalQueue(Queue, object):
|
||||
|
||||
@@ -6,18 +6,16 @@ import stat
|
||||
import gzip
|
||||
import time
|
||||
import json
|
||||
import socket
|
||||
import ctypes
|
||||
from datetime import datetime
|
||||
import calendar
|
||||
import mimetypes
|
||||
|
||||
from .__init__ import E, PY2
|
||||
from .__init__ import E, PY2, WINDOWS
|
||||
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||
|
||||
if not PY2:
|
||||
unicode = str
|
||||
from html import escape as html_escape
|
||||
else:
|
||||
from cgi import escape as html_escape
|
||||
|
||||
|
||||
class HttpCli(object):
|
||||
@@ -26,6 +24,7 @@ class HttpCli(object):
|
||||
"""
|
||||
|
||||
def __init__(self, conn):
|
||||
self.t0 = time.time()
|
||||
self.conn = conn
|
||||
self.s = conn.s
|
||||
self.sr = conn.sr
|
||||
@@ -37,11 +36,19 @@ class HttpCli(object):
|
||||
|
||||
self.bufsz = 1024 * 32
|
||||
self.absolute_urls = False
|
||||
self.out_headers = {}
|
||||
self.out_headers = {"Access-Control-Allow-Origin": "*"}
|
||||
|
||||
def log(self, msg):
|
||||
self.log_func(self.log_src, msg)
|
||||
|
||||
def _check_nonfatal(self, ex):
|
||||
return ex.code in [404]
|
||||
|
||||
def _assert_safe_rem(self, rem):
|
||||
# sanity check to prevent any disasters
|
||||
if rem.startswith("/") or rem.startswith("../") or "/../" in rem:
|
||||
raise Exception("that was close")
|
||||
|
||||
def run(self):
|
||||
"""returns true if connection can be reused"""
|
||||
self.keepalive = False
|
||||
@@ -62,9 +69,13 @@ class HttpCli(object):
|
||||
raise Pebkac(400, "bad headers:\n" + "\n".join(headerlines))
|
||||
|
||||
except Pebkac as ex:
|
||||
# self.log("pebkac at httpcli.run #1: " + repr(ex))
|
||||
self.keepalive = self._check_nonfatal(ex)
|
||||
self.loud_reply(str(ex), status=ex.code)
|
||||
return False
|
||||
return self.keepalive
|
||||
|
||||
# normalize incoming headers to lowercase;
|
||||
# outgoing headers however are Correct-Case
|
||||
for header_line in headerlines[1:]:
|
||||
k, v = header_line.split(":", 1)
|
||||
self.headers[k.lower()] = v.strip()
|
||||
@@ -72,11 +83,15 @@ class HttpCli(object):
|
||||
v = self.headers.get("connection", "").lower()
|
||||
self.keepalive = not v.startswith("close")
|
||||
|
||||
v = self.headers.get("x-forwarded-for", None)
|
||||
if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]:
|
||||
self.log_src = self.conn.set_rproxy(v.split(",")[0])
|
||||
|
||||
self.uname = "*"
|
||||
if "cookie" in self.headers:
|
||||
cookies = self.headers["cookie"].split(";")
|
||||
for k, v in [x.split("=", 1) for x in cookies]:
|
||||
if k != "cppwd":
|
||||
if k.strip() != "cppwd":
|
||||
continue
|
||||
|
||||
v = unescape_cookie(v)
|
||||
@@ -112,45 +127,76 @@ class HttpCli(object):
|
||||
self.uparam = uparam
|
||||
self.vpath = unquotep(vpath)
|
||||
|
||||
ua = self.headers.get("user-agent", "")
|
||||
if ua.startswith("rclone/"):
|
||||
uparam["raw"] = True
|
||||
uparam["dots"] = True
|
||||
|
||||
try:
|
||||
if self.mode in ["GET", "HEAD"]:
|
||||
return self.handle_get() and self.keepalive
|
||||
elif self.mode == "POST":
|
||||
return self.handle_post() and self.keepalive
|
||||
elif self.mode == "PUT":
|
||||
return self.handle_put() and self.keepalive
|
||||
elif self.mode == "OPTIONS":
|
||||
return self.handle_options() and self.keepalive
|
||||
else:
|
||||
raise Pebkac(400, 'invalid HTTP mode "{0}"'.format(self.mode))
|
||||
|
||||
except Pebkac as ex:
|
||||
try:
|
||||
self.loud_reply(str(ex), status=ex.code)
|
||||
# self.log("pebkac at httpcli.run #2: " + repr(ex))
|
||||
self.keepalive = self._check_nonfatal(ex)
|
||||
self.loud_reply("{}: {}".format(str(ex), self.vpath), status=ex.code)
|
||||
return self.keepalive
|
||||
except Pebkac:
|
||||
pass
|
||||
return False
|
||||
|
||||
return False
|
||||
def send_headers(self, length, status=200, mime=None, headers={}):
|
||||
response = ["HTTP/1.1 {} {}".format(status, HTTPCODE[status])]
|
||||
|
||||
if length is not None:
|
||||
response.append("Content-Length: " + str(length))
|
||||
|
||||
# close if unknown length, otherwise take client's preference
|
||||
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
|
||||
|
||||
# headers{} overrides anything set previously
|
||||
self.out_headers.update(headers)
|
||||
|
||||
# default to utf8 html if no content-type is set
|
||||
try:
|
||||
mime = mime or self.out_headers["Content-Type"]
|
||||
except KeyError:
|
||||
mime = "text/html; charset=UTF-8"
|
||||
|
||||
self.out_headers["Content-Type"] = mime
|
||||
|
||||
def reply(self, body, status=200, mime="text/html", headers=[]):
|
||||
# TODO something to reply with user-supplied values safely
|
||||
response = [
|
||||
"HTTP/1.1 {} {}".format(status, HTTPCODE[status]),
|
||||
"Content-Type: " + mime,
|
||||
"Content-Length: " + str(len(body)),
|
||||
"Connection: " + ("Keep-Alive" if self.keepalive else "Close"),
|
||||
]
|
||||
for k, v in self.out_headers.items():
|
||||
response.append("{}: {}".format(k, v))
|
||||
|
||||
response.extend(headers)
|
||||
response_str = "\r\n".join(response).encode("utf-8")
|
||||
try:
|
||||
self.s.sendall(response_str + b"\r\n\r\n" + body)
|
||||
# best practice to separate headers and body into different packets
|
||||
self.s.sendall("\r\n".join(response).encode("utf-8") + b"\r\n\r\n")
|
||||
except:
|
||||
raise Pebkac(400, "client disconnected before http response")
|
||||
raise Pebkac(400, "client d/c while replying headers")
|
||||
|
||||
def reply(self, body, status=200, mime=None, headers={}):
|
||||
# TODO something to reply with user-supplied values safely
|
||||
self.send_headers(len(body), status, mime, headers)
|
||||
|
||||
try:
|
||||
if self.mode != "HEAD":
|
||||
self.s.sendall(body)
|
||||
except:
|
||||
raise Pebkac(400, "client d/c while replying body")
|
||||
|
||||
return body
|
||||
|
||||
def loud_reply(self, body, *args, **kwargs):
|
||||
self.log(body.rstrip())
|
||||
self.reply(b"<pre>" + body.encode("utf-8"), *list(args), **kwargs)
|
||||
self.reply(b"<pre>" + body.encode("utf-8") + b"\r\n", *list(args), **kwargs)
|
||||
|
||||
def handle_get(self):
|
||||
logmsg = "{:4} {}".format(self.mode, self.req)
|
||||
@@ -195,17 +241,47 @@ class HttpCli(object):
|
||||
return self.tx_mounts()
|
||||
|
||||
return self.tx_browser()
|
||||
|
||||
|
||||
def handle_options(self):
|
||||
self.log("OPTIONS " + self.req)
|
||||
self.send_headers(
|
||||
None,
|
||||
204,
|
||||
headers={
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "*",
|
||||
"Access-Control-Allow-Headers": "*",
|
||||
},
|
||||
)
|
||||
return True
|
||||
|
||||
def handle_put(self):
|
||||
self.log("PUT " + self.req)
|
||||
|
||||
if self.headers.get("expect", "").lower() == "100-continue":
|
||||
try:
|
||||
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
|
||||
except:
|
||||
raise Pebkac(400, "client d/c before 100 continue")
|
||||
|
||||
return self.handle_stash()
|
||||
|
||||
def handle_post(self):
|
||||
self.log("POST " + self.req)
|
||||
|
||||
if self.headers.get("expect", "").lower() == "100-continue":
|
||||
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
|
||||
try:
|
||||
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
|
||||
except:
|
||||
raise Pebkac(400, "client d/c before 100 continue")
|
||||
|
||||
ctype = self.headers.get("content-type", "").lower()
|
||||
if not ctype:
|
||||
raise Pebkac(400, "you can't post without a content-type header")
|
||||
|
||||
if "raw" in self.uparam:
|
||||
return self.handle_stash()
|
||||
|
||||
if "multipart/form-data" in ctype:
|
||||
return self.handle_post_multipart()
|
||||
|
||||
@@ -218,17 +294,58 @@ class HttpCli(object):
|
||||
|
||||
raise Pebkac(405, "don't know how to handle {} POST".format(ctype))
|
||||
|
||||
def handle_stash(self):
|
||||
remains = int(self.headers.get("content-length", None))
|
||||
if remains is None:
|
||||
reader = read_socket_unbounded(self.sr)
|
||||
self.keepalive = False
|
||||
else:
|
||||
reader = read_socket(self.sr, remains)
|
||||
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
|
||||
addr = self.conn.addr[0].replace(":", ".")
|
||||
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
||||
path = os.path.join(fdir, fn)
|
||||
|
||||
with open(path, "wb", 512 * 1024) as f:
|
||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||
|
||||
spd = self._spd(post_sz)
|
||||
self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path))
|
||||
self.reply("{}\n{}\n".format(post_sz, sha_b64).encode("utf-8"))
|
||||
return True
|
||||
|
||||
def _spd(self, nbytes, add=True):
|
||||
if add:
|
||||
self.conn.nbyte += nbytes
|
||||
|
||||
spd1 = get_spd(nbytes, self.t0)
|
||||
spd2 = get_spd(self.conn.nbyte, self.conn.t0)
|
||||
return spd1 + " " + spd2
|
||||
|
||||
def handle_post_multipart(self):
|
||||
self.parser = MultipartParser(self.log, self.sr, self.headers)
|
||||
self.parser.parse()
|
||||
|
||||
act = self.parser.require("act", 64)
|
||||
|
||||
if act == "login":
|
||||
return self.handle_login()
|
||||
|
||||
if act == "mkdir":
|
||||
return self.handle_mkdir()
|
||||
|
||||
if act == "new_md":
|
||||
# kinda silly but has the least side effects
|
||||
return self.handle_new_md()
|
||||
|
||||
if act == "bput":
|
||||
return self.handle_plain_upload()
|
||||
|
||||
if act == "login":
|
||||
return self.handle_login()
|
||||
if act == "tput":
|
||||
return self.handle_text_upload()
|
||||
|
||||
raise Pebkac(422, 'invalid action "{}"'.format(act))
|
||||
|
||||
@@ -267,8 +384,9 @@ class HttpCli(object):
|
||||
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
|
||||
body["vdir"] = os.path.join(vfs.realpath, rem)
|
||||
body["addr"] = self.conn.addr[0]
|
||||
body["vdir"] = self.vpath
|
||||
body["rdir"] = os.path.join(vfs.realpath, rem)
|
||||
body["addr"] = self.addr[0]
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||
response = x.get()
|
||||
@@ -292,7 +410,7 @@ class HttpCli(object):
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", wark, chash)
|
||||
response = x.get()
|
||||
chunksize, cstart, path = response
|
||||
chunksize, cstart, path, lastmod = response
|
||||
|
||||
if self.args.nw:
|
||||
path = os.devnull
|
||||
@@ -336,9 +454,19 @@ class HttpCli(object):
|
||||
self.log("clone {} done".format(cstart[0]))
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", wark, chash)
|
||||
response = x.get()
|
||||
num_left = x.get()
|
||||
|
||||
self.loud_reply("thank")
|
||||
if not WINDOWS and num_left == 0:
|
||||
times = (int(time.time()), int(lastmod))
|
||||
self.log("no more chunks, setting times {}".format(times))
|
||||
try:
|
||||
os.utime(path, times)
|
||||
except:
|
||||
self.log("failed to utime ({}, {})".format(path, times))
|
||||
|
||||
spd = self._spd(post_sz)
|
||||
self.log("{} thank".format(spd))
|
||||
self.reply(b"thank")
|
||||
return True
|
||||
|
||||
def handle_login(self):
|
||||
@@ -351,19 +479,85 @@ class HttpCli(object):
|
||||
msg = "naw dude"
|
||||
pwd = "x" # nosec
|
||||
|
||||
h = ["Set-Cookie: cppwd={}; Path=/".format(pwd)]
|
||||
h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
|
||||
html = self.conn.tpl_msg.render(h1=msg, h2='<a href="/">ack</a>', redir="/")
|
||||
self.reply(html.encode("utf-8"), headers=h)
|
||||
return True
|
||||
|
||||
def handle_mkdir(self):
|
||||
new_dir = self.parser.require("name", 512)
|
||||
self.parser.drop()
|
||||
|
||||
nullwrite = self.args.nw
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
self._assert_safe_rem(rem)
|
||||
|
||||
sanitized = sanitize_fn(new_dir)
|
||||
|
||||
if not nullwrite:
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
fn = os.path.join(fdir, sanitized)
|
||||
|
||||
if not os.path.isdir(fsenc(fdir)):
|
||||
raise Pebkac(500, "parent folder does not exist")
|
||||
|
||||
if os.path.isdir(fsenc(fn)):
|
||||
raise Pebkac(500, "that folder exists already")
|
||||
|
||||
try:
|
||||
os.mkdir(fsenc(fn))
|
||||
except:
|
||||
raise Pebkac(500, "mkdir failed, check the logs")
|
||||
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}">go to /{}</a>'.format(
|
||||
quotep(vpath), html_escape(vpath)
|
||||
),
|
||||
pre="aight",
|
||||
click=True,
|
||||
)
|
||||
self.reply(html.encode("utf-8", "replace"))
|
||||
return True
|
||||
|
||||
def handle_new_md(self):
|
||||
new_file = self.parser.require("name", 512)
|
||||
self.parser.drop()
|
||||
|
||||
nullwrite = self.args.nw
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
self._assert_safe_rem(rem)
|
||||
|
||||
if not new_file.endswith(".md"):
|
||||
new_file += ".md"
|
||||
|
||||
sanitized = sanitize_fn(new_file)
|
||||
|
||||
if not nullwrite:
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
fn = os.path.join(fdir, sanitized)
|
||||
|
||||
if os.path.exists(fsenc(fn)):
|
||||
raise Pebkac(500, "that file exists already")
|
||||
|
||||
with open(fsenc(fn), "wb") as f:
|
||||
f.write(b"`GRUNNUR`\n")
|
||||
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
|
||||
quotep(vpath), html_escape(vpath)
|
||||
),
|
||||
pre="aight",
|
||||
click=True,
|
||||
)
|
||||
self.reply(html.encode("utf-8", "replace"))
|
||||
return True
|
||||
|
||||
def handle_plain_upload(self):
|
||||
nullwrite = self.args.nw
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
|
||||
# rem is escaped at this point,
|
||||
# this is just a sanity check to prevent any disasters
|
||||
if rem.startswith("/") or rem.startswith("../") or "/../" in rem:
|
||||
raise Exception("that was close")
|
||||
self._assert_safe_rem(rem)
|
||||
|
||||
files = []
|
||||
errmsg = ""
|
||||
@@ -382,10 +576,12 @@ class HttpCli(object):
|
||||
if not os.path.isdir(fsenc(fdir)):
|
||||
raise Pebkac(404, "that folder does not exist")
|
||||
|
||||
# TODO broker which avoid this race
|
||||
# and provides a new filename if taken
|
||||
# TODO broker which avoid this race and
|
||||
# provides a new filename if taken (same as up2k)
|
||||
if os.path.exists(fsenc(fn)):
|
||||
fn += ".{:.6f}".format(time.time())
|
||||
fn += ".{:.6f}-{}".format(time.time(), self.addr[0])
|
||||
# using current-time instead of t0 cause clients
|
||||
# may reuse a name for multiple files in one post
|
||||
|
||||
try:
|
||||
with open(fsenc(fn), "wb") as f:
|
||||
@@ -395,6 +591,7 @@ class HttpCli(object):
|
||||
raise Pebkac(400, "empty files in post")
|
||||
|
||||
files.append([sz, sha512_hex])
|
||||
self.conn.nbyte += sz
|
||||
|
||||
except Pebkac:
|
||||
if fn != os.devnull:
|
||||
@@ -405,7 +602,7 @@ class HttpCli(object):
|
||||
except Pebkac as ex:
|
||||
errmsg = str(ex)
|
||||
|
||||
td = time.time() - t0
|
||||
td = max(0.1, time.time() - t0)
|
||||
sz_total = sum(x[0] for x in files)
|
||||
spd = (sz_total / td) / (1024 * 1024)
|
||||
|
||||
@@ -422,7 +619,9 @@ class HttpCli(object):
|
||||
# truncated SHA-512 prevents length extension attacks;
|
||||
# using SHA-512/224, optionally SHA-512/256 = :64
|
||||
|
||||
self.log(msg)
|
||||
vspd = self._spd(sz_total, False)
|
||||
self.log("{} {}".format(vspd, msg))
|
||||
|
||||
if not nullwrite:
|
||||
# TODO this is bad
|
||||
log_fn = "up.{:.6f}.txt".format(t0)
|
||||
@@ -444,7 +643,7 @@ class HttpCli(object):
|
||||
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}">return to /{}</a>'.format(
|
||||
quotep(self.vpath), html_escape(self.vpath, quote=False)
|
||||
quotep(self.vpath), html_escape(self.vpath)
|
||||
),
|
||||
pre=msg,
|
||||
)
|
||||
@@ -452,32 +651,102 @@ class HttpCli(object):
|
||||
self.parser.drop()
|
||||
return True
|
||||
|
||||
def tx_file(self, req_path):
|
||||
do_send = True
|
||||
status = 200
|
||||
extra_headers = []
|
||||
logmsg = "{:4} {} ".format("", self.req)
|
||||
logtail = ""
|
||||
|
||||
#
|
||||
# if request is for foo.js, check if we have foo.js.gz
|
||||
|
||||
is_gzip = False
|
||||
fs_path = req_path
|
||||
def handle_text_upload(self):
|
||||
try:
|
||||
file_sz = os.path.getsize(fsenc(fs_path))
|
||||
cli_lastmod3 = int(self.parser.require("lastmod", 16))
|
||||
except:
|
||||
is_gzip = True
|
||||
fs_path += ".gz"
|
||||
try:
|
||||
file_sz = os.path.getsize(fsenc(fs_path))
|
||||
except:
|
||||
raise Pebkac(404)
|
||||
raise Pebkac(400, "could not read lastmod from request")
|
||||
|
||||
nullwrite = self.args.nw
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
self._assert_safe_rem(rem)
|
||||
|
||||
# TODO:
|
||||
# the per-volume read/write permissions must be replaced with permission flags
|
||||
# which would decide how to handle uploads to filenames which are taken,
|
||||
# current behavior of creating a new name is a good default for binary files
|
||||
# but should also offer a flag to takeover the filename and rename the old one
|
||||
#
|
||||
# if-modified
|
||||
# stopgap:
|
||||
if not rem.endswith(".md"):
|
||||
raise Pebkac(400, "only markdown pls")
|
||||
|
||||
file_ts = os.path.getmtime(fsenc(fs_path))
|
||||
if nullwrite:
|
||||
response = json.dumps({"ok": True, "lastmod": 0})
|
||||
self.log(response)
|
||||
# TODO reply should parser.drop()
|
||||
self.parser.drop()
|
||||
self.reply(response.encode("utf-8"))
|
||||
return True
|
||||
|
||||
fp = os.path.join(vfs.realpath, rem)
|
||||
srv_lastmod = -1
|
||||
try:
|
||||
st = os.stat(fsenc(fp))
|
||||
srv_lastmod = st.st_mtime
|
||||
srv_lastmod3 = int(srv_lastmod * 1000)
|
||||
except OSError as ex:
|
||||
if ex.errno != 2:
|
||||
raise
|
||||
|
||||
# if file exists, chekc that timestamp matches the client's
|
||||
if srv_lastmod >= 0:
|
||||
same_lastmod = cli_lastmod3 in [-1, srv_lastmod3]
|
||||
if not same_lastmod:
|
||||
# some filesystems/transports limit precision to 1sec, hopefully floored
|
||||
same_lastmod = (
|
||||
srv_lastmod == int(srv_lastmod)
|
||||
and cli_lastmod3 > srv_lastmod3
|
||||
and cli_lastmod3 - srv_lastmod3 < 1000
|
||||
)
|
||||
|
||||
if not same_lastmod:
|
||||
response = json.dumps(
|
||||
{
|
||||
"ok": False,
|
||||
"lastmod": srv_lastmod3,
|
||||
"now": int(time.time() * 1000),
|
||||
}
|
||||
)
|
||||
self.log(
|
||||
"{} - {} = {}".format(
|
||||
srv_lastmod3, cli_lastmod3, srv_lastmod3 - cli_lastmod3
|
||||
)
|
||||
)
|
||||
self.log(response)
|
||||
self.parser.drop()
|
||||
self.reply(response.encode("utf-8"))
|
||||
return True
|
||||
|
||||
# TODO another hack re: pending permissions rework
|
||||
mdir, mfile = os.path.split(fp)
|
||||
mfile2 = "{}.{:.3f}.md".format(mfile[:-3], srv_lastmod)
|
||||
try:
|
||||
os.mkdir(os.path.join(mdir, ".hist"))
|
||||
except:
|
||||
pass
|
||||
os.rename(fp, os.path.join(mdir, ".hist", mfile2))
|
||||
|
||||
p_field, _, p_data = next(self.parser.gen)
|
||||
if p_field != "body":
|
||||
raise Pebkac(400, "expected body, got {}".format(p_field))
|
||||
|
||||
with open(fp, "wb") as f:
|
||||
sz, sha512, _ = hashcopy(self.conn, p_data, f)
|
||||
|
||||
new_lastmod = os.stat(fsenc(fp)).st_mtime
|
||||
new_lastmod3 = int(new_lastmod * 1000)
|
||||
sha512 = sha512[:56]
|
||||
|
||||
response = json.dumps(
|
||||
{"ok": True, "lastmod": new_lastmod3, "size": sz, "sha512": sha512}
|
||||
)
|
||||
self.log(response)
|
||||
self.parser.drop()
|
||||
self.reply(response.encode("utf-8"))
|
||||
return True
|
||||
|
||||
def _chk_lastmod(self, file_ts):
|
||||
file_dt = datetime.utcfromtimestamp(file_ts)
|
||||
file_lastmod = file_dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||
|
||||
@@ -486,14 +755,85 @@ class HttpCli(object):
|
||||
try:
|
||||
cli_dt = time.strptime(cli_lastmod, "%a, %d %b %Y %H:%M:%S GMT")
|
||||
cli_ts = calendar.timegm(cli_dt)
|
||||
do_send = int(file_ts) > int(cli_ts)
|
||||
except:
|
||||
self.log("bad lastmod format: {}".format(cli_lastmod))
|
||||
do_send = file_lastmod != cli_lastmod
|
||||
return file_lastmod, int(file_ts) > int(cli_ts)
|
||||
except Exception as ex:
|
||||
self.log(
|
||||
"lastmod {}\nremote: [{}]\n local: [{}]".format(
|
||||
repr(ex), cli_lastmod, file_lastmod
|
||||
)
|
||||
)
|
||||
return file_lastmod, file_lastmod != cli_lastmod
|
||||
|
||||
return file_lastmod, True
|
||||
|
||||
def tx_file(self, req_path):
|
||||
status = 200
|
||||
logmsg = "{:4} {} ".format("", self.req)
|
||||
logtail = ""
|
||||
|
||||
#
|
||||
# if request is for foo.js, check if we have foo.js.{gz,br}
|
||||
|
||||
file_ts = 0
|
||||
editions = {}
|
||||
for ext in ["", ".gz", ".br"]:
|
||||
try:
|
||||
fs_path = req_path + ext
|
||||
st = os.stat(fsenc(fs_path))
|
||||
file_ts = max(file_ts, st.st_mtime)
|
||||
editions[ext or "plain"] = [fs_path, st.st_size]
|
||||
except:
|
||||
pass
|
||||
|
||||
if not editions:
|
||||
raise Pebkac(404)
|
||||
|
||||
#
|
||||
# if-modified
|
||||
|
||||
file_lastmod, do_send = self._chk_lastmod(file_ts)
|
||||
self.out_headers["Last-Modified"] = file_lastmod
|
||||
if not do_send:
|
||||
status = 304
|
||||
|
||||
#
|
||||
# Accept-Encoding and UA decides which edition to send
|
||||
|
||||
decompress = False
|
||||
supported_editions = [
|
||||
x.strip()
|
||||
for x in self.headers.get("accept-encoding", "").lower().split(",")
|
||||
]
|
||||
if ".br" in editions and "br" in supported_editions:
|
||||
is_compressed = True
|
||||
selected_edition = ".br"
|
||||
fs_path, file_sz = editions[".br"]
|
||||
self.out_headers["Content-Encoding"] = "br"
|
||||
elif ".gz" in editions:
|
||||
is_compressed = True
|
||||
selected_edition = ".gz"
|
||||
fs_path, file_sz = editions[".gz"]
|
||||
if "gzip" not in supported_editions:
|
||||
decompress = True
|
||||
else:
|
||||
ua = self.headers.get("user-agent", "")
|
||||
if re.match(r"MSIE [4-6]\.", ua) and " SV1" not in ua:
|
||||
decompress = True
|
||||
|
||||
if not decompress:
|
||||
self.out_headers["Content-Encoding"] = "gzip"
|
||||
else:
|
||||
is_compressed = False
|
||||
selected_edition = "plain"
|
||||
|
||||
try:
|
||||
fs_path, file_sz = editions[selected_edition]
|
||||
logmsg += "{} ".format(selected_edition.lstrip("."))
|
||||
except:
|
||||
# client is old and we only have .br
|
||||
# (could make brotli a dep to fix this but it's not worth)
|
||||
raise Pebkac(404)
|
||||
|
||||
#
|
||||
# partial
|
||||
|
||||
@@ -501,7 +841,8 @@ class HttpCli(object):
|
||||
upper = file_sz
|
||||
hrange = self.headers.get("range")
|
||||
|
||||
if do_send and not is_gzip and hrange:
|
||||
# let's not support 206 with compression
|
||||
if do_send and not is_compressed and hrange:
|
||||
try:
|
||||
a, b = hrange.split("=", 1)[1].split("-")
|
||||
|
||||
@@ -515,34 +856,28 @@ class HttpCli(object):
|
||||
else:
|
||||
upper = file_sz
|
||||
|
||||
if lower < 0 or lower >= file_sz or upper < 0 or upper > file_sz:
|
||||
if upper > file_sz:
|
||||
upper = file_sz
|
||||
|
||||
if lower < 0 or lower >= upper:
|
||||
raise Exception()
|
||||
|
||||
except:
|
||||
raise Pebkac(400, "invalid range requested: " + hrange)
|
||||
err = "invalid range ({}), size={}".format(hrange, file_sz)
|
||||
self.loud_reply(
|
||||
err,
|
||||
status=416,
|
||||
headers={"Content-Range": "bytes */{}".format(file_sz)},
|
||||
)
|
||||
return True
|
||||
|
||||
status = 206
|
||||
extra_headers.append(
|
||||
"Content-Range: bytes {}-{}/{}".format(lower, upper - 1, file_sz)
|
||||
self.out_headers["Content-Range"] = "bytes {}-{}/{}".format(
|
||||
lower, upper - 1, file_sz
|
||||
)
|
||||
|
||||
logtail += " [\033[36m{}-{}\033[0m]".format(lower, upper)
|
||||
|
||||
#
|
||||
# Accept-Encoding and UA decides if we can send gzip as-is
|
||||
|
||||
decompress = False
|
||||
if is_gzip:
|
||||
if "gzip" not in self.headers.get("accept-encoding", "").lower():
|
||||
decompress = True
|
||||
else:
|
||||
ua = self.headers.get("user-agent", "")
|
||||
if re.match(r"MSIE [4-6]\.", ua) and " SV1" not in ua:
|
||||
decompress = True
|
||||
|
||||
if not decompress:
|
||||
extra_headers.append("Content-Encoding: gzip")
|
||||
|
||||
if decompress:
|
||||
open_func = gzip.open
|
||||
open_args = [fsenc(fs_path), "rb"]
|
||||
@@ -556,27 +891,23 @@ class HttpCli(object):
|
||||
#
|
||||
# send reply
|
||||
|
||||
if not is_compressed:
|
||||
self.out_headers["Cache-Control"] = "no-cache"
|
||||
|
||||
self.out_headers["Accept-Ranges"] = "bytes"
|
||||
self.send_headers(
|
||||
length=upper - lower,
|
||||
status=status,
|
||||
mime=guess_mime(req_path)[0] or "application/octet-stream",
|
||||
)
|
||||
|
||||
logmsg += str(status) + logtail
|
||||
|
||||
mime = mimetypes.guess_type(req_path)[0] or "application/octet-stream"
|
||||
|
||||
headers = [
|
||||
"HTTP/1.1 {} {}".format(status, HTTPCODE[status]),
|
||||
"Content-Type: " + mime,
|
||||
"Content-Length: " + str(upper - lower),
|
||||
"Accept-Ranges: bytes",
|
||||
"Last-Modified: " + file_lastmod,
|
||||
"Connection: " + ("Keep-Alive" if self.keepalive else "Close"),
|
||||
]
|
||||
|
||||
headers.extend(extra_headers)
|
||||
headers = "\r\n".join(headers).encode("utf-8") + b"\r\n\r\n"
|
||||
self.s.sendall(headers)
|
||||
|
||||
if self.mode == "HEAD" or not do_send:
|
||||
self.log(logmsg)
|
||||
return True
|
||||
|
||||
ret = True
|
||||
with open_func(*open_args) as f:
|
||||
remains = upper - lower
|
||||
f.seek(lower)
|
||||
@@ -589,16 +920,74 @@ class HttpCli(object):
|
||||
if remains < len(buf):
|
||||
buf = buf[:remains]
|
||||
|
||||
remains -= len(buf)
|
||||
|
||||
try:
|
||||
self.s.sendall(buf)
|
||||
remains -= len(buf)
|
||||
except:
|
||||
logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
|
||||
self.log(logmsg)
|
||||
return False
|
||||
ret = False
|
||||
break
|
||||
|
||||
self.log(logmsg)
|
||||
spd = self._spd((upper - lower) - remains)
|
||||
self.log("{}, {}".format(logmsg, spd))
|
||||
return ret
|
||||
|
||||
def tx_md(self, fs_path):
|
||||
logmsg = "{:4} {} ".format("", self.req)
|
||||
if "edit2" in self.uparam:
|
||||
html_path = "web/mde.html"
|
||||
template = self.conn.tpl_mde
|
||||
else:
|
||||
html_path = "web/md.html"
|
||||
template = self.conn.tpl_md
|
||||
|
||||
html_path = os.path.join(E.mod, html_path)
|
||||
|
||||
st = os.stat(fsenc(fs_path))
|
||||
# sz_md = st.st_size
|
||||
ts_md = st.st_mtime
|
||||
|
||||
st = os.stat(fsenc(html_path))
|
||||
ts_html = st.st_mtime
|
||||
|
||||
# TODO dont load into memory ;_;
|
||||
# (trivial fix, count the &'s)
|
||||
with open(fsenc(fs_path), "rb") as f:
|
||||
md = f.read().replace(b"&", b"&")
|
||||
sz_md = len(md)
|
||||
|
||||
file_ts = max(ts_md, ts_html)
|
||||
file_lastmod, do_send = self._chk_lastmod(file_ts)
|
||||
self.out_headers["Last-Modified"] = file_lastmod
|
||||
self.out_headers["Cache-Control"] = "no-cache"
|
||||
status = 200 if do_send else 304
|
||||
|
||||
targs = {
|
||||
"edit": "edit" in self.uparam,
|
||||
"title": html_escape(self.vpath),
|
||||
"lastmod": int(ts_md * 1000),
|
||||
"md_plug": "true" if self.args.emp else "false",
|
||||
"md_chk_rate": self.args.mcr,
|
||||
"md": "",
|
||||
}
|
||||
sz_html = len(template.render(**targs).encode("utf-8"))
|
||||
self.send_headers(sz_html + sz_md, status)
|
||||
|
||||
logmsg += str(status)
|
||||
if self.mode == "HEAD" or not do_send:
|
||||
self.log(logmsg)
|
||||
return True
|
||||
|
||||
# TODO jinja2 can stream this right?
|
||||
targs["md"] = md.decode("utf-8", "replace")
|
||||
html = template.render(**targs).encode("utf-8")
|
||||
try:
|
||||
self.s.sendall(html)
|
||||
except:
|
||||
self.log(logmsg + " \033[31md/c\033[0m")
|
||||
return False
|
||||
|
||||
self.log(logmsg + " " + str(len(html)))
|
||||
return True
|
||||
|
||||
def tx_mounts(self):
|
||||
@@ -618,9 +1007,11 @@ class HttpCli(object):
|
||||
else:
|
||||
vpath += "/" + node
|
||||
|
||||
vpnodes.append([quotep(vpath) + "/", html_escape(node, quote=False)])
|
||||
vpnodes.append([quotep(vpath) + "/", html_escape(node)])
|
||||
|
||||
vn, rem = self.auth.vfs.get(self.vpath, self.uname, self.readable, self.writable)
|
||||
vn, rem = self.auth.vfs.get(
|
||||
self.vpath, self.uname, self.readable, self.writable
|
||||
)
|
||||
abspath = vn.canonical(rem)
|
||||
|
||||
if not os.path.exists(fsenc(abspath)):
|
||||
@@ -628,29 +1019,62 @@ class HttpCli(object):
|
||||
raise Pebkac(404)
|
||||
|
||||
if not os.path.isdir(fsenc(abspath)):
|
||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||
return self.tx_md(abspath)
|
||||
|
||||
return self.tx_file(abspath)
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)
|
||||
vfs_ls.extend(vfs_virt)
|
||||
vfs_ls.extend(vfs_virt.keys())
|
||||
|
||||
# check for old versions of files,
|
||||
hist = {} # [num-backups, most-recent, hist-path]
|
||||
histdir = os.path.join(fsroot, ".hist")
|
||||
ptn = re.compile(r"(.*)\.([0-9]+\.[0-9]{3})(\.[^\.]+)$")
|
||||
try:
|
||||
for hfn in os.listdir(histdir):
|
||||
m = ptn.match(hfn)
|
||||
if not m:
|
||||
continue
|
||||
|
||||
fn = m.group(1) + m.group(3)
|
||||
n, ts, _ = hist.get(fn, [0, 0, ""])
|
||||
hist[fn] = [n + 1, max(ts, float(m.group(2))), hfn]
|
||||
except:
|
||||
pass
|
||||
|
||||
# show dotfiles if permitted and requested
|
||||
if not self.args.ed or "dots" not in self.uparam:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
dirs = []
|
||||
files = []
|
||||
for fn in exclude_dotfiles(vfs_ls):
|
||||
for fn in vfs_ls:
|
||||
base = ""
|
||||
href = fn
|
||||
if self.absolute_urls and vpath:
|
||||
href = "/" + vpath + "/" + fn
|
||||
base = "/" + vpath + "/"
|
||||
href = base + fn
|
||||
|
||||
if fn in vfs_virt:
|
||||
fspath = vfs_virt[fn].realpath
|
||||
else:
|
||||
fspath = fsroot + "/" + fn
|
||||
|
||||
fspath = fsroot + "/" + fn
|
||||
try:
|
||||
inf = os.stat(fsenc(fspath))
|
||||
except FileNotFoundError as ex:
|
||||
self.log("broken symlink: {}".format(fspath))
|
||||
except:
|
||||
self.log("broken symlink: {}".format(repr(fspath)))
|
||||
continue
|
||||
|
||||
is_dir = stat.S_ISDIR(inf.st_mode)
|
||||
if is_dir:
|
||||
margin = "DIR"
|
||||
href += "/"
|
||||
elif fn in hist:
|
||||
margin = '<a href="{}.hist/{}">#{}</a>'.format(
|
||||
base, html_escape(hist[fn][2], quote=True), hist[fn][0]
|
||||
)
|
||||
else:
|
||||
margin = "-"
|
||||
|
||||
@@ -658,7 +1082,12 @@ class HttpCli(object):
|
||||
dt = datetime.utcfromtimestamp(inf.st_mtime)
|
||||
dt = dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
item = [margin, quotep(href), html_escape(fn, quote=False), sz, dt]
|
||||
try:
|
||||
ext = "---" if is_dir else fn.rsplit(".", 1)[1]
|
||||
except:
|
||||
ext = "%"
|
||||
|
||||
item = [margin, quotep(href), html_escape(fn), sz, ext, dt]
|
||||
if is_dir:
|
||||
dirs.append(item)
|
||||
else:
|
||||
@@ -671,6 +1100,45 @@ class HttpCli(object):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
logues[n] = f.read().decode("utf-8")
|
||||
|
||||
if False:
|
||||
# this is a mistake
|
||||
md = None
|
||||
for fn in [x[2] for x in files]:
|
||||
if fn.lower() == "readme.md":
|
||||
fn = os.path.join(abspath, fn)
|
||||
with open(fn, "rb") as f:
|
||||
md = f.read().decode("utf-8")
|
||||
|
||||
break
|
||||
|
||||
srv_info = []
|
||||
|
||||
try:
|
||||
if not self.args.nih:
|
||||
srv_info.append(str(socket.gethostname()).split(".")[0])
|
||||
except:
|
||||
self.log("#wow #whoa")
|
||||
pass
|
||||
|
||||
try:
|
||||
# some fuses misbehave
|
||||
if not self.args.nid:
|
||||
if WINDOWS:
|
||||
bfree = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
||||
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
|
||||
)
|
||||
srv_info.append(humansize(bfree.value) + " free")
|
||||
else:
|
||||
sv = os.statvfs(abspath)
|
||||
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
||||
total = humansize(sv.f_frsize * sv.f_blocks, True)
|
||||
|
||||
srv_info.append(free + " free")
|
||||
srv_info.append(total)
|
||||
except:
|
||||
pass
|
||||
|
||||
ts = ""
|
||||
# ts = "?{}".format(time.time())
|
||||
|
||||
@@ -684,7 +1152,8 @@ class HttpCli(object):
|
||||
ts=ts,
|
||||
prologue=logues[0],
|
||||
epilogue=logues[1],
|
||||
title=html_escape(self.vpath),
|
||||
srv_info="</span> /// <span>".join(srv_info),
|
||||
)
|
||||
self.reply(html.encode("utf-8", "replace"))
|
||||
return True
|
||||
|
||||
|
||||
@@ -2,9 +2,25 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import ssl
|
||||
import time
|
||||
import socket
|
||||
import jinja2
|
||||
|
||||
try:
|
||||
import jinja2
|
||||
except ImportError:
|
||||
print(
|
||||
"""\033[1;31m
|
||||
you do not have jinja2 installed,\033[33m
|
||||
choose one of these:\033[0m
|
||||
* apt install python-jinja2
|
||||
* python3 -m pip install --user jinja2
|
||||
* (try another python version, if you have one)
|
||||
* (try copyparty.sfx instead)
|
||||
"""
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
from .__init__ import E
|
||||
from .util import Unrecv
|
||||
@@ -26,15 +42,31 @@ class HttpConn(object):
|
||||
self.auth = hsrv.auth
|
||||
self.cert_path = hsrv.cert_path
|
||||
|
||||
self.t0 = time.time()
|
||||
self.nbyte = 0
|
||||
self.workload = 0
|
||||
self.log_func = hsrv.log
|
||||
self.log_src = "{} \033[36m{}".format(addr[0], addr[1]).ljust(26)
|
||||
self.set_rproxy()
|
||||
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
||||
self.tpl_mounts = env.get_template("splash.html")
|
||||
self.tpl_browser = env.get_template("browser.html")
|
||||
self.tpl_msg = env.get_template("msg.html")
|
||||
self.tpl_md = env.get_template("md.html")
|
||||
self.tpl_mde = env.get_template("mde.html")
|
||||
|
||||
def set_rproxy(self, ip=None):
|
||||
if ip is None:
|
||||
color = 36
|
||||
ip = self.addr[0]
|
||||
self.rproxy = None
|
||||
else:
|
||||
color = 34
|
||||
self.rproxy = ip
|
||||
|
||||
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
|
||||
return self.log_src
|
||||
|
||||
def respath(self, res_name):
|
||||
return os.path.join(E.mod, "web", res_name)
|
||||
@@ -48,6 +80,8 @@ class HttpConn(object):
|
||||
if self.cert_path:
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
except socket.timeout:
|
||||
return
|
||||
except AttributeError:
|
||||
# jython does not support msg_peek; forget about https
|
||||
method = self.s.recv(4)
|
||||
@@ -67,7 +101,7 @@ class HttpConn(object):
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return
|
||||
|
||||
if method not in [None, b"GET ", b"HEAD", b"POST"]:
|
||||
if method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]:
|
||||
if self.sr:
|
||||
self.log("\033[1;31mTODO: cannot do https in jython\033[0m")
|
||||
return
|
||||
|
||||
@@ -6,7 +6,7 @@ import time
|
||||
import socket
|
||||
import threading
|
||||
|
||||
from .__init__ import E
|
||||
from .__init__ import E, MACOS
|
||||
from .httpconn import HttpConn
|
||||
from .authsrv import AuthSrv
|
||||
|
||||
@@ -38,7 +38,7 @@ class HttpSrv(object):
|
||||
|
||||
def accept(self, sck, addr):
|
||||
"""takes an incoming tcp connection and creates a thread to handle it"""
|
||||
self.log(str(addr), "-" * 5 + "C-cthr")
|
||||
self.log("%s %s" % addr, "-" * 5 + "C-cthr")
|
||||
thr = threading.Thread(target=self.thr_client, args=(sck, addr))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
@@ -66,19 +66,26 @@ class HttpSrv(object):
|
||||
thr.start()
|
||||
|
||||
try:
|
||||
self.log(str(addr), "-" * 6 + "C-crun")
|
||||
self.log("%s %s" % addr, "-" * 6 + "C-crun")
|
||||
cli.run()
|
||||
|
||||
finally:
|
||||
self.log(str(addr), "-" * 7 + "C-done")
|
||||
self.log("%s %s" % addr, "-" * 7 + "C-done")
|
||||
try:
|
||||
sck.shutdown(socket.SHUT_RDWR)
|
||||
sck.close()
|
||||
except (OSError, socket.error) as ex:
|
||||
if ex.errno not in [107, 57, 9]:
|
||||
# 107 Transport endpoint not connected
|
||||
# 57 Socket is not connected
|
||||
# 9 Bad file descriptor
|
||||
if not MACOS:
|
||||
self.log(
|
||||
"%s %s" % addr,
|
||||
"shut_rdwr err:\n {}\n {}".format(repr(sck), ex),
|
||||
)
|
||||
if ex.errno not in [10038, 10054, 107, 57, 9]:
|
||||
# 10038 No longer considered a socket
|
||||
# 10054 Foribly closed by remote
|
||||
# 107 Transport endpoint not connected
|
||||
# 57 Socket is not connected
|
||||
# 9 Bad file descriptor
|
||||
raise
|
||||
finally:
|
||||
with self.mutex:
|
||||
|
||||
@@ -10,11 +10,12 @@ Original source: misc/python/surrogateescape.py in https://bitbucket.org/haypo/m
|
||||
|
||||
# This code is released under the Python license and the BSD 2-clause license
|
||||
|
||||
import platform
|
||||
import codecs
|
||||
import sys
|
||||
|
||||
PY3 = sys.version_info[0] > 2
|
||||
|
||||
WINDOWS = platform.system() == "Windows"
|
||||
FS_ERRORS = "surrogateescape"
|
||||
|
||||
|
||||
@@ -168,6 +169,11 @@ FS_ENCODING = sys.getfilesystemencoding()
|
||||
# FS_ENCODING = 'UTF-8'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]')
|
||||
|
||||
|
||||
if WINDOWS and not PY3:
|
||||
# py2 thinks win* is mbcs, probably a bug? anyways this works
|
||||
FS_ENCODING = 'utf-8'
|
||||
|
||||
|
||||
# normalize the filesystem encoding name.
|
||||
# For example, we expect "utf-8", not "UTF8".
|
||||
FS_ENCODING = codecs.lookup(FS_ENCODING).name
|
||||
|
||||
@@ -8,7 +8,7 @@ import threading
|
||||
from datetime import datetime, timedelta
|
||||
import calendar
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
from .__init__ import PY2, WINDOWS, MACOS, VT100
|
||||
from .tcpsrv import TcpSrv
|
||||
from .up2k import Up2k
|
||||
from .util import mp
|
||||
@@ -84,22 +84,23 @@ class SvcHub(object):
|
||||
dt = dt.replace(hour=0, minute=0, second=0)
|
||||
self.next_day = calendar.timegm(dt.utctimetuple())
|
||||
|
||||
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
|
||||
|
||||
if not WINDOWS:
|
||||
fmt = "\033[36m{} \033[33m{:21} \033[0m{}"
|
||||
else:
|
||||
fmt = "\033[36m{} \033[33m{:21} \033[0m{}"
|
||||
if not VT100:
|
||||
fmt = "{} {:21} {}"
|
||||
if "\033" in msg:
|
||||
msg = self.ansi_re.sub("", msg)
|
||||
if "\033" in src:
|
||||
src = self.ansi_re.sub("", src)
|
||||
|
||||
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
|
||||
msg = fmt.format(ts, src, msg)
|
||||
try:
|
||||
print(msg)
|
||||
except UnicodeEncodeError:
|
||||
print(msg.encode("utf-8", "replace").decode())
|
||||
try:
|
||||
print(msg.encode("utf-8", "replace").decode())
|
||||
except:
|
||||
print(msg.encode("ascii", "replace").decode())
|
||||
|
||||
def check_mp_support(self):
|
||||
vmin = sys.version_info[1]
|
||||
@@ -110,6 +111,8 @@ class SvcHub(object):
|
||||
return msg
|
||||
elif vmin < 3:
|
||||
return msg
|
||||
elif MACOS:
|
||||
return "multiprocessing is wonky on mac osx;"
|
||||
else:
|
||||
msg = "need python 2.7 or 3.3+ for multiprocessing;"
|
||||
if not PY2 and vmin < 3:
|
||||
@@ -126,8 +129,11 @@ class SvcHub(object):
|
||||
return None
|
||||
|
||||
def check_mp_enable(self):
|
||||
if self.args.j == 0:
|
||||
self.log("root", "multiprocessing disabled by argument -j 0;")
|
||||
if self.args.j == 1:
|
||||
self.log("root", "multiprocessing disabled by argument -j 1;")
|
||||
return False
|
||||
|
||||
if mp.cpu_count() <= 1:
|
||||
return False
|
||||
|
||||
try:
|
||||
|
||||
@@ -24,7 +24,7 @@ class TcpSrv(object):
|
||||
ip = "127.0.0.1"
|
||||
eps = {ip: "local only"}
|
||||
if self.args.i != ip:
|
||||
eps = self.detect_interfaces(self.args.i) or eps
|
||||
eps = self.detect_interfaces(self.args.i) or {self.args.i: "external"}
|
||||
|
||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||
self.log(
|
||||
@@ -64,14 +64,14 @@ class TcpSrv(object):
|
||||
|
||||
self.log("tcpsrv", "-" * 2 + "C-acc1")
|
||||
sck, addr = self.srv.accept()
|
||||
self.log(str(addr), "-" * 3 + "C-acc2")
|
||||
self.log("%s %s" % addr, "-" * 3 + "C-acc2")
|
||||
self.num_clients.add()
|
||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||
|
||||
def shutdown(self):
|
||||
self.log("tcpsrv", "ok bye")
|
||||
|
||||
def detect_interfaces(self, ext_ip):
|
||||
def detect_interfaces(self, listen_ip):
|
||||
eps = {}
|
||||
|
||||
# get all ips and their interfaces
|
||||
@@ -85,29 +85,39 @@ class TcpSrv(object):
|
||||
for ln in ip_addr.split("\n"):
|
||||
try:
|
||||
ip, dev = r.match(ln.rstrip()).groups()
|
||||
if ext_ip in ["0.0.0.0", ip]:
|
||||
if listen_ip in ["0.0.0.0", ip]:
|
||||
eps[ip] = dev
|
||||
except:
|
||||
pass
|
||||
|
||||
# get ip with default route
|
||||
default_route = None
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
try:
|
||||
s.connect(("10.255.255.255", 1))
|
||||
ip = s.getsockname()[0]
|
||||
except (OSError, socket.error) as ex:
|
||||
if ex.errno not in [101, 10065]:
|
||||
raise
|
||||
|
||||
return None
|
||||
for ip in [
|
||||
"10.255.255.255",
|
||||
"172.31.255.255",
|
||||
"192.168.255.255",
|
||||
"239.255.255.255",
|
||||
# could add 1.1.1.1 as a final fallback
|
||||
# but external connections is kinshi
|
||||
]:
|
||||
try:
|
||||
s.connect((ip, 1))
|
||||
# raise OSError(13, "a")
|
||||
default_route = s.getsockname()[0]
|
||||
break
|
||||
except (OSError, socket.error) as ex:
|
||||
if ex.errno == 13:
|
||||
self.log("tcpsrv", "eaccess {} (trying next)".format(ip))
|
||||
elif ex.errno not in [101, 10065, 10051]:
|
||||
self.log("tcpsrv", "route lookup failed; err {}".format(ex.errno))
|
||||
|
||||
s.close()
|
||||
|
||||
if ext_ip in ["0.0.0.0", ip]:
|
||||
if default_route and listen_ip in ["0.0.0.0", default_route]:
|
||||
desc = "\033[32mexternal"
|
||||
try:
|
||||
eps[ip] += ", " + desc
|
||||
eps[default_route] += ", " + desc
|
||||
except:
|
||||
eps[ip] = desc
|
||||
eps[default_route] = desc
|
||||
|
||||
return eps
|
||||
|
||||
@@ -6,12 +6,14 @@ import os
|
||||
import re
|
||||
import time
|
||||
import math
|
||||
import shutil
|
||||
import base64
|
||||
import hashlib
|
||||
import threading
|
||||
from copy import deepcopy
|
||||
|
||||
from .util import Pebkac
|
||||
from .__init__ import WINDOWS
|
||||
from .util import Pebkac, Queue, fsenc, sanitize_fn
|
||||
|
||||
|
||||
class Up2k(object):
|
||||
@@ -35,30 +37,57 @@ class Up2k(object):
|
||||
self.registry = {}
|
||||
self.mutex = threading.Lock()
|
||||
|
||||
if WINDOWS:
|
||||
# usually fails to set lastmod too quickly
|
||||
self.lastmod_q = Queue()
|
||||
thr = threading.Thread(target=self._lastmodder)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
# static
|
||||
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
|
||||
|
||||
def handle_json(self, cj):
|
||||
cj["name"] = sanitize_fn(cj["name"])
|
||||
wark = self._get_wark(cj)
|
||||
now = time.time()
|
||||
with self.mutex:
|
||||
try:
|
||||
# TODO use registry persistence here to symlink any matching wark
|
||||
if wark in self.registry:
|
||||
job = self.registry[wark]
|
||||
if job["vdir"] != cj["vdir"] or job["name"] != cj["name"]:
|
||||
print("\n".join([job["vdir"], cj["vdir"], job["name"], cj["name"]]))
|
||||
raise Pebkac(400, "unexpected filepath")
|
||||
|
||||
except KeyError:
|
||||
if job["rdir"] != cj["rdir"] or job["name"] != cj["name"]:
|
||||
src = os.path.join(job["rdir"], job["name"])
|
||||
dst = os.path.join(cj["rdir"], cj["name"])
|
||||
if job["need"]:
|
||||
self.log("up2k", "unfinished:\n {0}\n {1}".format(src, dst))
|
||||
err = "partial upload exists at a different location; please resume uploading here instead:\n{0}{1} ".format(
|
||||
job["vdir"], job["name"]
|
||||
)
|
||||
raise Pebkac(400, err)
|
||||
else:
|
||||
# symlink to the client-provided name,
|
||||
# returning the previous upload info
|
||||
job = deepcopy(job)
|
||||
suffix = self._suffix(dst, now, job["addr"])
|
||||
job["name"] = cj["name"] + suffix
|
||||
self._symlink(src, dst + suffix)
|
||||
else:
|
||||
job = {
|
||||
"wark": wark,
|
||||
"t0": int(time.time()),
|
||||
"t0": now,
|
||||
"addr": cj["addr"],
|
||||
"vdir": cj["vdir"],
|
||||
"rdir": cj["rdir"],
|
||||
# client-provided, sanitized by _get_wark:
|
||||
"name": cj["name"],
|
||||
"size": cj["size"],
|
||||
"lmod": cj["lmod"],
|
||||
"hash": deepcopy(cj["hash"]),
|
||||
}
|
||||
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
job["name"] += self._suffix(path, now, cj["addr"])
|
||||
|
||||
# one chunk may occur multiple times in a file;
|
||||
# filter to unique values for the list of missing chunks
|
||||
# (preserve order to reduce disk thrashing)
|
||||
@@ -74,10 +103,47 @@ class Up2k(object):
|
||||
return {
|
||||
"name": job["name"],
|
||||
"size": job["size"],
|
||||
"lmod": job["lmod"],
|
||||
"hash": job["need"],
|
||||
"wark": wark,
|
||||
}
|
||||
|
||||
def _suffix(self, fpath, ts, ip):
|
||||
# TODO broker which avoid this race and
|
||||
# provides a new filename if taken (same as bup)
|
||||
if not os.path.exists(fsenc(fpath)):
|
||||
return ""
|
||||
|
||||
return ".{:.6f}-{}".format(ts, ip)
|
||||
|
||||
def _symlink(self, src, dst):
|
||||
# TODO store this in linktab so we never delete src if there are links to it
|
||||
self.log("up2k", "linking dupe:\n {0}\n {1}".format(src, dst))
|
||||
try:
|
||||
lsrc = src
|
||||
ldst = dst
|
||||
fs1 = os.stat(fsenc(os.path.split(src)[0])).st_dev
|
||||
fs2 = os.stat(fsenc(os.path.split(dst)[0])).st_dev
|
||||
if fs1 == 0:
|
||||
# py2 on winxp or other unsupported combination
|
||||
raise OSError()
|
||||
elif fs1 == fs2:
|
||||
# same fs; make symlink as relative as possible
|
||||
nsrc = src.replace("\\", "/").split("/")
|
||||
ndst = dst.replace("\\", "/").split("/")
|
||||
nc = 0
|
||||
for a, b in zip(nsrc, ndst):
|
||||
if a != b:
|
||||
break
|
||||
nc += 1
|
||||
if nc > 1:
|
||||
lsrc = nsrc[nc:]
|
||||
lsrc = "../" * (len(lsrc) - 1) + "/".join(lsrc)
|
||||
os.symlink(fsenc(lsrc), fsenc(ldst))
|
||||
except (AttributeError, OSError) as ex:
|
||||
self.log("up2k", "cannot symlink; creating copy")
|
||||
shutil.copy2(fsenc(src), fsenc(dst))
|
||||
|
||||
def handle_chunk(self, wark, chash):
|
||||
with self.mutex:
|
||||
job = self.registry.get(wark)
|
||||
@@ -94,13 +160,21 @@ class Up2k(object):
|
||||
chunksize = self._get_chunksize(job["size"])
|
||||
ofs = [chunksize * x for x in nchunk]
|
||||
|
||||
path = os.path.join(job["vdir"], job["name"])
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
|
||||
return [chunksize, ofs, path]
|
||||
return [chunksize, ofs, path, job["lmod"]]
|
||||
|
||||
def confirm_chunk(self, wark, chash):
|
||||
with self.mutex:
|
||||
self.registry[wark]["need"].remove(chash)
|
||||
job = self.registry[wark]
|
||||
job["need"].remove(chash)
|
||||
ret = len(job["need"])
|
||||
|
||||
if WINDOWS and ret == 0:
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
self.lastmod_q.put([path, (int(time.time()), int(job["lmod"]))])
|
||||
|
||||
return ret
|
||||
|
||||
def _get_chunksize(self, filesize):
|
||||
chunksize = 1024 * 1024
|
||||
@@ -115,7 +189,7 @@ class Up2k(object):
|
||||
stepsize *= mul
|
||||
|
||||
def _get_wark(self, cj):
|
||||
if len(cj["name"]) > 1024 or len(cj["hash"]) > 256:
|
||||
if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB
|
||||
raise Pebkac(400, "name or numchunks not according to spec")
|
||||
|
||||
for k in cj["hash"]:
|
||||
@@ -124,6 +198,12 @@ class Up2k(object):
|
||||
400, "at least one hash is not according to spec: {}".format(k)
|
||||
)
|
||||
|
||||
# try to use client-provided timestamp, don't care if it fails somehow
|
||||
try:
|
||||
cj["lmod"] = int(cj["lmod"])
|
||||
except:
|
||||
cj["lmod"] = int(time.time())
|
||||
|
||||
# server-reproducible file identifier, independent of name or location
|
||||
ident = [self.salt, str(cj["size"])]
|
||||
ident.extend(cj["hash"])
|
||||
@@ -138,8 +218,21 @@ class Up2k(object):
|
||||
|
||||
def _new_upload(self, job):
|
||||
self.registry[job["wark"]] = job
|
||||
path = os.path.join(job["vdir"], job["name"])
|
||||
with open(path, "wb") as f:
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
with open(fsenc(path), "wb") as f:
|
||||
f.seek(job["size"] - 1)
|
||||
f.write(b"e")
|
||||
|
||||
def _lastmodder(self):
|
||||
while True:
|
||||
ready = []
|
||||
while not self.lastmod_q.empty():
|
||||
ready.append(self.lastmod_q.get())
|
||||
|
||||
# self.log("lmod", "got {}".format(len(ready)))
|
||||
time.sleep(5)
|
||||
for path, times in ready:
|
||||
try:
|
||||
os.utime(fsenc(path), times)
|
||||
except:
|
||||
self.log("lmod", "failed to utime ({}, {})".format(path, times))
|
||||
|
||||
@@ -3,14 +3,16 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import base64
|
||||
import struct
|
||||
import hashlib
|
||||
import platform
|
||||
import threading
|
||||
import mimetypes
|
||||
import subprocess as sp # nosec
|
||||
|
||||
from .__init__ import PY2
|
||||
from .__init__ import PY2, WINDOWS
|
||||
from .stolen import surrogateescape
|
||||
|
||||
FAKE_MP = False
|
||||
@@ -27,16 +29,21 @@ except ImportError:
|
||||
if not PY2:
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from queue import Queue
|
||||
else:
|
||||
from urllib import unquote # pylint: disable=no-name-in-module
|
||||
from urllib import quote # pylint: disable=no-name-in-module
|
||||
from Queue import Queue # pylint: disable=import-error,no-name-in-module
|
||||
|
||||
surrogateescape.register_surrogateescape()
|
||||
FS_ENCODING = sys.getfilesystemencoding()
|
||||
if WINDOWS and PY2:
|
||||
FS_ENCODING = "utf-8"
|
||||
|
||||
|
||||
HTTPCODE = {
|
||||
200: "OK",
|
||||
204: "No Content",
|
||||
206: "Partial Content",
|
||||
304: "Not Modified",
|
||||
400: "Bad Request",
|
||||
@@ -44,6 +51,7 @@ HTTPCODE = {
|
||||
404: "Not Found",
|
||||
405: "Method Not Allowed",
|
||||
413: "Payload Too Large",
|
||||
416: "Requested Range Not Satisfiable",
|
||||
422: "Unprocessable Entity",
|
||||
500: "Internal Server Error",
|
||||
501: "Not Implemented",
|
||||
@@ -186,7 +194,7 @@ class MultipartParser(object):
|
||||
buf = self.sr.recv(bufsz)
|
||||
if not buf:
|
||||
# abort: client disconnected
|
||||
raise Pebkac(400, "client disconnected during multipart post")
|
||||
raise Pebkac(400, "client d/c during multipart post")
|
||||
|
||||
while True:
|
||||
ofs = buf.find(self.boundary)
|
||||
@@ -220,7 +228,7 @@ class MultipartParser(object):
|
||||
buf2 = self.sr.recv(bufsz)
|
||||
if not buf2:
|
||||
# abort: client disconnected
|
||||
raise Pebkac(400, "client disconnected during multipart post")
|
||||
raise Pebkac(400, "client d/c during multipart post")
|
||||
|
||||
buf += buf2
|
||||
|
||||
@@ -304,18 +312,7 @@ def get_boundary(headers):
|
||||
def read_header(sr):
|
||||
ret = b""
|
||||
while True:
|
||||
if ret.endswith(b"\r\n\r\n"):
|
||||
break
|
||||
elif ret.endswith(b"\r\n\r"):
|
||||
n = 1
|
||||
elif ret.endswith(b"\r\n"):
|
||||
n = 2
|
||||
elif ret.endswith(b"\r"):
|
||||
n = 3
|
||||
else:
|
||||
n = 4
|
||||
|
||||
buf = sr.recv(n)
|
||||
buf = sr.recv(1024)
|
||||
if not buf:
|
||||
if not ret:
|
||||
return None
|
||||
@@ -327,11 +324,40 @@ def read_header(sr):
|
||||
)
|
||||
|
||||
ret += buf
|
||||
ofs = ret.find(b"\r\n\r\n")
|
||||
if ofs < 0:
|
||||
if len(ret) > 1024 * 64:
|
||||
raise Pebkac(400, "header 2big")
|
||||
else:
|
||||
continue
|
||||
|
||||
if len(ret) > 1024 * 64:
|
||||
raise Pebkac(400, "header 2big")
|
||||
sr.unrecv(ret[ofs + 4 :])
|
||||
return ret[:ofs].decode("utf-8", "surrogateescape").split("\r\n")
|
||||
|
||||
return ret[:-4].decode("utf-8", "surrogateescape").split("\r\n")
|
||||
|
||||
def humansize(sz, terse=False):
|
||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||
if sz < 1024:
|
||||
break
|
||||
|
||||
sz /= 1024.0
|
||||
|
||||
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||
|
||||
if not terse:
|
||||
return ret
|
||||
|
||||
return ret.replace("iB", "").replace(" ", "")
|
||||
|
||||
|
||||
def get_spd(nbyte, t0, t=None):
|
||||
if t is None:
|
||||
t = time.time()
|
||||
|
||||
bps = nbyte / ((t - t0) + 0.001)
|
||||
s1 = humansize(nbyte).replace(" ", "\033[33m").replace("iB", "")
|
||||
s2 = humansize(bps).replace(" ", "\033[35m").replace("iB", "")
|
||||
return "{} \033[0m{}/s\033[0m".format(s1, s2)
|
||||
|
||||
|
||||
def undot(path):
|
||||
@@ -351,7 +377,30 @@ def undot(path):
|
||||
|
||||
|
||||
def sanitize_fn(fn):
|
||||
return fn.replace("\\", "/").split("/")[-1].strip()
|
||||
fn = fn.replace("\\", "/").split("/")[-1]
|
||||
|
||||
if WINDOWS:
|
||||
for bad, good in [
|
||||
["<", "<"],
|
||||
[">", ">"],
|
||||
[":", ":"],
|
||||
['"', """],
|
||||
["/", "/"],
|
||||
["\\", "\"],
|
||||
["|", "|"],
|
||||
["?", "?"],
|
||||
["*", "*"],
|
||||
]:
|
||||
fn = fn.replace(bad, good)
|
||||
|
||||
bad = ["con", "prn", "aux", "nul"]
|
||||
for n in range(1, 10):
|
||||
bad += "com{0} lpt{0}".format(n).split(" ")
|
||||
|
||||
if fn.lower() in bad:
|
||||
fn = "_" + fn
|
||||
|
||||
return fn.strip()
|
||||
|
||||
|
||||
def exclude_dotfiles(filepaths):
|
||||
@@ -360,26 +409,41 @@ def exclude_dotfiles(filepaths):
|
||||
yield fpath
|
||||
|
||||
|
||||
def html_escape(s, quote=False):
|
||||
"""html.escape but also newlines"""
|
||||
s = (
|
||||
s.replace("&", "&")
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace("\r", " ")
|
||||
.replace("\n", " ")
|
||||
)
|
||||
if quote:
|
||||
s = s.replace('"', """).replace("'", "'")
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def quotep(txt):
|
||||
"""url quoter which deals with bytes correctly"""
|
||||
btxt = fsenc(txt)
|
||||
btxt = w8enc(txt)
|
||||
quot1 = quote(btxt, safe=b"/")
|
||||
if not PY2:
|
||||
quot1 = quot1.encode("ascii")
|
||||
|
||||
quot2 = quot1.replace(b" ", b"+")
|
||||
return fsdec(quot2)
|
||||
return w8dec(quot2)
|
||||
|
||||
|
||||
def unquotep(txt):
|
||||
"""url unquoter which deals with bytes correctly"""
|
||||
btxt = fsenc(txt)
|
||||
unq1 = btxt.replace(b"+", b" ")
|
||||
unq2 = unquote(unq1)
|
||||
return fsdec(unq2)
|
||||
btxt = w8enc(txt)
|
||||
# btxt = btxt.replace(b"+", b" ")
|
||||
unq2 = unquote(btxt)
|
||||
return w8dec(unq2)
|
||||
|
||||
|
||||
def fsdec(txt):
|
||||
def w8dec(txt):
|
||||
"""decodes filesystem-bytes to wtf8"""
|
||||
if PY2:
|
||||
return surrogateescape.decodefilename(txt)
|
||||
@@ -387,7 +451,7 @@ def fsdec(txt):
|
||||
return txt.decode(FS_ENCODING, "surrogateescape")
|
||||
|
||||
|
||||
def fsenc(txt):
|
||||
def w8enc(txt):
|
||||
"""encodes wtf8 to filesystem-bytes"""
|
||||
if PY2:
|
||||
return surrogateescape.encodefilename(txt)
|
||||
@@ -395,6 +459,19 @@ def fsenc(txt):
|
||||
return txt.encode(FS_ENCODING, "surrogateescape")
|
||||
|
||||
|
||||
if PY2 and WINDOWS:
|
||||
# moonrunes become \x3f with bytestrings,
|
||||
# losing mojibake support is worth
|
||||
def _not_actually_mbcs(txt):
|
||||
return txt
|
||||
|
||||
fsenc = _not_actually_mbcs
|
||||
fsdec = _not_actually_mbcs
|
||||
else:
|
||||
fsenc = w8enc
|
||||
fsdec = w8dec
|
||||
|
||||
|
||||
def read_socket(sr, total_size):
|
||||
remains = total_size
|
||||
while remains > 0:
|
||||
@@ -404,12 +481,21 @@ def read_socket(sr, total_size):
|
||||
|
||||
buf = sr.recv(bufsz)
|
||||
if not buf:
|
||||
raise Pebkac(400, "client disconnected during binary post")
|
||||
raise Pebkac(400, "client d/c during binary post")
|
||||
|
||||
remains -= len(buf)
|
||||
yield buf
|
||||
|
||||
|
||||
def read_socket_unbounded(sr):
|
||||
while True:
|
||||
buf = sr.recv(32 * 1024)
|
||||
if not buf:
|
||||
return
|
||||
|
||||
yield buf
|
||||
|
||||
|
||||
def hashcopy(actor, fin, fout):
|
||||
u32_lim = int((2 ** 31) * 0.9)
|
||||
hashobj = hashlib.sha512()
|
||||
@@ -457,6 +543,13 @@ def unescape_cookie(orig):
|
||||
return ret
|
||||
|
||||
|
||||
def guess_mime(url):
|
||||
if url.endswith(".md"):
|
||||
return ["text/plain; charset=UTF-8"]
|
||||
|
||||
return mimetypes.guess_type(url)
|
||||
|
||||
|
||||
def runcmd(*argv):
|
||||
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
stdout, stderr = p.communicate()
|
||||
@@ -480,6 +573,7 @@ def gzip_orig_sz(fn):
|
||||
|
||||
|
||||
def py_desc():
|
||||
interp = platform.python_implementation()
|
||||
py_ver = ".".join([str(x) for x in sys.version_info])
|
||||
ofs = py_ver.find(".final.")
|
||||
if ofs > 0:
|
||||
@@ -487,8 +581,14 @@ def py_desc():
|
||||
|
||||
bitness = struct.calcsize(b"P") * 8
|
||||
host_os = platform.system()
|
||||
compiler = platform.python_compiler()
|
||||
|
||||
return "{0} on {1}{2}".format(py_ver, host_os, bitness)
|
||||
os_ver = re.search(r"([0-9]+\.[0-9\.]+)", platform.version())
|
||||
os_ver = os_ver.group(1) if os_ver else ""
|
||||
|
||||
return "{:>9} v{} on {}{} {} [{}]".format(
|
||||
interp, py_ver, host_os, bitness, os_ver, compiler
|
||||
)
|
||||
|
||||
|
||||
class Pebkac(Exception):
|
||||
|
||||
12
copyparty/web/Makefile
Normal file
12
copyparty/web/Makefile
Normal file
@@ -0,0 +1,12 @@
|
||||
# run me to zopfli all the static files
|
||||
# which should help on really slow connections
|
||||
# but then why are you using copyparty in the first place
|
||||
|
||||
pk: $(addsuffix .gz, $(wildcard *.js *.css))
|
||||
un: $(addsuffix .un, $(wildcard *.gz))
|
||||
|
||||
%.gz: %
|
||||
pigz -11 -J 34 -I 5730 $<
|
||||
|
||||
%.un: %
|
||||
pigz -d $<
|
||||
@@ -36,12 +36,12 @@ body {
|
||||
padding: .35em .5em .2em .5em;
|
||||
border-radius: 0 .3em .3em 0;
|
||||
box-shadow: .1em .1em .4em #222;
|
||||
margin: 2em 0 1em 0;
|
||||
margin: 1.3em 0 0 0;
|
||||
font-size: 1.4em;
|
||||
}
|
||||
#files {
|
||||
border-collapse: collapse;
|
||||
margin-top: 1em;
|
||||
margin-top: 2em;
|
||||
}
|
||||
#files tbody a {
|
||||
display: block;
|
||||
@@ -68,7 +68,7 @@ a {
|
||||
}
|
||||
#files thead th:last-child {
|
||||
background: #444;
|
||||
border-radius: .7em 0 0 0;
|
||||
border-radius: .7em .7em 0 0;
|
||||
}
|
||||
#files thead th:first-child {
|
||||
background: #222;
|
||||
@@ -131,6 +131,17 @@ a {
|
||||
.logue {
|
||||
padding: .2em 1.5em;
|
||||
}
|
||||
#srv_info {
|
||||
opacity: .5;
|
||||
font-size: .8em;
|
||||
color: #fc5;
|
||||
position: absolute;
|
||||
top: .5em;
|
||||
left: 2em;
|
||||
}
|
||||
#srv_info span {
|
||||
color: #fff;
|
||||
}
|
||||
a.play {
|
||||
color: #e70;
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>copyparty</title>
|
||||
<title>⇆🎉 {{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css{{ ts }}">
|
||||
@@ -13,16 +13,16 @@
|
||||
</head>
|
||||
|
||||
<body>
|
||||
{%- if can_upload %}
|
||||
{%- include 'upload.html' %}
|
||||
{%- endif %}
|
||||
|
||||
<h1 id="path">
|
||||
{%- for n in vpnodes %}
|
||||
<a href="/{{ n[0] }}">{{ n[1] }}</a>
|
||||
{%- endfor %}
|
||||
</h1>
|
||||
|
||||
{%- if can_upload %}
|
||||
{%- include 'upload.html' %}
|
||||
{%- endif %}
|
||||
|
||||
{%- if can_read %}
|
||||
{%- if prologue %}
|
||||
<div id="pro" class="logue">{{ prologue }}</div>
|
||||
@@ -33,14 +33,15 @@
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>File Name</th>
|
||||
<th>File Size</th>
|
||||
<th sort="int">File Size</th>
|
||||
<th>T</th>
|
||||
<th>Date</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
||||
{%- for f in files %}
|
||||
<tr><td>{{ f[0] }}</td><td><a href="{{ f[1] }}">{{ f[2] }}</a></td><td>{{ f[3] }}</td><td>{{ f[4] }}</td></tr>
|
||||
<tr><td>{{ f[0] }}</td><td><a href="{{ f[1] }}">{{ f[2] }}</a></td><td>{{ f[3] }}</td><td>{{ f[4] }}</td><td>{{ f[5] }}</td></tr>
|
||||
{%- endfor %}
|
||||
|
||||
</tbody>
|
||||
@@ -53,6 +54,10 @@
|
||||
|
||||
<h2><a href="?h">control-panel</a></h2>
|
||||
|
||||
{%- if srv_info %}
|
||||
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
||||
{%- endif %}
|
||||
|
||||
<div id="widget">
|
||||
<div id="wtoggle">♫</div>
|
||||
<div id="widgeti">
|
||||
@@ -63,6 +68,8 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="/.cpr/util.js{{ ts }}"></script>
|
||||
|
||||
{%- if can_read %}
|
||||
<script src="/.cpr/browser.js{{ ts }}"></script>
|
||||
{%- endif %}
|
||||
|
||||
@@ -1,74 +1,9 @@
|
||||
"use strict";
|
||||
|
||||
// error handler for mobile devices
|
||||
function hcroak(msg) {
|
||||
document.body.innerHTML = msg;
|
||||
window.onerror = undefined;
|
||||
throw 'fatal_err';
|
||||
}
|
||||
function croak(msg) {
|
||||
document.body.textContent = msg;
|
||||
window.onerror = undefined;
|
||||
throw msg;
|
||||
}
|
||||
function esc(txt) {
|
||||
return txt.replace(/[&"<>]/g, function (c) {
|
||||
return {
|
||||
'&': '&',
|
||||
'"': '"',
|
||||
'<': '<',
|
||||
'>': '>'
|
||||
}[c];
|
||||
});
|
||||
}
|
||||
window.onerror = function (msg, url, lineNo, columnNo, error) {
|
||||
window.onerror = undefined;
|
||||
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
||||
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
||||
|
||||
if (error) {
|
||||
var find = ['desc', 'stack', 'trace'];
|
||||
for (var a = 0; a < find.length; a++)
|
||||
if (String(error[find[a]]) !== 'undefined')
|
||||
html.push('<h2>' + find[a] + '</h2>' +
|
||||
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
|
||||
}
|
||||
document.body.style.fontSize = '0.8em';
|
||||
hcroak(html.join('\n'));
|
||||
};
|
||||
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||
if (!String.prototype.endsWith) {
|
||||
String.prototype.endsWith = function (search, this_len) {
|
||||
if (this_len === undefined || this_len > this.length) {
|
||||
this_len = this.length;
|
||||
}
|
||||
return this.substring(this_len - search.length, this_len) === search;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = document.createElement('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
|
||||
script.onreadystatechange = cb;
|
||||
script.onload = cb;
|
||||
|
||||
head.appendChild(script);
|
||||
}
|
||||
|
||||
|
||||
function o(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
window.onerror = vis_exh;
|
||||
|
||||
function dbg(msg) {
|
||||
o('path').innerHTML = msg;
|
||||
ebi('path').innerHTML = msg;
|
||||
}
|
||||
|
||||
function ev(e) {
|
||||
@@ -77,6 +12,8 @@ function ev(e) {
|
||||
return e;
|
||||
}
|
||||
|
||||
makeSortable(ebi('files'));
|
||||
|
||||
|
||||
// extract songs + add play column
|
||||
var mp = (function () {
|
||||
@@ -88,10 +25,9 @@ var mp = (function () {
|
||||
'tracks': tracks,
|
||||
'cover_url': ''
|
||||
};
|
||||
var re_audio = new RegExp('\.(opus|ogg|m4a|aac|mp3|wav|flac)$', 'i');
|
||||
var re_cover = new RegExp('^(cover|folder|cd|front|back)\.(jpe?g|png|gif)$', 'i');
|
||||
var re_audio = /\.(opus|ogg|m4a|aac|mp3|wav|flac)$/i;
|
||||
|
||||
var trs = document.getElementById('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
|
||||
var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
|
||||
for (var a = 0, aa = trs.length; a < aa; a++) {
|
||||
var tds = trs[a].getElementsByTagName('td');
|
||||
var link = tds[1].getElementsByTagName('a')[0];
|
||||
@@ -107,7 +43,7 @@ var mp = (function () {
|
||||
}
|
||||
|
||||
for (var a = 0, aa = tracks.length; a < aa; a++)
|
||||
o('trk' + a).onclick = ev_play;
|
||||
ebi('trk' + a).onclick = ev_play;
|
||||
|
||||
ret.vol = localStorage.getItem('vol');
|
||||
if (ret.vol !== null)
|
||||
@@ -134,8 +70,8 @@ var mp = (function () {
|
||||
// toggle player widget
|
||||
var widget = (function () {
|
||||
var ret = {};
|
||||
var widget = document.getElementById('widget');
|
||||
var wtoggle = document.getElementById('wtoggle');
|
||||
var widget = ebi('widget');
|
||||
var wtoggle = ebi('wtoggle');
|
||||
var touchmode = false;
|
||||
var side_open = false;
|
||||
var was_paused = true;
|
||||
@@ -164,7 +100,7 @@ var widget = (function () {
|
||||
ret.paused = function (paused) {
|
||||
if (was_paused != paused) {
|
||||
was_paused = paused;
|
||||
o('bplay').innerHTML = paused ? '▶' : '⏸';
|
||||
ebi('bplay').innerHTML = paused ? '▶' : '⏸';
|
||||
}
|
||||
};
|
||||
var click_handler = function (e) {
|
||||
@@ -188,8 +124,8 @@ var widget = (function () {
|
||||
// buffer/position bar
|
||||
var pbar = (function () {
|
||||
var r = {};
|
||||
r.bcan = o('barbuf');
|
||||
r.pcan = o('barpos');
|
||||
r.bcan = ebi('barbuf');
|
||||
r.pcan = ebi('barpos');
|
||||
r.bctx = r.bcan.getContext('2d');
|
||||
r.pctx = r.pcan.getContext('2d');
|
||||
|
||||
@@ -254,7 +190,7 @@ var pbar = (function () {
|
||||
// volume bar
|
||||
var vbar = (function () {
|
||||
var r = {};
|
||||
r.can = o('pvol');
|
||||
r.can = ebi('pvol');
|
||||
r.ctx = r.can.getContext('2d');
|
||||
|
||||
var bctx = r.ctx;
|
||||
@@ -351,7 +287,7 @@ var vbar = (function () {
|
||||
else
|
||||
play(0);
|
||||
};
|
||||
o('bplay').onclick = function (e) {
|
||||
ebi('bplay').onclick = function (e) {
|
||||
ev(e);
|
||||
if (mp.au) {
|
||||
if (mp.au.paused)
|
||||
@@ -362,15 +298,15 @@ var vbar = (function () {
|
||||
else
|
||||
play(0);
|
||||
};
|
||||
o('bprev').onclick = function (e) {
|
||||
ebi('bprev').onclick = function (e) {
|
||||
ev(e);
|
||||
bskip(-1);
|
||||
};
|
||||
o('bnext').onclick = function (e) {
|
||||
ebi('bnext').onclick = function (e) {
|
||||
ev(e);
|
||||
bskip(1);
|
||||
};
|
||||
o('barpos').onclick = function (e) {
|
||||
ebi('barpos').onclick = function (e) {
|
||||
if (!mp.au) {
|
||||
//dbg((new Date()).getTime());
|
||||
return play(0);
|
||||
@@ -380,15 +316,6 @@ var vbar = (function () {
|
||||
var x = e.clientX - rect.left;
|
||||
var mul = x * 1.0 / rect.width;
|
||||
|
||||
/*
|
||||
dbg(//Math.round(rect.width) + 'x' + Math.round(rect.height) + '+' +
|
||||
//Math.round(rect.left) + '+' + Math.round(rect.top) + ', ' +
|
||||
//Math.round(e.clientX) + 'x' + Math.round(e.clientY) + ', ' +
|
||||
Math.round(mp.au.currentTime * 10) / 10 + ', ' +
|
||||
Math.round(mp.au.duration * 10) / 10 + '*' +
|
||||
Math.round(mul * 1000) / 1000);
|
||||
*/
|
||||
|
||||
mp.au.currentTime = mp.au.duration * mul;
|
||||
|
||||
if (mp.au === mp.au_native)
|
||||
@@ -445,12 +372,18 @@ function ev_play(e) {
|
||||
|
||||
|
||||
function setclass(id, clas) {
|
||||
o(id).setAttribute('class', clas);
|
||||
ebi(id).setAttribute('class', clas);
|
||||
}
|
||||
|
||||
|
||||
var iOS = !!navigator.platform &&
|
||||
/iPad|iPhone|iPod/.test(navigator.platform);
|
||||
var need_ogv = true;
|
||||
try {
|
||||
need_ogv = new Audio().canPlayType('audio/ogg; codecs=opus') !== 'probably';
|
||||
|
||||
if (/ Edge\//.exec(navigator.userAgent + ''))
|
||||
need_ogv = true;
|
||||
}
|
||||
catch (ex) { }
|
||||
|
||||
|
||||
// plays the tid'th audio file on the page
|
||||
@@ -473,7 +406,7 @@ function play(tid, call_depth) {
|
||||
var hack_attempt_play = true;
|
||||
|
||||
var url = mp.tracks[tid];
|
||||
if (iOS && /\.(ogg|opus)$/i.test(url)) {
|
||||
if (need_ogv && /\.(ogg|opus)$/i.test(url)) {
|
||||
if (mp.au_ogvjs) {
|
||||
mp.au = mp.au_ogvjs;
|
||||
}
|
||||
@@ -535,7 +468,6 @@ function play(tid, call_depth) {
|
||||
function evau_error(e) {
|
||||
var err = '';
|
||||
var eplaya = (e && e.target) || (window.event && window.event.srcElement);
|
||||
var url = eplaya.src;
|
||||
|
||||
switch (eplaya.error.code) {
|
||||
case eplaya.error.MEDIA_ERR_ABORTED:
|
||||
@@ -560,7 +492,6 @@ function evau_error(e) {
|
||||
err += '\n\nFile: «' + decodeURIComponent(eplaya.src.split('/').slice(-1)[0]) + '»';
|
||||
|
||||
alert(err);
|
||||
play(eplaya.tid + 1);
|
||||
}
|
||||
|
||||
|
||||
@@ -577,26 +508,27 @@ function show_modal(html) {
|
||||
|
||||
// hide fullscreen message
|
||||
function unblocked() {
|
||||
var dom = o('blocked');
|
||||
var dom = ebi('blocked');
|
||||
if (dom)
|
||||
dom.remove();
|
||||
dom.parentNode.removeChild(dom);
|
||||
}
|
||||
|
||||
|
||||
// show ui to manually start playback of a linked song
|
||||
function autoplay_blocked(tid) {
|
||||
function autoplay_blocked() {
|
||||
show_modal(
|
||||
'<div id="blk_play"><a id="blk_go"></a></div>' +
|
||||
'<div id="blk_abrt"><a id="blk_na">Cancel<br />(show file list)</a></div>');
|
||||
'<div id="blk_play"><a href="#" id="blk_go"></a></div>' +
|
||||
'<div id="blk_abrt"><a href="#" id="blk_na">Cancel<br />(show file list)</a></div>');
|
||||
|
||||
var go = o('blk_go');
|
||||
var na = o('blk_na');
|
||||
var go = ebi('blk_go');
|
||||
var na = ebi('blk_na');
|
||||
|
||||
var fn = mp.tracks[mp.au.tid].split(/\//).pop();
|
||||
fn = decodeURIComponent(fn.replace(/\+/g, ' '));
|
||||
|
||||
go.textContent = 'Play "' + fn + '"';
|
||||
go.onclick = function () {
|
||||
go.onclick = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
unblocked();
|
||||
mp.au.play();
|
||||
};
|
||||
|
||||
602
copyparty/web/md.css
Normal file
602
copyparty/web/md.css
Normal file
@@ -0,0 +1,602 @@
|
||||
@font-face {
|
||||
font-family: 'scp';
|
||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
||||
}
|
||||
html, body {
|
||||
color: #333;
|
||||
background: #eee;
|
||||
font-family: sans-serif;
|
||||
line-height: 1.5em;
|
||||
}
|
||||
#mtw {
|
||||
display: none;
|
||||
}
|
||||
#mw {
|
||||
margin: 0 auto;
|
||||
padding: 0 1.5em;
|
||||
}
|
||||
pre, code, a {
|
||||
color: #480;
|
||||
background: #f7f7f7;
|
||||
border: .07em solid #ddd;
|
||||
border-radius: .2em;
|
||||
padding: .1em .3em;
|
||||
margin: 0 .1em;
|
||||
}
|
||||
code {
|
||||
font-size: .96em;
|
||||
}
|
||||
pre, code {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
pre {
|
||||
counter-reset: precode;
|
||||
}
|
||||
pre code {
|
||||
counter-increment: precode;
|
||||
display: inline-block;
|
||||
margin: 0 -.3em;
|
||||
padding: .4em .5em;
|
||||
border: none;
|
||||
border-bottom: 1px solid #cdc;
|
||||
min-width: calc(100% - .6em);
|
||||
line-height: 1.1em;
|
||||
}
|
||||
pre code:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
pre code::before {
|
||||
content: counter(precode);
|
||||
-webkit-user-select: none;
|
||||
display: inline-block;
|
||||
text-align: right;
|
||||
font-size: .75em;
|
||||
color: #48a;
|
||||
width: 4em;
|
||||
padding-right: 1.5em;
|
||||
margin-left: -5.5em;
|
||||
}
|
||||
pre code:hover {
|
||||
background: #fec;
|
||||
color: #360;
|
||||
}
|
||||
h1, h2 {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
h1 {
|
||||
font-size: 1.7em;
|
||||
text-align: center;
|
||||
border: 1em solid #777;
|
||||
border-width: .05em 0;
|
||||
margin: 3em 0;
|
||||
}
|
||||
h2 {
|
||||
font-size: 1.5em;
|
||||
font-weight: normal;
|
||||
background: #f7f7f7;
|
||||
border-top: .07em solid #fff;
|
||||
border-bottom: .07em solid #bbb;
|
||||
border-radius: .5em .5em 0 0;
|
||||
padding-left: .4em;
|
||||
margin-top: 3em;
|
||||
}
|
||||
h3 {
|
||||
border-bottom: .1em solid #999;
|
||||
}
|
||||
h1 a, h3 a, h5 a,
|
||||
h2 a, h4 a, h6 a {
|
||||
color: inherit;
|
||||
display: block;
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
#mp ul,
|
||||
#mp ol {
|
||||
border-left: .3em solid #ddd;
|
||||
}
|
||||
#m>ul,
|
||||
#m>ol {
|
||||
border-color: #bbb;
|
||||
}
|
||||
#mp ul>li {
|
||||
list-style-type: disc;
|
||||
}
|
||||
#mp ul>li,
|
||||
#mp ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
}
|
||||
blockquote {
|
||||
font-family: serif;
|
||||
background: #f7f7f7;
|
||||
border: .07em dashed #ccc;
|
||||
padding: 0 2em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
small {
|
||||
opacity: .8;
|
||||
}
|
||||
#toc {
|
||||
margin: 0 1em;
|
||||
-ms-scroll-chaining: none;
|
||||
overscroll-behavior-y: none;
|
||||
}
|
||||
#toc ul {
|
||||
padding-left: 1em;
|
||||
}
|
||||
#toc>ul {
|
||||
text-align: left;
|
||||
padding-left: .5em;
|
||||
}
|
||||
#toc li {
|
||||
list-style-type: none;
|
||||
line-height: 1.2em;
|
||||
margin: .5em 0;
|
||||
}
|
||||
#toc a {
|
||||
color: #057;
|
||||
border: none;
|
||||
background: none;
|
||||
display: block;
|
||||
margin-left: -.3em;
|
||||
padding: .2em .3em;
|
||||
}
|
||||
#toc a.act {
|
||||
color: #fff;
|
||||
background: #07a;
|
||||
}
|
||||
.todo_pend,
|
||||
.todo_done {
|
||||
z-index: 99;
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
font-family: monospace, monospace;
|
||||
font-weight: bold;
|
||||
font-size: 1.3em;
|
||||
line-height: .1em;
|
||||
margin: -.5em 0 -.5em -.85em;
|
||||
top: .1em;
|
||||
color: #b29;
|
||||
}
|
||||
.todo_done {
|
||||
color: #6b3;
|
||||
text-shadow: .02em 0 0 #6b3;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
margin: 1em 0;
|
||||
}
|
||||
th, td {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
blink {
|
||||
animation: blinker .7s cubic-bezier(.9, 0, .1, 1) infinite;
|
||||
}
|
||||
@keyframes blinker {
|
||||
10% {
|
||||
opacity: 0;
|
||||
}
|
||||
60% {
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
|
||||
@media screen {
|
||||
html, body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
outline: 0;
|
||||
border: none;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
#mw {
|
||||
margin: 0 auto;
|
||||
right: 0;
|
||||
}
|
||||
#mp {
|
||||
max-width: 52em;
|
||||
margin-bottom: 6em;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
}
|
||||
a {
|
||||
color: #fff;
|
||||
background: #39b;
|
||||
text-decoration: none;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
border-bottom: .07em solid #079;
|
||||
}
|
||||
h2 {
|
||||
color: #fff;
|
||||
background: #555;
|
||||
margin-top: 2em;
|
||||
border-bottom: .22em solid #999;
|
||||
border-top: none;
|
||||
}
|
||||
h1 {
|
||||
color: #fff;
|
||||
background: #444;
|
||||
font-weight: normal;
|
||||
border-top: .4em solid #fb0;
|
||||
border-bottom: .4em solid #777;
|
||||
border-radius: 0 1em 0 1em;
|
||||
margin: 3em 0 1em 0;
|
||||
padding: .5em 0;
|
||||
}
|
||||
#mn {
|
||||
padding: 1.3em 0 .7em 1em;
|
||||
border-bottom: 1px solid #ccc;
|
||||
background: #eee;
|
||||
z-index: 10;
|
||||
width: calc(100% - 1em);
|
||||
}
|
||||
#mn a {
|
||||
color: #444;
|
||||
background: none;
|
||||
margin: 0 0 0 -.2em;
|
||||
padding: .3em 0 .3em .4em;
|
||||
text-decoration: none;
|
||||
border: none;
|
||||
/* ie: */
|
||||
border-bottom: .1em solid #777\9;
|
||||
margin-right: 1em\9;
|
||||
}
|
||||
#mn a:first-child {
|
||||
padding-left: .5em;
|
||||
}
|
||||
#mn a:last-child {
|
||||
padding-right: .5em;
|
||||
}
|
||||
#mn a:not(:last-child)::after {
|
||||
content: '';
|
||||
width: 1.05em;
|
||||
height: 1.05em;
|
||||
margin: -.2em .3em -.2em -.4em;
|
||||
display: inline-block;
|
||||
border: 1px solid rgba(0,0,0,0.2);
|
||||
border-width: .2em .2em 0 0;
|
||||
transform: rotate(45deg);
|
||||
}
|
||||
#mn a:hover {
|
||||
color: #000;
|
||||
text-decoration: underline;
|
||||
}
|
||||
#mh {
|
||||
padding: .4em 1em;
|
||||
position: relative;
|
||||
width: 100%;
|
||||
width: calc(100% - 3em);
|
||||
background: #eee;
|
||||
z-index: 9;
|
||||
top: 0;
|
||||
}
|
||||
#mh a {
|
||||
color: #444;
|
||||
background: none;
|
||||
text-decoration: underline;
|
||||
border: none;
|
||||
}
|
||||
#mh a:hover {
|
||||
color: #000;
|
||||
background: #ddd;
|
||||
}
|
||||
#toolsbox {
|
||||
overflow: hidden;
|
||||
display: inline-block;
|
||||
background: #eee;
|
||||
height: 1.5em;
|
||||
padding: 0 .2em;
|
||||
margin: 0 .2em;
|
||||
position: absolute;
|
||||
}
|
||||
#toolsbox.open {
|
||||
height: auto;
|
||||
overflow: visible;
|
||||
background: #eee;
|
||||
box-shadow: 0 .2em .2em #ccc;
|
||||
padding-bottom: .2em;
|
||||
}
|
||||
#toolsbox a {
|
||||
display: block;
|
||||
}
|
||||
#toolsbox a+a {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark,
|
||||
html.dark body {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark #toc a {
|
||||
color: #ccc;
|
||||
border-left: .4em solid #444;
|
||||
border-bottom: .1em solid #333;
|
||||
}
|
||||
html.dark #toc a.act {
|
||||
color: #fff;
|
||||
border-left: .4em solid #3ad;
|
||||
}
|
||||
html.dark #toc li {
|
||||
border-width: 0;
|
||||
}
|
||||
html.dark #mp a {
|
||||
background: #057;
|
||||
}
|
||||
html.dark #mp h1 a, html.dark #mp h4 a,
|
||||
html.dark #mp h2 a, html.dark #mp h5 a,
|
||||
html.dark #mp h3 a, html.dark #mp h6 a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
color: #8c0;
|
||||
background: #1a1a1a;
|
||||
border: .07em solid #333;
|
||||
}
|
||||
html.dark #mp ul,
|
||||
html.dark #mp ol {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark #m>ul,
|
||||
html.dark #m>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
html.dark h1 {
|
||||
background: #383838;
|
||||
border-top: .4em solid #b80;
|
||||
border-bottom: .4em solid #4c4c4c;
|
||||
}
|
||||
html.dark h2 {
|
||||
background: #444;
|
||||
border-bottom: .22em solid #555;
|
||||
}
|
||||
html.dark td,
|
||||
html.dark th {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark blockquote {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
html.dark #mn a:not(:last-child)::after {
|
||||
border-color: rgba(255,255,255,0.3);
|
||||
}
|
||||
html.dark #mn a {
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark #mn {
|
||||
border-bottom: 1px solid #333;
|
||||
}
|
||||
html.dark #mn,
|
||||
html.dark #mh {
|
||||
background: #222;
|
||||
}
|
||||
html.dark #mh a {
|
||||
color: #ccc;
|
||||
background: none;
|
||||
}
|
||||
html.dark #mh a:hover {
|
||||
background: #333;
|
||||
color: #fff;
|
||||
}
|
||||
html.dark #toolsbox {
|
||||
background: #222;
|
||||
}
|
||||
html.dark #toolsbox.open {
|
||||
box-shadow: 0 .2em .2em #069;
|
||||
border-radius: 0 0 .4em .4em;
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (min-width: 66em) {
|
||||
#mw {
|
||||
position: fixed;
|
||||
overflow-y: auto;
|
||||
left: 14em;
|
||||
left: calc(100% - 55em);
|
||||
max-width: none;
|
||||
bottom: 0;
|
||||
scrollbar-color: #eb0 #f7f7f7;
|
||||
}
|
||||
#toc {
|
||||
width: 13em;
|
||||
width: calc(100% - 55.3em);
|
||||
max-width: 30em;
|
||||
background: #eee;
|
||||
position: fixed;
|
||||
overflow-y: auto;
|
||||
top: 0;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
scrollbar-color: #eb0 #f7f7f7;
|
||||
box-shadow: 0 0 1em rgba(0,0,0,0.1);
|
||||
border-top: 1px solid #d7d7d7;
|
||||
}
|
||||
#toc li {
|
||||
border-left: .3em solid #ccc;
|
||||
}
|
||||
#toc::-webkit-scrollbar-track {
|
||||
background: #f7f7f7;
|
||||
}
|
||||
#toc::-webkit-scrollbar {
|
||||
background: #f7f7f7;
|
||||
width: .8em;
|
||||
}
|
||||
#toc::-webkit-scrollbar-thumb {
|
||||
background: #eb0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark #toc {
|
||||
background: #282828;
|
||||
border-top: 1px solid #2c2c2c;
|
||||
box-shadow: 0 0 1em #181818;
|
||||
}
|
||||
html.dark #toc,
|
||||
html.dark #mw {
|
||||
scrollbar-color: #b80 #282828;
|
||||
}
|
||||
html.dark #toc::-webkit-scrollbar-track {
|
||||
background: #282828;
|
||||
}
|
||||
html.dark #toc::-webkit-scrollbar {
|
||||
background: #282828;
|
||||
width: .8em;
|
||||
}
|
||||
html.dark #toc::-webkit-scrollbar-thumb {
|
||||
background: #b80;
|
||||
}
|
||||
}
|
||||
@media screen and (min-width: 85.5em) {
|
||||
#toc { width: 30em }
|
||||
#mw { left: 30.5em }
|
||||
}
|
||||
@media print {
|
||||
@page {
|
||||
size: A4;
|
||||
padding: 0;
|
||||
margin: .5in .6in;
|
||||
mso-header-margin: .6in;
|
||||
mso-footer-margin: .6in;
|
||||
mso-paper-source: 0;
|
||||
}
|
||||
a {
|
||||
color: #079;
|
||||
text-decoration: none;
|
||||
border-bottom: .07em solid #4ac;
|
||||
padding: 0 .3em;
|
||||
}
|
||||
#toc>ul {
|
||||
border-left: .1em solid #84c4dd;
|
||||
}
|
||||
#mn, #mh {
|
||||
display: none;
|
||||
}
|
||||
html, body, #toc, #mw {
|
||||
margin: 0 !important;
|
||||
word-break: break-word;
|
||||
width: 52em;
|
||||
}
|
||||
#toc {
|
||||
margin-left: 1em !important;
|
||||
}
|
||||
#toc a {
|
||||
color: #000 !important;
|
||||
}
|
||||
#toc a::after {
|
||||
/* hopefully supported by browsers eventually */
|
||||
content: leader('.') target-counter(attr(href), page);
|
||||
}
|
||||
a[ctr]::before {
|
||||
content: attr(ctr) '. ';
|
||||
}
|
||||
h1 {
|
||||
margin: 2em 0;
|
||||
}
|
||||
h2 {
|
||||
margin: 2em 0 0 0;
|
||||
}
|
||||
h1, h2, h3 {
|
||||
page-break-inside: avoid;
|
||||
}
|
||||
h1::after,
|
||||
h2::after,
|
||||
h3::after {
|
||||
content: 'orz';
|
||||
color: transparent;
|
||||
display: block;
|
||||
line-height: 1em;
|
||||
padding: 4em 0 0 0;
|
||||
margin: 0 0 -5em 0;
|
||||
}
|
||||
p {
|
||||
page-break-inside: avoid;
|
||||
}
|
||||
table {
|
||||
page-break-inside: auto;
|
||||
}
|
||||
tr {
|
||||
page-break-inside: avoid;
|
||||
page-break-after: auto;
|
||||
}
|
||||
thead {
|
||||
display: table-header-group;
|
||||
}
|
||||
tfoot {
|
||||
display: table-footer-group;
|
||||
}
|
||||
#mp a.vis::after {
|
||||
content: ' (' attr(href) ')';
|
||||
border-bottom: 1px solid #bbb;
|
||||
color: #444;
|
||||
}
|
||||
blockquote {
|
||||
border-color: #555;
|
||||
}
|
||||
code {
|
||||
border-color: #bbb;
|
||||
}
|
||||
pre, pre code {
|
||||
border-color: #999;
|
||||
}
|
||||
pre code::before {
|
||||
color: #058;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark a {
|
||||
color: #000;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
color: #240;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #940;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
*[data-ln]:before {
|
||||
content: attr(data-ln);
|
||||
font-size: .8em;
|
||||
margin: 0 .4em;
|
||||
color: #f0c;
|
||||
}
|
||||
*/
|
||||
155
copyparty/web/md.html
Normal file
155
copyparty/web/md.html
Normal file
@@ -0,0 +1,155 @@
|
||||
<!DOCTYPE html><html><head>
|
||||
<meta charset="utf-8">
|
||||
<title>📝🎉 {{ title }}</title> <!-- 📜 -->
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
<link href="/.cpr/md.css" rel="stylesheet">
|
||||
{%- if edit %}
|
||||
<link href="/.cpr/md2.css" rel="stylesheet">
|
||||
{%- endif %}
|
||||
</head>
|
||||
<body>
|
||||
<div id="mn">navbar</div>
|
||||
<div id="mh">
|
||||
<a id="lightswitch" href="#">go dark</a>
|
||||
<a id="navtoggle" href="#">hide nav</a>
|
||||
{%- if edit %}
|
||||
<a id="save" href="?edit">save</a>
|
||||
<a id="sbs" href="#">sbs</a>
|
||||
<a id="nsbs" href="#">editor</a>
|
||||
<div id="toolsbox">
|
||||
<a id="tools" href="#">tools</a>
|
||||
<a id="fmt_table" href="#">prettify table (ctrl-k)</a>
|
||||
<a id="iter_uni" href="#">non-ascii: iterate (ctrl-u)</a>
|
||||
<a id="mark_uni" href="#">non-ascii: markup</a>
|
||||
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
|
||||
<a id="help" href="#">help</a>
|
||||
</div>
|
||||
{%- else %}
|
||||
<a href="?edit">edit (basic)</a>
|
||||
<a href="?edit2">edit (fancy)</a>
|
||||
<a href="?raw">view raw</a>
|
||||
{%- endif %}
|
||||
</div>
|
||||
<div id="toc"></div>
|
||||
<div id="mtw">
|
||||
<textarea id="mt" autocomplete="off">{{ md }}</textarea>
|
||||
</div>
|
||||
<div id="mw">
|
||||
<div id="ml">
|
||||
<div style="text-align:center;margin:5em 0">
|
||||
<div style="font-size:2em;margin:1em 0">Loading</div>
|
||||
if you're still reading this, check that javascript is allowed
|
||||
</div>
|
||||
</div>
|
||||
<div id="mp"></div>
|
||||
</div>
|
||||
|
||||
{%- if edit %}
|
||||
<div id="helpbox">
|
||||
<textarea autocomplete="off">
|
||||
|
||||
write markdown (most html is 🙆 too)
|
||||
|
||||
## hotkey list
|
||||
* `Ctrl-S` to save
|
||||
* `Ctrl-E` to toggle mode
|
||||
* `Ctrl-K` to prettyprint a table
|
||||
* `Ctrl-U` to iterate non-ascii chars
|
||||
* `Ctrl-H` / `Ctrl-Shift-H` to create a header
|
||||
* `TAB` / `Shift-TAB` to indent/dedent a selection
|
||||
|
||||
## toolbar
|
||||
1. toggle dark mode
|
||||
2. show/hide navigation bar
|
||||
3. save changes on server
|
||||
4. side-by-side editing
|
||||
5. toggle editor/preview
|
||||
6. this thing :^)
|
||||
|
||||
## markdown
|
||||
|||
|
||||
|--|--|
|
||||
|`**bold**`|**bold**|
|
||||
|`_italic_`|_italic_|
|
||||
|`~~strike~~`|~~strike~~|
|
||||
|`` `code` ``|`code`|
|
||||
|`[](#hotkey-list)`|[](#hotkey-list)|
|
||||
|`[](/foo/bar.md#header)`|[](/foo/bar.md#header)|
|
||||
|`<blink>💯</blink>`|<blink>💯</blink>|
|
||||
|
||||
## tables
|
||||
|left-aligned|centered|right-aligned
|
||||
| ---------- | :----: | ----------:
|
||||
|one |two |three
|
||||
|
||||
|left-aligned|centered|right-aligned
|
||||
| ---------- | :----: | ----------:
|
||||
|one |two |three
|
||||
|
||||
## lists
|
||||
* one
|
||||
* two
|
||||
1. one
|
||||
1. two
|
||||
* one
|
||||
* two
|
||||
1. one
|
||||
1. two
|
||||
|
||||
## headers
|
||||
# level 1
|
||||
## level 2
|
||||
### level 3
|
||||
|
||||
## quote
|
||||
> hello
|
||||
> hello
|
||||
|
||||
## codeblock
|
||||
four spaces (no tab pls)
|
||||
|
||||
## code in lists
|
||||
* foo
|
||||
bar
|
||||
six spaces total
|
||||
* foo
|
||||
bar
|
||||
six spaces total
|
||||
.
|
||||
</textarea>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
<script>
|
||||
|
||||
var last_modified = {{ lastmod }};
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
(function () {
|
||||
var btn = document.getElementById("lightswitch");
|
||||
var toggle = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
var dark = !document.documentElement.getAttribute("class");
|
||||
document.documentElement.setAttribute("class", dark ? "dark" : "");
|
||||
btn.innerHTML = "go " + (dark ? "light" : "dark");
|
||||
if (window.localStorage)
|
||||
localStorage.setItem('darkmode', dark ? 1 : 0);
|
||||
};
|
||||
btn.onclick = toggle;
|
||||
if (window.localStorage && localStorage.getItem('darkmode') == 1)
|
||||
toggle();
|
||||
})();
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js"></script>
|
||||
<script src="/.cpr/deps/marked.full.js"></script>
|
||||
<script src="/.cpr/md.js"></script>
|
||||
{%- if edit %}
|
||||
<script src="/.cpr/md2.js"></script>
|
||||
{%- endif %}
|
||||
</body></html>
|
||||
534
copyparty/web/md.js
Normal file
534
copyparty/web/md.js
Normal file
@@ -0,0 +1,534 @@
|
||||
"use strict";
|
||||
|
||||
var dom_toc = ebi('toc');
|
||||
var dom_wrap = ebi('mw');
|
||||
var dom_hbar = ebi('mh');
|
||||
var dom_nav = ebi('mn');
|
||||
var dom_pre = ebi('mp');
|
||||
var dom_src = ebi('mt');
|
||||
var dom_navtgl = ebi('navtoggle');
|
||||
|
||||
|
||||
// chrome 49 needs this
|
||||
var chromedbg = function () { console.log(arguments); }
|
||||
|
||||
// null-logger
|
||||
var dbg = function () { };
|
||||
|
||||
// replace dbg with the real deal here or in the console:
|
||||
// dbg = chromedbg
|
||||
// dbg = console.log
|
||||
|
||||
|
||||
// plugins
|
||||
var md_plug = {};
|
||||
|
||||
|
||||
function hesc(txt) {
|
||||
return txt.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
}
|
||||
|
||||
|
||||
function cls(dom, name, add) {
|
||||
var re = new RegExp('(^| )' + name + '( |$)');
|
||||
var lst = (dom.getAttribute('class') + '').replace(re, "$1$2").replace(/ /, "");
|
||||
dom.setAttribute('class', lst + (add ? ' ' + name : ''));
|
||||
}
|
||||
|
||||
|
||||
function statify(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
|
||||
// dodge browser issues
|
||||
(function () {
|
||||
var ua = navigator.userAgent;
|
||||
if (ua.indexOf(') Gecko/') !== -1 && /Linux| Mac /.exec(ua)) {
|
||||
// necessary on ff-68.7 at least
|
||||
var s = document.createElement('style');
|
||||
s.innerHTML = '@page { margin: .5in .6in .8in .6in; }';
|
||||
console.log(s.innerHTML);
|
||||
document.head.appendChild(s);
|
||||
}
|
||||
})();
|
||||
|
||||
|
||||
// add navbar
|
||||
(function () {
|
||||
var n = document.location + '';
|
||||
n = n.substr(n.indexOf('//') + 2).split('?')[0].split('/');
|
||||
n[0] = 'top';
|
||||
var loc = [];
|
||||
var nav = [];
|
||||
for (var a = 0; a < n.length; a++) {
|
||||
if (a > 0)
|
||||
loc.push(n[a]);
|
||||
|
||||
var dec = hesc(decodeURIComponent(n[a]));
|
||||
|
||||
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
||||
}
|
||||
dom_nav.innerHTML = nav.join('');
|
||||
})();
|
||||
|
||||
|
||||
// faster than replacing the entire html (chrome 1.8x, firefox 1.6x)
|
||||
function copydom(src, dst, lv) {
|
||||
var sc = src.childNodes,
|
||||
dc = dst.childNodes;
|
||||
|
||||
if (sc.length !== dc.length) {
|
||||
dbg("replace L%d (%d/%d) |%d|",
|
||||
lv, sc.length, dc.length, src.innerHTML.length);
|
||||
|
||||
dst.innerHTML = src.innerHTML;
|
||||
return;
|
||||
}
|
||||
|
||||
var rpl = [];
|
||||
for (var a = sc.length - 1; a >= 0; a--) {
|
||||
var st = sc[a].tagName,
|
||||
dt = dc[a].tagName;
|
||||
|
||||
if (st !== dt) {
|
||||
dbg("replace L%d (%d/%d) type %s/%s", lv, a, sc.length, st, dt);
|
||||
rpl.push(a);
|
||||
continue;
|
||||
}
|
||||
|
||||
var sa = sc[a].attributes || [],
|
||||
da = dc[a].attributes || [];
|
||||
|
||||
if (sa.length !== da.length) {
|
||||
dbg("replace L%d (%d/%d) attr# %d/%d",
|
||||
lv, a, sc.length, sa.length, da.length);
|
||||
|
||||
rpl.push(a);
|
||||
continue;
|
||||
}
|
||||
|
||||
var dirty = false;
|
||||
for (var b = sa.length - 1; b >= 0; b--) {
|
||||
var name = sa[b].name,
|
||||
sv = sa[b].value,
|
||||
dv = dc[a].getAttribute(name);
|
||||
|
||||
if (name == "data-ln" && sv !== dv) {
|
||||
dc[a].setAttribute(name, sv);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (sv !== dv) {
|
||||
dbg("replace L%d (%d/%d) attr %s [%s] [%s]",
|
||||
lv, a, sc.length, name, sv, dv);
|
||||
|
||||
dirty = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (dirty)
|
||||
rpl.push(a);
|
||||
}
|
||||
|
||||
// TODO pure guessing
|
||||
if (rpl.length > sc.length / 3) {
|
||||
dbg("replace L%d fully, %s (%d/%d) |%d|",
|
||||
lv, rpl.length, sc.length, src.innerHTML.length);
|
||||
|
||||
dst.innerHTML = src.innerHTML;
|
||||
return;
|
||||
}
|
||||
|
||||
// repl is reversed; build top-down
|
||||
var nbytes = 0;
|
||||
for (var a = rpl.length - 1; a >= 0; a--) {
|
||||
var html = sc[rpl[a]].outerHTML;
|
||||
dc[rpl[a]].outerHTML = html;
|
||||
nbytes += html.length;
|
||||
}
|
||||
if (nbytes > 0)
|
||||
dbg("replaced %d bytes L%d", nbytes, lv);
|
||||
|
||||
for (var a = 0; a < sc.length; a++)
|
||||
copydom(sc[a], dc[a], lv + 1);
|
||||
|
||||
if (src.innerHTML !== dst.innerHTML) {
|
||||
dbg("setting %d bytes L%d", src.innerHTML.length, lv);
|
||||
dst.innerHTML = src.innerHTML;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function md_plug_err(ex, js) {
|
||||
var errbox = ebi('md_errbox');
|
||||
if (errbox)
|
||||
errbox.parentNode.removeChild(errbox);
|
||||
|
||||
if (!ex)
|
||||
return;
|
||||
|
||||
var msg = (ex + '').split('\n')[0];
|
||||
var ln = ex.lineNumber;
|
||||
var o = null;
|
||||
if (ln) {
|
||||
msg = "Line " + ln + ", " + msg;
|
||||
var lns = js.split('\n');
|
||||
if (ln < lns.length) {
|
||||
o = document.createElement('span');
|
||||
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
|
||||
o.textContent = lns[ln - 1];
|
||||
}
|
||||
}
|
||||
errbox = document.createElement('div');
|
||||
errbox.setAttribute('id', 'md_errbox');
|
||||
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
|
||||
errbox.textContent = msg;
|
||||
errbox.onclick = function () {
|
||||
alert('' + ex.stack);
|
||||
};
|
||||
if (o) {
|
||||
errbox.appendChild(o);
|
||||
errbox.style.padding = '.25em .5em';
|
||||
}
|
||||
dom_nav.appendChild(errbox);
|
||||
|
||||
try {
|
||||
console.trace();
|
||||
}
|
||||
catch (ex2) { }
|
||||
}
|
||||
|
||||
|
||||
function load_plug(md_text, plug_type) {
|
||||
if (!md_opt.allow_plugins)
|
||||
return md_text;
|
||||
|
||||
var find = '\n```copyparty_' + plug_type + '\n';
|
||||
var ofs = md_text.indexOf(find);
|
||||
if (ofs === -1)
|
||||
return md_text;
|
||||
|
||||
var ofs2 = md_text.indexOf('\n```', ofs + 1);
|
||||
if (ofs2 == -1)
|
||||
return md_text;
|
||||
|
||||
var js = md_text.slice(ofs + find.length, ofs2 + 1);
|
||||
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
|
||||
|
||||
var old_plug = md_plug[plug_type];
|
||||
if (!old_plug || old_plug[1] != js) {
|
||||
js = 'const x = { ' + js + ' }; x;';
|
||||
try {
|
||||
var x = eval(js);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug[plug_type] = null;
|
||||
md_plug_err(ex, js);
|
||||
return md;
|
||||
}
|
||||
if (x['ctor']) {
|
||||
x['ctor']();
|
||||
delete x['ctor'];
|
||||
}
|
||||
md_plug[plug_type] = [x, js];
|
||||
}
|
||||
|
||||
return md;
|
||||
}
|
||||
|
||||
|
||||
function convert_markdown(md_text, dest_dom) {
|
||||
md_text = md_text.replace(/\r/g, '');
|
||||
|
||||
md_plug_err(null);
|
||||
md_text = load_plug(md_text, 'pre');
|
||||
md_text = load_plug(md_text, 'post');
|
||||
|
||||
var marked_opts = {
|
||||
//headerPrefix: 'h-',
|
||||
breaks: true,
|
||||
gfm: true
|
||||
};
|
||||
|
||||
var ext = md_plug['pre'];
|
||||
if (ext)
|
||||
Object.assign(marked_opts, ext[0]);
|
||||
|
||||
try {
|
||||
var md_html = marked(md_text, marked_opts);
|
||||
}
|
||||
catch (ex) {
|
||||
if (ext)
|
||||
md_plug_err(ex, ext[1]);
|
||||
|
||||
throw ex;
|
||||
}
|
||||
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
||||
|
||||
var nodes = md_dom.getElementsByTagName('a');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
var href = nodes[a].getAttribute('href');
|
||||
var txt = nodes[a].textContent;
|
||||
|
||||
if (!txt)
|
||||
nodes[a].textContent = href;
|
||||
else if (href !== txt)
|
||||
nodes[a].setAttribute('class', 'vis');
|
||||
}
|
||||
|
||||
// todo-lists (should probably be a marked extension)
|
||||
nodes = md_dom.getElementsByTagName('input');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
var dom_box = nodes[a];
|
||||
if (dom_box.getAttribute('type') !== 'checkbox')
|
||||
continue;
|
||||
|
||||
var dom_li = dom_box.parentNode;
|
||||
var done = dom_box.getAttribute('checked');
|
||||
done = done !== null;
|
||||
var clas = done ? 'done' : 'pend';
|
||||
var char = done ? 'Y' : 'N';
|
||||
|
||||
dom_li.setAttribute('class', 'task-list-item');
|
||||
dom_li.style.listStyleType = 'none';
|
||||
var html = dom_li.innerHTML;
|
||||
dom_li.innerHTML =
|
||||
'<span class="todo_' + clas + '">' + char + '</span>' +
|
||||
html.substr(html.indexOf('>') + 1);
|
||||
}
|
||||
|
||||
// separate <code> for each line in <pre>
|
||||
nodes = md_dom.getElementsByTagName('pre');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
var el = nodes[a];
|
||||
|
||||
var is_precode =
|
||||
el.tagName == 'PRE' &&
|
||||
el.childNodes.length === 1 &&
|
||||
el.childNodes[0].tagName == 'CODE';
|
||||
|
||||
if (!is_precode)
|
||||
continue;
|
||||
|
||||
var nline = parseInt(el.getAttribute('data-ln')) + 1;
|
||||
var lines = el.innerHTML.replace(/\n<\/code>$/i, '</code>').split(/\n/g);
|
||||
for (var b = 0; b < lines.length - 1; b++)
|
||||
lines[b] += '</code>\n<code data-ln="' + (nline + b) + '">';
|
||||
|
||||
el.innerHTML = lines.join('');
|
||||
}
|
||||
|
||||
// self-link headers
|
||||
var id_seen = {},
|
||||
dyn = md_dom.getElementsByTagName('*');
|
||||
|
||||
nodes = [];
|
||||
for (var a = 0, aa = dyn.length; a < aa; a++)
|
||||
if (/^[Hh]([1-6])/.exec(dyn[a].tagName) !== null)
|
||||
nodes.push(dyn[a]);
|
||||
|
||||
for (var a = 0; a < nodes.length; a++) {
|
||||
el = nodes[a];
|
||||
var id = el.getAttribute('id'),
|
||||
orig_id = id;
|
||||
|
||||
if (id_seen[id]) {
|
||||
for (var n = 1; n < 4096; n++) {
|
||||
id = orig_id + '-' + n;
|
||||
if (!id_seen[id])
|
||||
break;
|
||||
}
|
||||
el.setAttribute('id', id);
|
||||
}
|
||||
id_seen[id] = 1;
|
||||
el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>';
|
||||
}
|
||||
|
||||
ext = md_plug['post'];
|
||||
if (ext && ext[0].render)
|
||||
try {
|
||||
ext[0].render(md_dom);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug_err(ex, ext[1]);
|
||||
}
|
||||
|
||||
copydom(md_dom, dest_dom, 0);
|
||||
|
||||
if (ext && ext[0].render2)
|
||||
try {
|
||||
ext[0].render2(dest_dom);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug_err(ex, ext[1]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function init_toc() {
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
|
||||
var anchors = []; // list of toc entries, complex objects
|
||||
var anchor = null; // current toc node
|
||||
var html = []; // generated toc html
|
||||
var lv = 0; // current indentation level in the toc html
|
||||
var ctr = [0, 0, 0, 0, 0, 0];
|
||||
|
||||
var manip_nodes_dyn = dom_pre.getElementsByTagName('*');
|
||||
var manip_nodes = [];
|
||||
for (var a = 0, aa = manip_nodes_dyn.length; a < aa; a++)
|
||||
manip_nodes.push(manip_nodes_dyn[a]);
|
||||
|
||||
for (var a = 0, aa = manip_nodes.length; a < aa; a++) {
|
||||
var elm = manip_nodes[a];
|
||||
var m = /^[Hh]([1-6])/.exec(elm.tagName);
|
||||
var is_header = m !== null;
|
||||
if (is_header) {
|
||||
var nlv = m[1];
|
||||
while (lv < nlv) {
|
||||
html.push('<ul>');
|
||||
lv++;
|
||||
}
|
||||
while (lv > nlv) {
|
||||
html.push('</ul>');
|
||||
lv--;
|
||||
}
|
||||
ctr[lv - 1]++;
|
||||
for (var b = lv; b < 6; b++)
|
||||
ctr[b] = 0;
|
||||
|
||||
elm.childNodes[0].setAttribute('ctr', ctr.slice(0, lv).join('.'));
|
||||
|
||||
var elm2 = elm.cloneNode(true);
|
||||
elm2.childNodes[0].textContent = elm.textContent;
|
||||
while (elm2.childNodes.length > 1)
|
||||
elm2.removeChild(elm2.childNodes[1]);
|
||||
|
||||
html.push('<li>' + elm2.innerHTML + '</li>');
|
||||
|
||||
if (anchor != null)
|
||||
anchors.push(anchor);
|
||||
|
||||
anchor = {
|
||||
elm: elm,
|
||||
kids: [],
|
||||
y: null
|
||||
};
|
||||
}
|
||||
if (!is_header && anchor)
|
||||
anchor.kids.push(elm);
|
||||
}
|
||||
dom_toc.innerHTML = html.join('\n');
|
||||
if (anchor != null)
|
||||
anchors.push(anchor);
|
||||
|
||||
// copy toc links into the toc list
|
||||
var atoc = dom_toc.getElementsByTagName('a');
|
||||
for (var a = 0, aa = anchors.length; a < aa; a++)
|
||||
anchors[a].lnk = atoc[a];
|
||||
|
||||
// collect vertical position of all toc items (headers in document)
|
||||
function freshen_offsets() {
|
||||
var top = window.pageYOffset || document.documentElement.scrollTop;
|
||||
for (var a = anchors.length - 1; a >= 0; a--) {
|
||||
var y = top + anchors[a].elm.getBoundingClientRect().top;
|
||||
y = Math.round(y * 10.0) / 10;
|
||||
if (anchors[a].y === y)
|
||||
break;
|
||||
|
||||
anchors[a].y = y;
|
||||
}
|
||||
}
|
||||
|
||||
// hilight the correct toc items + scroll into view
|
||||
function freshen_toclist() {
|
||||
if (anchors.length == 0)
|
||||
return;
|
||||
|
||||
var ptop = window.pageYOffset || document.documentElement.scrollTop;
|
||||
var hit = anchors.length - 1;
|
||||
for (var a = 0; a < anchors.length; a++) {
|
||||
if (anchors[a].y >= ptop - 8) { //???
|
||||
hit = a;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
var links = dom_toc.getElementsByTagName('a');
|
||||
if (!anchors[hit].active) {
|
||||
for (var a = 0; a < anchors.length; a++) {
|
||||
if (anchors[a].active) {
|
||||
anchors[a].active = false;
|
||||
links[a].setAttribute('class', '');
|
||||
}
|
||||
}
|
||||
anchors[hit].active = true;
|
||||
links[hit].setAttribute('class', 'act');
|
||||
}
|
||||
|
||||
var pane_height = parseInt(getComputedStyle(dom_toc).height);
|
||||
var link_bounds = links[hit].getBoundingClientRect();
|
||||
var top = link_bounds.top - (pane_height / 6);
|
||||
var btm = link_bounds.bottom + (pane_height / 6);
|
||||
if (top < 0)
|
||||
dom_toc.scrollTop -= -top;
|
||||
else if (btm > pane_height)
|
||||
dom_toc.scrollTop += btm - pane_height;
|
||||
}
|
||||
|
||||
function refresh() {
|
||||
freshen_offsets();
|
||||
freshen_toclist();
|
||||
}
|
||||
|
||||
return { "refresh": refresh }
|
||||
}
|
||||
|
||||
|
||||
// "main" :p
|
||||
convert_markdown(dom_src.value, dom_pre);
|
||||
var toc = init_toc();
|
||||
|
||||
|
||||
// scroll handler
|
||||
var redraw = (function () {
|
||||
var sbs = false;
|
||||
function onresize() {
|
||||
sbs = window.matchMedia('(min-width: 64em)').matches;
|
||||
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
|
||||
if (sbs) {
|
||||
dom_toc.style.top = y;
|
||||
dom_wrap.style.top = y;
|
||||
dom_toc.style.marginTop = '0';
|
||||
}
|
||||
onscroll();
|
||||
}
|
||||
|
||||
function onscroll() {
|
||||
toc.refresh();
|
||||
}
|
||||
|
||||
window.onresize = onresize;
|
||||
window.onscroll = onscroll;
|
||||
dom_wrap.onscroll = onscroll;
|
||||
|
||||
onresize();
|
||||
return onresize;
|
||||
})();
|
||||
|
||||
|
||||
dom_navtgl.onclick = function () {
|
||||
var hidden = dom_navtgl.innerHTML == 'hide nav';
|
||||
dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav';
|
||||
dom_nav.style.display = hidden ? 'none' : 'block';
|
||||
|
||||
if (window.localStorage)
|
||||
localStorage.setItem('hidenav', hidden ? 1 : 0);
|
||||
|
||||
redraw();
|
||||
};
|
||||
|
||||
if (window.localStorage && localStorage.getItem('hidenav') == 1)
|
||||
dom_navtgl.onclick();
|
||||
128
copyparty/web/md2.css
Normal file
128
copyparty/web/md2.css
Normal file
@@ -0,0 +1,128 @@
|
||||
#toc {
|
||||
display: none;
|
||||
}
|
||||
#mtw {
|
||||
display: block;
|
||||
position: fixed;
|
||||
left: .5em;
|
||||
bottom: 0;
|
||||
width: calc(100% - 56em);
|
||||
}
|
||||
#mw {
|
||||
left: calc(100% - 55em);
|
||||
overflow-y: auto;
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
}
|
||||
|
||||
|
||||
/* single-screen */
|
||||
#mtw.preview,
|
||||
#mw.editor {
|
||||
opacity: 0;
|
||||
z-index: 1;
|
||||
}
|
||||
#mw.preview,
|
||||
#mtw.editor {
|
||||
z-index: 5;
|
||||
}
|
||||
#mtw.single,
|
||||
#mw.single {
|
||||
margin: 0;
|
||||
left: 1em;
|
||||
left: max(1em, calc((100% - 56em) / 2));
|
||||
}
|
||||
#mtw.single {
|
||||
width: 55em;
|
||||
width: min(55em, calc(100% - 2em));
|
||||
}
|
||||
|
||||
|
||||
#mp {
|
||||
position: relative;
|
||||
}
|
||||
#mt, #mtr {
|
||||
width: 100%;
|
||||
height: calc(100% - 1px);
|
||||
color: #444;
|
||||
background: #f7f7f7;
|
||||
border: 1px solid #999;
|
||||
outline: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-family: 'consolas', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
overflow-y: scroll;
|
||||
line-height: 1.3em;
|
||||
font-size: .9em;
|
||||
position: relative;
|
||||
scrollbar-color: #eb0 #f7f7f7;
|
||||
}
|
||||
html.dark #mt {
|
||||
color: #eee;
|
||||
background: #222;
|
||||
border: 1px solid #777;
|
||||
scrollbar-color: #b80 #282828;
|
||||
}
|
||||
#mtr {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
}
|
||||
#save.force-save {
|
||||
color: #400;
|
||||
background: #f97;
|
||||
border-radius: .15em;
|
||||
}
|
||||
html.dark #save.force-save {
|
||||
color: #fca;
|
||||
background: #720;
|
||||
}
|
||||
#save.disabled {
|
||||
opacity: .4;
|
||||
}
|
||||
#helpbox,
|
||||
#toast {
|
||||
background: #f7f7f7;
|
||||
border-radius: .4em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#helpbox {
|
||||
display: none;
|
||||
position: fixed;
|
||||
padding: 2em;
|
||||
top: 4em;
|
||||
overflow-y: auto;
|
||||
box-shadow: 0 .5em 2em #777;
|
||||
height: calc(100% - 12em);
|
||||
left: calc(50% - 15em);
|
||||
right: 0;
|
||||
width: 30em;
|
||||
}
|
||||
#helpclose {
|
||||
display: block;
|
||||
}
|
||||
html.dark #helpbox {
|
||||
box-shadow: 0 .5em 2em #444;
|
||||
}
|
||||
html.dark #helpbox,
|
||||
html.dark #toast {
|
||||
background: #222;
|
||||
border: 1px solid #079;
|
||||
border-width: 1px 0;
|
||||
}
|
||||
#toast {
|
||||
font-weight: bold;
|
||||
text-align: center;
|
||||
padding: .6em 0;
|
||||
position: fixed;
|
||||
z-index: 9001;
|
||||
top: 30%;
|
||||
transition: opacity 0.2s ease-in-out;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
# mt {opacity: .5;top:1px}
|
||||
1151
copyparty/web/md2.js
Normal file
1151
copyparty/web/md2.js
Normal file
File diff suppressed because it is too large
Load Diff
321
copyparty/web/mde.css
Normal file
321
copyparty/web/mde.css
Normal file
@@ -0,0 +1,321 @@
|
||||
html .editor-toolbar>button { margin-left: -1px; border: 1px solid rgba(0,0,0,0.1) }
|
||||
html .editor-toolbar>button+button { border-left: 1px solid rgba(0,0,0,0) }
|
||||
html .editor-toolbar>button:hover,
|
||||
html .editor-toolbar>button:active { box-shadow: 0 .1em .3em #999; z-index: 9 }
|
||||
html .editor-toolbar>button:active,
|
||||
html .editor-toolbar>button.active { border-color: rgba(0,0,0,0.4); background: #fc0 }
|
||||
html .editor-toolbar>i.separator { border-left: 1px solid #ccc; }
|
||||
html .editor-toolbar.disabled-for-preview>button:not(.no-disable) { opacity: .35 }
|
||||
|
||||
html {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
html, body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
min-height: 100%;
|
||||
font-family: sans-serif;
|
||||
background: #f7f7f7;
|
||||
color: #333;
|
||||
}
|
||||
#mn {
|
||||
font-weight: normal;
|
||||
margin: 1.3em 0 .7em 1em;
|
||||
}
|
||||
#mn a {
|
||||
color: #444;
|
||||
margin: 0 0 0 -.2em;
|
||||
padding: 0 0 0 .4em;
|
||||
text-decoration: none;
|
||||
/* ie: */
|
||||
border-bottom: .1em solid #777\9;
|
||||
margin-right: 1em\9;
|
||||
}
|
||||
#mn a:first-child {
|
||||
padding-left: .5em;
|
||||
}
|
||||
#mn a:last-child {
|
||||
padding-right: .5em;
|
||||
}
|
||||
#mn a:not(:last-child):after {
|
||||
content: '';
|
||||
width: 1.05em;
|
||||
height: 1.05em;
|
||||
margin: -.2em .3em -.2em -.4em;
|
||||
display: inline-block;
|
||||
border: 1px solid rgba(0,0,0,0.2);
|
||||
border-width: .2em .2em 0 0;
|
||||
transform: rotate(45deg);
|
||||
}
|
||||
#mn a:hover {
|
||||
color: #000;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
html .editor-toolbar>button.disabled {
|
||||
opacity: .35;
|
||||
pointer-events: none;
|
||||
}
|
||||
html .editor-toolbar>button.save.force-save {
|
||||
background: #f97;
|
||||
}
|
||||
|
||||
/*
|
||||
*[data-ln]:before {
|
||||
content: attr(data-ln);
|
||||
font-size: .8em;
|
||||
margin: 0 .4em;
|
||||
color: #f0c;
|
||||
}
|
||||
.cm-header { font-size: .4em !important }
|
||||
*/
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* copied from md.css for now */
|
||||
.mdo pre,
|
||||
.mdo code,
|
||||
.mdo a {
|
||||
color: #480;
|
||||
background: #f7f7f7;
|
||||
border: .07em solid #ddd;
|
||||
border-radius: .2em;
|
||||
padding: .1em .3em;
|
||||
margin: 0 .1em;
|
||||
}
|
||||
.mdo code {
|
||||
font-size: .96em;
|
||||
}
|
||||
.mdo pre,
|
||||
.mdo code {
|
||||
font-family: monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
.mdo pre code {
|
||||
display: block;
|
||||
margin: 0 -.3em;
|
||||
padding: .4em .5em;
|
||||
line-height: 1.1em;
|
||||
}
|
||||
.mdo a {
|
||||
color: #fff;
|
||||
background: #39b;
|
||||
text-decoration: none;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
border-bottom: .07em solid #079;
|
||||
}
|
||||
.mdo h2 {
|
||||
color: #fff;
|
||||
background: #555;
|
||||
margin-top: 2em;
|
||||
border-bottom: .22em solid #999;
|
||||
border-top: none;
|
||||
}
|
||||
.mdo h1 {
|
||||
color: #fff;
|
||||
background: #444;
|
||||
font-weight: normal;
|
||||
border-top: .4em solid #fb0;
|
||||
border-bottom: .4em solid #777;
|
||||
border-radius: 0 1em 0 1em;
|
||||
margin: 3em 0 1em 0;
|
||||
padding: .5em 0;
|
||||
}
|
||||
h1, h2 {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
h1 {
|
||||
font-size: 1.7em;
|
||||
text-align: center;
|
||||
border: 1em solid #777;
|
||||
border-width: .05em 0;
|
||||
margin: 3em 0;
|
||||
}
|
||||
h2 {
|
||||
font-size: 1.5em;
|
||||
font-weight: normal;
|
||||
background: #f7f7f7;
|
||||
border-top: .07em solid #fff;
|
||||
border-bottom: .07em solid #bbb;
|
||||
border-radius: .5em .5em 0 0;
|
||||
padding-left: .4em;
|
||||
margin-top: 3em;
|
||||
}
|
||||
.mdo ul,
|
||||
.mdo ol {
|
||||
border-left: .3em solid #ddd;
|
||||
}
|
||||
.mdo>ul,
|
||||
.mdo>ol {
|
||||
border-color: #bbb;
|
||||
}
|
||||
.mdo ul>li {
|
||||
list-style-type: disc;
|
||||
}
|
||||
.mdo ul>li,
|
||||
.mdo ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
}
|
||||
blockquote {
|
||||
font-family: serif;
|
||||
background: #f7f7f7;
|
||||
border: .07em dashed #ccc;
|
||||
padding: 0 2em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
small {
|
||||
opacity: .8;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
td {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
th {
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* mde support */
|
||||
.mdo {
|
||||
padding: 1em;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
html.dark .mdo {
|
||||
background: #1c1c1c;
|
||||
}
|
||||
.CodeMirror {
|
||||
background: #f7f7f7;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* darkmode */
|
||||
html.dark .mdo,
|
||||
html.dark .CodeMirror {
|
||||
border-color: #222;
|
||||
}
|
||||
html.dark,
|
||||
html.dark body,
|
||||
html.dark .CodeMirror {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark .CodeMirror-cursor {
|
||||
border-color: #fff;
|
||||
}
|
||||
html.dark .CodeMirror-selected {
|
||||
box-shadow: 0 0 1px #0cf inset;
|
||||
}
|
||||
html.dark .CodeMirror-selected,
|
||||
html.dark .CodeMirror-selectedtext {
|
||||
border-radius: .1em;
|
||||
background: #246;
|
||||
color: #fff;
|
||||
}
|
||||
html.dark .mdo a {
|
||||
background: #057;
|
||||
}
|
||||
html.dark .mdo h1 a, html.dark .mdo h4 a,
|
||||
html.dark .mdo h2 a, html.dark .mdo h5 a,
|
||||
html.dark .mdo h3 a, html.dark .mdo h6 a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
color: #8c0;
|
||||
background: #1a1a1a;
|
||||
border: .07em solid #333;
|
||||
}
|
||||
html.dark .mdo ul,
|
||||
html.dark .mdo ol {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark .mdo>ul,
|
||||
html.dark .mdo>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
html.dark h1 {
|
||||
background: #383838;
|
||||
border-top: .4em solid #b80;
|
||||
border-bottom: .4em solid #4c4c4c;
|
||||
}
|
||||
html.dark h2 {
|
||||
background: #444;
|
||||
border-bottom: .22em solid #555;
|
||||
}
|
||||
html.dark td,
|
||||
html.dark th {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark blockquote {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark #mn a {
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark #mn a:not(:last-child):after {
|
||||
border-color: rgba(255,255,255,0.3);
|
||||
}
|
||||
html.dark .editor-toolbar {
|
||||
border-color: #2c2c2c;
|
||||
background: #1c1c1c;
|
||||
}
|
||||
html.dark .editor-toolbar>i.separator {
|
||||
border-left: 1px solid #444;
|
||||
border-right: 1px solid #111;
|
||||
}
|
||||
html.dark .editor-toolbar>button {
|
||||
margin-left: -1px; border: 1px solid rgba(255,255,255,0.1);
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark .editor-toolbar>button:hover {
|
||||
color: #333;
|
||||
}
|
||||
html.dark .editor-toolbar>button.active {
|
||||
color: #333;
|
||||
border-color: #ec1;
|
||||
background: #c90;
|
||||
}
|
||||
html.dark .editor-toolbar::after,
|
||||
html.dark .editor-toolbar::before {
|
||||
background: none;
|
||||
}
|
||||
49
copyparty/web/mde.html
Normal file
49
copyparty/web/mde.html
Normal file
@@ -0,0 +1,49 @@
|
||||
<!DOCTYPE html><html><head>
|
||||
<meta charset="utf-8">
|
||||
<title>📝🎉 {{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
<link href="/.cpr/mde.css" rel="stylesheet">
|
||||
<link href="/.cpr/deps/mini-fa.css" rel="stylesheet">
|
||||
<link href="/.cpr/deps/easymde.css" rel="stylesheet">
|
||||
</head>
|
||||
<body>
|
||||
<div id="mw">
|
||||
<div id="mn"></div>
|
||||
<div id="ml">
|
||||
<div style="text-align:center;margin:5em 0">
|
||||
<div style="font-size:2em;margin:1em 0">Loading</div>
|
||||
if you're still reading this, check that javascript is allowed
|
||||
</div>
|
||||
</div>
|
||||
<div id="m">
|
||||
<textarea id="mt" style="display:none" autocomplete="off">{{ md }}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
|
||||
var last_modified = {{ lastmod }};
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
var lightswitch = (function () {
|
||||
var fun = function () {
|
||||
var dark = !!!document.documentElement.getAttribute("class");
|
||||
document.documentElement.setAttribute("class", dark ? "dark" : "");
|
||||
if (window.localStorage)
|
||||
localStorage.setItem('darkmode', dark ? 1 : 0);
|
||||
};
|
||||
if (window.localStorage && localStorage.getItem('darkmode') == 1)
|
||||
fun();
|
||||
|
||||
return fun;
|
||||
})();
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js"></script>
|
||||
<script src="/.cpr/deps/easymde.js"></script>
|
||||
<script src="/.cpr/mde.js"></script>
|
||||
</body></html>
|
||||
226
copyparty/web/mde.js
Normal file
226
copyparty/web/mde.js
Normal file
@@ -0,0 +1,226 @@
|
||||
"use strict";
|
||||
|
||||
var dom_wrap = ebi('mw');
|
||||
var dom_nav = ebi('mn');
|
||||
var dom_doc = ebi('m');
|
||||
var dom_md = ebi('mt');
|
||||
|
||||
(function () {
|
||||
var n = document.location + '';
|
||||
n = n.substr(n.indexOf('//') + 2).split('?')[0].split('/');
|
||||
n[0] = 'top';
|
||||
var loc = [];
|
||||
var nav = [];
|
||||
for (var a = 0; a < n.length; a++) {
|
||||
if (a > 0)
|
||||
loc.push(n[a]);
|
||||
|
||||
var dec = decodeURIComponent(n[a]).replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
|
||||
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
||||
}
|
||||
dom_nav.innerHTML = nav.join('');
|
||||
})();
|
||||
|
||||
var mde = (function () {
|
||||
var tbar = [
|
||||
{
|
||||
name: "light",
|
||||
title: "light",
|
||||
className: "fa fa-lightbulb",
|
||||
action: lightswitch
|
||||
}, {
|
||||
name: "save",
|
||||
title: "save",
|
||||
className: "fa fa-save",
|
||||
action: save
|
||||
}, '|',
|
||||
'bold', 'italic', 'strikethrough', 'heading', '|',
|
||||
'code', 'quote', 'unordered-list', 'ordered-list', 'clean-block', '|',
|
||||
'link', 'image', 'table', 'horizontal-rule', '|',
|
||||
'preview', 'side-by-side', 'fullscreen', '|',
|
||||
'undo', 'redo'];
|
||||
|
||||
var mde = new EasyMDE({
|
||||
autoDownloadFontAwesome: false,
|
||||
autofocus: true,
|
||||
spellChecker: false,
|
||||
renderingConfig: {
|
||||
markedOptions: {
|
||||
breaks: true,
|
||||
gfm: true
|
||||
}
|
||||
},
|
||||
shortcuts: {
|
||||
"save": "Ctrl-S"
|
||||
},
|
||||
insertTexts: ["[](", ")"],
|
||||
indentWithTabs: false,
|
||||
tabSize: 2,
|
||||
toolbar: tbar,
|
||||
previewClass: 'mdo',
|
||||
onToggleFullScreen: set_jumpto,
|
||||
});
|
||||
md_changed(mde, true);
|
||||
mde.codemirror.on("change", function () {
|
||||
md_changed(mde);
|
||||
});
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
return mde;
|
||||
})();
|
||||
|
||||
function set_jumpto() {
|
||||
document.querySelector('.editor-preview-side').onclick = jumpto;
|
||||
}
|
||||
|
||||
function jumpto(ev) {
|
||||
var tgt = ev.target || ev.srcElement;
|
||||
var ln = null;
|
||||
while (tgt && !ln) {
|
||||
ln = tgt.getAttribute('data-ln');
|
||||
tgt = tgt.parentElement;
|
||||
}
|
||||
var ln = parseInt(ln);
|
||||
console.log(ln);
|
||||
var cm = mde.codemirror;
|
||||
var y = cm.heightAtLine(ln - 1, 'local');
|
||||
var y2 = cm.heightAtLine(ln, 'local');
|
||||
cm.scrollTo(null, y + (y2 - y) - cm.getScrollInfo().clientHeight / 2);
|
||||
}
|
||||
|
||||
function md_changed(mde, on_srv) {
|
||||
if (on_srv)
|
||||
window.md_saved = mde.value();
|
||||
|
||||
var md_now = mde.value();
|
||||
var save_btn = document.querySelector('.editor-toolbar button.save');
|
||||
|
||||
if (md_now == window.md_saved)
|
||||
save_btn.classList.add('disabled');
|
||||
else
|
||||
save_btn.classList.remove('disabled');
|
||||
|
||||
set_jumpto();
|
||||
}
|
||||
|
||||
function save(mde) {
|
||||
var save_btn = document.querySelector('.editor-toolbar button.save');
|
||||
if (save_btn.classList.contains('disabled')) {
|
||||
alert('there is nothing to save');
|
||||
return;
|
||||
}
|
||||
var force = save_btn.classList.contains('force-save');
|
||||
if (force && !confirm('confirm that you wish to lose the changes made on the server since you opened this document')) {
|
||||
alert('ok, aborted');
|
||||
return;
|
||||
}
|
||||
|
||||
var txt = mde.value();
|
||||
|
||||
var fd = new FormData();
|
||||
fd.append("act", "tput");
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.mde = mde;
|
||||
xhr.txt = txt;
|
||||
xhr.send(fd);
|
||||
}
|
||||
|
||||
function save_cb() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return;
|
||||
}
|
||||
|
||||
var r;
|
||||
try {
|
||||
r = JSON.parse(this.responseText);
|
||||
}
|
||||
catch (ex) {
|
||||
alert('Failed to parse reply from server:\n\n' + this.responseText);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!r.ok) {
|
||||
if (!this.btn.classList.contains('force-save')) {
|
||||
this.btn.classList.add('force-save');
|
||||
var msg = [
|
||||
'This file has been modified since you started editing it!\n',
|
||||
'if you really want to overwrite, press save again.\n',
|
||||
'modified ' + ((r.now - r.lastmod) / 1000) + ' seconds ago,',
|
||||
((r.lastmod - last_modified) / 1000) + ' sec after you opened it\n',
|
||||
last_modified + ' lastmod when you opened it,',
|
||||
r.lastmod + ' lastmod on the server now,',
|
||||
r.now + ' server time now,\n',
|
||||
];
|
||||
alert(msg.join('\n'));
|
||||
}
|
||||
else {
|
||||
alert('Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
this.btn.classList.remove('force-save');
|
||||
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
|
||||
|
||||
// download the saved doc from the server and compare
|
||||
var url = (document.location + '').split('?')[0] + '?raw';
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_chk;
|
||||
xhr.btn = this.save_btn;
|
||||
xhr.mde = this.mde;
|
||||
xhr.txt = this.txt;
|
||||
xhr.lastmod = r.lastmod;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
function save_chk() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return;
|
||||
}
|
||||
|
||||
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
||||
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
||||
if (doc1 != doc2) {
|
||||
alert(
|
||||
'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' +
|
||||
'Length: yours=' + doc1.length + ', server=' + doc2.length
|
||||
);
|
||||
alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
|
||||
alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
|
||||
return;
|
||||
}
|
||||
|
||||
last_modified = this.lastmod;
|
||||
md_changed(this.mde, true);
|
||||
|
||||
var ok = document.createElement('div');
|
||||
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
|
||||
ok.innerHTML = 'OK✔️';
|
||||
var parent = ebi('m');
|
||||
document.documentElement.appendChild(ok);
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
ok.parentNode.removeChild(ok);
|
||||
}, 750);
|
||||
}
|
||||
@@ -31,6 +31,10 @@
|
||||
{%- if html %}
|
||||
{{ html }}
|
||||
{%- endif %}
|
||||
|
||||
{%- if click %}
|
||||
<script>document.getElementsByTagName("a")[0].click()</script>
|
||||
{%- endif %}
|
||||
</div>
|
||||
|
||||
{%- if redir %}
|
||||
|
||||
@@ -13,6 +13,7 @@ h1 {
|
||||
border-bottom: 1px solid #ccc;
|
||||
margin: 2em 0 .4em 0;
|
||||
padding: 0 0 .2em 0;
|
||||
font-weight: normal;
|
||||
}
|
||||
li {
|
||||
margin: 1em 0;
|
||||
@@ -24,4 +25,29 @@ a {
|
||||
border-bottom: 1px solid #aaa;
|
||||
border-radius: .2em;
|
||||
padding: .2em .8em;
|
||||
}
|
||||
|
||||
|
||||
html.dark,
|
||||
html.dark body,
|
||||
html.dark #wrap {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.dark h1 {
|
||||
border-color: #777;
|
||||
}
|
||||
html.dark a {
|
||||
color: #fff;
|
||||
background: #057;
|
||||
border-color: #37a;
|
||||
}
|
||||
html.dark input {
|
||||
color: #fff;
|
||||
background: #624;
|
||||
border: 1px solid #c27;
|
||||
border-width: 1px 0 0 0;
|
||||
border-radius: .5em;
|
||||
padding: .5em .7em;
|
||||
margin: 0 .5em 0 0;
|
||||
}
|
||||
@@ -36,7 +36,11 @@
|
||||
</form>
|
||||
</ul>
|
||||
</div>
|
||||
<!-- script src="/.cpr/splash.js"></script -->
|
||||
</body>
|
||||
<script>
|
||||
|
||||
if (window.localStorage && localStorage.getItem('darkmode') == 1)
|
||||
document.documentElement.setAttribute("class", "dark");
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,100 +1,115 @@
|
||||
"use strict";
|
||||
|
||||
// error handler for mobile devices
|
||||
function hcroak(msg) {
|
||||
document.body.innerHTML = msg;
|
||||
window.onerror = undefined;
|
||||
throw 'fatal_err';
|
||||
}
|
||||
function croak(msg) {
|
||||
document.body.textContent = msg;
|
||||
window.onerror = undefined;
|
||||
throw msg;
|
||||
}
|
||||
function esc(txt) {
|
||||
return txt.replace(/[&"<>]/g, function (c) {
|
||||
return {
|
||||
'&': '&',
|
||||
'"': '"',
|
||||
'<': '<',
|
||||
'>': '>'
|
||||
}[c];
|
||||
});
|
||||
}
|
||||
window.onerror = function (msg, url, lineNo, columnNo, error) {
|
||||
window.onerror = undefined;
|
||||
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
||||
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
||||
window.onerror = vis_exh;
|
||||
|
||||
if (error) {
|
||||
var find = ['desc', 'stack', 'trace'];
|
||||
for (var a = 0; a < find.length; a++)
|
||||
if (String(error[find[a]]) !== 'undefined')
|
||||
html.push('<h2>' + find[a] + '</h2>' +
|
||||
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
|
||||
|
||||
(function () {
|
||||
var ops = document.querySelectorAll('#ops>a');
|
||||
for (var a = 0; a < ops.length; a++) {
|
||||
ops[a].onclick = opclick;
|
||||
}
|
||||
document.body.style.fontSize = '0.8em';
|
||||
hcroak(html.join('\n'));
|
||||
};
|
||||
})();
|
||||
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = document.createElement('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
function opclick(ev) {
|
||||
if (ev) //ie
|
||||
ev.preventDefault();
|
||||
|
||||
script.onreadystatechange = cb;
|
||||
script.onload = cb;
|
||||
var dest = this.getAttribute('data-dest');
|
||||
goto(dest);
|
||||
|
||||
head.appendChild(script);
|
||||
// writing a blank value makes ie8 segfault w
|
||||
if (window.localStorage)
|
||||
localStorage.setItem('opmode', dest || '.');
|
||||
|
||||
var input = document.querySelector('.opview.act input:not([type="hidden"])')
|
||||
if (input)
|
||||
input.focus();
|
||||
}
|
||||
|
||||
|
||||
function o(id) {
|
||||
return document.getElementById(id);
|
||||
function goto(dest) {
|
||||
var obj = document.querySelectorAll('.opview.act');
|
||||
for (var a = obj.length - 1; a >= 0; a--)
|
||||
obj[a].classList.remove('act');
|
||||
|
||||
obj = document.querySelectorAll('#ops>a');
|
||||
for (var a = obj.length - 1; a >= 0; a--)
|
||||
obj[a].classList.remove('act');
|
||||
|
||||
if (dest) {
|
||||
ebi('op_' + dest).classList.add('act');
|
||||
document.querySelector('#ops>a[data-dest=' + dest + ']').classList.add('act');
|
||||
|
||||
var fn = window['goto_' + dest];
|
||||
if (fn)
|
||||
fn();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function goto_up2k() {
|
||||
if (up2k === false)
|
||||
return goto('bup');
|
||||
|
||||
if (!up2k)
|
||||
return setTimeout(goto_up2k, 100);
|
||||
|
||||
up2k.init_deps();
|
||||
}
|
||||
|
||||
|
||||
(function () {
|
||||
// chrome requires https to use crypto.subtle,
|
||||
// usually it's undefined but some chromes throw on invoke
|
||||
try {
|
||||
crypto.subtle.digest(
|
||||
'SHA-512', new Uint8Array(1)
|
||||
).then(
|
||||
function (x) { up2k_init(true) },
|
||||
function (x) { up2k_init(false) }
|
||||
);
|
||||
}
|
||||
catch (ex) {
|
||||
up2k_init(false);
|
||||
goto();
|
||||
if (window.localStorage) {
|
||||
var op = localStorage.getItem('opmode');
|
||||
if (op !== null && op !== '.')
|
||||
goto(op);
|
||||
}
|
||||
ebi('ops').style.display = 'block';
|
||||
})();
|
||||
|
||||
|
||||
// chrome requires https to use crypto.subtle,
|
||||
// usually it's undefined but some chromes throw on invoke
|
||||
var up2k = null;
|
||||
try {
|
||||
crypto.subtle.digest(
|
||||
'SHA-512', new Uint8Array(1)
|
||||
).then(
|
||||
function (x) { up2k = up2k_init(true) },
|
||||
function (x) { up2k = up2k_init(false) }
|
||||
);
|
||||
}
|
||||
catch (ex) {
|
||||
try {
|
||||
up2k = up2k_init(false);
|
||||
}
|
||||
catch (ex) { }
|
||||
}
|
||||
|
||||
|
||||
function up2k_init(have_crypto) {
|
||||
//have_crypto = false;
|
||||
var need_filereader_cache = undefined;
|
||||
|
||||
// show modal message
|
||||
function showmodal(msg) {
|
||||
o('u2notbtn').innerHTML = msg;
|
||||
o('u2btn').style.display = 'none';
|
||||
o('u2notbtn').style.display = 'block';
|
||||
o('u2conf').style.opacity = '0.5';
|
||||
ebi('u2notbtn').innerHTML = msg;
|
||||
ebi('u2btn').style.display = 'none';
|
||||
ebi('u2notbtn').style.display = 'block';
|
||||
ebi('u2conf').style.opacity = '0.5';
|
||||
}
|
||||
|
||||
// hide modal message
|
||||
function unmodal() {
|
||||
o('u2notbtn').style.display = 'none';
|
||||
o('u2btn').style.display = 'block';
|
||||
o('u2conf').style.opacity = '1';
|
||||
o('u2notbtn').innerHTML = '';
|
||||
ebi('u2notbtn').style.display = 'none';
|
||||
ebi('u2btn').style.display = 'block';
|
||||
ebi('u2conf').style.opacity = '1';
|
||||
ebi('u2notbtn').innerHTML = '';
|
||||
}
|
||||
|
||||
var post_url = o('bup').getElementsByTagName('form')[0].getAttribute('action');
|
||||
var post_url = ebi('op_bup').getElementsByTagName('form')[0].getAttribute('action');
|
||||
if (post_url && post_url.charAt(post_url.length - 1) !== '/')
|
||||
post_url += '/';
|
||||
|
||||
@@ -105,52 +120,45 @@ function up2k_init(have_crypto) {
|
||||
shame = 'your browser is impressively ancient';
|
||||
|
||||
// upload ui hidden by default, clicking the header shows it
|
||||
function toggle_upload_visible(ev) {
|
||||
if (ev)
|
||||
ev.preventDefault();
|
||||
|
||||
o('u2tgl').style.display = 'none';
|
||||
o('u2body').style.display = 'block';
|
||||
|
||||
function init_deps() {
|
||||
if (!have_crypto && !window.asmCrypto) {
|
||||
showmodal('<h1>loading sha512.js</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
|
||||
import_js('/.cpr/deps/sha512.js', unmodal);
|
||||
|
||||
if (is_https)
|
||||
o('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
|
||||
ebi('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
|
||||
else
|
||||
o('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance';
|
||||
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance';
|
||||
}
|
||||
};
|
||||
o('u2tgl').onclick = toggle_upload_visible;
|
||||
|
||||
}
|
||||
|
||||
// show uploader if the user only has write-access
|
||||
if (!o('files'))
|
||||
toggle_upload_visible();
|
||||
if (!ebi('files'))
|
||||
goto('up2k');
|
||||
|
||||
// shows or clears an error message in the basic uploader ui
|
||||
function setmsg(msg) {
|
||||
if (msg !== undefined) {
|
||||
o('u2err').setAttribute('class', 'err');
|
||||
o('u2err').innerHTML = msg;
|
||||
ebi('u2err').setAttribute('class', 'err');
|
||||
ebi('u2err').innerHTML = msg;
|
||||
}
|
||||
else {
|
||||
o('u2err').setAttribute('class', '');
|
||||
o('u2err').innerHTML = '';
|
||||
ebi('u2err').setAttribute('class', '');
|
||||
ebi('u2err').innerHTML = '';
|
||||
}
|
||||
}
|
||||
|
||||
// switches to the basic uploader with msg as error message
|
||||
function un2k(msg) {
|
||||
o('up2k').style.display = 'none';
|
||||
o('bup').style.display = 'block';
|
||||
setmsg(msg);
|
||||
return false;
|
||||
}
|
||||
|
||||
// handle user intent to use the basic uploader instead
|
||||
o('u2nope').onclick = function (e) {
|
||||
ebi('u2nope').onclick = function (e) {
|
||||
e.preventDefault();
|
||||
un2k();
|
||||
setmsg('');
|
||||
goto('bup');
|
||||
};
|
||||
|
||||
if (!String.prototype.format) {
|
||||
@@ -166,9 +174,9 @@ function up2k_init(have_crypto) {
|
||||
function cfg_get(name) {
|
||||
var val = localStorage.getItem(name);
|
||||
if (val === null)
|
||||
return parseInt(o(name).value);
|
||||
return parseInt(ebi(name).value);
|
||||
|
||||
o(name).value = val
|
||||
ebi(name).value = val;
|
||||
return val;
|
||||
}
|
||||
|
||||
@@ -179,7 +187,7 @@ function up2k_init(have_crypto) {
|
||||
else
|
||||
val = (val == '1');
|
||||
|
||||
o(name).checked = val;
|
||||
ebi(name).checked = val;
|
||||
return val;
|
||||
}
|
||||
|
||||
@@ -187,7 +195,7 @@ function up2k_init(have_crypto) {
|
||||
localStorage.setItem(
|
||||
name, val ? '1' : '0');
|
||||
|
||||
o(name).checked = val;
|
||||
ebi(name).checked = val;
|
||||
return val;
|
||||
}
|
||||
|
||||
@@ -213,19 +221,17 @@ function up2k_init(have_crypto) {
|
||||
}
|
||||
};
|
||||
|
||||
var bobslice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
|
||||
var bobslice = null;
|
||||
if (window.File)
|
||||
bobslice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
|
||||
|
||||
if (!bobslice || !window.FileReader || !window.FileList)
|
||||
return un2k("this is the basic uploader; up2k needs at least<br />chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1");
|
||||
|
||||
// probably safe now
|
||||
o('up2k').style.display = 'block';
|
||||
o('bup').style.display = 'none';
|
||||
|
||||
function nav() {
|
||||
o('file' + fdom_ctr).click();
|
||||
ebi('file' + fdom_ctr).click();
|
||||
}
|
||||
o('u2btn').addEventListener('click', nav, false);
|
||||
ebi('u2btn').addEventListener('click', nav, false);
|
||||
|
||||
function ondrag(ev) {
|
||||
ev.stopPropagation();
|
||||
@@ -233,8 +239,8 @@ function up2k_init(have_crypto) {
|
||||
ev.dataTransfer.dropEffect = 'copy';
|
||||
ev.dataTransfer.effectAllowed = 'copy';
|
||||
}
|
||||
o('u2btn').addEventListener('dragover', ondrag, false);
|
||||
o('u2btn').addEventListener('dragenter', ondrag, false);
|
||||
ebi('u2btn').addEventListener('dragover', ondrag, false);
|
||||
ebi('u2btn').addEventListener('dragenter', ondrag, false);
|
||||
|
||||
function gotfile(ev) {
|
||||
ev.stopPropagation();
|
||||
@@ -272,12 +278,15 @@ function up2k_init(have_crypto) {
|
||||
bad_files.push([a, fobj.name]);
|
||||
continue;
|
||||
}
|
||||
var now = new Date().getTime();
|
||||
var lmod = fobj.lastModified || now;
|
||||
var entry = {
|
||||
"n": parseInt(st.files.length.toString()),
|
||||
"t0": new Date().getTime(), // TODO remove probably
|
||||
"t0": now, // TODO remove probably
|
||||
"fobj": fobj,
|
||||
"name": fobj.name,
|
||||
"size": fobj.size,
|
||||
"lmod": lmod / 1000,
|
||||
"hash": []
|
||||
};
|
||||
|
||||
@@ -291,9 +300,9 @@ function up2k_init(have_crypto) {
|
||||
continue;
|
||||
|
||||
var tr = document.createElement('tr');
|
||||
tr.innerHTML = '<td></td><td id="f{0}t">hashing</td><td id="f{0}p" class="prog"></td>'.format(st.files.length);
|
||||
tr.innerHTML = '<td id="f{0}n"></td><td id="f{0}t">hashing</td><td id="f{0}p" class="prog"></td>'.format(st.files.length);
|
||||
tr.getElementsByTagName('td')[0].textContent = entry.name;
|
||||
o('u2tab').appendChild(tr);
|
||||
ebi('u2tab').appendChild(tr);
|
||||
|
||||
st.files.push(entry);
|
||||
st.todo.hash.push(entry);
|
||||
@@ -310,14 +319,14 @@ function up2k_init(have_crypto) {
|
||||
alert(msg);
|
||||
}
|
||||
}
|
||||
o('u2btn').addEventListener('drop', gotfile, false);
|
||||
ebi('u2btn').addEventListener('drop', gotfile, false);
|
||||
|
||||
function more_one_file() {
|
||||
fdom_ctr++;
|
||||
var elm = document.createElement('div')
|
||||
elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" />'.format(fdom_ctr);
|
||||
o('u2form').appendChild(elm);
|
||||
o('file' + fdom_ctr).addEventListener('change', gotfile, false);
|
||||
ebi('u2form').appendChild(elm);
|
||||
ebi('file' + fdom_ctr).addEventListener('change', gotfile, false);
|
||||
}
|
||||
more_one_file();
|
||||
|
||||
@@ -387,17 +396,6 @@ function up2k_init(have_crypto) {
|
||||
/// hashing
|
||||
//
|
||||
|
||||
// https://gist.github.com/jonleighton/958841
|
||||
function buf2b64_maybe_fucky(buffer) {
|
||||
var ret = '';
|
||||
var view = new DataView(buffer);
|
||||
for (var i = 0; i < view.byteLength; i++) {
|
||||
ret += String.fromCharCode(view.getUint8(i));
|
||||
}
|
||||
return window.btoa(ret).replace(
|
||||
/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
|
||||
}
|
||||
|
||||
// https://gist.github.com/jonleighton/958841
|
||||
function buf2b64(arrayBuffer) {
|
||||
var base64 = '';
|
||||
@@ -438,20 +436,6 @@ function up2k_init(have_crypto) {
|
||||
return base64;
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest
|
||||
function buf2hex(buffer) {
|
||||
var hexCodes = [];
|
||||
var view = new DataView(buffer);
|
||||
for (var i = 0; i < view.byteLength; i += 4) {
|
||||
var value = view.getUint32(i) // 4 bytes per iter
|
||||
var stringValue = value.toString(16) // doesn't pad
|
||||
var padding = '00000000'
|
||||
var paddedValue = (padding + stringValue).slice(-padding.length)
|
||||
hexCodes.push(paddedValue);
|
||||
}
|
||||
return hexCodes.join("");
|
||||
}
|
||||
|
||||
function get_chunksize(filesize) {
|
||||
var chunksize = 1024 * 1024;
|
||||
var stepsize = 512 * 1024;
|
||||
@@ -538,7 +522,7 @@ function up2k_init(have_crypto) {
|
||||
pb_html += '<div id="f{0}p{1}" style="width:{2}%"><div></div></div>'.format(
|
||||
t.n, a, pb_perc);
|
||||
|
||||
o('f{0}p'.format(t.n)).innerHTML = pb_html;
|
||||
ebi('f{0}p'.format(t.n)).innerHTML = pb_html;
|
||||
|
||||
var reader = new FileReader();
|
||||
|
||||
@@ -613,7 +597,7 @@ function up2k_init(have_crypto) {
|
||||
alert('{0} ms, {1} MB/s\n'.format(t.t2 - t.t1, spd.toFixed(3)) + t.hash.join('\n'));
|
||||
}
|
||||
|
||||
o('f{0}t'.format(t.n)).innerHTML = 'connecting';
|
||||
ebi('f{0}t'.format(t.n)).innerHTML = 'connecting';
|
||||
st.busy.hash.splice(st.busy.hash.indexOf(t), 1);
|
||||
st.todo.handshake.push(t);
|
||||
};
|
||||
@@ -639,6 +623,12 @@ function up2k_init(have_crypto) {
|
||||
if (xhr.status == 200) {
|
||||
var response = JSON.parse(xhr.responseText);
|
||||
|
||||
if (response.name !== t.name) {
|
||||
// file exists; server renamed us
|
||||
t.name = response.name;
|
||||
ebi('f{0}n'.format(t.n)).textContent = t.name;
|
||||
}
|
||||
|
||||
t.postlist = [];
|
||||
t.wark = response.wark;
|
||||
var missing = response.hash;
|
||||
@@ -666,13 +656,13 @@ function up2k_init(have_crypto) {
|
||||
msg = 'uploading';
|
||||
done = false;
|
||||
}
|
||||
o('f{0}t'.format(t.n)).innerHTML = msg;
|
||||
ebi('f{0}t'.format(t.n)).innerHTML = msg;
|
||||
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
|
||||
|
||||
if (done) {
|
||||
var spd1 = (t.size / ((t.t2 - t.t1) / 1000.)) / (1024 * 1024.);
|
||||
var spd2 = (t.size / ((t.t3 - t.t2) / 1000.)) / (1024 * 1024.);
|
||||
o('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format(
|
||||
ebi('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format(
|
||||
spd1.toFixed(2), spd2.toFixed(2));
|
||||
}
|
||||
tasker();
|
||||
@@ -689,6 +679,7 @@ function up2k_init(have_crypto) {
|
||||
xhr.send(JSON.stringify({
|
||||
"name": t.name,
|
||||
"size": t.size,
|
||||
"lmod": t.lmod,
|
||||
"hash": t.hash
|
||||
}));
|
||||
}
|
||||
@@ -732,7 +723,7 @@ function up2k_init(have_crypto) {
|
||||
t.postlist.splice(t.postlist.indexOf(npart), 1);
|
||||
if (t.postlist.length == 0) {
|
||||
t.t3 = new Date().getTime();
|
||||
o('f{0}t'.format(t.n)).innerHTML = 'verifying';
|
||||
ebi('f{0}t'.format(t.n)).innerHTML = 'verifying';
|
||||
st.todo.handshake.push(t);
|
||||
}
|
||||
tasker();
|
||||
@@ -763,7 +754,7 @@ function up2k_init(have_crypto) {
|
||||
//
|
||||
|
||||
function prog(nfile, nchunk, color, percent) {
|
||||
var n1 = o('f{0}p{1}'.format(nfile, nchunk));
|
||||
var n1 = ebi('f{0}p{1}'.format(nfile, nchunk));
|
||||
var n2 = n1.getElementsByTagName('div')[0];
|
||||
if (percent === undefined) {
|
||||
n1.style.background = color;
|
||||
@@ -786,7 +777,7 @@ function up2k_init(have_crypto) {
|
||||
dir.preventDefault();
|
||||
} catch (ex) { }
|
||||
|
||||
var obj = o('nthread');
|
||||
var obj = ebi('nthread');
|
||||
if (dir.target) {
|
||||
obj.style.background = '#922';
|
||||
var v = Math.floor(parseInt(obj.value));
|
||||
@@ -821,21 +812,23 @@ function up2k_init(have_crypto) {
|
||||
this.click();
|
||||
}
|
||||
|
||||
o('nthread_add').onclick = function (ev) {
|
||||
ebi('nthread_add').onclick = function (ev) {
|
||||
ev.preventDefault();
|
||||
bumpthread(1);
|
||||
};
|
||||
o('nthread_sub').onclick = function (ev) {
|
||||
ebi('nthread_sub').onclick = function (ev) {
|
||||
ev.preventDefault();
|
||||
bumpthread(-1);
|
||||
};
|
||||
|
||||
o('nthread').addEventListener('input', bumpthread, false);
|
||||
o('multitask').addEventListener('click', tgl_multitask, false);
|
||||
ebi('nthread').addEventListener('input', bumpthread, false);
|
||||
ebi('multitask').addEventListener('click', tgl_multitask, false);
|
||||
|
||||
var nodes = o('u2conf').getElementsByTagName('a');
|
||||
var nodes = ebi('u2conf').getElementsByTagName('a');
|
||||
for (var a = nodes.length - 1; a >= 0; a--)
|
||||
nodes[a].addEventListener('touchend', nop, false);
|
||||
|
||||
bumpthread({ "target": 1 })
|
||||
|
||||
return { "init_deps": init_deps }
|
||||
}
|
||||
|
||||
@@ -1,19 +1,94 @@
|
||||
#bup {
|
||||
padding: .5em .5em .5em .3em;
|
||||
margin: 1em 0 2em 0;
|
||||
background: #2d2d2d;
|
||||
border-radius: 0 1em 1em 0;
|
||||
.opview {
|
||||
display: none;
|
||||
}
|
||||
.opview.act {
|
||||
display: block;
|
||||
}
|
||||
#ops a {
|
||||
color: #fc5;
|
||||
font-size: 1.5em;
|
||||
padding: 0 .3em;
|
||||
margin: 0;
|
||||
outline: none;
|
||||
}
|
||||
#ops a.act {
|
||||
text-decoration: underline;
|
||||
}
|
||||
/*
|
||||
#ops a+a:after,
|
||||
#ops a:first-child:after {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #01a7e1;
|
||||
margin-left: .3em;
|
||||
position: relative;
|
||||
}
|
||||
#ops a+a:before {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #ff3f1a;
|
||||
margin-right: .3em;
|
||||
margin-left: -.3em;
|
||||
}
|
||||
#ops a:last-child:after {
|
||||
content: '';
|
||||
}
|
||||
#ops a.act:before,
|
||||
#ops a.act:after {
|
||||
text-decoration: none !important;
|
||||
}
|
||||
*/
|
||||
#ops i {
|
||||
font-size: 1.5em;
|
||||
}
|
||||
#ops i:before {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #01a7e1;
|
||||
position: relative;
|
||||
}
|
||||
#ops i:after {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #ff3f1a;
|
||||
margin-left: -.35em;
|
||||
font-size: 1.05em;
|
||||
}
|
||||
#ops,
|
||||
.opbox {
|
||||
border: 1px solid #3a3a3a;
|
||||
border-width: 0 .3em .3em 0;
|
||||
box-shadow: 0 0 1em #222 inset;
|
||||
}
|
||||
#ops {
|
||||
display: none;
|
||||
background: #333;
|
||||
margin: 1.7em 1.5em 0 1.5em;
|
||||
padding: .3em .6em;
|
||||
border-radius: .3em;
|
||||
border-width: .15em 0;
|
||||
}
|
||||
.opbox {
|
||||
background: #2d2d2d;
|
||||
margin: 1.5em 0 0 0;
|
||||
padding: .5em;
|
||||
border-radius: 0 1em 1em 0;
|
||||
border-width: .15em .3em .3em 0;
|
||||
max-width: 40em;
|
||||
}
|
||||
#bup input {
|
||||
.opbox input {
|
||||
margin: .5em;
|
||||
}
|
||||
#up2k {
|
||||
display: none;
|
||||
padding: 0 1em;
|
||||
.opbox input[type=text] {
|
||||
color: #fff;
|
||||
background: #383838;
|
||||
border: none;
|
||||
box-shadow: 0 0 .3em #222;
|
||||
border-bottom: 1px solid #fc5;
|
||||
border-radius: .2em;
|
||||
padding: .2em .3em;
|
||||
}
|
||||
#op_up2k {
|
||||
padding: 0 1em 1em 1em;
|
||||
}
|
||||
#u2form {
|
||||
position: absolute;
|
||||
@@ -29,16 +104,6 @@
|
||||
color: #f87;
|
||||
padding: .5em;
|
||||
}
|
||||
#u2tgl {
|
||||
color: #fc5;
|
||||
font-size: 1.5em;
|
||||
margin: .5em 0 1em 0;
|
||||
display: block;
|
||||
}
|
||||
#u2body {
|
||||
display: none;
|
||||
padding-bottom: 1em;
|
||||
}
|
||||
#u2form {
|
||||
width: 2px;
|
||||
height: 2px;
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
<div id="bup">
|
||||
<div id="ops"><a
|
||||
href="#" data-dest="">---</a><i></i><a
|
||||
href="#" data-dest="up2k">up2k</a><i></i><a
|
||||
href="#" data-dest="bup">bup</a><i></i><a
|
||||
href="#" data-dest="mkdir">mkdir</a><i></i><a
|
||||
href="#" data-dest="new_md">new.md</a></div>
|
||||
|
||||
<div id="op_bup" class="opview opbox act">
|
||||
<div id="u2err"></div>
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
|
||||
<input type="hidden" name="act" value="bput" />
|
||||
@@ -7,10 +14,24 @@
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="up2k">
|
||||
<a href="#" id="u2tgl">you can upload here</a>
|
||||
<form id="u2form" method="POST" enctype="multipart/form-data" onsubmit="return false;"></form>
|
||||
<div id="u2body">
|
||||
<div id="op_mkdir" class="opview opbox act">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
|
||||
<input type="hidden" name="act" value="mkdir" />
|
||||
<input type="text" name="name" size="30">
|
||||
<input type="submit" value="mkdir">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_new_md" class="opview opbox">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
|
||||
<input type="hidden" name="act" value="new_md" />
|
||||
<input type="text" name="name" size="30">
|
||||
<input type="submit" value="create doc">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_up2k" class="opview">
|
||||
<form id="u2form" method="post" enctype="multipart/form-data" onsubmit="return false;"></form>
|
||||
|
||||
<table id="u2conf">
|
||||
<tr>
|
||||
@@ -45,6 +66,5 @@
|
||||
</table>
|
||||
|
||||
<p id="u2foot"></p>
|
||||
<p>( if you don't need resumable uploads and progress bars just use the <a href="#" id="u2nope" onclick="javascript:un2k();">basic uploader</a>)</p>
|
||||
</div>
|
||||
<p>( if you don't need lastmod timestamps, resumable uploads or progress bars just use the <a href="#" id="u2nope">basic uploader</a>)</p>
|
||||
</div>
|
||||
|
||||
109
copyparty/web/util.js
Normal file
109
copyparty/web/util.js
Normal file
@@ -0,0 +1,109 @@
|
||||
"use strict";
|
||||
|
||||
// error handler for mobile devices
|
||||
function hcroak(msg) {
|
||||
document.body.innerHTML = msg;
|
||||
window.onerror = undefined;
|
||||
throw 'fatal_err';
|
||||
}
|
||||
function croak(msg) {
|
||||
document.body.textContent = msg;
|
||||
window.onerror = undefined;
|
||||
throw msg;
|
||||
}
|
||||
function esc(txt) {
|
||||
return txt.replace(/[&"<>]/g, function (c) {
|
||||
return {
|
||||
'&': '&',
|
||||
'"': '"',
|
||||
'<': '<',
|
||||
'>': '>'
|
||||
}[c];
|
||||
});
|
||||
}
|
||||
function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
window.onerror = undefined;
|
||||
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
||||
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
||||
|
||||
if (error) {
|
||||
var find = ['desc', 'stack', 'trace'];
|
||||
for (var a = 0; a < find.length; a++)
|
||||
if (String(error[find[a]]) !== 'undefined')
|
||||
html.push('<h2>' + find[a] + '</h2>' +
|
||||
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
|
||||
}
|
||||
document.body.style.fontSize = '0.8em';
|
||||
document.body.style.padding = '0 1em 1em 1em';
|
||||
hcroak(html.join('\n'));
|
||||
}
|
||||
|
||||
|
||||
function ebi(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||
if (!String.prototype.endsWith) {
|
||||
String.prototype.endsWith = function (search, this_len) {
|
||||
if (this_len === undefined || this_len > this.length) {
|
||||
this_len = this.length;
|
||||
}
|
||||
return this.substring(this_len - search.length, this_len) === search;
|
||||
};
|
||||
}
|
||||
if (!String.startsWith) {
|
||||
String.prototype.startsWith = function (s, i) {
|
||||
i = i > 0 ? i | 0 : 0;
|
||||
return this.substring(i, i + s.length) === s;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = document.createElement('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
|
||||
script.onreadystatechange = cb;
|
||||
script.onload = cb;
|
||||
|
||||
head.appendChild(script);
|
||||
}
|
||||
|
||||
|
||||
function sortTable(table, col) {
|
||||
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
|
||||
th = table.tHead.rows[0].cells,
|
||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||
i, reverse = th[col].className == 'sort1' ? -1 : 1;
|
||||
for (var a = 0, thl = th.length; a < thl; a++)
|
||||
th[a].className = '';
|
||||
th[col].className = 'sort' + reverse;
|
||||
var stype = th[col].getAttribute('sort');
|
||||
tr = tr.sort(function (a, b) {
|
||||
var v1 = a.cells[col].textContent.trim();
|
||||
var v2 = b.cells[col].textContent.trim();
|
||||
if (stype == 'int') {
|
||||
v1 = parseInt(v1.replace(/,/g, ''));
|
||||
v2 = parseInt(v2.replace(/,/g, ''));
|
||||
return reverse * (v1 - v2);
|
||||
}
|
||||
return reverse * (v1.localeCompare(v2));
|
||||
});
|
||||
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]);
|
||||
}
|
||||
function makeSortable(table) {
|
||||
var th = table.tHead, i;
|
||||
th && (th = th.rows[0]) && (th = th.cells);
|
||||
if (th) i = th.length;
|
||||
else return; // if no `<thead>` then do nothing
|
||||
while (--i >= 0) (function (i) {
|
||||
th[i].onclick = function () {
|
||||
sortTable(table, i);
|
||||
};
|
||||
}(i));
|
||||
}
|
||||
@@ -13,7 +13,7 @@ head -c $((2*1024*1024*1024)) /dev/zero | openssl enc -aes-256-ctr -pass pass:hu
|
||||
## testing multiple parallel uploads
|
||||
## usage: para | tee log
|
||||
|
||||
para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}; do for ((n=0;n<s;n++)); do curl -sF "act=bput" -F "f=@garbage.file" http://127.0.0.1:1234/ 2>&1 & done; wait; echo; done; done; }
|
||||
para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}; do for ((n=0;n<s;n++)); do curl -sF "act=bput" -F "f=@garbage.file" http://127.0.0.1:3923/ 2>&1 & done; wait; echo; done; done; }
|
||||
|
||||
|
||||
##
|
||||
@@ -36,13 +36,13 @@ for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qwe,rty;asd
|
||||
|
||||
fn=$(printf '\xba\xdc\xab.cab')
|
||||
echo asdf > "$fn"
|
||||
curl --cookie cppwd=wark -sF "act=bput" -F "f=@$fn" http://127.0.0.1:1234/moji/%ED%91/
|
||||
curl --cookie cppwd=wark -sF "act=bput" -F "f=@$fn" http://127.0.0.1:3923/moji/%ED%91/
|
||||
|
||||
|
||||
##
|
||||
## test compression
|
||||
|
||||
wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:1234/.cpr/deps/ogv.js -O- | md5sum; p=~ed/dev/copyparty/copyparty/web/deps/ogv.js.gz; md5sum $p; gzip -d < $p | md5sum
|
||||
wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:3923/.cpr/deps/ogv.js -O- | md5sum; p=~ed/dev/copyparty/copyparty/web/deps/ogv.js.gz; md5sum $p; gzip -d < $p | md5sum
|
||||
|
||||
|
||||
##
|
||||
@@ -80,3 +80,45 @@ for d in /usr /var; do find $d -type f -size +30M 2>/dev/null; done | while IFS=
|
||||
# py2 on osx
|
||||
brew install python@2
|
||||
pip install virtualenv
|
||||
|
||||
|
||||
##
|
||||
## http 206
|
||||
|
||||
# az = abcdefghijklmnopqrstuvwxyz
|
||||
|
||||
printf '%s\r\n' 'GET /az HTTP/1.1' 'Host: ocv.me' 'Range: bytes=5-10' '' | ncat ocv.me 80
|
||||
# Content-Range: bytes 5-10/26
|
||||
# Content-Length: 6
|
||||
# fghijk
|
||||
|
||||
Range: bytes=0-1 "ab" Content-Range: bytes 0-1/26
|
||||
Range: bytes=24-24 "y" Content-Range: bytes 24-24/26
|
||||
Range: bytes=24-25 "yz" Content-Range: bytes 24-25/26
|
||||
Range: bytes=24- "yz" Content-Range: bytes 24-25/26
|
||||
Range: bytes=25-29 "z" Content-Range: bytes 25-25/26
|
||||
Range: bytes=26- Content-Range: bytes */26
|
||||
HTTP/1.1 416 Requested Range Not Satisfiable
|
||||
|
||||
|
||||
##
|
||||
## md perf
|
||||
|
||||
var tsh = [];
|
||||
function convert_markdown(md_text, dest_dom) {
|
||||
tsh.push(new Date().getTime());
|
||||
while (tsh.length > 10)
|
||||
tsh.shift();
|
||||
if (tsh.length > 1) {
|
||||
var end = tsh.slice(-2);
|
||||
console.log("render", end.pop() - end.pop(), (tsh[tsh.length - 1] - tsh[0]) / (tsh.length - 1));
|
||||
}
|
||||
|
||||
|
||||
##
|
||||
## tmpfiles.d meme
|
||||
|
||||
mk() { rm -rf /tmp/foo; sudo -u ed bash -c 'mkdir /tmp/foo; echo hi > /tmp/foo/bar'; }
|
||||
mk && t0="$(date)" && while true; do date -s "$(date '+ 1 hour')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
||||
mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
||||
mk && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done; echo "$t0"
|
||||
|
||||
35
docs/pretend-youre-qnap.patch
Normal file
35
docs/pretend-youre-qnap.patch
Normal file
@@ -0,0 +1,35 @@
|
||||
diff --git a/copyparty/httpcli.py b/copyparty/httpcli.py
|
||||
index 2d3c1ad..e1e85a0 100644
|
||||
--- a/copyparty/httpcli.py
|
||||
+++ b/copyparty/httpcli.py
|
||||
@@ -864,6 +864,30 @@ class HttpCli(object):
|
||||
#
|
||||
# send reply
|
||||
|
||||
+ try:
|
||||
+ fakefn = self.conn.hsrv.fakefn
|
||||
+ fakectr = self.conn.hsrv.fakectr
|
||||
+ fakedata = self.conn.hsrv.fakedata
|
||||
+ except:
|
||||
+ fakefn = b''
|
||||
+ fakectr = 0
|
||||
+ fakedata = b''
|
||||
+
|
||||
+ self.log('\n{} {}\n{}'.format(fakefn, fakectr, open_args[0]))
|
||||
+ if fakefn == open_args[0] and fakectr > 0:
|
||||
+ self.reply(fakedata, mime=guess_mime(req_path)[0])
|
||||
+ self.conn.hsrv.fakectr = fakectr - 1
|
||||
+ else:
|
||||
+ with open_func(*open_args) as f:
|
||||
+ fakedata = f.read()
|
||||
+
|
||||
+ self.conn.hsrv.fakefn = open_args[0]
|
||||
+ self.conn.hsrv.fakedata = fakedata
|
||||
+ self.conn.hsrv.fakectr = 15
|
||||
+ self.reply(fakedata, mime=guess_mime(req_path)[0])
|
||||
+
|
||||
+ return True
|
||||
+
|
||||
self.out_headers["Accept-Ranges"] = "bytes"
|
||||
self.send_headers(
|
||||
length=upper - lower,
|
||||
62
docs/rclone.md
Normal file
62
docs/rclone.md
Normal file
@@ -0,0 +1,62 @@
|
||||
# using rclone to mount a remote copyparty server as a local filesystem
|
||||
|
||||
speed estimates with server and client on the same win10 machine:
|
||||
* `1070 MiB/s` with rclone as both server and client
|
||||
* `570 MiB/s` with rclone-client and `copyparty -ed -j16` as server
|
||||
* `220 MiB/s` with rclone-client and `copyparty -ed` as server
|
||||
* `100 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
|
||||
|
||||
when server is on another machine (1gbit LAN),
|
||||
* `75 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
|
||||
* `92 MiB/s` with rclone-client and `copyparty -ed` as server
|
||||
* `103 MiB/s` (connection max) with `copyparty -ed -j16` and all the others
|
||||
|
||||
|
||||
# creating the config file
|
||||
|
||||
if you want to use password auth, add `headers = Cookie,cppwd=fgsfds` below
|
||||
|
||||
|
||||
### on windows clients:
|
||||
```
|
||||
(
|
||||
echo [cpp]
|
||||
echo type = http
|
||||
echo url = http://127.0.0.1:3923/
|
||||
) > %userprofile%\.config\rclone\rclone.conf
|
||||
```
|
||||
|
||||
also install the windows dependencies: [winfsp](https://github.com/billziss-gh/winfsp/releases/latest)
|
||||
|
||||
|
||||
### on unix clients:
|
||||
```
|
||||
cat > ~/.config/rclone/rclone.conf <<'EOF'
|
||||
[cpp]
|
||||
type = http
|
||||
url = http://127.0.0.1:3923/
|
||||
EOF
|
||||
```
|
||||
|
||||
|
||||
# mounting the copyparty server locally
|
||||
```
|
||||
rclone.exe mount --vfs-cache-max-age 5s --attr-timeout 5s --dir-cache-time 5s cpp: Z:
|
||||
```
|
||||
|
||||
|
||||
# use rclone as server too, replacing copyparty
|
||||
|
||||
feels out of place but is too good not to mention
|
||||
|
||||
```
|
||||
rclone.exe serve http --read-only .
|
||||
```
|
||||
|
||||
* `webdav` gives write-access but `http` is twice as fast
|
||||
* `ftp` is buggy, avoid
|
||||
|
||||
|
||||
# bugs
|
||||
|
||||
* rclone-client throws an exception if you try to read an empty file (should return zero bytes)
|
||||
10
docs/unirange.py
Normal file
10
docs/unirange.py
Normal file
@@ -0,0 +1,10 @@
|
||||
v = "U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+2000-206F, U+2074, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD"
|
||||
for v in v.split(","):
|
||||
if "+" in v:
|
||||
v = v.split("+")[1]
|
||||
if "-" in v:
|
||||
lo, hi = v.split("-")
|
||||
else:
|
||||
lo = hi = v
|
||||
for v in range(int(lo, 16), int(hi, 16) + 1):
|
||||
print("{:4x} [{}]".format(v, chr(v)))
|
||||
@@ -5,8 +5,6 @@ _msg() { printf "$2"'\033[1;30m>\033[0;33m>\033[1m>\033[0m %s\n' "$1" >&2; }
|
||||
imsg() { _msg "$1" ''; }
|
||||
msg() { _msg "$1" \\n; }
|
||||
|
||||
mkdir -p ~/src
|
||||
|
||||
##
|
||||
## helper which installs termux packages
|
||||
|
||||
@@ -24,22 +22,6 @@ addpkg() {
|
||||
apt install -y $1
|
||||
}
|
||||
|
||||
##
|
||||
## ensure git and copyparty is available
|
||||
|
||||
[ -e ~/src/copyparty/.ok ] || {
|
||||
command -v git >/dev/null ||
|
||||
addpkg git
|
||||
|
||||
msg "downloading copyparty from github"
|
||||
(
|
||||
cd ~/src
|
||||
rm -rf copyparty
|
||||
git clone https://github.com/9001/copyparty
|
||||
touch copyparty/.ok
|
||||
)
|
||||
}
|
||||
|
||||
##
|
||||
## ensure python is available
|
||||
|
||||
@@ -49,20 +31,21 @@ command -v python3 >/dev/null ||
|
||||
##
|
||||
## ensure virtualenv and dependencies are available
|
||||
|
||||
[ -e ~/src/copyparty/.env/.ok ] || { (
|
||||
cd ~/src/copyparty
|
||||
rm -rf .env
|
||||
ve=$HOME/ve.copyparty
|
||||
|
||||
[ -e $ve/.ok ] || (
|
||||
rm -rf $ve
|
||||
|
||||
msg "creating python3 virtualenv"
|
||||
python3 -m venv .env
|
||||
python3 -m venv $ve
|
||||
|
||||
msg "installing python dependencies"
|
||||
. .env/bin/activate
|
||||
pip install jinja2
|
||||
msg "installing copyparty"
|
||||
. $ve/bin/activate
|
||||
pip install copyparty
|
||||
|
||||
deactivate
|
||||
touch .env/.ok
|
||||
) }
|
||||
touch $ve/.ok
|
||||
)
|
||||
|
||||
##
|
||||
## add copyparty alias to bashrc
|
||||
@@ -75,9 +58,5 @@ grep -qE '^alias copyparty=' ~/.bashrc 2>/dev/null || {
|
||||
##
|
||||
## start copyparty
|
||||
|
||||
imsg "activating virtualenv"
|
||||
. ~/src/copyparty/.env/bin/activate
|
||||
|
||||
imsg "starting copyparty"
|
||||
PYTHONPATH=~/src/copyparty python3 -m copyparty "$@"
|
||||
|
||||
$ve/bin/python -m copyparty "$@"
|
||||
|
||||
104
scripts/copyparty-repack.sh
Executable file
104
scripts/copyparty-repack.sh
Executable file
@@ -0,0 +1,104 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# -- download latest copyparty (source.tgz and sfx),
|
||||
# -- build minimal sfx versions,
|
||||
# -- create a .tar.gz bundle
|
||||
#
|
||||
# convenient for deploying updates to inconvenient locations
|
||||
# (and those are usually linux so bash is good inaff)
|
||||
# (but that said this even has macos support)
|
||||
#
|
||||
# bundle will look like:
|
||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty
|
||||
# -rw-r--r-- 0 ed ed 491318 Nov 19 00:40 copyparty-extras/copyparty-0.5.4.tar.gz
|
||||
# -rwxr-xr-x 0 ed ed 30254 Nov 17 23:58 copyparty-extras/copyparty-fuse.py
|
||||
# -rwxr-xr-x 0 ed ed 481403 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.sh
|
||||
# -rwxr-xr-x 0 ed ed 506043 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.py
|
||||
# -rwxr-xr-x 0 ed ed 167699 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.sh
|
||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
|
||||
|
||||
|
||||
command -v gtar && tar() { gtar "$@"; }
|
||||
command -v gsed && sed() { gsed "$@"; }
|
||||
td="$(mktemp -d)"
|
||||
od="$(pwd)"
|
||||
cd "$td"
|
||||
pwd
|
||||
|
||||
|
||||
# debug: if cache exists, use that instead of bothering github
|
||||
cache="$od/.copyparty-repack.cache"
|
||||
[ -e "$cache" ] &&
|
||||
tar -xvf "$cache" ||
|
||||
{
|
||||
# get download links from github
|
||||
curl https://api.github.com/repos/9001/copyparty/releases/latest |
|
||||
(
|
||||
# prefer jq if available
|
||||
jq -r '.assets[]|select(.name|test("-sfx|tar.gz")).browser_download_url' ||
|
||||
|
||||
# fallback to awk (sorry)
|
||||
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
tr -d '\r' | tr '\n' '\0' | xargs -0 curl -L --remote-name-all
|
||||
|
||||
# debug: create cache
|
||||
#tar -czvf "$cache" *
|
||||
}
|
||||
|
||||
|
||||
# move src into copyparty-extras/,
|
||||
# move sfx into copyparty-extras/sfx-full/
|
||||
mkdir -p copyparty-extras/sfx-{full,lite}
|
||||
mv copyparty-sfx.* copyparty-extras/sfx-full/
|
||||
mv copyparty-*.tar.gz copyparty-extras/
|
||||
|
||||
|
||||
# unpack the source code
|
||||
( cd copyparty-extras/
|
||||
tar -xvf *.tar.gz
|
||||
)
|
||||
|
||||
|
||||
# fix permissions
|
||||
chmod 755 \
|
||||
copyparty-extras/sfx-full/* \
|
||||
copyparty-extras/copyparty-*/{scripts,bin}/*
|
||||
|
||||
|
||||
# extract and repack the sfx with less features enabled
|
||||
( cd copyparty-extras/sfx-full/
|
||||
./copyparty-sfx.py -h
|
||||
cd ../copyparty-*/
|
||||
./scripts/make-sfx.sh re no-ogv no-cm
|
||||
)
|
||||
|
||||
|
||||
# put new sfx into copyparty-extras/sfx-lite/,
|
||||
# fuse client into copyparty-extras/,
|
||||
# copy lite-sfx.py to ./copyparty,
|
||||
# delete extracted source code
|
||||
( cd copyparty-extras/
|
||||
mv copyparty-*/dist/* sfx-lite/
|
||||
mv copyparty-*/bin/copyparty-fuse.py .
|
||||
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
|
||||
rm -rf copyparty-{0..9}*.*.*{0..9}
|
||||
)
|
||||
|
||||
|
||||
# and include the repacker itself too
|
||||
cp -pv "$od/$0" copyparty-extras/
|
||||
|
||||
|
||||
# create the bundle
|
||||
fn=copyparty-$(date +%Y-%m%d-%H%M%S).tgz
|
||||
tar -czvf "$od/$fn" *
|
||||
cd "$od"
|
||||
rm -rf "$td"
|
||||
|
||||
|
||||
echo
|
||||
echo "done, here's your bundle:"
|
||||
ls -al "$fn"
|
||||
@@ -1,17 +1,59 @@
|
||||
FROM alpine:3.10
|
||||
FROM alpine:3.11
|
||||
WORKDIR /z
|
||||
ENV ver_asmcrypto=2821dd1dedd1196c378f5854037dda5c869313f3 \
|
||||
ver_ogvjs=1.6.1
|
||||
ver_markdownit=10.0.0 \
|
||||
ver_showdown=1.9.1 \
|
||||
ver_marked=1.1.0 \
|
||||
ver_ogvjs=1.6.1 \
|
||||
ver_mde=2.10.1 \
|
||||
ver_codemirror=5.53.2 \
|
||||
ver_fontawesome=5.13.0 \
|
||||
ver_zopfli=1.0.3
|
||||
|
||||
|
||||
# download;
|
||||
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
||||
RUN mkdir -p /z/dist/no-pk \
|
||||
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
|
||||
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
|
||||
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \
|
||||
&& wget https://github.com/asmcrypto/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
|
||||
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
|
||||
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
|
||||
&& wget https://github.com/codemirror/CodeMirror/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
|
||||
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
|
||||
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
|
||||
&& unzip ogvjs.zip \
|
||||
&& (tar -xf asmcrypto.tgz \
|
||||
&& cd asmcrypto.js-$ver_asmcrypto \
|
||||
&& npm install ) \
|
||||
&& (tar -xf marked.tgz \
|
||||
&& cd marked-$ver_marked \
|
||||
&& npm install \
|
||||
&& npm i grunt uglify-js -g ) \
|
||||
&& (tar -xf codemirror.tgz \
|
||||
&& cd CodeMirror-$ver_codemirror \
|
||||
&& npm install ) \
|
||||
&& (tar -xf mde.tgz \
|
||||
&& cd easy-markdown-editor* \
|
||||
&& npm install \
|
||||
&& npm i gulp-cli -g ) \
|
||||
&& unzip fontawesome.zip \
|
||||
&& tar -xf zopfli.tgz
|
||||
|
||||
|
||||
# build fonttools (which needs zopfli)
|
||||
RUN tar -xf zopfli.tgz \
|
||||
&& cd zopfli* \
|
||||
&& cmake \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr \
|
||||
-DZOPFLI_BUILD_SHARED=ON \
|
||||
-B build \
|
||||
-S . \
|
||||
&& make -C build \
|
||||
&& make -C build install \
|
||||
&& python3 -m pip install fonttools zopfli
|
||||
|
||||
# download
|
||||
RUN apk add make g++ git bash npm patch wget tar pigz gzip unzip \
|
||||
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip \
|
||||
&& wget https://github.com/asmcrypto/asmcrypto.js/archive/$ver_asmcrypto.tar.gz \
|
||||
&& unzip ogvjs-$ver_ogvjs.zip \
|
||||
&& tar -xf $ver_asmcrypto.tar.gz \
|
||||
&& cd asmcrypto.js-$ver_asmcrypto \
|
||||
&& npm install \
|
||||
&& mkdir /z/dist
|
||||
|
||||
# build asmcrypto
|
||||
RUN cd asmcrypto.js-$ver_asmcrypto \
|
||||
@@ -19,28 +61,84 @@ RUN cd asmcrypto.js-$ver_asmcrypto \
|
||||
&& node -r esm build.js \
|
||||
&& mv asmcrypto.all.es5.js /z/dist/sha512.js
|
||||
|
||||
|
||||
# build ogvjs
|
||||
RUN cd ogvjs-$ver_ogvjs \
|
||||
&& cp -pv \
|
||||
ogv.js \
|
||||
ogv-worker-audio.js \
|
||||
ogv-demuxer-ogg.js \
|
||||
ogv-demuxer-ogg-wasm.js \
|
||||
ogv-demuxer-ogg-wasm.wasm \
|
||||
ogv-demuxer-webm.js \
|
||||
ogv-demuxer-webm-wasm.js \
|
||||
ogv-demuxer-webm-wasm.wasm \
|
||||
ogv-decoder-audio-opus.js \
|
||||
ogv-decoder-audio-opus-wasm.js \
|
||||
ogv-decoder-audio-opus-wasm.wasm \
|
||||
ogv-decoder-audio-vorbis.js \
|
||||
ogv-decoder-audio-vorbis-wasm.js \
|
||||
ogv-decoder-audio-vorbis-wasm.wasm \
|
||||
dynamicaudio.swf \
|
||||
/z/dist
|
||||
|
||||
# ogv-demuxer-ogg.js \
|
||||
# ogv-demuxer-webm.js \
|
||||
# ogv-decoder-audio-opus.js \
|
||||
# ogv-decoder-audio-vorbis.js \
|
||||
# dynamicaudio.swf \
|
||||
|
||||
|
||||
# build marked
|
||||
COPY marked.patch /z/
|
||||
COPY marked-ln.patch /z/
|
||||
RUN cd marked-$ver_marked \
|
||||
&& patch -p1 < /z/marked-ln.patch \
|
||||
&& patch -p1 < /z/marked.patch \
|
||||
&& npm run build \
|
||||
&& cp -pv marked.min.js /z/dist/marked.js \
|
||||
&& cp -pv lib/marked.js /z/dist/marked.full.js \
|
||||
&& mkdir -p /z/nodepkgs \
|
||||
&& ln -s $(pwd) /z/nodepkgs/marked
|
||||
# && npm run test \
|
||||
|
||||
|
||||
# build codemirror
|
||||
COPY codemirror.patch /z/
|
||||
RUN cd CodeMirror-$ver_codemirror \
|
||||
&& patch -p1 < /z/codemirror.patch \
|
||||
&& sed -ri '/^var urlRE = /d' mode/gfm/gfm.js \
|
||||
&& npm run build \
|
||||
&& ln -s $(pwd) /z/nodepkgs/codemirror
|
||||
|
||||
|
||||
# build easymde
|
||||
COPY easymde.patch /z/
|
||||
RUN cd easy-markdown-editor-$ver_mde \
|
||||
&& patch -p1 < /z/easymde.patch \
|
||||
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-0.8.2.tgz`file:/z/nodepkgs/marked`' package-lock.json \
|
||||
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
|
||||
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
|
||||
&& npm install
|
||||
|
||||
COPY easymde-ln.patch /z/
|
||||
RUN cd easy-markdown-editor-$ver_mde \
|
||||
&& patch -p1 < /z/easymde-ln.patch \
|
||||
&& gulp \
|
||||
&& cp -pv dist/easymde.min.css /z/dist/easymde.css \
|
||||
&& cp -pv dist/easymde.min.js /z/dist/easymde.js
|
||||
|
||||
|
||||
# build fontawesome and scp
|
||||
COPY mini-fa.sh /z
|
||||
COPY mini-fa.css /z
|
||||
RUN /bin/ash /z/mini-fa.sh
|
||||
|
||||
|
||||
# compress
|
||||
COPY zopfli.makefile /z/dist/Makefile
|
||||
RUN cd /z/dist \
|
||||
&& make -j$(nproc) \
|
||||
&& rm Makefile
|
||||
&& rm Makefile \
|
||||
&& mv no-pk/* . \
|
||||
&& rmdir no-pk
|
||||
|
||||
|
||||
# git diff -U2 --no-index marked-1.1.0-orig/ marked-1.1.0-edit/ -U2 | sed -r '/^index /d;s`^(diff --git a/)[^/]+/(.* b/)[^/]+/`\1\2`; s`^(---|\+\+\+) ([ab]/)[^/]+/`\1 \2`' > ../dev/copyparty/scripts/deps-docker/marked-ln.patch
|
||||
# d=/home/ed/dev/copyparty/scripts/deps-docker/; tar -cf ../x . && ssh root@$bip "cd $d && tar -xv >&2 && make >&2 && tar -cC ../../copyparty/web deps" <../x | (cd ../../copyparty/web/; cat > the.tgz; tar -xvf the.tgz; rm the.tgz)
|
||||
# gzip -dkf ../dev/copyparty/copyparty/web/deps/deps/marked.full.js.gz && diff -NarU2 ../dev/copyparty/copyparty/web/deps/{,deps/}marked.full.js
|
||||
|
||||
277
scripts/deps-docker/codemirror.patch
Normal file
277
scripts/deps-docker/codemirror.patch
Normal file
@@ -0,0 +1,277 @@
|
||||
diff -NarU2 CodeMirror-orig/mode/gfm/gfm.js CodeMirror-edit/mode/gfm/gfm.js
|
||||
--- CodeMirror-orig/mode/gfm/gfm.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/mode/gfm/gfm.js 2020-05-02 02:13:32.142131800 +0200
|
||||
@@ -97,5 +97,5 @@
|
||||
}
|
||||
}
|
||||
- if (stream.match(urlRE) &&
|
||||
+ /*if (stream.match(urlRE) &&
|
||||
stream.string.slice(stream.start - 2, stream.start) != "](" &&
|
||||
(stream.start == 0 || /\W/.test(stream.string.charAt(stream.start - 1)))) {
|
||||
@@ -106,5 +106,5 @@
|
||||
state.combineTokens = true;
|
||||
return "link";
|
||||
- }
|
||||
+ }*/
|
||||
stream.next();
|
||||
return null;
|
||||
diff -NarU2 CodeMirror-orig/mode/meta.js CodeMirror-edit/mode/meta.js
|
||||
--- CodeMirror-orig/mode/meta.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/mode/meta.js 2020-05-02 03:56:58.852408400 +0200
|
||||
@@ -13,4 +13,5 @@
|
||||
|
||||
CodeMirror.modeInfo = [
|
||||
+ /*
|
||||
{name: "APL", mime: "text/apl", mode: "apl", ext: ["dyalog", "apl"]},
|
||||
{name: "PGP", mimes: ["application/pgp", "application/pgp-encrypted", "application/pgp-keys", "application/pgp-signature"], mode: "asciiarmor", ext: ["asc", "pgp", "sig"]},
|
||||
@@ -56,5 +57,7 @@
|
||||
{name: "Gas", mime: "text/x-gas", mode: "gas", ext: ["s"]},
|
||||
{name: "Gherkin", mime: "text/x-feature", mode: "gherkin", ext: ["feature"]},
|
||||
+ */
|
||||
{name: "GitHub Flavored Markdown", mime: "text/x-gfm", mode: "gfm", file: /^(readme|contributing|history).md$/i},
|
||||
+ /*
|
||||
{name: "Go", mime: "text/x-go", mode: "go", ext: ["go"]},
|
||||
{name: "Groovy", mime: "text/x-groovy", mode: "groovy", ext: ["groovy", "gradle"], file: /^Jenkinsfile$/},
|
||||
@@ -65,5 +68,7 @@
|
||||
{name: "HXML", mime: "text/x-hxml", mode: "haxe", ext: ["hxml"]},
|
||||
{name: "ASP.NET", mime: "application/x-aspx", mode: "htmlembedded", ext: ["aspx"], alias: ["asp", "aspx"]},
|
||||
+ */
|
||||
{name: "HTML", mime: "text/html", mode: "htmlmixed", ext: ["html", "htm", "handlebars", "hbs"], alias: ["xhtml"]},
|
||||
+ /*
|
||||
{name: "HTTP", mime: "message/http", mode: "http"},
|
||||
{name: "IDL", mime: "text/x-idl", mode: "idl", ext: ["pro"]},
|
||||
@@ -82,5 +87,7 @@
|
||||
{name: "LiveScript", mime: "text/x-livescript", mode: "livescript", ext: ["ls"], alias: ["ls"]},
|
||||
{name: "Lua", mime: "text/x-lua", mode: "lua", ext: ["lua"]},
|
||||
+ */
|
||||
{name: "Markdown", mime: "text/x-markdown", mode: "markdown", ext: ["markdown", "md", "mkd"]},
|
||||
+ /*
|
||||
{name: "mIRC", mime: "text/mirc", mode: "mirc"},
|
||||
{name: "MariaDB SQL", mime: "text/x-mariadb", mode: "sql"},
|
||||
@@ -163,5 +170,7 @@
|
||||
{name: "VHDL", mime: "text/x-vhdl", mode: "vhdl", ext: ["vhd", "vhdl"]},
|
||||
{name: "Vue.js Component", mimes: ["script/x-vue", "text/x-vue"], mode: "vue", ext: ["vue"]},
|
||||
+ */
|
||||
{name: "XML", mimes: ["application/xml", "text/xml"], mode: "xml", ext: ["xml", "xsl", "xsd", "svg"], alias: ["rss", "wsdl", "xsd"]},
|
||||
+ /*
|
||||
{name: "XQuery", mime: "application/xquery", mode: "xquery", ext: ["xy", "xquery"]},
|
||||
{name: "Yacas", mime: "text/x-yacas", mode: "yacas", ext: ["ys"]},
|
||||
@@ -171,4 +180,5 @@
|
||||
{name: "xu", mime: "text/x-xu", mode: "mscgen", ext: ["xu"]},
|
||||
{name: "msgenny", mime: "text/x-msgenny", mode: "mscgen", ext: ["msgenny"]}
|
||||
+ */
|
||||
];
|
||||
// Ensure all modes have a mime property for backwards compatibility
|
||||
diff -NarU2 CodeMirror-orig/src/display/selection.js CodeMirror-edit/src/display/selection.js
|
||||
--- CodeMirror-orig/src/display/selection.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/display/selection.js 2020-05-02 03:27:30.144662800 +0200
|
||||
@@ -83,29 +83,21 @@
|
||||
let order = getOrder(lineObj, doc.direction)
|
||||
iterateBidiSections(order, fromArg || 0, toArg == null ? lineLen : toArg, (from, to, dir, i) => {
|
||||
- let ltr = dir == "ltr"
|
||||
- let fromPos = coords(from, ltr ? "left" : "right")
|
||||
- let toPos = coords(to - 1, ltr ? "right" : "left")
|
||||
+ let fromPos = coords(from, "left")
|
||||
+ let toPos = coords(to - 1, "right")
|
||||
|
||||
let openStart = fromArg == null && from == 0, openEnd = toArg == null && to == lineLen
|
||||
let first = i == 0, last = !order || i == order.length - 1
|
||||
if (toPos.top - fromPos.top <= 3) { // Single line
|
||||
- let openLeft = (docLTR ? openStart : openEnd) && first
|
||||
- let openRight = (docLTR ? openEnd : openStart) && last
|
||||
- let left = openLeft ? leftSide : (ltr ? fromPos : toPos).left
|
||||
- let right = openRight ? rightSide : (ltr ? toPos : fromPos).right
|
||||
+ let openLeft = openStart && first
|
||||
+ let openRight = openEnd && last
|
||||
+ let left = openLeft ? leftSide : fromPos.left
|
||||
+ let right = openRight ? rightSide : toPos.right
|
||||
add(left, fromPos.top, right - left, fromPos.bottom)
|
||||
} else { // Multiple lines
|
||||
let topLeft, topRight, botLeft, botRight
|
||||
- if (ltr) {
|
||||
- topLeft = docLTR && openStart && first ? leftSide : fromPos.left
|
||||
- topRight = docLTR ? rightSide : wrapX(from, dir, "before")
|
||||
- botLeft = docLTR ? leftSide : wrapX(to, dir, "after")
|
||||
- botRight = docLTR && openEnd && last ? rightSide : toPos.right
|
||||
- } else {
|
||||
- topLeft = !docLTR ? leftSide : wrapX(from, dir, "before")
|
||||
- topRight = !docLTR && openStart && first ? rightSide : fromPos.right
|
||||
- botLeft = !docLTR && openEnd && last ? leftSide : toPos.left
|
||||
- botRight = !docLTR ? rightSide : wrapX(to, dir, "after")
|
||||
- }
|
||||
+ topLeft = openStart && first ? leftSide : fromPos.left
|
||||
+ topRight = rightSide
|
||||
+ botLeft = leftSide
|
||||
+ botRight = openEnd && last ? rightSide : toPos.right
|
||||
add(topLeft, fromPos.top, topRight - topLeft, fromPos.bottom)
|
||||
if (fromPos.bottom < toPos.top) add(leftSide, fromPos.bottom, null, toPos.top)
|
||||
diff -NarU2 CodeMirror-orig/src/input/ContentEditableInput.js CodeMirror-edit/src/input/ContentEditableInput.js
|
||||
--- CodeMirror-orig/src/input/ContentEditableInput.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/input/ContentEditableInput.js 2020-05-02 03:33:05.707995500 +0200
|
||||
@@ -391,4 +391,5 @@
|
||||
let info = mapFromLineView(view, line, pos.line)
|
||||
|
||||
+ /*
|
||||
let order = getOrder(line, cm.doc.direction), side = "left"
|
||||
if (order) {
|
||||
@@ -396,4 +397,5 @@
|
||||
side = partPos % 2 ? "right" : "left"
|
||||
}
|
||||
+ */
|
||||
let result = nodeAndOffsetInLineMap(info.map, pos.ch, side)
|
||||
result.offset = result.collapse == "right" ? result.end : result.start
|
||||
diff -NarU2 CodeMirror-orig/src/input/movement.js CodeMirror-edit/src/input/movement.js
|
||||
--- CodeMirror-orig/src/input/movement.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/input/movement.js 2020-05-02 03:31:19.710773500 +0200
|
||||
@@ -15,4 +15,5 @@
|
||||
|
||||
export function endOfLine(visually, cm, lineObj, lineNo, dir) {
|
||||
+ /*
|
||||
if (visually) {
|
||||
if (cm.doc.direction == "rtl") dir = -dir
|
||||
@@ -39,8 +40,11 @@
|
||||
}
|
||||
}
|
||||
+ */
|
||||
return new Pos(lineNo, dir < 0 ? lineObj.text.length : 0, dir < 0 ? "before" : "after")
|
||||
}
|
||||
|
||||
export function moveVisually(cm, line, start, dir) {
|
||||
+ return moveLogically(line, start, dir)
|
||||
+ /*
|
||||
let bidi = getOrder(line, cm.doc.direction)
|
||||
if (!bidi) return moveLogically(line, start, dir)
|
||||
@@ -109,3 +113,4 @@
|
||||
// Case 4: Nowhere to move
|
||||
return null
|
||||
+ */
|
||||
}
|
||||
diff -NarU2 CodeMirror-orig/src/line/line_data.js CodeMirror-edit/src/line/line_data.js
|
||||
--- CodeMirror-orig/src/line/line_data.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/line/line_data.js 2020-05-02 03:17:02.785065000 +0200
|
||||
@@ -79,6 +79,6 @@
|
||||
// Optionally wire in some hacks into the token-rendering
|
||||
// algorithm, to deal with browser quirks.
|
||||
- if (hasBadBidiRects(cm.display.measure) && (order = getOrder(line, cm.doc.direction)))
|
||||
- builder.addToken = buildTokenBadBidi(builder.addToken, order)
|
||||
+ //if (hasBadBidiRects(cm.display.measure) && (order = getOrder(line, cm.doc.direction)))
|
||||
+ // builder.addToken = buildTokenBadBidi(builder.addToken, order)
|
||||
builder.map = []
|
||||
let allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line)
|
||||
diff -NarU2 CodeMirror-orig/src/measurement/position_measurement.js CodeMirror-edit/src/measurement/position_measurement.js
|
||||
--- CodeMirror-orig/src/measurement/position_measurement.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/measurement/position_measurement.js 2020-05-02 03:35:20.674159600 +0200
|
||||
@@ -380,5 +380,6 @@
|
||||
sticky = "after"
|
||||
}
|
||||
- if (!order) return get(sticky == "before" ? ch - 1 : ch, sticky == "before")
|
||||
+ /*if (!order)*/ return get(sticky == "before" ? ch - 1 : ch, sticky == "before")
|
||||
+ /*
|
||||
|
||||
function getBidi(ch, partPos, invert) {
|
||||
@@ -391,4 +392,5 @@
|
||||
if (other != null) val.other = getBidi(ch, other, sticky != "before")
|
||||
return val
|
||||
+ */
|
||||
}
|
||||
|
||||
@@ -468,4 +470,5 @@
|
||||
let begin = 0, end = lineObj.text.length, ltr = true
|
||||
|
||||
+ /*
|
||||
let order = getOrder(lineObj, cm.doc.direction)
|
||||
// If the line isn't plain left-to-right text, first figure out
|
||||
@@ -482,4 +485,5 @@
|
||||
end = ltr ? part.to : part.from - 1
|
||||
}
|
||||
+ */
|
||||
|
||||
// A binary search to find the first character whose bounding box
|
||||
@@ -526,4 +530,5 @@
|
||||
}
|
||||
|
||||
+/*
|
||||
function coordsBidiPart(cm, lineObj, lineNo, preparedMeasure, order, x, y) {
|
||||
// Bidi parts are sorted left-to-right, and in a non-line-wrapping
|
||||
@@ -580,4 +585,5 @@
|
||||
return part
|
||||
}
|
||||
+*/
|
||||
|
||||
let measureText
|
||||
diff -NarU2 CodeMirror-orig/src/util/bidi.js CodeMirror-edit/src/util/bidi.js
|
||||
--- CodeMirror-orig/src/util/bidi.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/util/bidi.js 2020-05-02 03:12:44.418649800 +0200
|
||||
@@ -4,5 +4,5 @@
|
||||
|
||||
export function iterateBidiSections(order, from, to, f) {
|
||||
- if (!order) return f(from, to, "ltr", 0)
|
||||
+ /*if (!order)*/ return f(from, to, "ltr", 0) /*
|
||||
let found = false
|
||||
for (let i = 0; i < order.length; ++i) {
|
||||
@@ -14,4 +14,5 @@
|
||||
}
|
||||
if (!found) f(from, to, "ltr")
|
||||
+*/
|
||||
}
|
||||
|
||||
@@ -32,5 +33,7 @@
|
||||
}
|
||||
}
|
||||
- return found != null ? found : bidiOther
|
||||
+ var ret = found != null ? found : bidiOther
|
||||
+ console.log("getBidiPartAt(%s,%s,%s) => [%s]", order, ch, sticky, ret)
|
||||
+ return ret
|
||||
}
|
||||
|
||||
@@ -55,4 +58,7 @@
|
||||
// N (ON): Other Neutrals
|
||||
|
||||
+let bidiOrdering = (function() { return function(str, direction) { return false; }})();
|
||||
+/*
|
||||
+
|
||||
// Returns null if characters are ordered as they appear
|
||||
// (left-to-right), or an array of sections ({from, to, level}
|
||||
@@ -81,5 +87,5 @@
|
||||
}
|
||||
|
||||
- return function(str, direction) {
|
||||
+ var fun = function(str, direction) {
|
||||
let outerType = direction == "ltr" ? "L" : "R"
|
||||
|
||||
@@ -204,12 +210,16 @@
|
||||
return direction == "rtl" ? order.reverse() : order
|
||||
}
|
||||
-})()
|
||||
|
||||
+ return function(str, direction) {
|
||||
+ var ret = fun(str, direction);
|
||||
+ console.log("bidiOrdering inner ([%s], %s) => [%s]", str, direction, ret);
|
||||
+ return ret;
|
||||
+ }
|
||||
+})()
|
||||
+*/
|
||||
// Get the bidi ordering for the given line (and cache it). Returns
|
||||
// false for lines that are fully left-to-right, and an array of
|
||||
// BidiSpan objects otherwise.
|
||||
export function getOrder(line, direction) {
|
||||
- let order = line.order
|
||||
- if (order == null) order = line.order = bidiOrdering(line.text, direction)
|
||||
- return order
|
||||
+ return false;
|
||||
}
|
||||
diff -NarU2 CodeMirror-orig/src/util/feature_detection.js CodeMirror-edit/src/util/feature_detection.js
|
||||
--- CodeMirror-orig/src/util/feature_detection.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/util/feature_detection.js 2020-05-02 03:16:21.085621400 +0200
|
||||
@@ -25,4 +25,5 @@
|
||||
}
|
||||
|
||||
+/*
|
||||
// Feature-detect IE's crummy client rect reporting for bidi text
|
||||
let badBidiRects
|
||||
@@ -36,4 +37,5 @@
|
||||
return badBidiRects = (r1.right - r0.right < 3)
|
||||
}
|
||||
+*/
|
||||
|
||||
// See if "".split is the broken IE version, if so, provide an
|
||||
95
scripts/deps-docker/easymde-ln.patch
Normal file
95
scripts/deps-docker/easymde-ln.patch
Normal file
@@ -0,0 +1,95 @@
|
||||
diff -NarU2 easymde-mod1/src/js/easymde.js easymde-edit/src/js/easymde.js
|
||||
--- easymde-mod1/src/js/easymde.js 2020-05-01 14:34:19.878774400 +0200
|
||||
+++ easymde-edit/src/js/easymde.js 2020-05-01 21:24:44.142611200 +0200
|
||||
@@ -2189,4 +2189,5 @@
|
||||
};
|
||||
|
||||
+
|
||||
EasyMDE.prototype.createSideBySide = function () {
|
||||
var cm = this.codemirror;
|
||||
@@ -2223,12 +2224,80 @@
|
||||
}
|
||||
pScroll = true;
|
||||
- var height = v.getScrollInfo().height - v.getScrollInfo().clientHeight;
|
||||
- var ratio = parseFloat(v.getScrollInfo().top) / height;
|
||||
- var move = (preview.scrollHeight - preview.clientHeight) * ratio;
|
||||
- preview.scrollTop = move;
|
||||
+ var md_vp = v.getScrollInfo();
|
||||
+ // viewport top: top
|
||||
+ // viewport size: clientHeight
|
||||
+ // document size: height
|
||||
+ var md_scroll_y = md_vp.top + md_vp.clientHeight / 2;
|
||||
+ var md_center_n = cm.lineAtHeight(md_scroll_y, 'local') + 1;
|
||||
+ var md_next_n = md_center_n;
|
||||
+ var md_top = cm.heightAtLine(md_center_n - 1, 'local');
|
||||
+ while (md_next_n < cm.lineCount())
|
||||
+ if (cm.getLine(md_next_n++).replace(/\s+/g, '').length > 0)
|
||||
+ break;
|
||||
+ var md_end = cm.heightAtLine(md_next_n - 1, 'local');
|
||||
+ var md_frac = (md_scroll_y - md_top) * 1.0 / (md_end - md_top);
|
||||
+ var get_pre_line = function(line_n, increase) {
|
||||
+ var end = 0;
|
||||
+ var step = -1;
|
||||
+ if (increase) {
|
||||
+ step = 1;
|
||||
+ end = line_n + 1000;
|
||||
+ }
|
||||
+ // there might be multiple elements in the marked.js output,
|
||||
+ // take the element with the biggest height
|
||||
+ var biggest = -1;
|
||||
+ var line_dom = null;
|
||||
+ for (; line_n != end; line_n += step) {
|
||||
+ var hits = document.querySelectorAll('.editor-preview-side *[data-ln=\'' + line_n + '\']');
|
||||
+ for (var i = 0; i < hits.length; i++) {
|
||||
+ var hit_size = hits[i].offsetHeight;
|
||||
+ if (biggest < hit_size) {
|
||||
+ biggest = hit_size;
|
||||
+ line_dom = hits[i];
|
||||
+ }
|
||||
+ }
|
||||
+ if (line_dom) {
|
||||
+ var ret_y = 0;
|
||||
+ var el = line_dom;
|
||||
+ while (el && (el.getAttribute('class') + '').indexOf('editor-preview-side') < 0) {
|
||||
+ ret_y += el.offsetTop;
|
||||
+ el = el.offsetParent;
|
||||
+ }
|
||||
+ return [line_n, line_dom, ret_y];
|
||||
+ }
|
||||
+ }
|
||||
+ return null;
|
||||
+ };
|
||||
+ var pre1 = get_pre_line(md_center_n, false);
|
||||
+ var pre2 = get_pre_line(pre1[0] + 1, true) ||
|
||||
+ [cm.lineCount(), null, preview.scrollHeight];
|
||||
+
|
||||
+ //console.log('code-center %d, frac %.2f, pre [%d,%d] [%d,%d]',
|
||||
+ // md_center_n, md_frac, pre1[0], pre1[2], pre2[0], pre2[2]);
|
||||
+
|
||||
+ // [0] is the markdown line which matches that preview y-pos
|
||||
+ // and since not all preview lines are tagged with a line-number
|
||||
+ // take the lineno diff and divide it by the distance
|
||||
+ var pre_frac = md_frac / ((pre2[0] - pre1[0]) / (md_next_n - md_center_n));
|
||||
+
|
||||
+ // then use that fraction for the scroll offset
|
||||
+ var pre_y = pre1[2] + (pre2[2] - pre1[2]) * pre_frac;
|
||||
+
|
||||
+ // unless we couldn't match the markdown line exactly to any preview line
|
||||
+ if (md_center_n > pre1[0] && md_center_n < pre2[0])
|
||||
+ pre_y = pre2[2];
|
||||
+
|
||||
+ // except jump to the top or bottom if we're close enough
|
||||
+ if (md_vp.top < 32)
|
||||
+ pre_y = 0;
|
||||
+ else if (md_vp.top + 32 >= md_vp.height - md_vp.clientHeight)
|
||||
+ pre_y = preview.scrollHeight;
|
||||
+
|
||||
+ preview.scrollTop = pre_y - preview.clientHeight / 2;
|
||||
});
|
||||
|
||||
// Syncs scroll preview -> editor
|
||||
- preview.onscroll = function () {
|
||||
+ // disabled since it should be possible to intentionally unsync
|
||||
+ preview.onscroll_fgsfds = function () {
|
||||
if (pScroll) {
|
||||
pScroll = false;
|
||||
52
scripts/deps-docker/easymde.patch
Normal file
52
scripts/deps-docker/easymde.patch
Normal file
@@ -0,0 +1,52 @@
|
||||
diff -NarU2 easymde-orig/gulpfile.js easymde-mod1/gulpfile.js
|
||||
--- easymde-orig/gulpfile.js 2020-04-06 14:09:36.000000000 +0200
|
||||
+++ easymde-mod1/gulpfile.js 2020-05-01 14:33:52.260175200 +0200
|
||||
@@ -25,5 +25,4 @@
|
||||
'./node_modules/codemirror/lib/codemirror.css',
|
||||
'./src/css/*.css',
|
||||
- './node_modules/codemirror-spell-checker/src/css/spell-checker.css',
|
||||
];
|
||||
|
||||
diff -NarU2 easymde-orig/package.json easymde-mod1/package.json
|
||||
--- easymde-orig/package.json 2020-04-06 14:09:36.000000000 +0200
|
||||
+++ easymde-mod1/package.json 2020-05-01 14:33:57.189975800 +0200
|
||||
@@ -21,5 +21,4 @@
|
||||
"dependencies": {
|
||||
"codemirror": "^5.52.2",
|
||||
- "codemirror-spell-checker": "1.1.2",
|
||||
"marked": "^0.8.2"
|
||||
},
|
||||
diff -NarU2 easymde-orig/src/js/easymde.js easymde-mod1/src/js/easymde.js
|
||||
--- easymde-orig/src/js/easymde.js 2020-04-06 14:09:36.000000000 +0200
|
||||
+++ easymde-mod1/src/js/easymde.js 2020-05-01 14:34:19.878774400 +0200
|
||||
@@ -11,5 +11,4 @@
|
||||
require('codemirror/mode/gfm/gfm.js');
|
||||
require('codemirror/mode/xml/xml.js');
|
||||
-var CodeMirrorSpellChecker = require('codemirror-spell-checker');
|
||||
var marked = require('marked/lib/marked');
|
||||
|
||||
@@ -1889,18 +1888,7 @@
|
||||
|
||||
var mode, backdrop;
|
||||
- if (options.spellChecker !== false) {
|
||||
- mode = 'spell-checker';
|
||||
- backdrop = options.parsingConfig;
|
||||
- backdrop.name = 'gfm';
|
||||
- backdrop.gitHubSpice = false;
|
||||
-
|
||||
- CodeMirrorSpellChecker({
|
||||
- codeMirrorInstance: CodeMirror,
|
||||
- });
|
||||
- } else {
|
||||
mode = options.parsingConfig;
|
||||
mode.name = 'gfm';
|
||||
mode.gitHubSpice = false;
|
||||
- }
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
@@ -1927,5 +1915,4 @@
|
||||
configureMouse: configureMouse,
|
||||
inputStyle: (options.inputStyle != undefined) ? options.inputStyle : isMobile() ? 'contenteditable' : 'textarea',
|
||||
- spellcheck: (options.nativeSpellcheck != undefined) ? options.nativeSpellcheck : true,
|
||||
});
|
||||
|
||||
10
scripts/deps-docker/markdown-it.patch
Normal file
10
scripts/deps-docker/markdown-it.patch
Normal file
@@ -0,0 +1,10 @@
|
||||
diff -NarU1 markdown-it-10.0.0-orig/lib/common/entities.js markdown-it-10.0.0-edit/lib/common/entities.js
|
||||
--- markdown-it-10.0.0-orig/lib/common/entities.js 2019-09-10 21:39:58.000000000 +0000
|
||||
+++ markdown-it-10.0.0-edit/lib/common/entities.js 2020-04-26 10:24:33.043023331 +0000
|
||||
@@ -5,2 +5,5 @@
|
||||
/*eslint quotes:0*/
|
||||
-module.exports = require('entities/lib/maps/entities.json');
|
||||
+//module.exports = require('entities/lib/maps/entities.json');
|
||||
+module.exports = {
|
||||
+ "amp": "&", "quot": "\"", "gt": ">", "lt": "<"
|
||||
+}
|
||||
298
scripts/deps-docker/marked-ln.patch
Normal file
298
scripts/deps-docker/marked-ln.patch
Normal file
@@ -0,0 +1,298 @@
|
||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||
adds linetracking to marked.js v1.0.0 +git;
|
||||
add data-ln="%d" to most tags, %d is the source markdown line
|
||||
--- a/src/Lexer.js
|
||||
+++ b/src/Lexer.js
|
||||
@@ -49,4 +49,5 @@ function mangle(text) {
|
||||
module.exports = class Lexer {
|
||||
constructor(options) {
|
||||
+ this.ln = 1; // like most editors, start couting from 1
|
||||
this.tokens = [];
|
||||
this.tokens.links = Object.create(null);
|
||||
@@ -108,4 +109,15 @@ module.exports = class Lexer {
|
||||
}
|
||||
|
||||
+ set_ln(token, ln = this.ln) {
|
||||
+ // assigns ln (the current line numer) to the token,
|
||||
+ // then bump this.ln by the number of newlines in the contents
|
||||
+ //
|
||||
+ // if ln is set, also assigns the line counter to a new value
|
||||
+ // (usually a backup value from before a call into a subparser
|
||||
+ // which bumped the linecounter by a subset of the newlines)
|
||||
+ token.ln = ln;
|
||||
+ this.ln = ln + (token.raw.match(/\n/g) || []).length;
|
||||
+ }
|
||||
+
|
||||
/**
|
||||
* Lexing
|
||||
@@ -113,10 +125,15 @@ module.exports = class Lexer {
|
||||
blockTokens(src, tokens = [], top = true) {
|
||||
src = src.replace(/^ +$/gm, '');
|
||||
- let token, i, l, lastToken;
|
||||
+ let token, i, l, lastToken, ln;
|
||||
|
||||
while (src) {
|
||||
+ // this.ln will be bumped by recursive calls into this func;
|
||||
+ // reset the count and rely on the outermost token's raw only
|
||||
+ ln = this.ln;
|
||||
+
|
||||
// newline
|
||||
if (token = this.tokenizer.space(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token); // is \n if not type
|
||||
if (token.type) {
|
||||
tokens.push(token);
|
||||
@@ -128,4 +145,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.code(src, tokens)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
if (token.type) {
|
||||
tokens.push(token);
|
||||
@@ -141,4 +159,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.fences(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -148,4 +167,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.heading(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -155,4 +175,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.nptable(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -162,4 +183,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.hr(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -170,4 +192,7 @@ module.exports = class Lexer {
|
||||
src = src.substring(token.raw.length);
|
||||
token.tokens = this.blockTokens(token.text, [], top);
|
||||
+ // recursive call to blockTokens probably bumped this.ln,
|
||||
+ // token.raw is more reliable so reset this.ln and use that
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -180,5 +205,9 @@ module.exports = class Lexer {
|
||||
for (i = 0; i < l; i++) {
|
||||
token.items[i].tokens = this.blockTokens(token.items[i].text, [], false);
|
||||
+ // list entries don't bump the linecounter, so let's
|
||||
+ this.ln++;
|
||||
}
|
||||
+ // then reset like blockquote
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -188,4 +217,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.html(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -195,4 +225,5 @@ module.exports = class Lexer {
|
||||
if (top && (token = this.tokenizer.def(src))) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
if (!this.tokens.links[token.tag]) {
|
||||
this.tokens.links[token.tag] = {
|
||||
@@ -207,4 +238,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.table(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -214,4 +246,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.lheading(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -221,4 +254,5 @@ module.exports = class Lexer {
|
||||
if (top && (token = this.tokenizer.paragraph(src))) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -228,4 +262,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.text(src, tokens)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
if (token.type) {
|
||||
tokens.push(token);
|
||||
@@ -263,4 +298,7 @@ module.exports = class Lexer {
|
||||
for (i = 0; i < l; i++) {
|
||||
token = tokens[i];
|
||||
+ // this.ln is at EOF when inline() is invoked;
|
||||
+ // all this affects <br> tags only so no biggie if it breaks
|
||||
+ this.ln = token.ln || this.ln;
|
||||
switch (token.type) {
|
||||
case 'paragraph':
|
||||
@@ -386,4 +424,6 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.br(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ // no need to reset (no more blockTokens anyways)
|
||||
+ token.ln = this.ln++;
|
||||
tokens.push(token);
|
||||
continue;
|
||||
diff --git a/src/Parser.js b/src/Parser.js
|
||||
--- a/src/Parser.js
|
||||
+++ b/src/Parser.js
|
||||
@@ -18,4 +18,5 @@ module.exports = class Parser {
|
||||
this.textRenderer = new TextRenderer();
|
||||
this.slugger = new Slugger();
|
||||
+ this.ln = 0; // error indicator; should always be set >=1 from tokens
|
||||
}
|
||||
|
||||
@@ -55,4 +56,9 @@ module.exports = class Parser {
|
||||
for (i = 0; i < l; i++) {
|
||||
token = tokens[i];
|
||||
+ // take line-numbers from tokens whenever possible
|
||||
+ // and update the renderer's html attribute with the new value
|
||||
+ this.ln = token.ln || this.ln;
|
||||
+ this.renderer.tag_ln(this.ln);
|
||||
+
|
||||
switch (token.type) {
|
||||
case 'space': {
|
||||
@@ -105,7 +111,10 @@ module.exports = class Parser {
|
||||
}
|
||||
|
||||
- body += this.renderer.tablerow(cell);
|
||||
+ // the +2 is to skip the table header
|
||||
+ body += this.renderer.tag_ln(token.ln + j + 2).tablerow(cell);
|
||||
}
|
||||
- out += this.renderer.table(header, body);
|
||||
+ // the html attribute is now at the end of the table,
|
||||
+ // reset it before writing the <table> tag now
|
||||
+ out += this.renderer.tag_ln(token.ln).table(header, body);
|
||||
continue;
|
||||
}
|
||||
@@ -148,8 +157,12 @@ module.exports = class Parser {
|
||||
|
||||
itemBody += this.parse(item.tokens, loose);
|
||||
- body += this.renderer.listitem(itemBody, task, checked);
|
||||
+ // similar to tables, writing contents before the <ul> tag
|
||||
+ // so update the tag attribute as we go
|
||||
+ // (assuming all list entries got tagged with a source-line, probably safe w)
|
||||
+ body += this.renderer.tag_ln((item.tokens[0] || token).ln).listitem(itemBody, task, checked);
|
||||
}
|
||||
|
||||
- out += this.renderer.list(body, ordered, start);
|
||||
+ // then reset to the <ul>'s correct line number and write it
|
||||
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
|
||||
continue;
|
||||
}
|
||||
@@ -160,5 +173,6 @@ module.exports = class Parser {
|
||||
}
|
||||
case 'paragraph': {
|
||||
- out += this.renderer.paragraph(this.parseInline(token.tokens));
|
||||
+ let t = this.parseInline(token.tokens);
|
||||
+ out += this.renderer.tag_ln(token.ln).paragraph(t);
|
||||
continue;
|
||||
}
|
||||
@@ -199,4 +213,6 @@ module.exports = class Parser {
|
||||
for (i = 0; i < l; i++) {
|
||||
token = tokens[i];
|
||||
+ // another thing that only affects <br/> and other inlines
|
||||
+ this.ln = token.ln || this.ln;
|
||||
switch (token.type) {
|
||||
case 'escape': {
|
||||
@@ -229,5 +245,7 @@ module.exports = class Parser {
|
||||
}
|
||||
case 'br': {
|
||||
- out += renderer.br();
|
||||
+ // update the html attribute before writing each <br/>,
|
||||
+ // don't care about the others
|
||||
+ out += renderer.tag_ln(this.ln).br();
|
||||
break;
|
||||
}
|
||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
--- a/src/Renderer.js
|
||||
+++ b/src/Renderer.js
|
||||
@@ -11,6 +11,12 @@ module.exports = class Renderer {
|
||||
constructor(options) {
|
||||
this.options = options || defaults;
|
||||
+ this.ln = "";
|
||||
}
|
||||
|
||||
+ tag_ln(n) {
|
||||
+ this.ln = ' data-ln="' + n + '"';
|
||||
+ return this;
|
||||
+ };
|
||||
+
|
||||
code(code, infostring, escaped) {
|
||||
const lang = (infostring || '').match(/\S*/)[0];
|
||||
@@ -24,10 +30,10 @@ module.exports = class Renderer {
|
||||
|
||||
if (!lang) {
|
||||
- return '<pre><code>'
|
||||
+ return '<pre' + this.ln + '><code>'
|
||||
+ (escaped ? code : escape(code, true))
|
||||
+ '</code></pre>\n';
|
||||
}
|
||||
|
||||
- return '<pre><code class="'
|
||||
+ return '<pre' + this.ln + '><code class="'
|
||||
+ this.options.langPrefix
|
||||
+ escape(lang, true)
|
||||
@@ -38,5 +44,5 @@ module.exports = class Renderer {
|
||||
|
||||
blockquote(quote) {
|
||||
- return '<blockquote>\n' + quote + '</blockquote>\n';
|
||||
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n';
|
||||
}
|
||||
|
||||
@@ -49,4 +55,5 @@ module.exports = class Renderer {
|
||||
return '<h'
|
||||
+ level
|
||||
+ + this.ln
|
||||
+ ' id="'
|
||||
+ this.options.headerPrefix
|
||||
@@ -59,5 +66,5 @@ module.exports = class Renderer {
|
||||
}
|
||||
// ignore IDs
|
||||
- return '<h' + level + '>' + text + '</h' + level + '>\n';
|
||||
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n';
|
||||
}
|
||||
|
||||
@@ -73,5 +80,5 @@ module.exports = class Renderer {
|
||||
|
||||
listitem(text) {
|
||||
- return '<li>' + text + '</li>\n';
|
||||
+ return '<li' + this.ln + '>' + text + '</li>\n';
|
||||
}
|
||||
|
||||
@@ -85,5 +92,5 @@ module.exports = class Renderer {
|
||||
|
||||
paragraph(text) {
|
||||
- return '<p>' + text + '</p>\n';
|
||||
+ return '<p' + this.ln + '>' + text + '</p>\n';
|
||||
}
|
||||
|
||||
@@ -100,5 +107,5 @@ module.exports = class Renderer {
|
||||
|
||||
tablerow(content) {
|
||||
- return '<tr>\n' + content + '</tr>\n';
|
||||
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n';
|
||||
}
|
||||
|
||||
@@ -125,5 +132,5 @@ module.exports = class Renderer {
|
||||
|
||||
br() {
|
||||
- return this.options.xhtml ? '<br/>' : '<br>';
|
||||
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>';
|
||||
}
|
||||
|
||||
@@ -151,5 +158,5 @@ module.exports = class Renderer {
|
||||
}
|
||||
|
||||
- let out = '<img src="' + href + '" alt="' + text + '"';
|
||||
+ let out = '<img' + this.ln + ' src="' + href + '" alt="' + text + '"';
|
||||
if (title) {
|
||||
out += ' title="' + title + '"';
|
||||
340
scripts/deps-docker/marked.patch
Normal file
340
scripts/deps-docker/marked.patch
Normal file
@@ -0,0 +1,340 @@
|
||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||
--- a/src/Lexer.js
|
||||
+++ b/src/Lexer.js
|
||||
@@ -5,5 +5,5 @@ const { block, inline } = require('./rules.js');
|
||||
/**
|
||||
* smartypants text replacement
|
||||
- */
|
||||
+ *
|
||||
function smartypants(text) {
|
||||
return text
|
||||
@@ -26,5 +26,5 @@ function smartypants(text) {
|
||||
/**
|
||||
* mangle email addresses
|
||||
- */
|
||||
+ *
|
||||
function mangle(text) {
|
||||
let out = '',
|
||||
@@ -439,5 +439,5 @@ module.exports = class Lexer {
|
||||
|
||||
// autolink
|
||||
- if (token = this.tokenizer.autolink(src, mangle)) {
|
||||
+ if (token = this.tokenizer.autolink(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
@@ -446,5 +446,5 @@ module.exports = class Lexer {
|
||||
|
||||
// url (gfm)
|
||||
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
|
||||
+ if (!inLink && (token = this.tokenizer.url(src))) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
@@ -453,5 +453,5 @@ module.exports = class Lexer {
|
||||
|
||||
// text
|
||||
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
|
||||
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
--- a/src/Renderer.js
|
||||
+++ b/src/Renderer.js
|
||||
@@ -140,5 +140,5 @@ module.exports = class Renderer {
|
||||
|
||||
link(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
+ href = cleanUrl(this.options.baseUrl, href);
|
||||
if (href === null) {
|
||||
return text;
|
||||
@@ -153,5 +153,5 @@ module.exports = class Renderer {
|
||||
|
||||
image(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
+ href = cleanUrl(this.options.baseUrl, href);
|
||||
if (href === null) {
|
||||
return text;
|
||||
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
--- a/src/Tokenizer.js
|
||||
+++ b/src/Tokenizer.js
|
||||
@@ -287,11 +287,8 @@ module.exports = class Tokenizer {
|
||||
if (cap) {
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
- ? 'paragraph'
|
||||
- : 'html',
|
||||
+ type: 'html',
|
||||
raw: cap[0],
|
||||
- pre: !this.options.sanitizer
|
||||
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
|
||||
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
|
||||
+ text: cap[0]
|
||||
};
|
||||
}
|
||||
@@ -421,15 +418,9 @@ module.exports = class Tokenizer {
|
||||
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
- ? 'text'
|
||||
- : 'html',
|
||||
+ type: 'html',
|
||||
raw: cap[0],
|
||||
inLink,
|
||||
inRawBlock,
|
||||
- text: this.options.sanitize
|
||||
- ? (this.options.sanitizer
|
||||
- ? this.options.sanitizer(cap[0])
|
||||
- : escape(cap[0]))
|
||||
- : cap[0]
|
||||
+ text: cap[0]
|
||||
};
|
||||
}
|
||||
@@ -550,10 +541,10 @@ module.exports = class Tokenizer {
|
||||
}
|
||||
|
||||
- autolink(src, mangle) {
|
||||
+ autolink(src) {
|
||||
const cap = this.rules.inline.autolink.exec(src);
|
||||
if (cap) {
|
||||
let text, href;
|
||||
if (cap[2] === '@') {
|
||||
- text = escape(this.options.mangle ? mangle(cap[1]) : cap[1]);
|
||||
+ text = escape(cap[1]);
|
||||
href = 'mailto:' + text;
|
||||
} else {
|
||||
@@ -578,10 +569,10 @@ module.exports = class Tokenizer {
|
||||
}
|
||||
|
||||
- url(src, mangle) {
|
||||
+ url(src) {
|
||||
let cap;
|
||||
if (cap = this.rules.inline.url.exec(src)) {
|
||||
let text, href;
|
||||
if (cap[2] === '@') {
|
||||
- text = escape(this.options.mangle ? mangle(cap[0]) : cap[0]);
|
||||
+ text = escape(cap[0]);
|
||||
href = 'mailto:' + text;
|
||||
} else {
|
||||
@@ -615,12 +606,12 @@ module.exports = class Tokenizer {
|
||||
}
|
||||
|
||||
- inlineText(src, inRawBlock, smartypants) {
|
||||
+ inlineText(src, inRawBlock) {
|
||||
const cap = this.rules.inline.text.exec(src);
|
||||
if (cap) {
|
||||
let text;
|
||||
if (inRawBlock) {
|
||||
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
|
||||
+ text = cap[0];
|
||||
} else {
|
||||
- text = escape(this.options.smartypants ? smartypants(cap[0]) : cap[0]);
|
||||
+ text = escape(cap[0]);
|
||||
}
|
||||
return {
|
||||
diff --git a/src/defaults.js b/src/defaults.js
|
||||
--- a/src/defaults.js
|
||||
+++ b/src/defaults.js
|
||||
@@ -8,12 +8,8 @@ function getDefaults() {
|
||||
highlight: null,
|
||||
langPrefix: 'language-',
|
||||
- mangle: true,
|
||||
pedantic: false,
|
||||
renderer: null,
|
||||
- sanitize: false,
|
||||
- sanitizer: null,
|
||||
silent: false,
|
||||
smartLists: false,
|
||||
- smartypants: false,
|
||||
tokenizer: null,
|
||||
walkTokens: null,
|
||||
diff --git a/src/helpers.js b/src/helpers.js
|
||||
--- a/src/helpers.js
|
||||
+++ b/src/helpers.js
|
||||
@@ -64,18 +64,5 @@ function edit(regex, opt) {
|
||||
const nonWordAndColonTest = /[^\w:]/g;
|
||||
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
|
||||
-function cleanUrl(sanitize, base, href) {
|
||||
- if (sanitize) {
|
||||
- let prot;
|
||||
- try {
|
||||
- prot = decodeURIComponent(unescape(href))
|
||||
- .replace(nonWordAndColonTest, '')
|
||||
- .toLowerCase();
|
||||
- } catch (e) {
|
||||
- return null;
|
||||
- }
|
||||
- if (prot.indexOf('javascript:') === 0 || prot.indexOf('vbscript:') === 0 || prot.indexOf('data:') === 0) {
|
||||
- return null;
|
||||
- }
|
||||
- }
|
||||
+function cleanUrl(base, href) {
|
||||
if (base && !originIndependentUrl.test(href)) {
|
||||
href = resolveUrl(base, href);
|
||||
@@ -223,10 +210,4 @@ function findClosingBracket(str, b) {
|
||||
}
|
||||
|
||||
-function checkSanitizeDeprecation(opt) {
|
||||
- if (opt && opt.sanitize && !opt.silent) {
|
||||
- console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options');
|
||||
- }
|
||||
-}
|
||||
-
|
||||
module.exports = {
|
||||
escape,
|
||||
@@ -239,5 +220,4 @@ module.exports = {
|
||||
splitCells,
|
||||
rtrim,
|
||||
- findClosingBracket,
|
||||
- checkSanitizeDeprecation
|
||||
+ findClosingBracket
|
||||
};
|
||||
diff --git a/src/marked.js b/src/marked.js
|
||||
--- a/src/marked.js
|
||||
+++ b/src/marked.js
|
||||
@@ -7,5 +7,4 @@ const Slugger = require('./Slugger.js');
|
||||
const {
|
||||
merge,
|
||||
- checkSanitizeDeprecation,
|
||||
escape
|
||||
} = require('./helpers.js');
|
||||
@@ -35,5 +34,4 @@ function marked(src, opt, callback) {
|
||||
|
||||
opt = merge({}, marked.defaults, opt || {});
|
||||
- checkSanitizeDeprecation(opt);
|
||||
|
||||
if (callback) {
|
||||
@@ -108,5 +106,5 @@ function marked(src, opt, callback) {
|
||||
return Parser.parse(tokens, opt);
|
||||
} catch (e) {
|
||||
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
|
||||
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
|
||||
if (opt.silent) {
|
||||
return '<p>An error occurred:</p><pre>'
|
||||
diff --git a/test/bench.js b/test/bench.js
|
||||
--- a/test/bench.js
|
||||
+++ b/test/bench.js
|
||||
@@ -33,5 +33,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
});
|
||||
@@ -45,5 +44,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
});
|
||||
@@ -58,5 +56,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
});
|
||||
@@ -70,5 +67,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
});
|
||||
@@ -83,5 +79,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: true,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
});
|
||||
@@ -95,5 +90,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: true,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
});
|
||||
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
|
||||
--- a/test/specs/run-spec.js
|
||||
+++ b/test/specs/run-spec.js
|
||||
@@ -22,8 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
|
||||
}
|
||||
|
||||
- if (spec.options.sanitizer) {
|
||||
- // eslint-disable-next-line no-eval
|
||||
- spec.options.sanitizer = eval(spec.options.sanitizer);
|
||||
- }
|
||||
|
||||
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
|
||||
@@ -53,3 +49,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
||||
runSpecs('New', './new');
|
||||
runSpecs('ReDOS', './redos');
|
||||
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
|
||||
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
|
||||
--- a/test/unit/Lexer-spec.js
|
||||
+++ b/test/unit/Lexer-spec.js
|
||||
@@ -465,5 +465,5 @@ a | b
|
||||
});
|
||||
|
||||
- it('sanitize', () => {
|
||||
+ /*it('sanitize', () => {
|
||||
expectTokens({
|
||||
md: '<div>html</div>',
|
||||
@@ -483,5 +483,5 @@ a | b
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
|
||||
@@ -587,5 +587,5 @@ a | b
|
||||
});
|
||||
|
||||
- it('html sanitize', () => {
|
||||
+ /*it('html sanitize', () => {
|
||||
expectInlineTokens({
|
||||
md: '<div>html</div>',
|
||||
@@ -597,5 +597,5 @@ a | b
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
it('link', () => {
|
||||
@@ -909,5 +909,5 @@ a | b
|
||||
});
|
||||
|
||||
- it('autolink mangle email', () => {
|
||||
+ /*it('autolink mangle email', () => {
|
||||
expectInlineTokens({
|
||||
md: '<test@example.com>',
|
||||
@@ -929,5 +929,5 @@ a | b
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
it('url', () => {
|
||||
@@ -966,5 +966,5 @@ a | b
|
||||
});
|
||||
|
||||
- it('url mangle email', () => {
|
||||
+ /*it('url mangle email', () => {
|
||||
expectInlineTokens({
|
||||
md: 'test@example.com',
|
||||
@@ -986,5 +986,5 @@ a | b
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
|
||||
@@ -1002,5 +1002,5 @@ a | b
|
||||
});
|
||||
|
||||
- describe('smartypants', () => {
|
||||
+ /*describe('smartypants', () => {
|
||||
it('single quotes', () => {
|
||||
expectInlineTokens({
|
||||
@@ -1072,5 +1072,5 @@ a | b
|
||||
});
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
});
|
||||
53
scripts/deps-docker/mini-fa.css
Normal file
53
scripts/deps-docker/mini-fa.css
Normal file
@@ -0,0 +1,53 @@
|
||||
|
||||
/*
|
||||
that was the original copyright ^
|
||||
now here's a tiny subset of fontawesome
|
||||
*/
|
||||
|
||||
@font-face {
|
||||
font-family: 'fa';
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
font-display: block;
|
||||
src: url("/.cpr/deps/mini-fa.woff") format("woff");
|
||||
}
|
||||
|
||||
.fa,
|
||||
.fas,
|
||||
.far,
|
||||
.fal,
|
||||
.fad,
|
||||
.fab {
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
display: inline-block;
|
||||
font-style: normal;
|
||||
font-variant: normal;
|
||||
text-rendering: auto;
|
||||
line-height: 1;
|
||||
font-family: 'fa';
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
:add
|
||||
arrows-alt
|
||||
bold
|
||||
code
|
||||
columns
|
||||
eraser
|
||||
eye
|
||||
heading
|
||||
image
|
||||
italic
|
||||
lightbulb
|
||||
link
|
||||
list-ol
|
||||
list-ul
|
||||
minus
|
||||
question-circle
|
||||
quote-left
|
||||
redo
|
||||
save
|
||||
strikethrough
|
||||
table
|
||||
undo
|
||||
31
scripts/deps-docker/mini-fa.sh
Normal file
31
scripts/deps-docker/mini-fa.sh
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/bin/ash
|
||||
set -e
|
||||
|
||||
orig_css="$(find /z/fontawesome-fre* -name fontawesome.css | head -n 1)"
|
||||
orig_woff="$(find /z/fontawesome-fre* -name fa-solid-900.woff | head -n 1)"
|
||||
|
||||
# first grab the copyright meme
|
||||
awk '1; / *\*\// {exit}' <"$orig_css" >/z/dist/mini-fa.css
|
||||
|
||||
# then add the static part of our css template
|
||||
awk '/^:add/ {exit} 1' </z/mini-fa.css >>/z/dist/mini-fa.css
|
||||
|
||||
# then take the list of icons to include
|
||||
awk 'o; /^:add/ {o=1}' </z/mini-fa.css |
|
||||
while IFS= read -r g; do
|
||||
# and grab them from the upstream css
|
||||
awk 'o{gsub(/[ ;]+/,"");print;exit} /^\.fa-'$g':before/ {o=1;printf "%s",$0}' <"$orig_css"
|
||||
done >>/z/dist/mini-fa.css
|
||||
|
||||
# expecting this input btw:
|
||||
# .fa-python:before {
|
||||
# content: "\f3e2"; }
|
||||
|
||||
# get the codepoints (should produce lines like "f3e2")
|
||||
awk '/:before .content:"\\/ {sub(/[^"]+"./,""); sub(/".*/,""); print}' </z/dist/mini-fa.css >/z/icon.list
|
||||
|
||||
# and finally create a woff with just our icons
|
||||
pyftsubset "$orig_woff" --unicodes-file=/z/icon.list --no-ignore-missing-unicodes --flavor=woff --with-zopfli --output-file=/z/dist/no-pk/mini-fa.woff --verbose
|
||||
|
||||
# scp is easier, just want basic latin
|
||||
pyftsubset /z/scp.woff2 --unicodes="20-7e,ab,b7,bb,2022" --no-ignore-missing-unicodes --flavor=woff2 --output-file=/z/dist/no-pk/scp.woff2 --verbose
|
||||
214
scripts/deps-docker/showdown.patch
Normal file
214
scripts/deps-docker/showdown.patch
Normal file
@@ -0,0 +1,214 @@
|
||||
diff -NarU1 showdown-orig/Gruntfile.js showdown-mod/Gruntfile.js
|
||||
--- showdown-orig/Gruntfile.js 2020-04-23 06:22:01.486676149 +0000
|
||||
+++ showdown-mod/Gruntfile.js 2020-04-23 08:03:56.700219788 +0000
|
||||
@@ -27,3 +27,2 @@
|
||||
'src/subParsers/*.js',
|
||||
- 'src/subParsers/makeMarkdown/*.js',
|
||||
'src/loader.js'
|
||||
diff -NarU1 showdown-orig/src/converter.js showdown-mod/src/converter.js
|
||||
--- showdown-orig/src/converter.js 2020-04-23 06:22:01.496676150 +0000
|
||||
+++ showdown-mod/src/converter.js 2020-04-23 08:20:11.056920123 +0000
|
||||
@@ -84,5 +84,5 @@
|
||||
|
||||
- if (options.extensions) {
|
||||
+ /*if (options.extensions) {
|
||||
showdown.helper.forEach(options.extensions, _parseExtension);
|
||||
- }
|
||||
+ }*/
|
||||
}
|
||||
@@ -95,3 +95,3 @@
|
||||
*/
|
||||
- function _parseExtension (ext, name) {
|
||||
+ /*function _parseExtension (ext, name) {
|
||||
|
||||
@@ -159,3 +159,3 @@
|
||||
*/
|
||||
- function legacyExtensionLoading (ext, name) {
|
||||
+ /*function legacyExtensionLoading (ext, name) {
|
||||
if (typeof ext === 'function') {
|
||||
@@ -351,3 +351,3 @@
|
||||
*/
|
||||
- this.makeMarkdown = this.makeMd = function (src, HTMLParser) {
|
||||
+ /*this.makeMarkdown = this.makeMd = function (src, HTMLParser) {
|
||||
|
||||
@@ -482,3 +482,3 @@
|
||||
*/
|
||||
- this.addExtension = function (extension, name) {
|
||||
+ /*this.addExtension = function (extension, name) {
|
||||
name = name || null;
|
||||
@@ -491,3 +491,3 @@
|
||||
*/
|
||||
- this.useExtension = function (extensionName) {
|
||||
+ /*this.useExtension = function (extensionName) {
|
||||
_parseExtension(extensionName);
|
||||
@@ -526,3 +526,3 @@
|
||||
*/
|
||||
- this.removeExtension = function (extension) {
|
||||
+ /*this.removeExtension = function (extension) {
|
||||
if (!showdown.helper.isArray(extension)) {
|
||||
@@ -549,3 +549,3 @@
|
||||
*/
|
||||
- this.getAllExtensions = function () {
|
||||
+ /*this.getAllExtensions = function () {
|
||||
return {
|
||||
diff -NarU1 showdown-orig/src/options.js showdown-mod/src/options.js
|
||||
--- showdown-orig/src/options.js 2020-04-23 06:22:01.496676150 +0000
|
||||
+++ showdown-mod/src/options.js 2020-04-23 08:24:29.176929018 +0000
|
||||
@@ -118,3 +118,3 @@
|
||||
},
|
||||
- ghMentions: {
|
||||
+ /*ghMentions: {
|
||||
defaultValue: false,
|
||||
@@ -127,3 +127,3 @@
|
||||
type: 'string'
|
||||
- },
|
||||
+ },*/
|
||||
encodeEmails: {
|
||||
diff -NarU1 showdown-orig/src/showdown.js showdown-mod/src/showdown.js
|
||||
--- showdown-orig/src/showdown.js 2020-04-23 06:22:01.496676150 +0000
|
||||
+++ showdown-mod/src/showdown.js 2020-04-23 08:25:01.976930148 +0000
|
||||
@@ -7,3 +7,2 @@
|
||||
parsers = {},
|
||||
- extensions = {},
|
||||
globalOptions = getDefaultOpts(true),
|
||||
@@ -25,5 +24,4 @@
|
||||
ghCompatibleHeaderId: true,
|
||||
- ghMentions: true,
|
||||
+ //ghMentions: true,
|
||||
backslashEscapesHTMLTags: true,
|
||||
- emoji: true,
|
||||
splitAdjacentBlockquotes: true
|
||||
@@ -48,3 +46,3 @@
|
||||
requireSpaceBeforeHeadingText: true,
|
||||
- ghMentions: false,
|
||||
+ //ghMentions: false,
|
||||
encodeEmails: true
|
||||
@@ -65,3 +63,2 @@
|
||||
*/
|
||||
-showdown.extensions = {};
|
||||
|
||||
@@ -193,3 +190,3 @@
|
||||
*/
|
||||
-showdown.extension = function (name, ext) {
|
||||
+/*showdown.extension = function (name, ext) {
|
||||
'use strict';
|
||||
@@ -235,3 +232,3 @@
|
||||
*/
|
||||
-showdown.getAllExtensions = function () {
|
||||
+/*showdown.getAllExtensions = function () {
|
||||
'use strict';
|
||||
@@ -244,3 +241,3 @@
|
||||
*/
|
||||
-showdown.removeExtension = function (name) {
|
||||
+/*showdown.removeExtension = function (name) {
|
||||
'use strict';
|
||||
@@ -252,3 +249,3 @@
|
||||
*/
|
||||
-showdown.resetExtensions = function () {
|
||||
+/*showdown.resetExtensions = function () {
|
||||
'use strict';
|
||||
@@ -263,3 +260,3 @@
|
||||
*/
|
||||
-function validate (extension, name) {
|
||||
+/*function validate (extension, name) {
|
||||
'use strict';
|
||||
@@ -370,3 +367,3 @@
|
||||
*/
|
||||
-showdown.validateExtension = function (ext) {
|
||||
+/*showdown.validateExtension = function (ext) {
|
||||
'use strict';
|
||||
@@ -380 +377,2 @@
|
||||
};
|
||||
+*/
|
||||
diff -NarU1 showdown-orig/src/subParsers/anchors.js showdown-mod/src/subParsers/anchors.js
|
||||
--- showdown-orig/src/subParsers/anchors.js 2020-04-23 06:22:01.496676150 +0000
|
||||
+++ showdown-mod/src/subParsers/anchors.js 2020-04-23 08:25:26.880264347 +0000
|
||||
@@ -76,3 +76,3 @@
|
||||
// Lastly handle GithubMentions if option is enabled
|
||||
- if (options.ghMentions) {
|
||||
+ /*if (options.ghMentions) {
|
||||
text = text.replace(/(^|\s)(\\)?(@([a-z\d]+(?:[a-z\d.-]+?[a-z\d]+)*))/gmi, function (wm, st, escape, mentions, username) {
|
||||
@@ -93,3 +93,3 @@
|
||||
});
|
||||
- }
|
||||
+ }*/
|
||||
|
||||
diff -NarU1 showdown-orig/src/subParsers/spanGamut.js showdown-mod/src/subParsers/spanGamut.js
|
||||
--- showdown-orig/src/subParsers/spanGamut.js 2020-04-23 06:22:01.496676150 +0000
|
||||
+++ showdown-mod/src/subParsers/spanGamut.js 2020-04-23 08:07:50.460227880 +0000
|
||||
@@ -22,3 +22,2 @@
|
||||
text = showdown.subParser('simplifiedAutoLinks')(text, options, globals);
|
||||
- text = showdown.subParser('emoji')(text, options, globals);
|
||||
text = showdown.subParser('underline')(text, options, globals);
|
||||
@@ -26,3 +25,2 @@
|
||||
text = showdown.subParser('strikethrough')(text, options, globals);
|
||||
- text = showdown.subParser('ellipsis')(text, options, globals);
|
||||
|
||||
diff -NarU1 showdown-orig/test/node/showdown.Converter.js showdown-mod/test/node/showdown.Converter.js
|
||||
--- showdown-orig/test/node/showdown.Converter.js 2020-04-23 06:22:01.520009484 +0000
|
||||
+++ showdown-mod/test/node/showdown.Converter.js 2020-04-23 08:14:58.086909318 +0000
|
||||
@@ -29,3 +29,3 @@
|
||||
|
||||
- describe('Converter.options extensions', function () {
|
||||
+ /*describe('Converter.options extensions', function () {
|
||||
var runCount;
|
||||
@@ -48,3 +48,3 @@
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
@@ -115,3 +115,3 @@
|
||||
|
||||
- describe('extension methods', function () {
|
||||
+ /*describe('extension methods', function () {
|
||||
var extObjMock = {
|
||||
@@ -145,3 +145,3 @@
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
diff -NarU1 showdown-orig/test/node/showdown.js showdown-mod/test/node/showdown.js
|
||||
--- showdown-orig/test/node/showdown.js 2020-04-23 06:22:01.523342816 +0000
|
||||
+++ showdown-mod/test/node/showdown.js 2020-04-23 08:14:31.733575073 +0000
|
||||
@@ -25,3 +25,3 @@
|
||||
|
||||
-describe('showdown.extension()', function () {
|
||||
+/*describe('showdown.extension()', function () {
|
||||
'use strict';
|
||||
@@ -110,3 +110,3 @@
|
||||
});
|
||||
-});
|
||||
+});*/
|
||||
|
||||
diff -NarU1 showdown-orig/test/node/testsuite.features.js showdown-mod/test/node/testsuite.features.js
|
||||
--- showdown-orig/test/node/testsuite.features.js 2020-04-23 06:22:01.523342816 +0000
|
||||
+++ showdown-mod/test/node/testsuite.features.js 2020-04-23 08:25:48.880265106 +0000
|
||||
@@ -13,3 +13,2 @@
|
||||
rawPrefixHeaderIdSuite = bootstrap.getTestSuite('test/features/rawPrefixHeaderId/'),
|
||||
- emojisSuite = bootstrap.getTestSuite('test/features/emojis/'),
|
||||
underlineSuite = bootstrap.getTestSuite('test/features/underline/'),
|
||||
@@ -69,4 +68,4 @@
|
||||
converter = new showdown.Converter({ghCompatibleHeaderId: true});
|
||||
- } else if (testsuite[i].name === 'ghMentions') {
|
||||
- converter = new showdown.Converter({ghMentions: true});
|
||||
+ //} else if (testsuite[i].name === 'ghMentions') {
|
||||
+ // converter = new showdown.Converter({ghMentions: true});
|
||||
} else if (testsuite[i].name === 'disable-email-encoding') {
|
||||
@@ -185,17 +184,2 @@
|
||||
it(suite[i].name.replace(/-/g, ' '), assertion(suite[i], converter));
|
||||
- }
|
||||
- });
|
||||
-
|
||||
- /** test emojis support **/
|
||||
- describe('emojis support', function () {
|
||||
- var converter,
|
||||
- suite = emojisSuite;
|
||||
- for (var i = 0; i < suite.length; ++i) {
|
||||
- if (suite[i].name === 'simplifiedautolinks') {
|
||||
- converter = new showdown.Converter({emoji: true, simplifiedAutoLink: true});
|
||||
- } else {
|
||||
- converter = new showdown.Converter({emoji: true});
|
||||
- }
|
||||
-
|
||||
- it(suite[i].name.replace(/-/g, ' '), assertion(suite[i], converter));
|
||||
}
|
||||
@@ -1,6 +1,10 @@
|
||||
all: $(addsuffix .gz, $(wildcard *.*))
|
||||
|
||||
%.gz: %
|
||||
#brotli -q 11 $<
|
||||
pigz -11 -J 34 -I 573 $<
|
||||
|
||||
# pigz -11 -J 34 -I 100 -F < $< > $@.first
|
||||
|
||||
# disabling brotli after all since the gain is meh
|
||||
# and it bloats sfx and wheels by like 70%
|
||||
|
||||
100
scripts/fusefuzz.py
Executable file
100
scripts/fusefuzz.py
Executable file
@@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
"""
|
||||
mkdir -p /dev/shm/fusefuzz/{r,v}
|
||||
PYTHONPATH=.. python3 -m copyparty -v /dev/shm/fusefuzz/r::r -i 127.0.0.1
|
||||
../bin/copyparty-fuse.py /dev/shm/fusefuzz/v http://127.0.0.1:3923/ 2 0
|
||||
(d="$PWD"; cd /dev/shm/fusefuzz && "$d"/fusefuzz.py)
|
||||
"""
|
||||
|
||||
|
||||
def chk(fsz, rsz, ofs0, shift, ofs, rf, vf):
|
||||
if ofs != rf.tell():
|
||||
rf.seek(ofs)
|
||||
vf.seek(ofs)
|
||||
|
||||
rb = rf.read(rsz)
|
||||
vb = vf.read(rsz)
|
||||
|
||||
print(f"fsz {fsz} rsz {rsz} ofs {ofs0} shift {shift} ofs {ofs} = {len(rb)}")
|
||||
|
||||
if rb != vb:
|
||||
for n, buf in enumerate([rb, vb]):
|
||||
with open("buf." + str(n), "wb") as f:
|
||||
f.write(buf)
|
||||
|
||||
raise Exception(f"{len(rb)} != {len(vb)}")
|
||||
|
||||
return rb, vb
|
||||
|
||||
|
||||
def main():
|
||||
v = "v"
|
||||
for n in range(5):
|
||||
with open(f"r/{n}", "wb") as f:
|
||||
f.write(b"h" * n)
|
||||
|
||||
rand = os.urandom(7919) # prime
|
||||
for fsz in range(1024 * 1024 * 2 - 3, 1024 * 1024 * 2 + 3):
|
||||
with open("r/f", "wb", fsz) as f:
|
||||
f.write((rand * int(fsz / len(rand) + 1))[:fsz])
|
||||
|
||||
for rsz in range(64 * 1024 - 2, 64 * 1024 + 2):
|
||||
ofslist = [0, 1, 2]
|
||||
for n in range(3):
|
||||
ofslist.append(fsz - n)
|
||||
ofslist.append(fsz - (rsz * 1 + n))
|
||||
ofslist.append(fsz - (rsz * 2 + n))
|
||||
|
||||
for ofs0 in ofslist:
|
||||
for shift in range(-3, 3):
|
||||
print(f"fsz {fsz} rsz {rsz} ofs {ofs0} shift {shift}")
|
||||
ofs = ofs0
|
||||
if ofs < 0 or ofs >= fsz:
|
||||
continue
|
||||
|
||||
for n in range(1, 3):
|
||||
with open(f"{v}/{n}", "rb") as f:
|
||||
f.read()
|
||||
|
||||
prev_ofs = -99
|
||||
with open("r/f", "rb", rsz) as rf:
|
||||
with open(f"{v}/f", "rb", rsz) as vf:
|
||||
while True:
|
||||
ofs += shift
|
||||
if ofs < 0 or ofs > fsz or ofs == prev_ofs:
|
||||
break
|
||||
|
||||
prev_ofs = ofs
|
||||
|
||||
rb, vb = chk(fsz, rsz, ofs0, shift, ofs, rf, vf)
|
||||
|
||||
if not rb:
|
||||
break
|
||||
|
||||
ofs += len(rb)
|
||||
|
||||
for n in range(1, 3):
|
||||
with open(f"{v}/{n}", "rb") as f:
|
||||
f.read()
|
||||
|
||||
with open("r/f", "rb", rsz) as rf:
|
||||
with open(f"{v}/f", "rb", rsz) as vf:
|
||||
for n in range(2):
|
||||
ofs += shift
|
||||
if ofs < 0 or ofs > fsz:
|
||||
break
|
||||
|
||||
rb, vb = chk(fsz, rsz, ofs0, shift, ofs, rf, vf)
|
||||
|
||||
ofs -= rsz
|
||||
|
||||
# bumping fsz, sleep away the dentry cache in cppf
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -3,9 +3,13 @@ set -e
|
||||
echo
|
||||
|
||||
# osx support
|
||||
sed=$( which gsed 2>/dev/null || which sed)
|
||||
find=$(which gfind 2>/dev/null || which find)
|
||||
sort=$(which gsort 2>/dev/null || which sort)
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
}
|
||||
|
||||
which md5sum 2>/dev/null >/dev/null &&
|
||||
md5sum=md5sum ||
|
||||
|
||||
202
scripts/make-sfx.sh
Executable file
202
scripts/make-sfx.sh
Executable file
@@ -0,0 +1,202 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
echo
|
||||
|
||||
|
||||
# optional args:
|
||||
#
|
||||
# `clean` uses files from git (everything except web/deps),
|
||||
# so local changes won't affect the produced sfx
|
||||
#
|
||||
# `re` does a repack of an sfx which you already executed once
|
||||
# (grabs files from the sfx-created tempdir), overrides `clean`
|
||||
#
|
||||
# `no-ogv` saves ~500k by removing the opus/vorbis audio codecs
|
||||
# (only affects apple devices; everything else has native support)
|
||||
#
|
||||
# `no-cm` saves ~90k by removing easymde/codemirror
|
||||
# (the fancy markdown editor)
|
||||
|
||||
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
unexpand() { gunexpand "$@"; }
|
||||
}
|
||||
|
||||
[ -e copyparty/__main__.py ] || cd ..
|
||||
[ -e copyparty/__main__.py ] ||
|
||||
{
|
||||
echo "run me from within the project root folder"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
while [ ! -z "$1" ]; do
|
||||
[ "$1" = clean ] && clean=1 && shift && continue
|
||||
[ "$1" = re ] && repack=1 && shift && continue
|
||||
[ "$1" = no-ogv ] && no_ogv=1 && shift && continue
|
||||
[ "$1" = no-cm ] && no_cm=1 && shift && continue
|
||||
break
|
||||
done
|
||||
|
||||
tmv() {
|
||||
touch -r "$1" t
|
||||
mv t "$1"
|
||||
}
|
||||
|
||||
rm -rf sfx/*
|
||||
mkdir -p sfx build
|
||||
cd sfx
|
||||
|
||||
[ $repack ] && {
|
||||
old="$(
|
||||
printf '%s\n' "$TMPDIR" /tmp |
|
||||
awk '/./ {print; exit}'
|
||||
)/pe-copyparty"
|
||||
|
||||
echo "repack of files in $old"
|
||||
cp -pR "$old/"*{jinja2,copyparty} .
|
||||
mv {x.,}jinja2 2>/dev/null || true
|
||||
}
|
||||
|
||||
[ $repack ] || {
|
||||
echo collecting jinja2
|
||||
f="../build/Jinja2-2.6.tar.gz"
|
||||
[ -e "$f" ] ||
|
||||
(url=https://files.pythonhosted.org/packages/25/c8/212b1c2fd6df9eaf536384b6c6619c4e70a3afd2dffdd00e5296ffbae940/Jinja2-2.6.tar.gz;
|
||||
wget -O$f "$url" || curl -L "$url" >$f)
|
||||
|
||||
tar -zxf $f
|
||||
mv Jinja2-*/jinja2 .
|
||||
rm -rf Jinja2-* jinja2/testsuite jinja2/_markupsafe/tests.py jinja2/_stringdefs.py
|
||||
|
||||
f=jinja2/lexer.py
|
||||
sed -r '/.*föö.*/ raise SyntaxError/' <$f >t
|
||||
tmv $f
|
||||
|
||||
f=jinja2/_markupsafe/_constants.py
|
||||
awk '!/: [0-9]+,?$/ || /(amp|gt|lt|quot|apos|nbsp).:/' <$f >t
|
||||
tmv $f
|
||||
|
||||
# msys2 tar is bad, make the best of it
|
||||
echo collecting source
|
||||
[ $clean ] && {
|
||||
(cd .. && git archive master >tar) && tar -xf ../tar copyparty
|
||||
(cd .. && tar -cf tar copyparty/web/deps) && tar -xf ../tar
|
||||
}
|
||||
[ $clean ] || {
|
||||
(cd .. && tar -cf tar copyparty) && tar -xf ../tar
|
||||
}
|
||||
rm -f ../tar
|
||||
}
|
||||
|
||||
ver=
|
||||
git describe --tags >/dev/null 2>/dev/null && {
|
||||
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
|
||||
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//; s/-g?/./g')";
|
||||
t_ver=
|
||||
|
||||
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+$' && {
|
||||
# short format (exact version number)
|
||||
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g')";
|
||||
}
|
||||
|
||||
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+-[0-9]+-g[0-9a-f]+$' && {
|
||||
# long format (unreleased commit)
|
||||
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g; s/(.*) (.*)/\1 "\2"/')"
|
||||
}
|
||||
|
||||
[ -z "$t_ver" ] && {
|
||||
printf 'unexpected git version format: [%s]\n' "$git_ver"
|
||||
exit 1
|
||||
}
|
||||
|
||||
dt="$(git log -1 --format=%cd --date=format:'%Y, %m, %d')"
|
||||
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
|
||||
sed -ri '
|
||||
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;
|
||||
s/^(S_VERSION =)(.*)/#\1\2\n\1 "'"$ver"'"/;
|
||||
s/^(BUILD_DT =)(.*)/#\1\2\n\1 ('"$dt"')/;
|
||||
' copyparty/__version__.py
|
||||
}
|
||||
|
||||
[ -z "$ver" ] &&
|
||||
ver="$(awk '/^VERSION *= \(/ {
|
||||
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
|
||||
|
||||
ts=$(date -u +%s)
|
||||
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
|
||||
|
||||
mkdir -p ../dist
|
||||
sfx_out=../dist/copyparty-sfx
|
||||
|
||||
echo cleanup
|
||||
find .. -name '*.pyc' -delete
|
||||
find .. -name __pycache__ -delete
|
||||
|
||||
# especially prevent osx from leaking your lan ip (wtf apple)
|
||||
find .. -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
|
||||
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
|
||||
|
||||
echo use smol web deps
|
||||
rm -f copyparty/web/deps/*.full.*
|
||||
|
||||
# it's fine dw
|
||||
grep -lE '\.full\.(js|css)' copyparty/web/* |
|
||||
while IFS= read -r x; do
|
||||
sed -r 's/\.full\.(js|css)/.\1/g' <"$x" >t
|
||||
tmv "$x"
|
||||
done
|
||||
|
||||
[ $no_ogv ] &&
|
||||
rm -rf copyparty/web/deps/{dynamicaudio,ogv}*
|
||||
|
||||
[ $no_cm ] && {
|
||||
rm -rf copyparty/web/mde.* copyparty/web/deps/easymde*
|
||||
echo h > copyparty/web/mde.html
|
||||
f=copyparty/web/md.html
|
||||
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
|
||||
}
|
||||
|
||||
# up2k goes from 28k to 22k laff
|
||||
echo entabbening
|
||||
find | grep -E '\.(js|css|html|py)$' | while IFS= read -r f; do
|
||||
unexpand -t 4 --first-only <"$f" >t
|
||||
tmv "$f"
|
||||
done
|
||||
|
||||
echo creating tar
|
||||
args=(--owner=1000 --group=1000)
|
||||
[ "$OSTYPE" = msys ] &&
|
||||
args=()
|
||||
|
||||
tar -cf tar "${args[@]}" --numeric-owner copyparty jinja2
|
||||
|
||||
echo compressing tar
|
||||
# detect best level; bzip2 -7 is usually better than -9
|
||||
for n in {2..9}; do cp tar t.$n; bzip2 -$n t.$n & done; wait; mv -v $(ls -1S t.*.bz2 | tail -n 1) tar.bz2
|
||||
for n in {2..9}; do cp tar t.$n; xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz | tail -n 1) tar.xz
|
||||
rm t.*
|
||||
|
||||
echo creating unix sfx
|
||||
(
|
||||
sed "s/PACK_TS/$ts/; s/PACK_HTS/$hts/; s/CPP_VER/$ver/" <../scripts/sfx.sh |
|
||||
grep -E '^sfx_eof$' -B 9001;
|
||||
cat tar.xz
|
||||
) >$sfx_out.sh
|
||||
|
||||
echo creating generic sfx
|
||||
python ../scripts/sfx.py --sfx-make tar.bz2 $ver $ts
|
||||
mv sfx.out $sfx_out.py
|
||||
chmod 755 $sfx_out.*
|
||||
|
||||
printf "done:\n"
|
||||
printf " %s\n" "$(realpath $sfx_out)."{sh,py}
|
||||
# rm -rf *
|
||||
|
||||
# tar -tvf ../sfx/tar | sed -r 's/(.* ....-..-.. ..:.. )(.*)/\2 `` \1/' | sort | sed -r 's/(.*) `` (.*)/\2 \1/'| less
|
||||
# for n in {1..9}; do tar -tf tar | grep -vE '/$' | sed -r 's/(.*)\.(.*)/\2.\1/' | sort | sed -r 's/([^\.]+)\.(.*)/\2.\1/' | tar -cT- | bzip2 -c$n | wc -c; done
|
||||
@@ -2,9 +2,13 @@
|
||||
set -e
|
||||
echo
|
||||
|
||||
sed=$( which gsed 2>/dev/null || which sed)
|
||||
find=$(which gfind 2>/dev/null || which find)
|
||||
sort=$(which gsort 2>/dev/null || which sort)
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
}
|
||||
|
||||
which md5sum 2>/dev/null >/dev/null &&
|
||||
md5sum=md5sum ||
|
||||
@@ -22,14 +26,14 @@ ver="$1"
|
||||
[[ -e copyparty/__main__.py ]] || cd ..
|
||||
[[ -e copyparty/__main__.py ]] ||
|
||||
{
|
||||
echo "run me from within the copyparty folder"
|
||||
echo "run me from within the project root folder"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
out_dir="$(pwd | $sed -r 's@/[^/]+$@@')"
|
||||
zip_path="$out_dir/copyparty-$ver.zip"
|
||||
tgz_path="$out_dir/copyparty-$ver.tar.gz"
|
||||
mkdir -p dist
|
||||
zip_path="$(pwd)/dist/copyparty-$ver.zip"
|
||||
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"
|
||||
|
||||
[[ -e "$zip_path" ]] ||
|
||||
[[ -e "$tgz_path" ]] &&
|
||||
@@ -44,23 +48,25 @@ tgz_path="$out_dir/copyparty-$ver.tar.gz"
|
||||
rm "$zip_path" 2>/dev/null || true
|
||||
rm "$tgz_path" 2>/dev/null || true
|
||||
|
||||
#$sed -ri "s/^(ADMIN_PWD *= *u).*/\1'hunter2'/" copyparty/config.py
|
||||
#sed -ri "s/^(ADMIN_PWD *= *u).*/\1'hunter2'/" copyparty/config.py
|
||||
|
||||
tmp="$(mktemp -d)"
|
||||
rls_dir="$tmp/copyparty-$ver"
|
||||
mkdir "$rls_dir"
|
||||
|
||||
echo ">>> export"
|
||||
git archive master |
|
||||
tar -x -C "$rls_dir"
|
||||
echo ">>> export from git"
|
||||
git archive master | tar -xC "$rls_dir"
|
||||
|
||||
echo ">>> export untracked deps"
|
||||
tar -c copyparty/web/deps | tar -xC "$rls_dir"
|
||||
|
||||
cd "$rls_dir"
|
||||
$find -type d -exec chmod 755 '{}' \+
|
||||
$find -type f -exec chmod 644 '{}' \+
|
||||
find -type d -exec chmod 755 '{}' \+
|
||||
find -type f -exec chmod 644 '{}' \+
|
||||
|
||||
commaver="$(
|
||||
printf '%s\n' "$ver" |
|
||||
sed -r 's/\./,/g'
|
||||
sed -r 's/\./, /g'
|
||||
)"
|
||||
|
||||
grep -qE "^VERSION *= \(${commaver}\)$" copyparty/__version__.py ||
|
||||
@@ -85,16 +91,23 @@ rm \
|
||||
|
||||
mv LICENSE LICENSE.txt
|
||||
|
||||
# messy because osx support
|
||||
$find -type f -exec $md5sum '{}' \+ |
|
||||
$sed -r 's/(.{32})(.*)/\2\1/' | LC_COLLATE=c $sort |
|
||||
$sed -r 's/(.*)(.{32})/\2\1/' |
|
||||
$sed -r 's/^(.{32}) \./\1 ./' > ../.sums.md5
|
||||
# the regular cleanup memes
|
||||
find -name '*.pyc' -delete
|
||||
find -name __pycache__ -delete
|
||||
find -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
|
||||
find -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
|
||||
|
||||
# also messy because osx support
|
||||
find -type f -exec $md5sum '{}' \+ |
|
||||
sed -r 's/(.{32})(.*)/\2\1/' | LC_COLLATE=c sort |
|
||||
sed -r 's/(.*)(.{32})/\2\1/' |
|
||||
sed -r 's/^(.{32}) \./\1 ./' > ../.sums.md5
|
||||
mv ../.sums.md5 .
|
||||
|
||||
cd ..
|
||||
echo ">>> tar"; tar -czf "$tgz_path" "copyparty-$ver"
|
||||
echo ">>> zip"; zip -qr "$zip_path" "copyparty-$ver"
|
||||
pwd
|
||||
echo ">>> tar"; tar -czf "$tgz_path" --owner=1000 --group=1000 --numeric-owner "copyparty-$ver"
|
||||
echo ">>> zip"; zip -qr "$zip_path" "copyparty-$ver"
|
||||
|
||||
rm -rf "$tmp"
|
||||
echo
|
||||
@@ -103,5 +116,5 @@ echo " $zip_path"
|
||||
echo " $tgz_path"
|
||||
echo
|
||||
|
||||
# function alr() { ls -alR copyparty-$1 | $sed -r "s/copyparty-$1/copyparty/" | $sed -r 's/[A-Z][a-z]{2} [0-9 ]{2} [0-9]{2}:[0-9]{2}//' > $1; }; for x in master rls src ; do alr $x; done
|
||||
# function alr() { ls -alR copyparty-$1 | sed -r "s/copyparty-$1/copyparty/" | sed -r 's/[A-Z][a-z]{2} [0-9 ]{2} [0-9]{2}:[0-9]{2}//' > $1; }; for x in master rls src ; do alr $x; done
|
||||
|
||||
|
||||
564
scripts/sfx.py
Normal file
564
scripts/sfx.py
Normal file
@@ -0,0 +1,564 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re, os, sys, time, shutil, signal, tarfile, hashlib, platform, tempfile
|
||||
import subprocess as sp
|
||||
|
||||
"""
|
||||
run me with any version of python, i will unpack and run copyparty
|
||||
|
||||
(but please don't edit this file with a text editor
|
||||
since that would probably corrupt the binary stuff at the end)
|
||||
|
||||
there's zero binaries! just plaintext python scripts all the way down
|
||||
so you can easily unpack the archive and inspect it for shady stuff
|
||||
|
||||
the archive data is attached after the b"\n# eof\n" archive marker,
|
||||
b"\n#n" decodes to b"\n"
|
||||
b"\n#r" decodes to b"\r"
|
||||
b"\n# " decodes to b""
|
||||
"""
|
||||
|
||||
# set by make-sfx.sh
|
||||
VER = None
|
||||
SIZE = None
|
||||
CKSUM = None
|
||||
STAMP = None
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
sys.dont_write_bytecode = True
|
||||
me = os.path.abspath(os.path.realpath(__file__))
|
||||
cpp = None
|
||||
|
||||
|
||||
def eprint(*args, **kwargs):
|
||||
kwargs["file"] = sys.stderr
|
||||
print(*args, **kwargs)
|
||||
|
||||
|
||||
def msg(*args, **kwargs):
|
||||
if args:
|
||||
args = ["[SFX]", args[0]] + list(args[1:])
|
||||
|
||||
eprint(*args, **kwargs)
|
||||
|
||||
|
||||
# skip 1
|
||||
|
||||
|
||||
def testptn1():
|
||||
"""test: creates a test-pattern for encode()"""
|
||||
import struct
|
||||
|
||||
buf = b""
|
||||
for c in range(256):
|
||||
buf += struct.pack("B", c)
|
||||
|
||||
yield buf
|
||||
|
||||
|
||||
def testptn2():
|
||||
import struct
|
||||
|
||||
for a in range(256):
|
||||
if a % 16 == 0:
|
||||
msg(a)
|
||||
|
||||
for b in range(256):
|
||||
buf = b""
|
||||
for c in range(256):
|
||||
buf += struct.pack("BBBB", a, b, c, b)
|
||||
yield buf
|
||||
|
||||
|
||||
def testptn3():
|
||||
with open("C:/Users/ed/Downloads/python-3.8.1-amd64.exe", "rb", 512 * 1024) as f:
|
||||
while True:
|
||||
buf = f.read(512 * 1024)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
yield buf
|
||||
|
||||
|
||||
testptn = testptn2
|
||||
|
||||
|
||||
def testchk(cdata):
|
||||
"""test: verifies that `data` yields testptn"""
|
||||
import struct
|
||||
|
||||
cbuf = b""
|
||||
mbuf = b""
|
||||
checked = 0
|
||||
t0 = time.time()
|
||||
mdata = testptn()
|
||||
while True:
|
||||
if not mbuf:
|
||||
try:
|
||||
mbuf += next(mdata)
|
||||
except:
|
||||
break
|
||||
|
||||
if not cbuf:
|
||||
try:
|
||||
cbuf += next(cdata)
|
||||
except:
|
||||
expect = mbuf[:8]
|
||||
expect = "".join(
|
||||
" {:02x}".format(x)
|
||||
for x in struct.unpack("B" * len(expect), expect)
|
||||
)
|
||||
raise Exception(
|
||||
"truncated at {}, expected{}".format(checked + len(cbuf), expect)
|
||||
)
|
||||
|
||||
ncmp = min(len(cbuf), len(mbuf))
|
||||
# msg("checking {:x}H bytes, {:x}H ok so far".format(ncmp, checked))
|
||||
for n in range(ncmp):
|
||||
checked += 1
|
||||
if cbuf[n] != mbuf[n]:
|
||||
expect = mbuf[n : n + 8]
|
||||
expect = "".join(
|
||||
" {:02x}".format(x)
|
||||
for x in struct.unpack("B" * len(expect), expect)
|
||||
)
|
||||
cc = struct.unpack(b"B", cbuf[n : n + 1])[0]
|
||||
raise Exception(
|
||||
"byte {:x}H bad, got {:02x}, expected{}".format(checked, cc, expect)
|
||||
)
|
||||
|
||||
cbuf = cbuf[ncmp:]
|
||||
mbuf = mbuf[ncmp:]
|
||||
|
||||
td = time.time() - t0
|
||||
txt = "all {}d bytes OK in {:.3f} sec, {:.3f} MB/s".format(
|
||||
checked, td, (checked / (1024 * 1024.0)) / td
|
||||
)
|
||||
msg(txt)
|
||||
|
||||
|
||||
def encode(data, size, cksum, ver, ts):
|
||||
"""creates a new sfx; `data` should yield bufs to attach"""
|
||||
nin = 0
|
||||
nout = 0
|
||||
skip = False
|
||||
with open(me, "rb") as fi:
|
||||
unpk = ""
|
||||
src = fi.read().replace(b"\r", b"").rstrip(b"\n").decode("utf-8")
|
||||
for ln in src.split("\n"):
|
||||
if ln.endswith("# skip 0"):
|
||||
skip = False
|
||||
continue
|
||||
|
||||
if ln.endswith("# skip 1") or skip:
|
||||
skip = True
|
||||
continue
|
||||
|
||||
unpk += ln + "\n"
|
||||
|
||||
for k, v in [
|
||||
["VER", '"' + ver + '"'],
|
||||
["SIZE", size],
|
||||
["CKSUM", '"' + cksum + '"'],
|
||||
["STAMP", ts],
|
||||
]:
|
||||
v1 = "\n{} = None\n".format(k)
|
||||
v2 = "\n{} = {}\n".format(k, v)
|
||||
unpk = unpk.replace(v1, v2)
|
||||
|
||||
unpk = unpk.replace("\n ", "\n\t")
|
||||
for _ in range(16):
|
||||
unpk = unpk.replace("\t ", "\t\t")
|
||||
|
||||
with open("sfx.out", "wb") as f:
|
||||
f.write(unpk.encode("utf-8") + b"\n\n# eof\n# ")
|
||||
for buf in data:
|
||||
ebuf = buf.replace(b"\n", b"\n#n").replace(b"\r", b"\n#r")
|
||||
f.write(ebuf)
|
||||
nin += len(buf)
|
||||
nout += len(ebuf)
|
||||
|
||||
msg("wrote {:x}H bytes ({:x}H after encode)".format(nin, nout))
|
||||
|
||||
|
||||
def makesfx(tar_src, ver, ts):
|
||||
sz = os.path.getsize(tar_src)
|
||||
cksum = hashfile(tar_src)
|
||||
encode(yieldfile(tar_src), sz, cksum, ver, ts)
|
||||
|
||||
|
||||
# skip 0
|
||||
|
||||
|
||||
def u8(gen):
|
||||
try:
|
||||
for s in gen:
|
||||
yield s.decode("utf-8", "ignore")
|
||||
except:
|
||||
yield s
|
||||
for s in gen:
|
||||
yield s
|
||||
|
||||
|
||||
def get_py_win(ret):
|
||||
tops = []
|
||||
p = str(os.getenv("LocalAppdata"))
|
||||
if p:
|
||||
tops.append(os.path.join(p, "Programs", "Python"))
|
||||
|
||||
progfiles = {}
|
||||
for p in ["ProgramFiles", "ProgramFiles(x86)"]:
|
||||
p = str(os.getenv(p))
|
||||
if p:
|
||||
progfiles[p] = 1
|
||||
# 32bit apps get x86 for both
|
||||
if p.endswith(" (x86)"):
|
||||
progfiles[p[:-6]] = 1
|
||||
|
||||
tops += list(progfiles.keys())
|
||||
|
||||
for sysroot in [me, sys.executable]:
|
||||
sysroot = sysroot[:3].upper()
|
||||
if sysroot[1] == ":" and sysroot not in tops:
|
||||
tops.append(sysroot)
|
||||
|
||||
# $WIRESHARK_SLOGAN
|
||||
for top in tops:
|
||||
try:
|
||||
for name1 in u8(sorted(os.listdir(top), reverse=True)):
|
||||
if name1.lower().startswith("python"):
|
||||
path1 = os.path.join(top, name1)
|
||||
try:
|
||||
for name2 in u8(os.listdir(path1)):
|
||||
if name2.lower() == "python.exe":
|
||||
path2 = os.path.join(path1, name2)
|
||||
ret[path2.lower()] = path2
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def get_py_nix(ret):
|
||||
ptn = re.compile(r"^(python|pypy)[0-9\.-]*$")
|
||||
for bindir in os.getenv("PATH").split(":"):
|
||||
if not bindir:
|
||||
next
|
||||
|
||||
try:
|
||||
for fn in u8(os.listdir(bindir)):
|
||||
if ptn.match(fn):
|
||||
fn = os.path.join(bindir, fn)
|
||||
ret[fn.lower()] = fn
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def read_py(binp):
|
||||
cmd = [
|
||||
binp,
|
||||
"-c",
|
||||
"import sys; sys.stdout.write(' '.join(str(x) for x in sys.version_info)); import jinja2",
|
||||
]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
ver, _ = p.communicate()
|
||||
ver = ver.decode("utf-8").split(" ")[:3]
|
||||
ver = [int(x) if x.isdigit() else 0 for x in ver]
|
||||
return ver, p.returncode == 0
|
||||
|
||||
|
||||
def get_pys():
|
||||
ver, chk = read_py(sys.executable)
|
||||
if chk or PY2:
|
||||
return [[chk, ver, sys.executable]]
|
||||
|
||||
hits = {sys.executable.lower(): sys.executable}
|
||||
if platform.system() == "Windows":
|
||||
get_py_win(hits)
|
||||
else:
|
||||
get_py_nix(hits)
|
||||
|
||||
ret = []
|
||||
for binp in hits.values():
|
||||
ver, chk = read_py(binp)
|
||||
ret.append([chk, ver, binp])
|
||||
msg("\t".join(str(x) for x in ret[-1]))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def yieldfile(fn):
|
||||
with open(fn, "rb") as f:
|
||||
for block in iter(lambda: f.read(64 * 1024), b""):
|
||||
yield block
|
||||
|
||||
|
||||
def hashfile(fn):
|
||||
hasher = hashlib.md5()
|
||||
for block in yieldfile(fn):
|
||||
hasher.update(block)
|
||||
|
||||
return hasher.hexdigest()
|
||||
|
||||
|
||||
def unpack():
|
||||
"""unpacks the tar yielded by `data`"""
|
||||
name = "pe-copyparty"
|
||||
tag = "v" + str(STAMP)
|
||||
withpid = "{}.{}".format(name, os.getpid())
|
||||
top = tempfile.gettempdir()
|
||||
final = os.path.join(top, name)
|
||||
mine = os.path.join(top, withpid)
|
||||
tar = os.path.join(mine, "tar")
|
||||
|
||||
try:
|
||||
if tag in os.listdir(final):
|
||||
msg("found early")
|
||||
return final
|
||||
except:
|
||||
pass
|
||||
|
||||
nwrite = 0
|
||||
os.mkdir(mine)
|
||||
with open(tar, "wb") as f:
|
||||
for buf in get_payload():
|
||||
nwrite += len(buf)
|
||||
f.write(buf)
|
||||
|
||||
if nwrite != SIZE:
|
||||
t = "\n\n bad file:\n expected {} bytes, got {}\n".format(SIZE, nwrite)
|
||||
raise Exception(t)
|
||||
|
||||
cksum = hashfile(tar)
|
||||
if cksum != CKSUM:
|
||||
t = "\n\n bad file:\n {} expected,\n {} obtained\n".format(CKSUM, cksum)
|
||||
raise Exception(t)
|
||||
|
||||
with tarfile.open(tar, "r:bz2") as tf:
|
||||
tf.extractall(mine)
|
||||
|
||||
os.remove(tar)
|
||||
|
||||
with open(os.path.join(mine, tag), "wb") as f:
|
||||
f.write(b"h\n")
|
||||
|
||||
try:
|
||||
if tag in os.listdir(final):
|
||||
msg("found late")
|
||||
return final
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.path.islink(final):
|
||||
os.remove(final)
|
||||
else:
|
||||
shutil.rmtree(final)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
os.symlink(mine, final)
|
||||
except:
|
||||
try:
|
||||
os.rename(mine, final)
|
||||
except:
|
||||
msg("reloc fail,", mine)
|
||||
return mine
|
||||
|
||||
for fn in u8(os.listdir(top)):
|
||||
if fn.startswith(name) and fn not in [name, withpid]:
|
||||
try:
|
||||
old = os.path.join(top, fn)
|
||||
if time.time() - os.path.getmtime(old) > 10:
|
||||
shutil.rmtree(old)
|
||||
except:
|
||||
pass
|
||||
|
||||
return final
|
||||
|
||||
|
||||
def get_payload():
|
||||
"""yields the binary data attached to script"""
|
||||
with open(me, "rb") as f:
|
||||
ptn = b"\n# eof\n# "
|
||||
buf = b""
|
||||
for n in range(64):
|
||||
buf += f.read(4096)
|
||||
ofs = buf.find(ptn)
|
||||
if ofs >= 0:
|
||||
break
|
||||
|
||||
if ofs < 0:
|
||||
raise Exception("could not find archive marker")
|
||||
|
||||
# start reading from the final b"\n"
|
||||
fpos = ofs + len(ptn) - 3
|
||||
# msg("tar found at", fpos)
|
||||
f.seek(fpos)
|
||||
dpos = 0
|
||||
leftovers = b""
|
||||
while True:
|
||||
rbuf = f.read(1024 * 32)
|
||||
if rbuf:
|
||||
buf = leftovers + rbuf
|
||||
ofs = buf.rfind(b"\n")
|
||||
if len(buf) <= 4:
|
||||
leftovers = buf
|
||||
continue
|
||||
|
||||
if ofs >= len(buf) - 4:
|
||||
leftovers = buf[ofs:]
|
||||
buf = buf[:ofs]
|
||||
else:
|
||||
leftovers = b"\n# "
|
||||
else:
|
||||
buf = leftovers
|
||||
|
||||
fpos += len(buf) + 1
|
||||
buf = (
|
||||
buf.replace(b"\n# ", b"")
|
||||
.replace(b"\n#r", b"\r")
|
||||
.replace(b"\n#n", b"\n")
|
||||
)
|
||||
dpos += len(buf) - 1
|
||||
|
||||
yield buf
|
||||
|
||||
if not rbuf:
|
||||
break
|
||||
|
||||
|
||||
def confirm():
|
||||
msg()
|
||||
msg("*** hit enter to exit ***")
|
||||
try:
|
||||
raw_input() if PY2 else input()
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def run(tmp, py):
|
||||
global cpp
|
||||
|
||||
msg("OK")
|
||||
msg("will use:", py)
|
||||
msg("bound to:", tmp)
|
||||
|
||||
# "systemd-tmpfiles-clean.timer"?? HOW do you even come up with this shit
|
||||
try:
|
||||
import fcntl
|
||||
|
||||
fd = os.open(tmp, os.O_RDONLY)
|
||||
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
tmp = os.readlink(tmp) # can't flock a symlink, even with O_NOFOLLOW
|
||||
except:
|
||||
pass
|
||||
|
||||
fp_py = os.path.join(tmp, "py")
|
||||
try:
|
||||
with open(fp_py, "wb") as f:
|
||||
f.write(py.encode("utf-8") + b"\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
# avoid loading ./copyparty.py
|
||||
cmd = [
|
||||
py,
|
||||
"-c",
|
||||
'import sys, runpy; sys.path.insert(0, r"'
|
||||
+ tmp
|
||||
+ '"); runpy.run_module("copyparty", run_name="__main__")',
|
||||
] + list(sys.argv[1:])
|
||||
|
||||
msg("\n", cmd, "\n")
|
||||
cpp = sp.Popen(str(x) for x in cmd)
|
||||
try:
|
||||
cpp.wait()
|
||||
except:
|
||||
cpp.wait()
|
||||
|
||||
if cpp.returncode != 0:
|
||||
confirm()
|
||||
|
||||
sys.exit(cpp.returncode)
|
||||
|
||||
|
||||
def bye(sig, frame):
|
||||
if cpp is not None:
|
||||
cpp.terminate()
|
||||
|
||||
|
||||
def main():
|
||||
sysver = str(sys.version).replace("\n", "\n" + " " * 18)
|
||||
pktime = time.strftime("%Y-%m-%d, %H:%M:%S", time.gmtime(STAMP))
|
||||
os.system("")
|
||||
msg()
|
||||
msg(" this is: copyparty", VER)
|
||||
msg(" packed at:", pktime, "UTC,", STAMP)
|
||||
msg("archive is:", me)
|
||||
msg("python bin:", sys.executable)
|
||||
msg("python ver:", platform.python_implementation(), sysver)
|
||||
msg()
|
||||
|
||||
arg = ""
|
||||
try:
|
||||
arg = sys.argv[1]
|
||||
except:
|
||||
pass
|
||||
|
||||
# skip 1
|
||||
|
||||
if arg == "--sfx-testgen":
|
||||
return encode(testptn(), 1, "x", "x", 1)
|
||||
|
||||
if arg == "--sfx-testchk":
|
||||
return testchk(get_payload())
|
||||
|
||||
if arg == "--sfx-make":
|
||||
tar, ver, ts = sys.argv[2:]
|
||||
return makesfx(tar, ver, ts)
|
||||
|
||||
# skip 0
|
||||
|
||||
signal.signal(signal.SIGTERM, bye)
|
||||
|
||||
tmp = unpack()
|
||||
fp_py = os.path.join(tmp, "py")
|
||||
if os.path.exists(fp_py):
|
||||
with open(fp_py, "rb") as f:
|
||||
py = f.read().decode("utf-8").rstrip()
|
||||
|
||||
return run(tmp, py)
|
||||
|
||||
pys = get_pys()
|
||||
pys.sort(reverse=True)
|
||||
j2, ver, py = pys[0]
|
||||
if j2:
|
||||
try:
|
||||
os.rename(os.path.join(tmp, "jinja2"), os.path.join(tmp, "x.jinja2"))
|
||||
except:
|
||||
pass
|
||||
|
||||
return run(tmp, py)
|
||||
|
||||
msg("\n could not find jinja2; will use py2 + the bundled version\n")
|
||||
for _, ver, py in pys:
|
||||
if ver > [2, 7] and ver < [3, 0]:
|
||||
return run(tmp, py)
|
||||
|
||||
m = "\033[1;31m\n\n\ncould not find a python with jinja2 installed; please do one of these:\n\n pip install --user jinja2\n\n install python2\n\n\033[0m"
|
||||
msg(m)
|
||||
confirm()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
||||
# skip 1
|
||||
# python sfx.py --sfx-testgen && python test.py --sfx-testchk
|
||||
# c:\Python27\python.exe sfx.py --sfx-testgen && c:\Python27\python.exe test.py --sfx-testchk
|
||||
76
scripts/sfx.sh
Normal file
76
scripts/sfx.sh
Normal file
@@ -0,0 +1,76 @@
|
||||
# use current/default shell
|
||||
set -e
|
||||
|
||||
dir="$(
|
||||
printf '%s\n' "$TMPDIR" /tmp |
|
||||
awk '/./ {print; exit}'
|
||||
)/pe-copyparty"
|
||||
|
||||
[ -e "$dir/vPACK_TS" ] || (
|
||||
printf '\033[36munpacking copyparty vCPP_VER (sfx-PACK_HTS)\033[1;30m\n\n'
|
||||
mkdir -p "$dir.$$"
|
||||
ofs=$(awk '$0=="sfx_eof" {print NR+1; exit}' < "$0")
|
||||
|
||||
[ -z "$ofs" ] && {
|
||||
printf '\033[31mabort: could not find SFX boundary\033[0m\n'
|
||||
exit 1
|
||||
}
|
||||
tail -n +$ofs "$0" | tar -JxC "$dir.$$"
|
||||
ln -nsf "$dir.$$" "$dir"
|
||||
printf '\033[0m'
|
||||
|
||||
now=$(date -u +%s)
|
||||
for d in "$dir".*; do
|
||||
ts=$(stat -c%Y -- "$d" 2>/dev/null) ||
|
||||
ts=$(stat -f %m%n -- "$d" 2>/dev/null)
|
||||
|
||||
[ $((now-ts)) -gt 300 ] &&
|
||||
rm -rf "$d"
|
||||
done
|
||||
echo h > "$dir/vPACK_TS"
|
||||
) >&2 || exit 1
|
||||
|
||||
# detect available pythons
|
||||
(IFS=:; for d in $PATH; do
|
||||
printf '%s\n' "$d"/python* "$d"/pypy*;
|
||||
done) |
|
||||
(sed -E 's/(.*\/[^/0-9]+)([0-9]?[^/]*)$/\2 \1/' || cat) |
|
||||
(sort -nr || cat) |
|
||||
(sed -E 's/([^ ]*) (.*)/\2\1/' || cat) |
|
||||
grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
|
||||
|
||||
# see if we made a choice before
|
||||
[ -z "$pybin" ] && pybin="$(cat $dir/py 2>/dev/null || true)"
|
||||
|
||||
# otherwise find a python with jinja2
|
||||
[ -z "$pybin" ] && pybin="$(cat $dir/pys | while IFS= read -r _py; do
|
||||
printf '\033[1;30mlooking for jinja2 in [%s]\033[0m\n' "$_py" >&2
|
||||
$_py -c 'import jinja2' 2>/dev/null || continue
|
||||
printf '%s\n' "$_py"
|
||||
mv $dir/{,x.}jinja2
|
||||
break
|
||||
done)"
|
||||
|
||||
# otherwise find python2 (bundled jinja2 is way old)
|
||||
[ -z "$pybin" ] && {
|
||||
printf '\033[0;33mcould not find jinja2; will use py2 + the bundled version\033[0m\n' >&2
|
||||
pybin="$(cat $dir/pys | while IFS= read -r _py; do
|
||||
printf '\033[1;30mtesting if py2 [%s]\033[0m\n' "$_py" >&2
|
||||
_ver=$($_py -c 'import sys; sys.stdout.write(str(sys.version_info[0]))' 2>/dev/null) || continue
|
||||
[ $_ver = 2 ] || continue
|
||||
printf '%s\n' "$_py"
|
||||
break
|
||||
done)"
|
||||
}
|
||||
|
||||
[ -z "$pybin" ] && {
|
||||
printf '\033[1;31m\ncould not find a python with jinja2 installed; please do one of these:\n\n pip install --user jinja2\n\n install python2\033[0m\n\n' >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
printf '\033[1;30musing [%s]. you can reset with this:\n rm -rf %s*\033[0m\n\n' "$pybin" "$dir"
|
||||
printf '%s\n' "$pybin" > $dir/py
|
||||
|
||||
PYTHONPATH=$dir exec "$pybin" -m copyparty "$@"
|
||||
|
||||
sfx_eof
|
||||
164
scripts/speedtest-fs.py
Normal file
164
scripts/speedtest-fs.py
Normal file
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import time
|
||||
import signal
|
||||
import traceback
|
||||
import threading
|
||||
from queue import Queue
|
||||
|
||||
|
||||
"""speedtest-fs: filesystem performance estimate"""
|
||||
__author__ = "ed <copyparty@ocv.me>"
|
||||
__copyright__ = 2020
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
|
||||
def get_spd(nbyte, nsec):
|
||||
if not nsec:
|
||||
return "0.000 MB 0.000 sec 0.000 MB/s"
|
||||
|
||||
mb = nbyte / (1024 * 1024.0)
|
||||
spd = mb / nsec
|
||||
|
||||
return f"{mb:.3f} MB {nsec:.3f} sec {spd:.3f} MB/s"
|
||||
|
||||
|
||||
class Inf(object):
|
||||
def __init__(self, t0):
|
||||
self.msgs = []
|
||||
self.errors = []
|
||||
self.reports = []
|
||||
self.mtx_msgs = threading.Lock()
|
||||
self.mtx_reports = threading.Lock()
|
||||
|
||||
self.n_byte = 0
|
||||
self.n_sec = 0
|
||||
self.n_done = 0
|
||||
self.t0 = t0
|
||||
|
||||
thr = threading.Thread(target=self.print_msgs)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def msg(self, fn, n_read):
|
||||
with self.mtx_msgs:
|
||||
self.msgs.append(f"{fn} {n_read}")
|
||||
|
||||
def err(self, fn):
|
||||
with self.mtx_reports:
|
||||
self.errors.append(f"{fn}\n{traceback.format_exc()}")
|
||||
|
||||
def print_msgs(self):
|
||||
while True:
|
||||
time.sleep(0.02)
|
||||
with self.mtx_msgs:
|
||||
msgs = self.msgs
|
||||
self.msgs = []
|
||||
|
||||
if not msgs:
|
||||
continue
|
||||
|
||||
msgs = msgs[-64:]
|
||||
msgs = [f"{get_spd(self.n_byte, self.n_sec)} {x}" for x in msgs]
|
||||
print("\n".join(msgs))
|
||||
|
||||
def report(self, fn, n_byte, n_sec):
|
||||
with self.mtx_reports:
|
||||
self.reports.append([n_byte, n_sec, fn])
|
||||
self.n_byte += n_byte
|
||||
self.n_sec += n_sec
|
||||
|
||||
def done(self):
|
||||
with self.mtx_reports:
|
||||
self.n_done += 1
|
||||
|
||||
|
||||
def get_files(dir_path):
|
||||
for fn in os.listdir(dir_path):
|
||||
fn = os.path.join(dir_path, fn)
|
||||
st = os.stat(fn).st_mode
|
||||
|
||||
if stat.S_ISDIR(st):
|
||||
yield from get_files(fn)
|
||||
|
||||
if stat.S_ISREG(st):
|
||||
yield fn
|
||||
|
||||
|
||||
def worker(q, inf, read_sz):
|
||||
while True:
|
||||
fn = q.get()
|
||||
if not fn:
|
||||
break
|
||||
|
||||
n_read = 0
|
||||
try:
|
||||
t0 = time.time()
|
||||
with open(fn, "rb") as f:
|
||||
while True:
|
||||
buf = f.read(read_sz)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
n_read += len(buf)
|
||||
inf.msg(fn, n_read)
|
||||
|
||||
inf.report(fn, n_read, time.time() - t0)
|
||||
except:
|
||||
inf.err(fn)
|
||||
|
||||
inf.done()
|
||||
|
||||
|
||||
def sighandler(signo, frame):
|
||||
os._exit(0)
|
||||
|
||||
|
||||
def main():
|
||||
signal.signal(signal.SIGINT, sighandler)
|
||||
|
||||
root = "."
|
||||
if len(sys.argv) > 1:
|
||||
root = sys.argv[1]
|
||||
|
||||
t0 = time.time()
|
||||
q = Queue(256)
|
||||
inf = Inf(t0)
|
||||
|
||||
num_threads = 8
|
||||
read_sz = 32 * 1024
|
||||
for _ in range(num_threads):
|
||||
thr = threading.Thread(target=worker, args=(q, inf, read_sz,))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
for fn in get_files(root):
|
||||
q.put(fn)
|
||||
|
||||
for _ in range(num_threads):
|
||||
q.put(None)
|
||||
|
||||
while inf.n_done < num_threads:
|
||||
time.sleep(0.1)
|
||||
|
||||
t2 = time.time()
|
||||
print("\n")
|
||||
|
||||
log = inf.reports
|
||||
log.sort()
|
||||
for nbyte, nsec, fn in log[-64:]:
|
||||
print(f"{get_spd(nbyte, nsec)} {fn}")
|
||||
|
||||
print()
|
||||
print("\n".join(inf.errors))
|
||||
|
||||
print(get_spd(inf.n_byte, t2 - t0))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
6
setup.py
6
setup.py
@@ -137,16 +137,14 @@ if setuptools_available:
|
||||
"entry_points": {
|
||||
"console_scripts": ["copyparty = copyparty.__main__:main"]
|
||||
},
|
||||
"scripts": [
|
||||
"bin/copyparty-fuse.py"
|
||||
]
|
||||
"scripts": ["bin/copyparty-fuse.py"],
|
||||
}
|
||||
)
|
||||
else:
|
||||
args.update(
|
||||
{
|
||||
"packages": ["copyparty", "copyparty.stolen"],
|
||||
"scripts": ["bin/copyparty", "bin/copyparty-fuse.py"]
|
||||
"scripts": ["bin/copyparty-fuse.py"],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
84
srv/ceditable.html
Normal file
84
srv/ceditable.html
Normal file
@@ -0,0 +1,84 @@
|
||||
<!DOCTYPE html><html><head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<style>
|
||||
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
outline: 0;
|
||||
border: none;
|
||||
font-size: 1em;
|
||||
line-height: 1em;
|
||||
font-family: monospace, monospace;
|
||||
color: #333;
|
||||
}
|
||||
html, body {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background: #ddd;
|
||||
}
|
||||
html {
|
||||
font-size: 1.3em;
|
||||
}
|
||||
li, #edit {
|
||||
list-style-type: none;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
}
|
||||
li:nth-child(even) {
|
||||
background: #ddd;
|
||||
}
|
||||
#edit, #html, #txt1, #txt2 {
|
||||
background: #eee;
|
||||
position: fixed;
|
||||
width: calc(50% - .8em);
|
||||
height: calc(50% - .8em);
|
||||
}
|
||||
#txt1 { top: .5em; left: .5em }
|
||||
#edit { top: .5em; right: .5em }
|
||||
#html { bottom: .5em; left: .5em }
|
||||
#txt2 { bottom: .5em; right: .5em }
|
||||
|
||||
</style></head><body>
|
||||
<pre id="edit" contenteditable="true"></pre>
|
||||
<textarea id="html"></textarea>
|
||||
<ul id="txt1"></ul>
|
||||
<ul id="txt2"></ul>
|
||||
<script>
|
||||
|
||||
var edit = document.getElementById('edit'),
|
||||
html = document.getElementById('html'),
|
||||
txt1 = document.getElementById('txt1'),
|
||||
txt2 = document.getElementById('txt2');
|
||||
|
||||
var oh = null;
|
||||
function fun() {
|
||||
var h = edit.innerHTML;
|
||||
if (oh != h) {
|
||||
oh = h;
|
||||
html.value = h;
|
||||
var t = edit.innerText;
|
||||
if (h.indexOf('<div><br></div>') >= 0)
|
||||
t = t.replace(/\n\n/g, "\n");
|
||||
|
||||
t = '<li>' + t.
|
||||
replace(/&/g, "&").
|
||||
replace(/</g, "<").
|
||||
replace(/>/g, ">").
|
||||
split('\n').join('</li>\n<li>') + '</li>';
|
||||
|
||||
t = t.replace(/<li><\/li>/g, '<li> </li>');
|
||||
txt1.innerHTML = t;
|
||||
txt2.innerHTML = t;
|
||||
}
|
||||
setTimeout(fun, 100);
|
||||
}
|
||||
fun();
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
141
srv/extend.md
Normal file
141
srv/extend.md
Normal file
@@ -0,0 +1,141 @@
|
||||
# hi
|
||||
this showcases my worst idea yet; *extending markdown with inline javascript*
|
||||
|
||||
due to obvious reasons it's disabled by default, and can be enabled with `-emp`
|
||||
|
||||
the examples are by no means correct, they're as much of a joke as this feature itself
|
||||
|
||||
|
||||
### sub-header
|
||||
nothing special about this one
|
||||
|
||||
|
||||
## except/
|
||||
this one becomes a hyperlink to ./except/ thanks to
|
||||
* the `copyparty_pre` plugin at the end of this file
|
||||
* which is invoked as a markdown filter every time the document is modified
|
||||
* which looks for headers ending with a `/` and erwrites all headers below that
|
||||
|
||||
it is a passthrough to the markdown extension api, see https://marked.js.org/using_pro
|
||||
|
||||
in addition to the markdown extension functions, `ctor` will be called on document init
|
||||
|
||||
|
||||
### these/
|
||||
and this one becomes ./except/these/
|
||||
|
||||
|
||||
#### ones.md
|
||||
finally ./except/these/ones.md
|
||||
|
||||
|
||||
### also-this.md
|
||||
whic hshoud be ./except/also-this.md
|
||||
|
||||
|
||||
|
||||
|
||||
# ok
|
||||
now for another extension type, `copyparty_post` which is called to manipulate the generated dom instead
|
||||
|
||||
`copyparty_post` can have the following functions, all optional
|
||||
* `ctor` is called on document init
|
||||
* `render` is called when the dom is done but still in-memory
|
||||
* `render2` is called with the live browser dom as-displayed
|
||||
|
||||
## post example
|
||||
|
||||
the values in the `ex:` columns are linkified to `example.com/$value`
|
||||
|
||||
| ex:foo | bar | ex:baz |
|
||||
| ------------ | -------- | ------ |
|
||||
| asdf | nice | fgsfds |
|
||||
| more one row | hi hello | aaa |
|
||||
|
||||
and the table can be sorted by clicking the headers
|
||||
|
||||
the difference is that with `copyparty_pre` you'll probably break various copyparty features but if you use `copyparty_post` then future copyparty versions will probably break you
|
||||
|
||||
|
||||
|
||||
|
||||
# heres the plugins
|
||||
if there is anything below ths line in the preview then the plugin feature is disabled (good)
|
||||
|
||||
|
||||
|
||||
|
||||
```copyparty_pre
|
||||
ctor() {
|
||||
md_plug['h'] = {
|
||||
on: false,
|
||||
lv: -1,
|
||||
path: []
|
||||
}
|
||||
},
|
||||
walkTokens(token) {
|
||||
if (token.type == 'heading') {
|
||||
var h = md_plug['h'],
|
||||
is_dir = token.text.endsWith('/');
|
||||
|
||||
if (h.lv >= token.depth) {
|
||||
h.on = false;
|
||||
}
|
||||
if (!h.on && is_dir) {
|
||||
h.on = true;
|
||||
h.lv = token.depth;
|
||||
h.path = [token.text];
|
||||
}
|
||||
else if (h.on && h.lv < token.depth) {
|
||||
h.path = h.path.slice(0, token.depth - h.lv);
|
||||
h.path.push(token.text);
|
||||
}
|
||||
if (!h.on)
|
||||
return false;
|
||||
|
||||
var path = h.path.join('');
|
||||
var emoji = is_dir ? '📂' : '📜';
|
||||
token.tokens[0].text = '<a href="' + path + '">' + emoji + ' ' + path + '</a>';
|
||||
}
|
||||
if (token.type == 'paragraph') {
|
||||
//console.log(JSON.parse(JSON.stringify(token.tokens)));
|
||||
for (var a = 0; a < token.tokens.length; a++) {
|
||||
var t = token.tokens[a];
|
||||
if (t.type == 'text' || t.type == 'strong' || t.type == 'em') {
|
||||
var ret = '', text = t.text;
|
||||
for (var b = 0; b < text.length; b++)
|
||||
ret += (Math.random() > 0.5) ? text[b] : text[b].toUpperCase();
|
||||
|
||||
t.text = ret;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
```copyparty_post
|
||||
render(dom) {
|
||||
var ths = dom.querySelectorAll('th');
|
||||
for (var a = 0; a < ths.length; a++) {
|
||||
var th = ths[a];
|
||||
if (th.textContent.indexOf('ex:') === 0) {
|
||||
th.textContent = th.textContent.slice(3);
|
||||
var nrow = 0;
|
||||
while ((th = th.previousSibling) != null)
|
||||
nrow++;
|
||||
|
||||
var trs = ths[a].parentNode.parentNode.parentNode.querySelectorAll('tr');
|
||||
for (var b = 1; b < trs.length; b++) {
|
||||
var td = trs[b].childNodes[nrow];
|
||||
td.innerHTML = '<a href="//example.com/' + td.innerHTML + '">' + td.innerHTML + '</a>';
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
render2(dom) {
|
||||
window.makeSortable(dom.getElementsByTagName('table')[0]);
|
||||
}
|
||||
```
|
||||
237
srv/test.md
Normal file
237
srv/test.md
Normal file
@@ -0,0 +1,237 @@
|
||||
### hello world
|
||||
|
||||
* qwe
|
||||
* asd
|
||||
* zxc
|
||||
* 573
|
||||
* one
|
||||
* two
|
||||
|
||||
* |||
|
||||
|--|--|
|
||||
|listed|table|
|
||||
|
||||
```
|
||||
[72....................................................................]
|
||||
[80............................................................................]
|
||||
```
|
||||
|
||||
* foo
|
||||
```
|
||||
[72....................................................................]
|
||||
[80............................................................................]
|
||||
```
|
||||
|
||||
* bar
|
||||
```
|
||||
[72....................................................................]
|
||||
[80............................................................................]
|
||||
```
|
||||
|
||||
```
|
||||
l[i]=1I;(){}o0O</> var foo = "$(`bar`)"; a's'd
|
||||
```
|
||||
|
||||
🔍🌽.📕.🍙🔎
|
||||
|
||||
[](#s1)
|
||||
[s1](#s1)
|
||||
[#s1](#s1)
|
||||
|
||||
a123456789b123456789c123456789d123456789e123456789f123456789g123456789h123456789i123456789j123456789k123456789l123456789m123456789n123456789o123456789p123456789q123456789r123456789s123456789t123456789u123456789v123456789w123456789x123456789y123456789z123456789
|
||||
|
||||
<foo> bar & <span>baz</span>
|
||||
<a href="?foo=bar&baz=qwe&rty">?foo=bar&baz=qwe&rty</a>
|
||||
<!-- hidden -->
|
||||
```
|
||||
<foo> bar & <span>baz</span>
|
||||
<a href="?foo=bar&baz=qwe&rty">?foo=bar&baz=qwe&rty</a>
|
||||
<!-- visible -->
|
||||
```
|
||||
|
||||
*fails marked/showdown/tui/simplemde (just italics), **OK: markdown-it/simplemde:***
|
||||
testing just google.com and underscored _google.com_ also with _google.com,_ trailing comma and _google.com_, comma after
|
||||
|
||||
*fails tui (just italics), **OK: marked/showdown/markdown-it/simplemde:***
|
||||
testing just https://google.com and underscored _https://google.com_ links like that
|
||||
|
||||
*fails marked (no markup) and showdown/tui/simplemde (no links at all), **OK: markdown-it:***
|
||||
let's try <google.com> bracketed and _<google.com>_ underscored bracketed
|
||||
|
||||
*fails marked (literal underscore), **OK: showdown/markdown-it/simplemde:***
|
||||
let's try <https://google.com> bracketed and _<https://google.com>_ underscored bracketed
|
||||
|
||||
*fails none:*
|
||||
and then [google](google.com) verbose and _[google](google.com)_ underscored
|
||||
|
||||
*fails none:*
|
||||
and then [google](https://google.com/) verbose and _[google](https://google.com/)_ underscored
|
||||
|
||||
*all behave similarly (only verbose ones):*
|
||||
and then <local> or maybe <./local> fsgfds </absolute> fsgfds
|
||||
and then [local] or maybe [./local] fsgfds [/absolute] fsgfds
|
||||
and then (local) or maybe (./local) fsgfds (/absolute) fsgfds
|
||||
and then [](local) or maybe [](./local) fsgfds [](/absolute) fsgfds
|
||||
and then [.](local) or maybe [.](./local) fsgfds [.](/absolute) fsgfds
|
||||
and then [asdf](local) or maybe [asdf](./local) fsgfds [asdf](/absolute) fsgfds
|
||||
|
||||
*`ng/OK/OK/OK markdown-it`
|
||||
`ng/OK/ng/OK marked`
|
||||
`ng/OK/OK/OK showdown`
|
||||
`OK/OK/OK/OK simplemde`*
|
||||
[with spaces](/with spaces) plain, [with spaces](/with%20spaces) %20, [with spaces](</with spaces>) brackets, [with spaces](/with%20spaces) %20
|
||||
|
||||
*this fails marked, **OK: markdown-it, simplemde:***
|
||||
|
||||
* testing a list with:
|
||||
`some code after a newline`
|
||||
|
||||
* testing a list with:
|
||||
just a newline
|
||||
|
||||
and here is really just
|
||||
a newline toplevel
|
||||
|
||||
*this fails showdown/hypermd, **OK: marked/markdown-it/simplemde:***
|
||||
|
||||
* testing a list with
|
||||
|
||||
code here
|
||||
and a newline
|
||||
this should have two leading spaces
|
||||
|
||||
* second list level
|
||||
|
||||
more code here
|
||||
and a newline
|
||||
this should have two leading spaces
|
||||
|
||||
.
|
||||
|
||||
* testing a list with
|
||||
|
||||
code here
|
||||
and a newline
|
||||
this should have two leading spaces
|
||||
|
||||
* second list level
|
||||
|
||||
more code here
|
||||
and a newline
|
||||
this should have two leading spaces
|
||||
|
||||
*this fails stackedit, **OK: showdown/marked/markdown-it/simplemde:***
|
||||
|
||||
|||
|
||||
|--|--|
|
||||
| a table | with no header |
|
||||
| second row | foo bar |
|
||||
|
||||
*this fails showdown/stackedit, **OK: marked/markdown-it/simplemde:***
|
||||
|
||||
|||
|
||||
|--|--:|
|
||||
| a table | on the right |
|
||||
| second row | foo bar |
|
||||
|
||||
||
|
||||
--|:-:|-:
|
||||
a table | big text in this | aaakbfddd
|
||||
second row | centred | bbb
|
||||
|
||||
||
|
||||
--|--|--
|
||||
foo
|
||||
|
||||
* list entry
|
||||
* [x] yes
|
||||
* [ ] no
|
||||
* another entry
|
||||
|
||||
# s1
|
||||
## ep1
|
||||
## ep2
|
||||
# s2
|
||||
## ep1
|
||||
## ep2
|
||||
# s3
|
||||
## ep1
|
||||
## ep2
|
||||
|
||||
|
||||
|
||||
#######################################################################
|
||||
|
||||
|
||||
|
||||
marked:
|
||||
works in last ff/chrome for xp
|
||||
bug: config{breaks:true} does nothing in 1.0
|
||||
use whitespace, no tabs
|
||||
|
||||
showdown:
|
||||
ie6 and ie8 broken, works in last ff/chrome for xp
|
||||
|
||||
markdown-it:
|
||||
works in last ff/chrome for xp
|
||||
use whitespace, no tabs
|
||||
no header anchors
|
||||
|
||||
tui wysiwyg:
|
||||
requires links to be <http://> or [title](location)
|
||||
|
||||
|
||||
|
||||
links:
|
||||
http://demo.showdownjs.com/
|
||||
https://marked.js.org/demo/
|
||||
https://markdown-it.github.io/
|
||||
https://simplemde.com/
|
||||
|
||||
|
||||
|
||||
all-pass:
|
||||
|
||||
https://github.com/joemccann/dillinger
|
||||
https://dillinger.io/
|
||||
uses markdown-it
|
||||
|
||||
https://github.com/markdown-it/markdown-it
|
||||
https://markdown-it.github.io/
|
||||
|
||||
|
||||
|
||||
almost-all-pass:
|
||||
|
||||
https://github.com/Ionaru/easy-markdown-editor
|
||||
https://easymde.tk/
|
||||
simplemde fork (the most active)
|
||||
|
||||
https://github.com/Inscryb/inscryb-markdown-editor
|
||||
simplemde fork
|
||||
|
||||
other simplemde forks:
|
||||
pulkitmittal
|
||||
|
||||
https://simplemde.com/
|
||||
(dead)
|
||||
|
||||
https://github.com/nhn/tui.editor
|
||||
https://nhn.github.io/tui.editor/latest/tutorial-example01-editor-basic
|
||||
ie10 and up
|
||||
|
||||
|
||||
|
||||
unrelated neat stuff:
|
||||
https://github.com/gnab/remark
|
||||
|
||||
|
||||
|
||||
```sh
|
||||
awk '/./ {printf "%s %d\n", $0, NR; next} 1' <test.md >ln.md
|
||||
gawk '{print gensub(/([a-zA-Z\.])/,NR" \\1","1")}' <test.md >ln.md
|
||||
```
|
||||
|
||||
a|b|c
|
||||
--|--|--
|
||||
foo
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import shutil
|
||||
import unittest
|
||||
@@ -59,11 +60,21 @@ class TestVFS(unittest.TestCase):
|
||||
|
||||
if os.path.exists("/Volumes"):
|
||||
devname, _ = self.chkcmd("hdiutil", "attach", "-nomount", "ram://8192")
|
||||
_, _ = self.chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
|
||||
return "/Volumes/cptd"
|
||||
for _ in range(10):
|
||||
try:
|
||||
_, _ = self.chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
|
||||
return "/Volumes/cptd"
|
||||
except:
|
||||
print('lol macos')
|
||||
time.sleep(0.25)
|
||||
|
||||
raise Exception("ramdisk creation failed")
|
||||
|
||||
raise Exception("TODO support windows")
|
||||
|
||||
def log(self, src, msg):
|
||||
pass
|
||||
|
||||
def test(self):
|
||||
td = self.get_ramdisk() + "/vfs"
|
||||
try:
|
||||
@@ -85,7 +96,7 @@ class TestVFS(unittest.TestCase):
|
||||
f.write(fn)
|
||||
|
||||
# defaults
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=[]), None).vfs
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=[]), self.log).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td)
|
||||
@@ -93,7 +104,7 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(vfs.uwrite, ["*"])
|
||||
|
||||
# single read-only rootfs (relative path)
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), None).vfs
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td + "/a/ab")
|
||||
@@ -101,7 +112,9 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(vfs.uwrite, [])
|
||||
|
||||
# single read-only rootfs (absolute path)
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=[td + "//a/ac/../aa//::r"]), None).vfs
|
||||
vfs = AuthSrv(
|
||||
Namespace(c=None, a=[], v=[td + "//a/ac/../aa//::r"]), self.log
|
||||
).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td + "/a/aa")
|
||||
@@ -110,7 +123,8 @@ class TestVFS(unittest.TestCase):
|
||||
|
||||
# read-only rootfs with write-only subdirectory (read-write for k)
|
||||
vfs = AuthSrv(
|
||||
Namespace(c=None, a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]), None
|
||||
Namespace(c=None, a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]),
|
||||
self.log,
|
||||
).vfs
|
||||
self.assertEqual(len(vfs.nodes), 1)
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
@@ -139,34 +153,34 @@ class TestVFS(unittest.TestCase):
|
||||
fsdir, real, virt = self.ls(vfs, "/", "*")
|
||||
self.assertEqual(fsdir, td)
|
||||
self.assertEqual(real, ["b", "c"])
|
||||
self.assertEqual(virt, ["a"])
|
||||
self.assertEqual(list(virt), ["a"])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a", "*")
|
||||
self.assertEqual(fsdir, td + "/a")
|
||||
self.assertEqual(real, ["aa", "ab"])
|
||||
self.assertEqual(virt, ["ac"])
|
||||
self.assertEqual(list(virt), ["ac"])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ab", "*")
|
||||
self.assertEqual(fsdir, td + "/a/ab")
|
||||
self.assertEqual(real, ["aba", "abb", "abc"])
|
||||
self.assertEqual(virt, [])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac", "*")
|
||||
self.assertEqual(fsdir, td + "/a/ac")
|
||||
self.assertEqual(real, ["aca", "acc"])
|
||||
self.assertEqual(virt, [])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac", "k")
|
||||
self.assertEqual(fsdir, td + "/a/ac")
|
||||
self.assertEqual(real, ["aca", "acc"])
|
||||
self.assertEqual(virt, ["acb"])
|
||||
self.assertEqual(list(virt), ["acb"])
|
||||
|
||||
self.assertRaises(util.Pebkac, vfs.get, "a/ac/acb", "*", True, False)
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac/acb", "k")
|
||||
self.assertEqual(fsdir, td + "/a/ac/acb")
|
||||
self.assertEqual(real, ["acba", "acbb", "acbc"])
|
||||
self.assertEqual(virt, [])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
# breadth-first construction
|
||||
vfs = AuthSrv(
|
||||
@@ -181,7 +195,7 @@ class TestVFS(unittest.TestCase):
|
||||
"a/ac:a/ac:w",
|
||||
],
|
||||
),
|
||||
None,
|
||||
self.log,
|
||||
).vfs
|
||||
|
||||
# sanitizing relative paths
|
||||
@@ -193,17 +207,17 @@ class TestVFS(unittest.TestCase):
|
||||
self.undot(vfs, "./.././foo/..", "")
|
||||
|
||||
# shadowing
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=[".::r", "b:a/ac:r"]), None).vfs
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=[".::r", "b:a/ac:r"]), self.log).vfs
|
||||
|
||||
fsp, r1, v1 = self.ls(vfs, "", "*")
|
||||
self.assertEqual(fsp, td)
|
||||
self.assertEqual(r1, ["b", "c"])
|
||||
self.assertEqual(v1, ["a"])
|
||||
self.assertEqual(list(v1), ["a"])
|
||||
|
||||
fsp, r1, v1 = self.ls(vfs, "a", "*")
|
||||
self.assertEqual(fsp, td + "/a")
|
||||
self.assertEqual(r1, ["aa", "ab"])
|
||||
self.assertEqual(v1, ["ac"])
|
||||
self.assertEqual(list(v1), ["ac"])
|
||||
|
||||
fsp1, r1, v1 = self.ls(vfs, "a/ac", "*")
|
||||
fsp2, r2, v2 = self.ls(vfs, "b", "*")
|
||||
@@ -211,7 +225,7 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(fsp2, td + "/b")
|
||||
self.assertEqual(r1, ["ba", "bb", "bc"])
|
||||
self.assertEqual(r1, r2)
|
||||
self.assertEqual(v1, v2)
|
||||
self.assertEqual(list(v1), list(v2))
|
||||
|
||||
# config file parser
|
||||
cfg_path = self.get_ramdisk() + "/test.cfg"
|
||||
@@ -230,7 +244,7 @@ class TestVFS(unittest.TestCase):
|
||||
).encode("utf-8")
|
||||
)
|
||||
|
||||
au = AuthSrv(Namespace(c=[cfg_path], a=[], v=[]), None)
|
||||
au = AuthSrv(Namespace(c=[cfg_path], a=[], v=[]), self.log)
|
||||
self.assertEqual(au.user["a"], "123")
|
||||
self.assertEqual(au.user["asd"], "fgh:jkl")
|
||||
n = au.vfs
|
||||
|
||||
Reference in New Issue
Block a user