mirror of
https://github.com/9001/copyparty.git
synced 2025-10-23 16:14:10 +00:00
Compare commits
127 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
58f9e05d93 | ||
|
1ec981aea7 | ||
|
2a90286a7c | ||
|
12d25d09b2 | ||
|
a039fae1a4 | ||
|
322b9abadc | ||
|
0aaf954cea | ||
|
c2d22aa3d1 | ||
|
6934c75bba | ||
|
c58cf78f86 | ||
|
7f0de790ab | ||
|
d4bb4e3a73 | ||
|
d25612d038 | ||
|
116b2351b0 | ||
|
69b83dfdc4 | ||
|
3b1839c2ce | ||
|
13742ebdf8 | ||
|
634657bea1 | ||
|
46e70d50b7 | ||
|
d64e9b85a7 | ||
|
fb853edbe3 | ||
|
cc076c1be1 | ||
|
98cc9a6755 | ||
|
7bd2b9c23a | ||
|
de724a1ff3 | ||
|
2163055dae | ||
|
93ed0fc10b | ||
|
0d98cefd40 | ||
|
d58988a033 | ||
|
2acfab1e3f | ||
|
b915dfe9a6 | ||
|
25bd5a823e | ||
|
1c35de4716 | ||
|
4c00435a0a | ||
|
844e3079a8 | ||
|
4778cb5b2c | ||
|
ec5d60b919 | ||
|
e1f4b960e8 | ||
|
669e46da54 | ||
|
ba94cc5df7 | ||
|
d08245c3df | ||
|
5c18d12cbf | ||
|
580a42dec7 | ||
|
29286e159b | ||
|
19bcf90e9f | ||
|
dae9c00742 | ||
|
35324ceb7c | ||
|
5aadd47199 | ||
|
7d9057cc62 | ||
|
c4b322b883 | ||
|
19b09c898a | ||
|
eafe2098b6 | ||
|
2bc6a20d71 | ||
|
8b502a7235 | ||
|
37567844af | ||
|
2f6c4e0e34 | ||
|
1c7cc4cb2b | ||
|
f83db3648e | ||
|
b164aa00d4 | ||
|
a2d866d0c2 | ||
|
2dfe4ac4c6 | ||
|
db65d05cb5 | ||
|
300c0194c7 | ||
|
37a0d2b087 | ||
|
a4959300ea | ||
|
223657e5f8 | ||
|
0c53de6767 | ||
|
9c309b1498 | ||
|
1aa1b34c80 | ||
|
755a2ee023 | ||
|
69d3359e47 | ||
|
a90c49b8fb | ||
|
b1222edb27 | ||
|
b967a92f69 | ||
|
90a5cb5e59 | ||
|
7aba9cb76b | ||
|
f550a8171d | ||
|
82e568d4c9 | ||
|
7b2a4a3d59 | ||
|
0265455cd1 | ||
|
afafc886a4 | ||
|
8a959f6ac4 | ||
|
1c3aa0d2c5 | ||
|
79b7d3316a | ||
|
fa7768583a | ||
|
faf49f6c15 | ||
|
765af31b83 | ||
|
b6a3c52d67 | ||
|
b025c2f660 | ||
|
e559a7c878 | ||
|
5c8855aafd | ||
|
b5fc537b89 | ||
|
14899d3a7c | ||
|
0ea7881652 | ||
|
ec29b59d1e | ||
|
9405597c15 | ||
|
82441978c6 | ||
|
e0e6291bdb | ||
|
b2b083fd0a | ||
|
f8a51b68e7 | ||
|
e0a19108e5 | ||
|
770ea68ca8 | ||
|
ce36c52baf | ||
|
a7da1dd233 | ||
|
678ef296b4 | ||
|
9e5627d805 | ||
|
5958ee4439 | ||
|
7127e57f0e | ||
|
ee9c6dc8aa | ||
|
92779b3f48 | ||
|
2f1baf17d4 | ||
|
583da3d4a9 | ||
|
bf9ff78bcc | ||
|
2cb07792cc | ||
|
47bc8bb466 | ||
|
94ad1f5732 | ||
|
09557fbe83 | ||
|
1c0f44fa4e | ||
|
fc4d59d2d7 | ||
|
12345fbacc | ||
|
2e33c8d222 | ||
|
db5f07f164 | ||
|
e050e69a43 | ||
|
27cb1d4fc7 | ||
|
5d6a740947 | ||
|
da3f68c363 | ||
|
d7d1c3685c |
12
.eslintrc.json
Normal file
12
.eslintrc.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2021": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 12
|
||||
},
|
||||
"rules": {
|
||||
}
|
||||
}
|
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,4 +1,6 @@
|
||||
* text eol=lf
|
||||
|
||||
*.reg text eol=crlf
|
||||
|
||||
*.png binary
|
||||
*.gif binary
|
||||
|
8
.vscode/launch.json
vendored
8
.vscode/launch.json
vendored
@@ -9,13 +9,15 @@
|
||||
"console": "integratedTerminal",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": [
|
||||
"-j",
|
||||
"0",
|
||||
//"-nw",
|
||||
"-ed",
|
||||
"-emp",
|
||||
"-e2d",
|
||||
"-e2s",
|
||||
"-a",
|
||||
"ed:wark",
|
||||
"-v",
|
||||
"srv::r:aed"
|
||||
"srv::r:aed:cnodupe"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -37,7 +37,7 @@
|
||||
"python.linting.banditEnabled": true,
|
||||
"python.linting.flake8Args": [
|
||||
"--max-line-length=120",
|
||||
"--ignore=E722,F405,E203,W503,W293",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402",
|
||||
],
|
||||
"python.linting.banditArgs": [
|
||||
"--ignore=B104"
|
||||
@@ -55,6 +55,6 @@
|
||||
//
|
||||
// things you may wanna edit:
|
||||
//
|
||||
"python.pythonPath": ".venv/bin/python",
|
||||
"python.pythonPath": "/usr/bin/python3",
|
||||
//"python.linting.enabled": true,
|
||||
}
|
10
.vscode/tasks.json
vendored
Normal file
10
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "pre",
|
||||
"command": "true;rm -rf inc/* inc/.hist/;mkdir -p inc;",
|
||||
"type": "shell"
|
||||
}
|
||||
]
|
||||
}
|
59
README.md
59
README.md
@@ -13,12 +13,25 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* code standard: `black`
|
||||
|
||||
|
||||
## quickstart
|
||||
|
||||
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
|
||||
|
||||
running the sfx without arguments (for example doubleclicking it on Windows) will let anyone access the current folder; see `-h` for help if you want accounts and volumes etc
|
||||
|
||||
you may also want these, especially on servers:
|
||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for legit https)
|
||||
|
||||
|
||||
## notes
|
||||
|
||||
* iPhone/iPad: use Firefox to download files
|
||||
* Android-Chrome: set max "parallel uploads" for 200% upload speed (android bug)
|
||||
* Android-Firefox: takes a while to select files (in order to avoid the above android-chrome issue)
|
||||
* Desktop-Firefox: may use gigabytes of RAM if your connection is great and your files are massive
|
||||
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
|
||||
* because no browsers currently implement the media-query to do this properly orz
|
||||
|
||||
|
||||
## status
|
||||
@@ -36,10 +49,22 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* [x] accounts
|
||||
* [x] markdown viewer
|
||||
* [x] markdown editor
|
||||
* [x] FUSE client (read-only)
|
||||
|
||||
summary: it works! you can use it! (but technically not even close to beta)
|
||||
|
||||
|
||||
# client examples
|
||||
|
||||
* javascript: dump some state into a file (two separate examples)
|
||||
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
|
||||
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
||||
|
||||
* FUSE: mount a copyparty server as a local filesystem
|
||||
* cross-platform python client available in [./bin/](bin/)
|
||||
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
* `jinja2`
|
||||
@@ -55,28 +80,36 @@ currently there are two self-contained binaries:
|
||||
* `copyparty-sfx.sh` for unix (linux and osx) -- smaller, more robust
|
||||
* `copyparty-sfx.py` for windows (unix too) -- crossplatform, beta
|
||||
|
||||
launch either of them and it'll unpack and run copyparty, assuming you have python installed of course
|
||||
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
|
||||
|
||||
pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `copyparty.py` and keep it in the same folder because `sys.path` is funky
|
||||
|
||||
if you don't need all the features you can repack the sfx and save a bunch of space, tho currently the only removable feature is the opus/vorbis javascript decoder which is needed by apple devices to play foss audio files
|
||||
|
||||
steps to reduce the sfx size from `720 kB` to `250 kB` roughly:
|
||||
* run one of the sfx'es once to unpack it
|
||||
* `./scripts/make-sfx.sh re no-ogv` creates a new pair of sfx
|
||||
## sfx repack
|
||||
|
||||
no internet connection needed, just download an sfx and the repo zip (also if you're on windows use msys2)
|
||||
if you don't need all the features you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except for either msys2 or WSL if you're on windows)
|
||||
* `724K` original size as of v0.4.0
|
||||
* `256K` after `./scripts/make-sfx.sh re no-ogv`
|
||||
* `164K` after `./scripts/make-sfx.sh re no-ogv no-cm`
|
||||
|
||||
the features you can opt to drop are
|
||||
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files
|
||||
* `cm`/easymde, the "fancy" markdown editor
|
||||
|
||||
for the `re`pack to work, first run one of the sfx'es once to unpack it
|
||||
|
||||
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a `no-ogv no-cm` repack; works on linux/macos (and windows with msys2 or WSL)
|
||||
|
||||
|
||||
# install on android
|
||||
|
||||
install [Termux](https://termux.com/) (see [ocv.me/termux](https://ocv.me/termux/)) and then copy-paste this into Termux (long-tap) all at once:
|
||||
```sh
|
||||
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install curl && cd && curl -L https://github.com/9001/copyparty/raw/master/scripts/copyparty-android.sh > copyparty-android.sh && chmod 755 copyparty-android.sh && ./copyparty-android.sh -h
|
||||
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install python && python -m ensurepip && python -m pip install -U copyparty
|
||||
echo $?
|
||||
```
|
||||
|
||||
after the initial setup (and restarting bash), you can launch copyparty at any time by running "copyparty" in Termux
|
||||
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
|
||||
|
||||
|
||||
# dev env setup
|
||||
@@ -104,13 +137,15 @@ in the `scripts` folder:
|
||||
|
||||
roughly sorted by priority
|
||||
|
||||
* up2k handle filename too long
|
||||
* up2k fails on empty files? alert then stuck
|
||||
* reduce up2k roundtrips
|
||||
* start from a chunk index and just go
|
||||
* terminate client on bad data
|
||||
* drop onto folders
|
||||
* look into android thumbnail cache file format
|
||||
* `os.copy_file_range` for up2k cloning
|
||||
* up2k partials ui
|
||||
* support pillow-simd
|
||||
* cache sha512 chunks on client
|
||||
* symlink existing files on upload
|
||||
* comment field
|
||||
* ~~look into android thumbnail cache file format~~ bad idea
|
||||
* figure out the deal with pixel3a not being connectable as hotspot
|
||||
* pixel3a having unpredictable 3sec latency in general :||||
|
||||
|
41
bin/README.md
Normal file
41
bin/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# copyparty-fuse.py
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||
* **supports Linux** -- expect `117 MiB/s` sequential read
|
||||
* **supports macos** -- expect `85 MiB/s` sequential read
|
||||
|
||||
filecache is default-on for windows and macos;
|
||||
* macos readsize is 64kB, so speed ~32 MiB/s without the cache
|
||||
* windows readsize varies by software; explorer=1M, pv=32k
|
||||
|
||||
note that copyparty should run with `-ed` to enable dotfiles (hidden otherwise)
|
||||
|
||||
also consider using [../docs/rclone.md](../docs/rclone.md) instead for 5x performance
|
||||
|
||||
|
||||
## to run this on windows:
|
||||
* install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/)
|
||||
* [x] add python 3.x to PATH (it asks during install)
|
||||
* `python -m pip install --user fusepy`
|
||||
* `python ./copyparty-fuse.py n: http://192.168.1.69:3923/`
|
||||
|
||||
10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled:
|
||||
* `pacman -S mingw64/mingw-w64-x86_64-python{,-pip}`
|
||||
* `/mingw64/bin/python3 -m pip install --user fusepy`
|
||||
* `/mingw64/bin/python3 ./copyparty-fuse.py [...]`
|
||||
|
||||
you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releases/latest), let me know if you [figure out how](https://github.com/dokan-dev/dokany/wiki/FUSE)
|
||||
(winfsp's sshfs leaks, doesn't look like winfsp itself does, should be fine)
|
||||
|
||||
|
||||
|
||||
# copyparty-fuse🅱️.py
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* does the same thing except more correct, `samba` approves
|
||||
* **supports Linux** -- expect `18 MiB/s` (wait what)
|
||||
* **supports Macos** -- probably
|
||||
|
||||
|
||||
|
||||
# copyparty-fuse-streaming.py
|
||||
* pretend this doesn't exist
|
1100
bin/copyparty-fuse-streaming.py
Executable file
1100
bin/copyparty-fuse-streaming.py
Executable file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
592
bin/copyparty-fuseb.py
Executable file
592
bin/copyparty-fuseb.py
Executable file
@@ -0,0 +1,592 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
"""copyparty-fuseb: remote copyparty as a local filesystem"""
|
||||
__author__ = "ed <copyparty@ocv.me>"
|
||||
__copyright__ = 2020
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import stat
|
||||
import errno
|
||||
import struct
|
||||
import threading
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
|
||||
try:
|
||||
import fuse
|
||||
from fuse import Fuse
|
||||
|
||||
fuse.fuse_python_api = (0, 2)
|
||||
if not hasattr(fuse, "__version__"):
|
||||
raise Exception("your fuse-python is way old")
|
||||
except:
|
||||
print(
|
||||
"\n could not import fuse; these may help:\n python3 -m pip install --user fuse-python\n apt install libfuse\n modprobe fuse\n"
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
"""
|
||||
mount a copyparty server (local or remote) as a filesystem
|
||||
|
||||
usage:
|
||||
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas
|
||||
|
||||
dependencies:
|
||||
sudo apk add fuse-dev python3-dev
|
||||
python3 -m pip install --user fuse-python
|
||||
|
||||
fork of copyparty-fuse.py based on fuse-python which
|
||||
appears to be more compliant than fusepy? since this works with samba
|
||||
(probably just my garbage code tbh)
|
||||
"""
|
||||
|
||||
|
||||
def threadless_log(msg):
|
||||
print(msg + "\n", end="")
|
||||
|
||||
|
||||
def boring_log(msg):
|
||||
msg = "\033[36m{:012x}\033[0m {}\n".format(threading.current_thread().ident, msg)
|
||||
print(msg[4:], end="")
|
||||
|
||||
|
||||
def rice_tid():
|
||||
tid = threading.current_thread().ident
|
||||
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
|
||||
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
|
||||
|
||||
|
||||
def fancy_log(msg):
|
||||
print("{} {}\n".format(rice_tid(), msg), end="")
|
||||
|
||||
|
||||
def null_log(msg):
|
||||
pass
|
||||
|
||||
|
||||
info = fancy_log
|
||||
log = fancy_log
|
||||
dbg = fancy_log
|
||||
log = null_log
|
||||
dbg = null_log
|
||||
|
||||
|
||||
def get_tid():
|
||||
return threading.current_thread().ident
|
||||
|
||||
|
||||
def html_dec(txt):
|
||||
return (
|
||||
txt.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace(""", '"')
|
||||
.replace("&", "&")
|
||||
)
|
||||
|
||||
|
||||
class CacheNode(object):
|
||||
def __init__(self, tag, data):
|
||||
self.tag = tag
|
||||
self.data = data
|
||||
self.ts = time.time()
|
||||
|
||||
|
||||
class Stat(fuse.Stat):
|
||||
def __init__(self):
|
||||
self.st_mode = 0
|
||||
self.st_ino = 0
|
||||
self.st_dev = 0
|
||||
self.st_nlink = 1
|
||||
self.st_uid = 1000
|
||||
self.st_gid = 1000
|
||||
self.st_size = 0
|
||||
self.st_atime = 0
|
||||
self.st_mtime = 0
|
||||
self.st_ctime = 0
|
||||
|
||||
|
||||
class Gateway(object):
|
||||
def __init__(self, base_url):
|
||||
self.base_url = base_url
|
||||
|
||||
ui = urllib.parse.urlparse(base_url)
|
||||
self.web_root = ui.path.strip("/")
|
||||
try:
|
||||
self.web_host, self.web_port = ui.netloc.split(":")
|
||||
self.web_port = int(self.web_port)
|
||||
except:
|
||||
self.web_host = ui.netloc
|
||||
if ui.scheme == "http":
|
||||
self.web_port = 80
|
||||
elif ui.scheme == "https":
|
||||
raise Exception("todo")
|
||||
else:
|
||||
raise Exception("bad url?")
|
||||
|
||||
self.conns = {}
|
||||
|
||||
def quotep(self, path):
|
||||
# TODO: mojibake support
|
||||
path = path.encode("utf-8", "ignore")
|
||||
return quote(path, safe="/")
|
||||
|
||||
def getconn(self, tid=None):
|
||||
tid = tid or get_tid()
|
||||
try:
|
||||
return self.conns[tid]
|
||||
except:
|
||||
info("new conn [{}] [{}]".format(self.web_host, self.web_port))
|
||||
|
||||
conn = http.client.HTTPConnection(self.web_host, self.web_port, timeout=260)
|
||||
|
||||
self.conns[tid] = conn
|
||||
return conn
|
||||
|
||||
def closeconn(self, tid=None):
|
||||
tid = tid or get_tid()
|
||||
try:
|
||||
self.conns[tid].close()
|
||||
del self.conns[tid]
|
||||
except:
|
||||
pass
|
||||
|
||||
def sendreq(self, *args, **kwargs):
|
||||
tid = get_tid()
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
return c.getresponse()
|
||||
except:
|
||||
self.closeconn(tid)
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
return c.getresponse()
|
||||
|
||||
def listdir(self, path):
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
|
||||
r = self.sendreq("GET", web_path)
|
||||
if r.status != 200:
|
||||
self.closeconn()
|
||||
raise Exception(
|
||||
"http error {} reading dir {} in {}".format(
|
||||
r.status, web_path, rice_tid()
|
||||
)
|
||||
)
|
||||
|
||||
return self.parse_html(r)
|
||||
|
||||
def download_file_range(self, path, ofs1, ofs2):
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
|
||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
|
||||
log("downloading {}".format(hdr_range))
|
||||
|
||||
r = self.sendreq("GET", web_path, headers={"Range": hdr_range})
|
||||
if r.status != http.client.PARTIAL_CONTENT:
|
||||
self.closeconn()
|
||||
raise Exception(
|
||||
"http error {} reading file {} range {} in {}".format(
|
||||
r.status, web_path, hdr_range, rice_tid()
|
||||
)
|
||||
)
|
||||
|
||||
return r.read()
|
||||
|
||||
def parse_html(self, datasrc):
|
||||
ret = []
|
||||
remainder = b""
|
||||
ptn = re.compile(
|
||||
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
|
||||
)
|
||||
|
||||
while True:
|
||||
buf = remainder + datasrc.read(4096)
|
||||
# print('[{}]'.format(buf.decode('utf-8')))
|
||||
if not buf:
|
||||
break
|
||||
|
||||
remainder = b""
|
||||
endpos = buf.rfind(b"\n")
|
||||
if endpos >= 0:
|
||||
remainder = buf[endpos + 1 :]
|
||||
buf = buf[:endpos]
|
||||
|
||||
lines = buf.decode("utf-8").split("\n")
|
||||
for line in lines:
|
||||
m = ptn.match(line)
|
||||
if not m:
|
||||
# print(line)
|
||||
continue
|
||||
|
||||
ftype, fname, fsize, fdate = m.groups()
|
||||
fname = html_dec(fname)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
sz = int(fsize)
|
||||
if ftype == "-":
|
||||
ret.append([fname, self.stat_file(ts, sz), 0])
|
||||
else:
|
||||
ret.append([fname, self.stat_dir(ts, sz), 0])
|
||||
|
||||
return ret
|
||||
|
||||
def stat_dir(self, ts, sz=4096):
|
||||
ret = Stat()
|
||||
ret.st_mode = stat.S_IFDIR | 0o555
|
||||
ret.st_nlink = 2
|
||||
ret.st_size = sz
|
||||
ret.st_atime = ts
|
||||
ret.st_mtime = ts
|
||||
ret.st_ctime = ts
|
||||
return ret
|
||||
|
||||
def stat_file(self, ts, sz):
|
||||
ret = Stat()
|
||||
ret.st_mode = stat.S_IFREG | 0o444
|
||||
ret.st_size = sz
|
||||
ret.st_atime = ts
|
||||
ret.st_mtime = ts
|
||||
ret.st_ctime = ts
|
||||
return ret
|
||||
|
||||
|
||||
class CPPF(Fuse):
|
||||
def __init__(self, *args, **kwargs):
|
||||
Fuse.__init__(self, *args, **kwargs)
|
||||
|
||||
self.url = None
|
||||
|
||||
self.dircache = []
|
||||
self.dircache_mtx = threading.Lock()
|
||||
|
||||
self.filecache = []
|
||||
self.filecache_mtx = threading.Lock()
|
||||
|
||||
def init2(self):
|
||||
# TODO figure out how python-fuse wanted this to go
|
||||
self.gw = Gateway(self.url) # .decode('utf-8'))
|
||||
info("up")
|
||||
|
||||
def clean_dircache(self):
|
||||
"""not threadsafe"""
|
||||
now = time.time()
|
||||
cutoff = 0
|
||||
for cn in self.dircache:
|
||||
if now - cn.ts > 1:
|
||||
cutoff += 1
|
||||
else:
|
||||
break
|
||||
|
||||
if cutoff > 0:
|
||||
self.dircache = self.dircache[cutoff:]
|
||||
|
||||
def get_cached_dir(self, dirpath):
|
||||
# with self.dircache_mtx:
|
||||
if True:
|
||||
self.clean_dircache()
|
||||
for cn in self.dircache:
|
||||
if cn.tag == dirpath:
|
||||
return cn
|
||||
|
||||
return None
|
||||
|
||||
"""
|
||||
,-------------------------------, g1>=c1, g2<=c2
|
||||
|cache1 cache2| buf[g1-c1:(g1-c1)+(g2-g1)]
|
||||
`-------------------------------'
|
||||
,---------------,
|
||||
|get1 get2|
|
||||
`---------------'
|
||||
__________________________________________________________________________
|
||||
|
||||
,-------------------------------, g2<=c2, (g2>=c1)
|
||||
|cache1 cache2| cdr=buf[:g2-c1]
|
||||
`-------------------------------' dl car; g1-512K:c1
|
||||
,---------------,
|
||||
|get1 get2|
|
||||
`---------------'
|
||||
__________________________________________________________________________
|
||||
|
||||
,-------------------------------, g1>=c1, (g1<=c2)
|
||||
|cache1 cache2| car=buf[c2-g1:]
|
||||
`-------------------------------' dl cdr; c2:c2+1M
|
||||
,---------------,
|
||||
|get1 get2|
|
||||
`---------------'
|
||||
"""
|
||||
|
||||
def get_cached_file(self, path, get1, get2, file_sz):
|
||||
car = None
|
||||
cdr = None
|
||||
ncn = -1
|
||||
# with self.filecache_mtx:
|
||||
if True:
|
||||
dbg("cache request from {} to {}, size {}".format(get1, get2, file_sz))
|
||||
for cn in self.filecache:
|
||||
ncn += 1
|
||||
|
||||
cache_path, cache1 = cn.tag
|
||||
if cache_path != path:
|
||||
continue
|
||||
|
||||
cache2 = cache1 + len(cn.data)
|
||||
if get2 <= cache1 or get1 >= cache2:
|
||||
continue
|
||||
|
||||
if get1 >= cache1 and get2 <= cache2:
|
||||
# keep cache entry alive by moving it to the end
|
||||
self.filecache = (
|
||||
self.filecache[:ncn] + self.filecache[ncn + 1 :] + [cn]
|
||||
)
|
||||
buf_ofs = get1 - cache1
|
||||
buf_end = buf_ofs + (get2 - get1)
|
||||
dbg(
|
||||
"found all ({}, {} to {}, len {}) [{}:{}] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
len(cn.data),
|
||||
buf_ofs,
|
||||
buf_end,
|
||||
buf_end - buf_ofs,
|
||||
)
|
||||
)
|
||||
return cn.data[buf_ofs:buf_end]
|
||||
|
||||
if get2 < cache2:
|
||||
x = cn.data[: get2 - cache1]
|
||||
if not cdr or len(cdr) < len(x):
|
||||
dbg(
|
||||
"found car ({}, {} to {}, len {}) [:{}-{}] = [:{}] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
len(cn.data),
|
||||
get2,
|
||||
cache1,
|
||||
get2 - cache1,
|
||||
len(x),
|
||||
)
|
||||
)
|
||||
cdr = x
|
||||
|
||||
continue
|
||||
|
||||
if get1 > cache1:
|
||||
x = cn.data[-(cache2 - get1) :]
|
||||
if not car or len(car) < len(x):
|
||||
dbg(
|
||||
"found cdr ({}, {} to {}, len {}) [-({}-{}):] = [-{}:] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
len(cn.data),
|
||||
cache2,
|
||||
get1,
|
||||
cache2 - get1,
|
||||
len(x),
|
||||
)
|
||||
)
|
||||
car = x
|
||||
|
||||
continue
|
||||
|
||||
raise Exception("what")
|
||||
|
||||
if car and cdr:
|
||||
dbg("<cache> have both")
|
||||
|
||||
ret = car + cdr
|
||||
if len(ret) == get2 - get1:
|
||||
return ret
|
||||
|
||||
raise Exception("{} + {} != {} - {}".format(len(car), len(cdr), get2, get1))
|
||||
|
||||
elif cdr:
|
||||
h_end = get1 + (get2 - get1) - len(cdr)
|
||||
h_ofs = h_end - 512 * 1024
|
||||
|
||||
if h_ofs < 0:
|
||||
h_ofs = 0
|
||||
|
||||
buf_ofs = (get2 - get1) - len(cdr)
|
||||
|
||||
dbg(
|
||||
"<cache> cdr {}, car {}-{}={} [-{}:]".format(
|
||||
len(cdr), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = buf[-buf_ofs:] + cdr
|
||||
|
||||
elif car:
|
||||
h_ofs = get1 + len(car)
|
||||
h_end = h_ofs + 1024 * 1024
|
||||
|
||||
if h_end > file_sz:
|
||||
h_end = file_sz
|
||||
|
||||
buf_ofs = (get2 - get1) - len(car)
|
||||
|
||||
dbg(
|
||||
"<cache> car {}, cdr {}-{}={} [:{}]".format(
|
||||
len(car), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = car + buf[:buf_ofs]
|
||||
|
||||
else:
|
||||
h_ofs = get1 - 256 * 1024
|
||||
h_end = get2 + 1024 * 1024
|
||||
|
||||
if h_ofs < 0:
|
||||
h_ofs = 0
|
||||
|
||||
if h_end > file_sz:
|
||||
h_end = file_sz
|
||||
|
||||
buf_ofs = get1 - h_ofs
|
||||
buf_end = buf_ofs + get2 - get1
|
||||
|
||||
dbg(
|
||||
"<cache> {}-{}={} [{}:{}]".format(
|
||||
h_ofs, h_end, h_end - h_ofs, buf_ofs, buf_end
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = buf[buf_ofs:buf_end]
|
||||
|
||||
cn = CacheNode([path, h_ofs], buf)
|
||||
# with self.filecache_mtx:
|
||||
if True:
|
||||
if len(self.filecache) > 6:
|
||||
self.filecache = self.filecache[1:] + [cn]
|
||||
else:
|
||||
self.filecache.append(cn)
|
||||
|
||||
return ret
|
||||
|
||||
def _readdir(self, path):
|
||||
path = path.strip("/")
|
||||
log("readdir {}".format(path))
|
||||
|
||||
ret = self.gw.listdir(path)
|
||||
|
||||
# with self.dircache_mtx:
|
||||
if True:
|
||||
cn = CacheNode(path, ret)
|
||||
self.dircache.append(cn)
|
||||
self.clean_dircache()
|
||||
|
||||
return ret
|
||||
|
||||
def readdir(self, path, offset):
|
||||
for e in self._readdir(path)[offset:]:
|
||||
# log("yield [{}]".format(e[0]))
|
||||
yield fuse.Direntry(e[0])
|
||||
|
||||
def open(self, path, flags):
|
||||
if (flags & (os.O_RDONLY | os.O_WRONLY | os.O_RDWR)) != os.O_RDONLY:
|
||||
return -errno.EACCES
|
||||
|
||||
st = self.getattr(path)
|
||||
try:
|
||||
if st.st_nlink > 0:
|
||||
return st
|
||||
except:
|
||||
return st # -int(os.errcode)
|
||||
|
||||
def read(self, path, length, offset, fh=None, *args):
|
||||
if args:
|
||||
log("unexpected args [" + "] [".join(repr(x) for x in args) + "]")
|
||||
raise Exception()
|
||||
|
||||
path = path.strip("/")
|
||||
|
||||
ofs2 = offset + length
|
||||
log("read {} @ {} len {} end {}".format(path, offset, length, ofs2))
|
||||
|
||||
st = self.getattr(path)
|
||||
try:
|
||||
file_sz = st.st_size
|
||||
except:
|
||||
return st # -int(os.errcode)
|
||||
|
||||
if ofs2 > file_sz:
|
||||
ofs2 = file_sz
|
||||
log("truncate to len {} end {}".format(ofs2 - offset, ofs2))
|
||||
|
||||
if file_sz == 0 or offset >= ofs2:
|
||||
return b""
|
||||
|
||||
# toggle cache here i suppose
|
||||
# return self.get_cached_file(path, offset, ofs2, file_sz)
|
||||
return self.gw.download_file_range(path, offset, ofs2)
|
||||
|
||||
def getattr(self, path):
|
||||
log("getattr [{}]".format(path))
|
||||
|
||||
path = path.strip("/")
|
||||
try:
|
||||
dirpath, fname = path.rsplit("/", 1)
|
||||
except:
|
||||
dirpath = ""
|
||||
fname = path
|
||||
|
||||
if not path:
|
||||
ret = self.gw.stat_dir(time.time())
|
||||
dbg("=root")
|
||||
return ret
|
||||
|
||||
cn = self.get_cached_dir(dirpath)
|
||||
if cn:
|
||||
log("cache ok")
|
||||
dents = cn.data
|
||||
else:
|
||||
log("cache miss")
|
||||
dents = self._readdir(dirpath)
|
||||
|
||||
for cache_name, cache_stat, _ in dents:
|
||||
if cache_name == fname:
|
||||
dbg("=file")
|
||||
return cache_stat
|
||||
|
||||
log("=404")
|
||||
return -errno.ENOENT
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
|
||||
server = CPPF()
|
||||
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
|
||||
server.parse(values=server, errex=1)
|
||||
if not server.url or not str(server.url).startswith("http"):
|
||||
print("\nerror:")
|
||||
print(" need argument: -o url=<...>")
|
||||
print(" need argument: mount-path")
|
||||
print("example:")
|
||||
print(
|
||||
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
server.init2()
|
||||
threading.Thread(target=server.main, daemon=True).start()
|
||||
while True:
|
||||
time.sleep(9001)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -118,7 +118,7 @@ printf ']}' >> /dev/shm/$salt.hs
|
||||
|
||||
printf '\033[36m'
|
||||
|
||||
#curl "http://$target:1234$posturl/handshake.php" -H "Content-Type: text/plain;charset=UTF-8" -H "Cookie: cppwd=$passwd" --data "$(cat "/dev/shm/$salt.hs")" | tee /dev/shm/$salt.res
|
||||
#curl "http://$target:3923$posturl/handshake.php" -H "Content-Type: text/plain;charset=UTF-8" -H "Cookie: cppwd=$passwd" --data "$(cat "/dev/shm/$salt.hs")" | tee /dev/shm/$salt.res
|
||||
|
||||
{
|
||||
{
|
||||
@@ -135,7 +135,7 @@ EOF
|
||||
cat /dev/shm/$salt.hs
|
||||
} |
|
||||
tee /dev/shm/$salt.hsb |
|
||||
ncat $target 1234 |
|
||||
ncat $target 3923 |
|
||||
tee /dev/shm/$salt.hs1r
|
||||
|
||||
wark="$(cat /dev/shm/$salt.hs1r | getwark)"
|
||||
@@ -190,7 +190,7 @@ EOF
|
||||
nchunk=$((nchunk+1))
|
||||
|
||||
done |
|
||||
ncat $target 1234 |
|
||||
ncat $target 3923 |
|
||||
tee /dev/shm/$salt.pr
|
||||
|
||||
t=$(date +%s.%N)
|
||||
@@ -201,7 +201,7 @@ t=$(date +%s.%N)
|
||||
|
||||
printf '\033[36m'
|
||||
|
||||
ncat $target 1234 < /dev/shm/$salt.hsb |
|
||||
ncat $target 3923 < /dev/shm/$salt.hsb |
|
||||
tee /dev/shm/$salt.hs2r |
|
||||
grep -E '"hash": ?\[ *\]'
|
||||
|
||||
|
22
contrib/README.md
Normal file
22
contrib/README.md
Normal file
@@ -0,0 +1,22 @@
|
||||
### [`copyparty.bat`](copyparty.bat)
|
||||
* launches copyparty with no arguments (anon read+write within same folder)
|
||||
* intended for windows machines with no python.exe in PATH
|
||||
* works on windows, linux and macos
|
||||
* assumes `copyparty-sfx.py` was renamed to `copyparty.py` in the same folder as `copyparty.bat`
|
||||
|
||||
### [`index.html`](index.html)
|
||||
* drop-in redirect from an httpd to copyparty
|
||||
* assumes the webserver and copyparty is running on the same server/IP
|
||||
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
|
||||
|
||||
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
|
||||
disables thumbnails and folder-type detection in windows explorer, makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
|
||||
|
||||
# OS integration
|
||||
init-scripts to start copyparty as a service
|
||||
* [`systemd/copyparty.service`](systemd/copyparty.service)
|
||||
* [`openrc/copyparty`](openrc/copyparty)
|
||||
|
||||
# Reverse-proxy
|
||||
copyparty has basic support for running behind another webserver
|
||||
* [`nginx/copyparty.conf`](nginx/copyparty.conf)
|
33
contrib/copyparty.bat
Normal file
33
contrib/copyparty.bat
Normal file
@@ -0,0 +1,33 @@
|
||||
exec python "$(dirname "$0")"/copyparty.py
|
||||
|
||||
@rem on linux, the above will execute and the script will terminate
|
||||
@rem on windows, the rest of this script will run
|
||||
|
||||
@echo off
|
||||
cls
|
||||
|
||||
set py=
|
||||
for /f %%i in ('where python 2^>nul') do (
|
||||
set "py=%%i"
|
||||
goto c1
|
||||
)
|
||||
:c1
|
||||
|
||||
if [%py%] == [] (
|
||||
for /f %%i in ('where /r "%localappdata%\programs\python" python 2^>nul') do (
|
||||
set "py=%%i"
|
||||
goto c2
|
||||
)
|
||||
)
|
||||
:c2
|
||||
|
||||
if [%py%] == [] set "py=c:\python27\python.exe"
|
||||
|
||||
if not exist "%py%" (
|
||||
echo could not find python
|
||||
echo(
|
||||
pause
|
||||
exit /b
|
||||
)
|
||||
|
||||
start cmd /c %py% "%~dp0\copyparty.py"
|
31
contrib/explorer-nothumbs-nofoldertypes.reg
Normal file
31
contrib/explorer-nothumbs-nofoldertypes.reg
Normal file
@@ -0,0 +1,31 @@
|
||||
Windows Registry Editor Version 5.00
|
||||
|
||||
; this will do 3 things, all optional:
|
||||
; 1) disable thumbnails
|
||||
; 2) delete all existing folder type settings/detections
|
||||
; 3) disable folder type detection (force default columns)
|
||||
;
|
||||
; this makes the file explorer way faster,
|
||||
; especially on slow/networked locations
|
||||
|
||||
|
||||
; =====================================================================
|
||||
; 1) disable thumbnails
|
||||
|
||||
[HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Explorer\Advanced]
|
||||
"IconsOnly"=dword:00000001
|
||||
|
||||
|
||||
; =====================================================================
|
||||
; 2) delete all existing folder type settings/detections
|
||||
|
||||
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags]
|
||||
|
||||
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\BagMRU]
|
||||
|
||||
|
||||
; =====================================================================
|
||||
; 3) disable folder type detection
|
||||
|
||||
[HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags\AllFolders\Shell]
|
||||
"FolderType"="NotSpecified"
|
43
contrib/index.html
Normal file
43
contrib/index.html
Normal file
@@ -0,0 +1,43 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>⇆🎉 redirect</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<style>
|
||||
|
||||
html, body {
|
||||
font-family: sans-serif;
|
||||
}
|
||||
body {
|
||||
padding: 1em 2em;
|
||||
font-size: 1.5em;
|
||||
}
|
||||
a {
|
||||
font-size: 1.2em;
|
||||
padding: .1em;
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<span id="desc">you probably want</span> <a id="redir" href="//10.13.1.1:3923/">copyparty</a>
|
||||
<script>
|
||||
|
||||
var a = document.getElementById('redir'),
|
||||
proto = window.location.protocol.indexOf('https') === 0 ? 'https' : 'http',
|
||||
loc = window.location.hostname || '127.0.0.1',
|
||||
port = a.getAttribute('href').split(':').pop().split('/')[0],
|
||||
url = proto + '://' + loc + ':' + port + '/';
|
||||
|
||||
a.setAttribute('href', url);
|
||||
document.getElementById('desc').innerHTML = 'redirecting to';
|
||||
|
||||
setTimeout(function() {
|
||||
window.location.href = url;
|
||||
}, 500);
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
26
contrib/nginx/copyparty.conf
Normal file
26
contrib/nginx/copyparty.conf
Normal file
@@ -0,0 +1,26 @@
|
||||
upstream cpp {
|
||||
server 127.0.0.1:3923;
|
||||
keepalive 120;
|
||||
}
|
||||
server {
|
||||
listen 443 ssl;
|
||||
listen [::]:443 ssl;
|
||||
|
||||
server_name fs.example.com;
|
||||
|
||||
location / {
|
||||
proxy_pass http://cpp;
|
||||
proxy_redirect off;
|
||||
# disable buffering (next 4 lines)
|
||||
proxy_http_version 1.1;
|
||||
client_max_body_size 0;
|
||||
proxy_buffering off;
|
||||
proxy_request_buffering off;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Connection "Keep-Alive";
|
||||
}
|
||||
}
|
18
contrib/openrc/copyparty
Normal file
18
contrib/openrc/copyparty
Normal file
@@ -0,0 +1,18 @@
|
||||
#!/sbin/openrc-run
|
||||
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty /etc/init.d && rc-update add copyparty
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
|
||||
name="$SVCNAME"
|
||||
command_background=true
|
||||
pidfile="/var/run/$SVCNAME.pid"
|
||||
|
||||
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
|
||||
command_args="-q -v /mnt::a"
|
19
contrib/systemd/copyparty.service
Normal file
19
contrib/systemd/copyparty.service
Normal file
@@ -0,0 +1,19 @@
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/python /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
|
||||
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
@@ -9,6 +9,7 @@ __license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import os
|
||||
import time
|
||||
import shutil
|
||||
import filecmp
|
||||
import locale
|
||||
@@ -85,6 +86,7 @@ def ensure_cert():
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if WINDOWS:
|
||||
os.system("") # enables colors
|
||||
|
||||
@@ -103,17 +105,22 @@ def main():
|
||||
epilog=dedent(
|
||||
"""
|
||||
-a takes username:password,
|
||||
-v takes src:dst:permset:permset:... where "permset" is
|
||||
accesslevel followed by username (no separator)
|
||||
-v takes src:dst:permset:permset:cflag:cflag:...
|
||||
where "permset" is accesslevel followed by username (no separator)
|
||||
and "cflag" is config flags to set on this volume
|
||||
|
||||
list of cflags:
|
||||
cnodupe rejects existing files (instead of symlinking them)
|
||||
|
||||
example:\033[35m
|
||||
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed \033[36m
|
||||
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
|
||||
mount current directory at "/" with
|
||||
* r (read-only) for everyone
|
||||
* a (read+write) for ed
|
||||
mount ../inc at "/dump" with
|
||||
* w (write-only) for everyone
|
||||
* a (read+write) for ed \033[0m
|
||||
* a (read+write) for ed
|
||||
* reject duplicate files \033[0m
|
||||
|
||||
if no accounts or volumes are configured,
|
||||
current folder will be read/write for everyone
|
||||
@@ -123,18 +130,36 @@ def main():
|
||||
"""
|
||||
),
|
||||
)
|
||||
ap.add_argument(
|
||||
"-c", metavar="PATH", type=str, action="append", help="add config file"
|
||||
)
|
||||
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind")
|
||||
ap.add_argument("-p", metavar="PORT", type=int, default=1234, help="port to bind")
|
||||
ap.add_argument("-nc", metavar="NUM", type=int, default=16, help="max num clients")
|
||||
ap.add_argument("-j", metavar="CORES", type=int, help="max num cpu cores")
|
||||
# fmt: off
|
||||
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
|
||||
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||
ap.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
|
||||
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
||||
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
||||
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
|
||||
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
|
||||
ap.add_argument("-q", action="store_true", help="quiet")
|
||||
ap.add_argument("-nw", action="store_true", help="benchmark: disable writing")
|
||||
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||
ap.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||
ap.add_argument("-e2s", action="store_true", help="enable up2k db-scanner")
|
||||
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
||||
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||
al = ap.parse_args()
|
||||
# fmt: on
|
||||
|
||||
al.i = al.i.split(",")
|
||||
try:
|
||||
if "-" in al.p:
|
||||
lo, hi = [int(x) for x in al.p.split("-")]
|
||||
al.p = list(range(lo, hi + 1))
|
||||
else:
|
||||
al.p = [int(x) for x in al.p.split(",")]
|
||||
except:
|
||||
raise Exception("invalid value for -p")
|
||||
|
||||
SvcHub(al).run()
|
||||
|
||||
|
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 4, 0)
|
||||
CODENAME = "NIH"
|
||||
BUILD_DT = (2020, 5, 13)
|
||||
VERSION = (0, 7, 3)
|
||||
CODENAME = "keeping track"
|
||||
BUILD_DT = (2021, 2, 3)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
@@ -12,11 +12,12 @@ from .util import undot, Pebkac, fsdec, fsenc
|
||||
class VFS(object):
|
||||
"""single level in the virtual fs"""
|
||||
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[]):
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[], flags={}):
|
||||
self.realpath = realpath # absolute path on host filesystem
|
||||
self.vpath = vpath # absolute path in the virtual filesystem
|
||||
self.uread = uread # users who can read this
|
||||
self.uwrite = uwrite # users who can write this
|
||||
self.flags = flags # config switches
|
||||
self.nodes = {} # child nodes
|
||||
|
||||
def add(self, src, dst):
|
||||
@@ -36,6 +37,7 @@ class VFS(object):
|
||||
"{}/{}".format(self.vpath, name).lstrip("/"),
|
||||
self.uread,
|
||||
self.uwrite,
|
||||
self.flags,
|
||||
)
|
||||
self.nodes[name] = vn
|
||||
return vn.add(src, dst)
|
||||
@@ -104,7 +106,7 @@ class VFS(object):
|
||||
real.sort()
|
||||
if not rem:
|
||||
for name, vn2 in sorted(self.nodes.items()):
|
||||
if uname in vn2.uread:
|
||||
if uname in vn2.uread or "*" in vn2.uread:
|
||||
virt_vis[name] = vn2
|
||||
|
||||
# no vfs nodes in the list of real inodes
|
||||
@@ -128,16 +130,15 @@ class VFS(object):
|
||||
class AuthSrv(object):
|
||||
"""verifies users against given paths"""
|
||||
|
||||
def __init__(self, args, log_func):
|
||||
self.log_func = log_func
|
||||
def __init__(self, args, log_func, warn_anonwrite=True):
|
||||
self.args = args
|
||||
|
||||
self.warn_anonwrite = True
|
||||
self.log_func = log_func
|
||||
self.warn_anonwrite = warn_anonwrite
|
||||
|
||||
if WINDOWS:
|
||||
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)")
|
||||
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
|
||||
else:
|
||||
self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)")
|
||||
self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)$")
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.reload()
|
||||
@@ -161,7 +162,7 @@ class AuthSrv(object):
|
||||
|
||||
yield prev, True
|
||||
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, mount):
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, mflags, mount):
|
||||
vol_src = None
|
||||
vol_dst = None
|
||||
for ln in [x.decode("utf-8").strip() for x in fd]:
|
||||
@@ -191,6 +192,7 @@ class AuthSrv(object):
|
||||
mount[vol_dst] = vol_src
|
||||
mread[vol_dst] = []
|
||||
mwrite[vol_dst] = []
|
||||
mflags[vol_dst] = {}
|
||||
continue
|
||||
|
||||
lvl, uname = ln.split(" ")
|
||||
@@ -198,6 +200,9 @@ class AuthSrv(object):
|
||||
mread[vol_dst].append(uname)
|
||||
if lvl in "wa":
|
||||
mwrite[vol_dst].append(uname)
|
||||
if lvl == "c":
|
||||
# config option, currently switches only
|
||||
mflags[vol_dst][uname] = True
|
||||
|
||||
def reload(self):
|
||||
"""
|
||||
@@ -210,6 +215,7 @@ class AuthSrv(object):
|
||||
user = {} # username:password
|
||||
mread = {} # mountpoint:[username]
|
||||
mwrite = {} # mountpoint:[username]
|
||||
mflags = {} # mountpoint:[flag]
|
||||
mount = {} # dst:src (mountpoint:realpath)
|
||||
|
||||
if self.args.a:
|
||||
@@ -220,20 +226,25 @@ class AuthSrv(object):
|
||||
if self.args.v:
|
||||
# list of src:dst:permset:permset:...
|
||||
# permset is [rwa]username
|
||||
for vol_match in [self.re_vol.match(x) for x in self.args.v]:
|
||||
try:
|
||||
src, dst, perms = vol_match.groups()
|
||||
except:
|
||||
raise Exception("invalid -v argument")
|
||||
for v_str in self.args.v:
|
||||
m = self.re_vol.match(v_str)
|
||||
if not m:
|
||||
raise Exception("invalid -v argument: [{}]".format(v_str))
|
||||
|
||||
src, dst, perms = m.groups()
|
||||
# print("\n".join([src, dst, perms]))
|
||||
src = fsdec(os.path.abspath(fsenc(src)))
|
||||
dst = dst.strip("/")
|
||||
mount[dst] = src
|
||||
mread[dst] = []
|
||||
mwrite[dst] = []
|
||||
mflags[dst] = {}
|
||||
|
||||
perms = perms.split(":")
|
||||
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
|
||||
if lvl == "c":
|
||||
# config option, currently switches only
|
||||
mflags[dst][uname] = True
|
||||
if uname == "":
|
||||
uname = "*"
|
||||
if lvl in "ra":
|
||||
@@ -244,14 +255,15 @@ class AuthSrv(object):
|
||||
if self.args.c:
|
||||
for cfg_fn in self.args.c:
|
||||
with open(cfg_fn, "rb") as f:
|
||||
self._parse_config_file(f, user, mread, mwrite, mount)
|
||||
self._parse_config_file(f, user, mread, mwrite, mflags, mount)
|
||||
|
||||
self.all_writable = []
|
||||
if not mount:
|
||||
# -h says our defaults are CWD at root and read/write for everyone
|
||||
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
|
||||
elif "" not in mount:
|
||||
# there's volumes but no root; make root inaccessible
|
||||
vfs = VFS(os.path.abspath("."), "", [], [])
|
||||
vfs = VFS(os.path.abspath("."), "")
|
||||
|
||||
maxdepth = 0
|
||||
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
|
||||
@@ -261,12 +273,18 @@ class AuthSrv(object):
|
||||
|
||||
if dst == "":
|
||||
# rootfs was mapped; fully replaces the default CWD vfs
|
||||
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst])
|
||||
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst], mflags[dst])
|
||||
continue
|
||||
|
||||
v = vfs.add(mount[dst], dst)
|
||||
v.uread = mread[dst]
|
||||
v.uwrite = mwrite[dst]
|
||||
v.flags = mflags[dst]
|
||||
if v.uwrite:
|
||||
self.all_writable.append(v)
|
||||
|
||||
if vfs.uwrite and vfs not in self.all_writable:
|
||||
self.all_writable.append(vfs)
|
||||
|
||||
missing_users = {}
|
||||
for d in [mread, mwrite]:
|
||||
|
@@ -29,7 +29,7 @@ class BrokerMp(object):
|
||||
self.mutex = threading.Lock()
|
||||
|
||||
cores = self.args.j
|
||||
if cores is None:
|
||||
if not cores:
|
||||
cores = mp.cpu_count()
|
||||
|
||||
self.log("broker", "booting {} subprocesses".format(cores))
|
||||
|
@@ -73,7 +73,7 @@ class MpWorker(object):
|
||||
if PY2:
|
||||
sck = pickle.loads(sck) # nosec
|
||||
|
||||
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,))
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
with self.mutex:
|
||||
|
@@ -28,7 +28,7 @@ class BrokerThr(object):
|
||||
def put(self, want_retval, dest, *args):
|
||||
if dest == "httpconn":
|
||||
sck, addr = args
|
||||
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,))
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
else:
|
||||
|
@@ -6,6 +6,8 @@ import stat
|
||||
import gzip
|
||||
import time
|
||||
import json
|
||||
import socket
|
||||
import ctypes
|
||||
from datetime import datetime
|
||||
import calendar
|
||||
|
||||
@@ -14,9 +16,6 @@ from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||
|
||||
if not PY2:
|
||||
unicode = str
|
||||
from html import escape as html_escape
|
||||
else:
|
||||
from cgi import escape as html_escape # pylint: disable=no-name-in-module
|
||||
|
||||
|
||||
class HttpCli(object):
|
||||
@@ -25,9 +24,11 @@ class HttpCli(object):
|
||||
"""
|
||||
|
||||
def __init__(self, conn):
|
||||
self.t0 = time.time()
|
||||
self.conn = conn
|
||||
self.s = conn.s
|
||||
self.sr = conn.sr
|
||||
self.ip = conn.addr[0]
|
||||
self.addr = conn.addr
|
||||
self.args = conn.args
|
||||
self.auth = conn.auth
|
||||
@@ -36,13 +37,13 @@ class HttpCli(object):
|
||||
|
||||
self.bufsz = 1024 * 32
|
||||
self.absolute_urls = False
|
||||
self.out_headers = {}
|
||||
self.out_headers = {"Access-Control-Allow-Origin": "*"}
|
||||
|
||||
def log(self, msg):
|
||||
self.log_func(self.log_src, msg)
|
||||
|
||||
def _check_nonfatal(self, ex):
|
||||
return ex.code in [403, 404]
|
||||
return ex.code < 400 or ex.code == 404
|
||||
|
||||
def _assert_safe_rem(self, rem):
|
||||
# sanity check to prevent any disasters
|
||||
@@ -83,11 +84,16 @@ class HttpCli(object):
|
||||
v = self.headers.get("connection", "").lower()
|
||||
self.keepalive = not v.startswith("close")
|
||||
|
||||
v = self.headers.get("x-forwarded-for", None)
|
||||
if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]:
|
||||
self.ip = v.split(",")[0]
|
||||
self.log_src = self.conn.set_rproxy(self.ip)
|
||||
|
||||
self.uname = "*"
|
||||
if "cookie" in self.headers:
|
||||
cookies = self.headers["cookie"].split(";")
|
||||
for k, v in [x.split("=", 1) for x in cookies]:
|
||||
if k != "cppwd":
|
||||
if k.strip() != "cppwd":
|
||||
continue
|
||||
|
||||
v = unescape_cookie(v)
|
||||
@@ -123,11 +129,20 @@ class HttpCli(object):
|
||||
self.uparam = uparam
|
||||
self.vpath = unquotep(vpath)
|
||||
|
||||
ua = self.headers.get("user-agent", "")
|
||||
if ua.startswith("rclone/"):
|
||||
uparam["raw"] = True
|
||||
uparam["dots"] = True
|
||||
|
||||
try:
|
||||
if self.mode in ["GET", "HEAD"]:
|
||||
return self.handle_get() and self.keepalive
|
||||
elif self.mode == "POST":
|
||||
return self.handle_post() and self.keepalive
|
||||
elif self.mode == "PUT":
|
||||
return self.handle_put() and self.keepalive
|
||||
elif self.mode == "OPTIONS":
|
||||
return self.handle_options() and self.keepalive
|
||||
else:
|
||||
raise Pebkac(400, 'invalid HTTP mode "{0}"'.format(self.mode))
|
||||
|
||||
@@ -135,7 +150,7 @@ class HttpCli(object):
|
||||
try:
|
||||
# self.log("pebkac at httpcli.run #2: " + repr(ex))
|
||||
self.keepalive = self._check_nonfatal(ex)
|
||||
self.loud_reply(str(ex), status=ex.code)
|
||||
self.loud_reply("{}: {}".format(str(ex), self.vpath), status=ex.code)
|
||||
return self.keepalive
|
||||
except Pebkac:
|
||||
return False
|
||||
@@ -143,9 +158,7 @@ class HttpCli(object):
|
||||
def send_headers(self, length, status=200, mime=None, headers={}):
|
||||
response = ["HTTP/1.1 {} {}".format(status, HTTPCODE[status])]
|
||||
|
||||
if length is None:
|
||||
self.keepalive = False
|
||||
else:
|
||||
if length is not None:
|
||||
response.append("Content-Length: " + str(length))
|
||||
|
||||
# close if unknown length, otherwise take client's preference
|
||||
@@ -176,7 +189,8 @@ class HttpCli(object):
|
||||
self.send_headers(len(body), status, mime, headers)
|
||||
|
||||
try:
|
||||
self.s.sendall(body)
|
||||
if self.mode != "HEAD":
|
||||
self.s.sendall(body)
|
||||
except:
|
||||
raise Pebkac(400, "client d/c while replying body")
|
||||
|
||||
@@ -184,7 +198,7 @@ class HttpCli(object):
|
||||
|
||||
def loud_reply(self, body, *args, **kwargs):
|
||||
self.log(body.rstrip())
|
||||
self.reply(b"<pre>" + body.encode("utf-8"), *list(args), **kwargs)
|
||||
self.reply(b"<pre>" + body.encode("utf-8") + b"\r\n", *list(args), **kwargs)
|
||||
|
||||
def handle_get(self):
|
||||
logmsg = "{:4} {}".format(self.mode, self.req)
|
||||
@@ -230,6 +244,30 @@ class HttpCli(object):
|
||||
|
||||
return self.tx_browser()
|
||||
|
||||
def handle_options(self):
|
||||
self.log("OPTIONS " + self.req)
|
||||
self.send_headers(
|
||||
None,
|
||||
204,
|
||||
headers={
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "*",
|
||||
"Access-Control-Allow-Headers": "*",
|
||||
},
|
||||
)
|
||||
return True
|
||||
|
||||
def handle_put(self):
|
||||
self.log("PUT " + self.req)
|
||||
|
||||
if self.headers.get("expect", "").lower() == "100-continue":
|
||||
try:
|
||||
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
|
||||
except:
|
||||
raise Pebkac(400, "client d/c before 100 continue")
|
||||
|
||||
return self.handle_stash()
|
||||
|
||||
def handle_post(self):
|
||||
self.log("POST " + self.req)
|
||||
|
||||
@@ -243,6 +281,9 @@ class HttpCli(object):
|
||||
if not ctype:
|
||||
raise Pebkac(400, "you can't post without a content-type header")
|
||||
|
||||
if "raw" in self.uparam:
|
||||
return self.handle_stash()
|
||||
|
||||
if "multipart/form-data" in ctype:
|
||||
return self.handle_post_multipart()
|
||||
|
||||
@@ -255,6 +296,37 @@ class HttpCli(object):
|
||||
|
||||
raise Pebkac(405, "don't know how to handle {} POST".format(ctype))
|
||||
|
||||
def handle_stash(self):
|
||||
remains = int(self.headers.get("content-length", None))
|
||||
if remains is None:
|
||||
reader = read_socket_unbounded(self.sr)
|
||||
self.keepalive = False
|
||||
else:
|
||||
reader = read_socket(self.sr, remains)
|
||||
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
|
||||
addr = self.ip.replace(":", ".")
|
||||
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
||||
path = os.path.join(fdir, fn)
|
||||
|
||||
with open(path, "wb", 512 * 1024) as f:
|
||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||
|
||||
spd = self._spd(post_sz)
|
||||
self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path))
|
||||
self.reply("{}\n{}\n".format(post_sz, sha_b64).encode("utf-8"))
|
||||
return True
|
||||
|
||||
def _spd(self, nbytes, add=True):
|
||||
if add:
|
||||
self.conn.nbyte += nbytes
|
||||
|
||||
spd1 = get_spd(nbytes, self.t0)
|
||||
spd2 = get_spd(self.conn.nbyte, self.conn.t0)
|
||||
return spd1 + " " + spd2
|
||||
|
||||
def handle_post_multipart(self):
|
||||
self.parser = MultipartParser(self.log, self.sr, self.headers)
|
||||
self.parser.parse()
|
||||
@@ -314,9 +386,11 @@ class HttpCli(object):
|
||||
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
|
||||
body["vdir"] = self.vpath
|
||||
body["rdir"] = os.path.join(vfs.realpath, rem)
|
||||
body["addr"] = self.addr[0]
|
||||
body["vtop"] = vfs.vpath
|
||||
body["ptop"] = vfs.realpath
|
||||
body["prel"] = rem
|
||||
body["addr"] = self.ip
|
||||
body["flag"] = vfs.flags
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||
response = x.get()
|
||||
@@ -338,7 +412,10 @@ class HttpCli(object):
|
||||
except KeyError:
|
||||
raise Pebkac(400, "need hash and wark headers for binary POST")
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", wark, chash)
|
||||
vfs, _ = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
ptop = vfs.realpath
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash)
|
||||
response = x.get()
|
||||
chunksize, cstart, path, lastmod = response
|
||||
|
||||
@@ -383,8 +460,8 @@ class HttpCli(object):
|
||||
|
||||
self.log("clone {} done".format(cstart[0]))
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", wark, chash)
|
||||
num_left = x.get()
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
|
||||
num_left, path = x.get()
|
||||
|
||||
if not WINDOWS and num_left == 0:
|
||||
times = (int(time.time()), int(lastmod))
|
||||
@@ -394,7 +471,9 @@ class HttpCli(object):
|
||||
except:
|
||||
self.log("failed to utime ({}, {})".format(path, times))
|
||||
|
||||
self.loud_reply("thank")
|
||||
spd = self._spd(post_sz)
|
||||
self.log("{} thank".format(spd))
|
||||
self.reply(b"thank")
|
||||
return True
|
||||
|
||||
def handle_login(self):
|
||||
@@ -407,7 +486,7 @@ class HttpCli(object):
|
||||
msg = "naw dude"
|
||||
pwd = "x" # nosec
|
||||
|
||||
h = {"Set-Cookie": "cppwd={}; Path=/".format(pwd)}
|
||||
h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
|
||||
html = self.conn.tpl_msg.render(h1=msg, h2='<a href="/">ack</a>', redir="/")
|
||||
self.reply(html.encode("utf-8"), headers=h)
|
||||
return True
|
||||
@@ -440,7 +519,7 @@ class HttpCli(object):
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}">go to /{}</a>'.format(
|
||||
quotep(vpath), html_escape(vpath, quote=False)
|
||||
quotep(vpath), html_escape(vpath)
|
||||
),
|
||||
pre="aight",
|
||||
click=True,
|
||||
@@ -474,7 +553,7 @@ class HttpCli(object):
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
|
||||
quotep(vpath), html_escape(vpath, quote=False)
|
||||
quotep(vpath), html_escape(vpath)
|
||||
),
|
||||
pre="aight",
|
||||
click=True,
|
||||
@@ -496,33 +575,40 @@ class HttpCli(object):
|
||||
self.log("discarding incoming file without filename")
|
||||
# fallthrough
|
||||
|
||||
fn = os.devnull
|
||||
if p_file and not nullwrite:
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
fn = os.path.join(fdir, sanitize_fn(p_file))
|
||||
fname = sanitize_fn(p_file)
|
||||
|
||||
if not os.path.isdir(fsenc(fdir)):
|
||||
raise Pebkac(404, "that folder does not exist")
|
||||
|
||||
# TODO broker which avoid this race and
|
||||
# provides a new filename if taken (same as up2k)
|
||||
if os.path.exists(fsenc(fn)):
|
||||
fn += ".{:.6f}-{}".format(time.time(), self.addr[0])
|
||||
# using current-time instead of t0 cause clients
|
||||
# may reuse a name for multiple files in one post
|
||||
suffix = ".{:.6f}-{}".format(time.time(), self.ip)
|
||||
open_args = {"fdir": fdir, "suffix": suffix}
|
||||
else:
|
||||
open_args = {}
|
||||
fname = os.devnull
|
||||
fdir = ""
|
||||
|
||||
try:
|
||||
with open(fsenc(fn), "wb") as f:
|
||||
self.log("writing to {0}".format(fn))
|
||||
with ren_open(fname, "wb", 512 * 1024, **open_args) as f:
|
||||
f, fname = f["orz"]
|
||||
self.log("writing to {}/{}".format(fdir, fname))
|
||||
sz, sha512_hex, _ = hashcopy(self.conn, p_data, f)
|
||||
if sz == 0:
|
||||
raise Pebkac(400, "empty files in post")
|
||||
|
||||
files.append([sz, sha512_hex])
|
||||
self.conn.nbyte += sz
|
||||
|
||||
except Pebkac:
|
||||
if fn != os.devnull:
|
||||
os.rename(fsenc(fn), fsenc(fn + ".PARTIAL"))
|
||||
if fname != os.devnull:
|
||||
fp = os.path.join(fdir, fname)
|
||||
suffix = ".PARTIAL"
|
||||
try:
|
||||
os.rename(fsenc(fp), fsenc(fp + suffix))
|
||||
except:
|
||||
fp = fp[: -len(suffix)]
|
||||
os.rename(fsenc(fp), fsenc(fp + suffix))
|
||||
|
||||
raise
|
||||
|
||||
@@ -546,7 +632,9 @@ class HttpCli(object):
|
||||
# truncated SHA-512 prevents length extension attacks;
|
||||
# using SHA-512/224, optionally SHA-512/256 = :64
|
||||
|
||||
self.log(msg)
|
||||
vspd = self._spd(sz_total, False)
|
||||
self.log("{} {}".format(vspd, msg))
|
||||
|
||||
if not nullwrite:
|
||||
# TODO this is bad
|
||||
log_fn = "up.{:.6f}.txt".format(t0)
|
||||
@@ -556,7 +644,7 @@ class HttpCli(object):
|
||||
"\n".join(
|
||||
unicode(x)
|
||||
for x in [
|
||||
":".join(unicode(x) for x in self.addr),
|
||||
":".join(unicode(x) for x in [self.ip, self.addr[1]]),
|
||||
msg.rstrip(),
|
||||
]
|
||||
)
|
||||
@@ -568,7 +656,7 @@ class HttpCli(object):
|
||||
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}">return to /{}</a>'.format(
|
||||
quotep(self.vpath), html_escape(self.vpath, quote=False)
|
||||
quotep(self.vpath), html_escape(self.vpath)
|
||||
),
|
||||
pre=msg,
|
||||
)
|
||||
@@ -605,7 +693,7 @@ class HttpCli(object):
|
||||
return True
|
||||
|
||||
fp = os.path.join(vfs.realpath, rem)
|
||||
srv_lastmod = -1
|
||||
srv_lastmod = srv_lastmod3 = -1
|
||||
try:
|
||||
st = os.stat(fsenc(fp))
|
||||
srv_lastmod = st.st_mtime
|
||||
@@ -616,7 +704,16 @@ class HttpCli(object):
|
||||
|
||||
# if file exists, chekc that timestamp matches the client's
|
||||
if srv_lastmod >= 0:
|
||||
if cli_lastmod3 not in [-1, srv_lastmod3]:
|
||||
same_lastmod = cli_lastmod3 in [-1, srv_lastmod3]
|
||||
if not same_lastmod:
|
||||
# some filesystems/transports limit precision to 1sec, hopefully floored
|
||||
same_lastmod = (
|
||||
srv_lastmod == int(srv_lastmod)
|
||||
and cli_lastmod3 > srv_lastmod3
|
||||
and cli_lastmod3 - srv_lastmod3 < 1000
|
||||
)
|
||||
|
||||
if not same_lastmod:
|
||||
response = json.dumps(
|
||||
{
|
||||
"ok": False,
|
||||
@@ -647,7 +744,7 @@ class HttpCli(object):
|
||||
if p_field != "body":
|
||||
raise Pebkac(400, "expected body, got {}".format(p_field))
|
||||
|
||||
with open(fp, "wb") as f:
|
||||
with open(fp, "wb", 512 * 1024) as f:
|
||||
sz, sha512, _ = hashcopy(self.conn, p_data, f)
|
||||
|
||||
new_lastmod = os.stat(fsenc(fp)).st_mtime
|
||||
@@ -672,9 +769,12 @@ class HttpCli(object):
|
||||
cli_dt = time.strptime(cli_lastmod, "%a, %d %b %Y %H:%M:%S GMT")
|
||||
cli_ts = calendar.timegm(cli_dt)
|
||||
return file_lastmod, int(file_ts) > int(cli_ts)
|
||||
except:
|
||||
self.log("bad lastmod format: {}".format(cli_lastmod))
|
||||
self.log(" expected format: {}".format(file_lastmod))
|
||||
except Exception as ex:
|
||||
self.log(
|
||||
"lastmod {}\nremote: [{}]\n local: [{}]".format(
|
||||
repr(ex), cli_lastmod, file_lastmod
|
||||
)
|
||||
)
|
||||
return file_lastmod, file_lastmod != cli_lastmod
|
||||
|
||||
return file_lastmod, True
|
||||
@@ -697,6 +797,8 @@ class HttpCli(object):
|
||||
editions[ext or "plain"] = [fs_path, st.st_size]
|
||||
except:
|
||||
pass
|
||||
if not self.vpath.startswith(".cpr/"):
|
||||
break
|
||||
|
||||
if not editions:
|
||||
raise Pebkac(404)
|
||||
@@ -769,11 +871,20 @@ class HttpCli(object):
|
||||
else:
|
||||
upper = file_sz
|
||||
|
||||
if lower < 0 or lower >= file_sz or upper < 0 or upper > file_sz:
|
||||
if upper > file_sz:
|
||||
upper = file_sz
|
||||
|
||||
if lower < 0 or lower >= upper:
|
||||
raise Exception()
|
||||
|
||||
except:
|
||||
raise Pebkac(400, "invalid range requested: " + hrange)
|
||||
err = "invalid range ({}), size={}".format(hrange, file_sz)
|
||||
self.loud_reply(
|
||||
err,
|
||||
status=416,
|
||||
headers={"Content-Range": "bytes */{}".format(file_sz)},
|
||||
)
|
||||
return True
|
||||
|
||||
status = 206
|
||||
self.out_headers["Content-Range"] = "bytes {}-{}/{}".format(
|
||||
@@ -782,6 +893,7 @@ class HttpCli(object):
|
||||
|
||||
logtail += " [\033[36m{}-{}\033[0m]".format(lower, upper)
|
||||
|
||||
use_sendfile = False
|
||||
if decompress:
|
||||
open_func = gzip.open
|
||||
open_args = [fsenc(fs_path), "rb"]
|
||||
@@ -791,10 +903,15 @@ class HttpCli(object):
|
||||
open_func = open
|
||||
# 512 kB is optimal for huge files, use 64k
|
||||
open_args = [fsenc(fs_path), "rb", 64 * 1024]
|
||||
if hasattr(os, "sendfile"):
|
||||
use_sendfile = not self.args.no_sendfile
|
||||
|
||||
#
|
||||
# send reply
|
||||
|
||||
if not is_compressed:
|
||||
self.out_headers["Cache-Control"] = "no-cache"
|
||||
|
||||
self.out_headers["Accept-Ranges"] = "bytes"
|
||||
self.send_headers(
|
||||
length=upper - lower,
|
||||
@@ -808,29 +925,19 @@ class HttpCli(object):
|
||||
self.log(logmsg)
|
||||
return True
|
||||
|
||||
ret = True
|
||||
with open_func(*open_args) as f:
|
||||
remains = upper - lower
|
||||
f.seek(lower)
|
||||
while remains > 0:
|
||||
# time.sleep(0.01)
|
||||
buf = f.read(4096)
|
||||
if not buf:
|
||||
break
|
||||
if use_sendfile:
|
||||
remains = sendfile_kern(lower, upper, f, self.s)
|
||||
else:
|
||||
remains = sendfile_py(lower, upper, f, self.s)
|
||||
|
||||
if remains < len(buf):
|
||||
buf = buf[:remains]
|
||||
if remains > 0:
|
||||
logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
|
||||
|
||||
remains -= len(buf)
|
||||
|
||||
try:
|
||||
self.s.sendall(buf)
|
||||
except:
|
||||
logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
|
||||
self.log(logmsg)
|
||||
return False
|
||||
|
||||
self.log(logmsg)
|
||||
return True
|
||||
spd = self._spd((upper - lower) - remains)
|
||||
self.log("{}, {}".format(logmsg, spd))
|
||||
return ret
|
||||
|
||||
def tx_md(self, fs_path):
|
||||
logmsg = "{:4} {} ".format("", self.req)
|
||||
@@ -864,8 +971,10 @@ class HttpCli(object):
|
||||
|
||||
targs = {
|
||||
"edit": "edit" in self.uparam,
|
||||
"title": html_escape(self.vpath, quote=False),
|
||||
"title": html_escape(self.vpath),
|
||||
"lastmod": int(ts_md * 1000),
|
||||
"md_plug": "true" if self.args.emp else "false",
|
||||
"md_chk_rate": self.args.mcr,
|
||||
"md": "",
|
||||
}
|
||||
sz_html = len(template.render(**targs).encode("utf-8"))
|
||||
@@ -905,7 +1014,7 @@ class HttpCli(object):
|
||||
else:
|
||||
vpath += "/" + node
|
||||
|
||||
vpnodes.append([quotep(vpath) + "/", html_escape(node, quote=False)])
|
||||
vpnodes.append([quotep(vpath) + "/", html_escape(node)])
|
||||
|
||||
vn, rem = self.auth.vfs.get(
|
||||
self.vpath, self.uname, self.readable, self.writable
|
||||
@@ -920,6 +1029,10 @@ class HttpCli(object):
|
||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||
return self.tx_md(abspath)
|
||||
|
||||
bad = "{0}.hist{0}up2k.".format(os.sep)
|
||||
if abspath.endswith(bad + "db") or abspath.endswith(bad + "snap"):
|
||||
raise Pebkac(403)
|
||||
|
||||
return self.tx_file(abspath)
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)
|
||||
@@ -941,9 +1054,13 @@ class HttpCli(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
# show dotfiles if permitted and requested
|
||||
if not self.args.ed or "dots" not in self.uparam:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
dirs = []
|
||||
files = []
|
||||
for fn in exclude_dotfiles(vfs_ls):
|
||||
for fn in vfs_ls:
|
||||
base = ""
|
||||
href = fn
|
||||
if self.absolute_urls and vpath:
|
||||
@@ -976,7 +1093,12 @@ class HttpCli(object):
|
||||
dt = datetime.utcfromtimestamp(inf.st_mtime)
|
||||
dt = dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
item = [margin, quotep(href), html_escape(fn, quote=False), sz, dt]
|
||||
try:
|
||||
ext = "---" if is_dir else fn.rsplit(".", 1)[1]
|
||||
except:
|
||||
ext = "%"
|
||||
|
||||
item = [margin, quotep(href), html_escape(fn), sz, ext, dt]
|
||||
if is_dir:
|
||||
dirs.append(item)
|
||||
else:
|
||||
@@ -989,6 +1111,45 @@ class HttpCli(object):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
logues[n] = f.read().decode("utf-8")
|
||||
|
||||
if False:
|
||||
# this is a mistake
|
||||
md = None
|
||||
for fn in [x[2] for x in files]:
|
||||
if fn.lower() == "readme.md":
|
||||
fn = os.path.join(abspath, fn)
|
||||
with open(fn, "rb") as f:
|
||||
md = f.read().decode("utf-8")
|
||||
|
||||
break
|
||||
|
||||
srv_info = []
|
||||
|
||||
try:
|
||||
if not self.args.nih:
|
||||
srv_info.append(str(socket.gethostname()).split(".")[0])
|
||||
except:
|
||||
self.log("#wow #whoa")
|
||||
pass
|
||||
|
||||
try:
|
||||
# some fuses misbehave
|
||||
if not self.args.nid:
|
||||
if WINDOWS:
|
||||
bfree = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
||||
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
|
||||
)
|
||||
srv_info.append(humansize(bfree.value) + " free")
|
||||
else:
|
||||
sv = os.statvfs(abspath)
|
||||
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
||||
total = humansize(sv.f_frsize * sv.f_blocks, True)
|
||||
|
||||
srv_info.append(free + " free")
|
||||
srv_info.append(total)
|
||||
except:
|
||||
pass
|
||||
|
||||
ts = ""
|
||||
# ts = "?{}".format(time.time())
|
||||
|
||||
@@ -1002,7 +1163,8 @@ class HttpCli(object):
|
||||
ts=ts,
|
||||
prologue=logues[0],
|
||||
epilogue=logues[1],
|
||||
title=html_escape(self.vpath, quote=False),
|
||||
title=html_escape(self.vpath),
|
||||
srv_info="</span> /// <span>".join(srv_info),
|
||||
)
|
||||
self.reply(html.encode("utf-8", "replace"))
|
||||
return True
|
||||
|
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import os
|
||||
import sys
|
||||
import ssl
|
||||
import time
|
||||
import socket
|
||||
|
||||
try:
|
||||
@@ -41,9 +42,11 @@ class HttpConn(object):
|
||||
self.auth = hsrv.auth
|
||||
self.cert_path = hsrv.cert_path
|
||||
|
||||
self.t0 = time.time()
|
||||
self.nbyte = 0
|
||||
self.workload = 0
|
||||
self.log_func = hsrv.log
|
||||
self.log_src = "{} \033[36m{}".format(addr[0], addr[1]).ljust(26)
|
||||
self.set_rproxy()
|
||||
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
||||
@@ -53,6 +56,19 @@ class HttpConn(object):
|
||||
self.tpl_md = env.get_template("md.html")
|
||||
self.tpl_mde = env.get_template("mde.html")
|
||||
|
||||
def set_rproxy(self, ip=None):
|
||||
if ip is None:
|
||||
color = 36
|
||||
ip = self.addr[0]
|
||||
self.rproxy = None
|
||||
else:
|
||||
color = 34
|
||||
self.rproxy = ip
|
||||
|
||||
self.ip = ip
|
||||
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
|
||||
return self.log_src
|
||||
|
||||
def respath(self, res_name):
|
||||
return os.path.join(E.mod, "web", res_name)
|
||||
|
||||
@@ -86,7 +102,7 @@ class HttpConn(object):
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return
|
||||
|
||||
if method not in [None, b"GET ", b"HEAD", b"POST"]:
|
||||
if method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]:
|
||||
if self.sr:
|
||||
self.log("\033[1;31mTODO: cannot do https in jython\033[0m")
|
||||
return
|
||||
|
@@ -38,7 +38,7 @@ class HttpSrv(object):
|
||||
|
||||
def accept(self, sck, addr):
|
||||
"""takes an incoming tcp connection and creates a thread to handle it"""
|
||||
self.log("%s %s" % addr, "-" * 5 + "C-cthr")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-cthr\033[0m" % ("-" * 5,))
|
||||
thr = threading.Thread(target=self.thr_client, args=(sck, addr))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
@@ -66,11 +66,11 @@ class HttpSrv(object):
|
||||
thr.start()
|
||||
|
||||
try:
|
||||
self.log("%s %s" % addr, "-" * 6 + "C-crun")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-crun\033[0m" % ("-" * 6,))
|
||||
cli.run()
|
||||
|
||||
finally:
|
||||
self.log("%s %s" % addr, "-" * 7 + "C-done")
|
||||
self.log("%s %s" % addr, "\033[1;30m|%sC-cdone\033[0m" % ("-" * 7,))
|
||||
try:
|
||||
sck.shutdown(socket.SHUT_RDWR)
|
||||
sck.close()
|
||||
@@ -80,8 +80,9 @@ class HttpSrv(object):
|
||||
"%s %s" % addr,
|
||||
"shut_rdwr err:\n {}\n {}".format(repr(sck), ex),
|
||||
)
|
||||
if ex.errno not in [10038, 107, 57, 9]:
|
||||
if ex.errno not in [10038, 10054, 107, 57, 9]:
|
||||
# 10038 No longer considered a socket
|
||||
# 10054 Foribly closed by remote
|
||||
# 107 Transport endpoint not connected
|
||||
# 57 Socket is not connected
|
||||
# 9 Bad file descriptor
|
||||
|
@@ -9,6 +9,7 @@ from datetime import datetime, timedelta
|
||||
import calendar
|
||||
|
||||
from .__init__ import PY2, WINDOWS, MACOS, VT100
|
||||
from .authsrv import AuthSrv
|
||||
from .tcpsrv import TcpSrv
|
||||
from .up2k import Up2k
|
||||
from .util import mp
|
||||
@@ -38,6 +39,10 @@ class SvcHub(object):
|
||||
self.tcpsrv = TcpSrv(self)
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
if self.args.e2d and self.args.e2s:
|
||||
auth = AuthSrv(self.args, self.log, False)
|
||||
self.up2k.build_indexes(auth.all_writable)
|
||||
|
||||
# decide which worker impl to use
|
||||
if self.check_mp_enable():
|
||||
from .broker_mp import BrokerMp as Broker
|
||||
@@ -129,8 +134,8 @@ class SvcHub(object):
|
||||
return None
|
||||
|
||||
def check_mp_enable(self):
|
||||
if self.args.j == 0:
|
||||
self.log("root", "multiprocessing disabled by argument -j 0;")
|
||||
if self.args.j == 1:
|
||||
self.log("root", "multiprocessing disabled by argument -j 1;")
|
||||
return False
|
||||
|
||||
if mp.cpu_count() <= 1:
|
||||
|
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import re
|
||||
import time
|
||||
import socket
|
||||
import select
|
||||
|
||||
from .util import chkcmd, Counter
|
||||
|
||||
@@ -23,55 +24,75 @@ class TcpSrv(object):
|
||||
|
||||
ip = "127.0.0.1"
|
||||
eps = {ip: "local only"}
|
||||
if self.args.i != ip:
|
||||
eps = self.detect_interfaces(self.args.i) or {self.args.i: "external"}
|
||||
nonlocals = [x for x in self.args.i if x != ip]
|
||||
if nonlocals:
|
||||
eps = self.detect_interfaces(self.args.i)
|
||||
if not eps:
|
||||
for x in nonlocals:
|
||||
eps[x] = "external"
|
||||
|
||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||
self.log(
|
||||
"tcpsrv",
|
||||
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
|
||||
ip, self.args.p, desc
|
||||
),
|
||||
)
|
||||
for port in sorted(self.args.p):
|
||||
self.log(
|
||||
"tcpsrv",
|
||||
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
|
||||
ip, port, desc
|
||||
),
|
||||
)
|
||||
|
||||
self.srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
self.srv = []
|
||||
for ip in self.args.i:
|
||||
for port in self.args.p:
|
||||
self.srv.append(self._listen(ip, port))
|
||||
|
||||
def _listen(self, ip, port):
|
||||
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
try:
|
||||
self.srv.bind((self.args.i, self.args.p))
|
||||
srv.bind((ip, port))
|
||||
return srv
|
||||
except (OSError, socket.error) as ex:
|
||||
if ex.errno == 98:
|
||||
raise Exception(
|
||||
"\033[1;31mport {} is busy on interface {}\033[0m".format(
|
||||
self.args.p, self.args.i
|
||||
)
|
||||
"\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
)
|
||||
|
||||
if ex.errno == 99:
|
||||
raise Exception(
|
||||
"\033[1;31minterface {} does not exist\033[0m".format(self.args.i)
|
||||
"\033[1;31minterface {} does not exist\033[0m".format(ip)
|
||||
)
|
||||
|
||||
def run(self):
|
||||
self.srv.listen(self.args.nc)
|
||||
|
||||
self.log("tcpsrv", "listening @ {0}:{1}".format(self.args.i, self.args.p))
|
||||
for srv in self.srv:
|
||||
srv.listen(self.args.nc)
|
||||
ip, port = srv.getsockname()
|
||||
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
|
||||
|
||||
while True:
|
||||
self.log("tcpsrv", "-" * 1 + "C-ncli")
|
||||
self.log("tcpsrv", "\033[1;30m|%sC-ncli\033[0m" % ("-" * 1,))
|
||||
if self.num_clients.v >= self.args.nc:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
self.log("tcpsrv", "-" * 2 + "C-acc1")
|
||||
sck, addr = self.srv.accept()
|
||||
self.log("%s %s" % addr, "-" * 3 + "C-acc2")
|
||||
self.num_clients.add()
|
||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||
self.log("tcpsrv", "\033[1;30m|%sC-acc1\033[0m" % ("-" * 2,))
|
||||
ready, _, _ = select.select(self.srv, [], [])
|
||||
for srv in ready:
|
||||
sck, addr = srv.accept()
|
||||
sip, sport = srv.getsockname()
|
||||
self.log(
|
||||
"%s %s" % addr,
|
||||
"\033[1;30m|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||
"-" * 3, sip, sport % 8, sport
|
||||
),
|
||||
)
|
||||
self.num_clients.add()
|
||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||
|
||||
def shutdown(self):
|
||||
self.log("tcpsrv", "ok bye")
|
||||
|
||||
def detect_interfaces(self, listen_ip):
|
||||
def detect_interfaces(self, listen_ips):
|
||||
eps = {}
|
||||
|
||||
# get all ips and their interfaces
|
||||
@@ -85,8 +106,9 @@ class TcpSrv(object):
|
||||
for ln in ip_addr.split("\n"):
|
||||
try:
|
||||
ip, dev = r.match(ln.rstrip()).groups()
|
||||
if listen_ip in ["0.0.0.0", ip]:
|
||||
eps[ip] = dev
|
||||
for lip in listen_ips:
|
||||
if lip in ["0.0.0.0", ip]:
|
||||
eps[ip] = dev
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -113,11 +135,12 @@ class TcpSrv(object):
|
||||
|
||||
s.close()
|
||||
|
||||
if default_route and listen_ip in ["0.0.0.0", default_route]:
|
||||
desc = "\033[32mexternal"
|
||||
try:
|
||||
eps[default_route] += ", " + desc
|
||||
except:
|
||||
eps[default_route] = desc
|
||||
for lip in listen_ips:
|
||||
if default_route and lip in ["0.0.0.0", default_route]:
|
||||
desc = "\033[32mexternal"
|
||||
try:
|
||||
eps[default_route] += ", " + desc
|
||||
except:
|
||||
eps[default_route] = desc
|
||||
|
||||
return eps
|
||||
|
@@ -6,6 +6,9 @@ import os
|
||||
import re
|
||||
import time
|
||||
import math
|
||||
import json
|
||||
import gzip
|
||||
import stat
|
||||
import shutil
|
||||
import base64
|
||||
import hashlib
|
||||
@@ -13,7 +16,15 @@ import threading
|
||||
from copy import deepcopy
|
||||
|
||||
from .__init__ import WINDOWS
|
||||
from .util import Pebkac, Queue, fsenc, sanitize_fn
|
||||
from .util import Pebkac, Queue, fsdec, fsenc, sanitize_fn, ren_open, atomic_move
|
||||
|
||||
HAVE_SQLITE3 = False
|
||||
try:
|
||||
import sqlite3
|
||||
|
||||
HAVE_SQLITE3 = True
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class Up2k(object):
|
||||
@@ -22,20 +33,21 @@ class Up2k(object):
|
||||
* documentation
|
||||
* registry persistence
|
||||
* ~/.config flatfiles for active jobs
|
||||
* wark->path database for finished uploads
|
||||
"""
|
||||
|
||||
def __init__(self, broker):
|
||||
self.broker = broker
|
||||
self.args = broker.args
|
||||
self.log = broker.log
|
||||
self.persist = self.args.e2d
|
||||
|
||||
# config
|
||||
self.salt = "hunter2" # TODO: config
|
||||
|
||||
# state
|
||||
self.registry = {}
|
||||
self.mutex = threading.Lock()
|
||||
self.registry = {}
|
||||
self.db = {}
|
||||
|
||||
if WINDOWS:
|
||||
# usually fails to set lastmod too quickly
|
||||
@@ -44,54 +56,306 @@ class Up2k(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if self.persist:
|
||||
thr = threading.Thread(target=self._snapshot)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
# static
|
||||
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
|
||||
|
||||
if self.persist and not HAVE_SQLITE3:
|
||||
m = "could not initialize sqlite3, will use in-memory registry only"
|
||||
self.log("up2k", m)
|
||||
|
||||
def _vis_job_progress(self, job):
|
||||
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
return "{:5.1f}% {}".format(perc, path)
|
||||
|
||||
def _vis_reg_progress(self, reg):
|
||||
ret = []
|
||||
for _, job in reg.items():
|
||||
ret.append(self._vis_job_progress(job))
|
||||
|
||||
return ret
|
||||
|
||||
def register_vpath(self, ptop):
|
||||
with self.mutex:
|
||||
if ptop in self.registry:
|
||||
return None
|
||||
|
||||
reg = {}
|
||||
path = os.path.join(ptop, ".hist", "up2k.snap")
|
||||
if self.persist and os.path.exists(path):
|
||||
with gzip.GzipFile(path, "rb") as f:
|
||||
j = f.read().decode("utf-8")
|
||||
|
||||
reg = json.loads(j)
|
||||
for _, job in reg.items():
|
||||
job["poke"] = time.time()
|
||||
|
||||
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
|
||||
m = [m] + self._vis_reg_progress(reg)
|
||||
self.log("up2k", "\n".join(m))
|
||||
|
||||
self.registry[ptop] = reg
|
||||
if not self.persist or not HAVE_SQLITE3:
|
||||
return None
|
||||
|
||||
try:
|
||||
os.mkdir(os.path.join(ptop, ".hist"))
|
||||
except:
|
||||
pass
|
||||
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
if ptop in self.db:
|
||||
# self.db[ptop].close()
|
||||
return None
|
||||
|
||||
try:
|
||||
db = self._open_db(db_path)
|
||||
self.db[ptop] = db
|
||||
return db
|
||||
except Exception as ex:
|
||||
m = "failed to open [{}]: {}".format(ptop, repr(ex))
|
||||
self.log("up2k", m)
|
||||
|
||||
return None
|
||||
|
||||
def build_indexes(self, writeables):
|
||||
tops = [d.realpath for d in writeables]
|
||||
for top in tops:
|
||||
db = self.register_vpath(top)
|
||||
if db:
|
||||
# can be symlink so don't `and d.startswith(top)``
|
||||
excl = set([d for d in tops if d != top])
|
||||
self._build_dir([db, 0], top, excl, top)
|
||||
self._drop_lost(db, top)
|
||||
db.commit()
|
||||
|
||||
def _build_dir(self, dbw, top, excl, cdir):
|
||||
try:
|
||||
inodes = [fsdec(x) for x in os.listdir(fsenc(cdir))]
|
||||
except Exception as ex:
|
||||
self.log("up2k", "listdir: " + repr(ex))
|
||||
return
|
||||
|
||||
histdir = os.path.join(top, ".hist")
|
||||
for inode in inodes:
|
||||
abspath = os.path.join(cdir, inode)
|
||||
try:
|
||||
inf = os.stat(fsenc(abspath))
|
||||
except Exception as ex:
|
||||
self.log("up2k", "stat: " + repr(ex))
|
||||
continue
|
||||
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
if abspath in excl or abspath == histdir:
|
||||
continue
|
||||
# self.log("up2k", " dir: {}".format(abspath))
|
||||
self._build_dir(dbw, top, excl, abspath)
|
||||
else:
|
||||
# self.log("up2k", "file: {}".format(abspath))
|
||||
rp = abspath[len(top) :].replace("\\", "/").strip("/")
|
||||
c = dbw[0].execute("select * from up where rp = ?", (rp,))
|
||||
in_db = list(c.fetchall())
|
||||
if in_db:
|
||||
_, dts, dsz, _ = in_db[0]
|
||||
if len(in_db) > 1:
|
||||
m = "WARN: multiple entries: [{}] => [{}] ({})"
|
||||
self.log("up2k", m.format(top, rp, len(in_db)))
|
||||
dts = -1
|
||||
|
||||
if dts == inf.st_mtime and dsz == inf.st_size:
|
||||
continue
|
||||
|
||||
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
|
||||
top, rp, dts, inf.st_mtime, dsz, inf.st_size
|
||||
)
|
||||
self.log("up2k", m)
|
||||
self.db_rm(dbw[0], rp)
|
||||
dbw[1] += 1
|
||||
in_db = None
|
||||
|
||||
self.log("up2k", "file: {}".format(abspath))
|
||||
try:
|
||||
hashes = self._hashlist_from_file(abspath)
|
||||
except Exception as ex:
|
||||
self.log("up2k", "hash: " + repr(ex))
|
||||
continue
|
||||
|
||||
wark = self._wark_from_hashlist(inf.st_size, hashes)
|
||||
self.db_add(dbw[0], wark, rp, inf.st_mtime, inf.st_size)
|
||||
dbw[1] += 1
|
||||
if dbw[1] > 1024:
|
||||
dbw[0].commit()
|
||||
dbw[1] = 0
|
||||
|
||||
def _drop_lost(self, db, top):
|
||||
rm = []
|
||||
c = db.execute("select * from up")
|
||||
for dwark, dts, dsz, drp in c:
|
||||
abspath = os.path.join(top, drp)
|
||||
try:
|
||||
if not os.path.exists(fsenc(abspath)):
|
||||
rm.append(drp)
|
||||
except Exception as ex:
|
||||
self.log("up2k", "stat-rm: " + repr(ex))
|
||||
|
||||
if not rm:
|
||||
return
|
||||
|
||||
self.log("up2k", "forgetting {} deleted files".format(len(rm)))
|
||||
for rp in rm:
|
||||
self.db_rm(db, rp)
|
||||
|
||||
def _open_db(self, db_path):
|
||||
conn = sqlite3.connect(db_path, check_same_thread=False)
|
||||
try:
|
||||
c = conn.execute(r"select * from kv where k = 'sver'")
|
||||
rows = c.fetchall()
|
||||
if rows:
|
||||
ver = rows[0][1]
|
||||
else:
|
||||
self.log("up2k", "WARN: no sver in kv, DB corrupt?")
|
||||
ver = "unknown"
|
||||
|
||||
if ver == "1":
|
||||
try:
|
||||
nfiles = next(conn.execute("select count(w) from up"))[0]
|
||||
self.log("up2k", "found DB at {} |{}|".format(db_path, nfiles))
|
||||
return conn
|
||||
except Exception as ex:
|
||||
m = "WARN: could not list files, DB corrupt?\n " + repr(ex)
|
||||
self.log("up2k", m)
|
||||
|
||||
m = "REPLACING unsupported DB (v.{}) at {}".format(ver, db_path)
|
||||
self.log("up2k", m)
|
||||
conn.close()
|
||||
os.unlink(db_path)
|
||||
conn = sqlite3.connect(db_path, check_same_thread=False)
|
||||
except:
|
||||
pass
|
||||
|
||||
# sqlite is variable-width only, no point in using char/nchar/varchar
|
||||
for cmd in [
|
||||
r"create table kv (k text, v text)",
|
||||
r"create table up (w text, mt int, sz int, rp text)",
|
||||
r"insert into kv values ('sver', '1')",
|
||||
r"create index up_w on up(w)",
|
||||
]:
|
||||
conn.execute(cmd)
|
||||
|
||||
conn.commit()
|
||||
self.log("up2k", "created DB at {}".format(db_path))
|
||||
return conn
|
||||
|
||||
def handle_json(self, cj):
|
||||
self.register_vpath(cj["ptop"])
|
||||
cj["name"] = sanitize_fn(cj["name"])
|
||||
cj["poke"] = time.time()
|
||||
wark = self._get_wark(cj)
|
||||
now = time.time()
|
||||
job = None
|
||||
with self.mutex:
|
||||
# TODO use registry persistence here to symlink any matching wark
|
||||
if wark in self.registry:
|
||||
job = self.registry[wark]
|
||||
if job["rdir"] != cj["rdir"] or job["name"] != cj["name"]:
|
||||
src = os.path.join(job["rdir"], job["name"])
|
||||
dst = os.path.join(cj["rdir"], cj["name"])
|
||||
db = self.db.get(cj["ptop"], None)
|
||||
reg = self.registry[cj["ptop"]]
|
||||
if db:
|
||||
cur = db.execute(r"select * from up where w = ?", (wark,))
|
||||
for _, dtime, dsize, dp_rel in cur:
|
||||
dp_abs = os.path.join(cj["ptop"], dp_rel).replace("\\", "/")
|
||||
# relying on path.exists to return false on broken symlinks
|
||||
if os.path.exists(fsenc(dp_abs)):
|
||||
try:
|
||||
prel, name = dp_rel.rsplit("/", 1)
|
||||
except:
|
||||
prel = ""
|
||||
name = dp_rel
|
||||
|
||||
job = {
|
||||
"name": name,
|
||||
"prel": prel,
|
||||
"vtop": cj["vtop"],
|
||||
"ptop": cj["ptop"],
|
||||
"flag": cj["flag"],
|
||||
"size": dsize,
|
||||
"lmod": dtime,
|
||||
"hash": [],
|
||||
"need": [],
|
||||
}
|
||||
break
|
||||
|
||||
if job and wark in reg:
|
||||
del reg[wark]
|
||||
|
||||
if job or wark in reg:
|
||||
job = job or reg[wark]
|
||||
if job["prel"] == cj["prel"] and job["name"] == cj["name"]:
|
||||
# ensure the files haven't been deleted manually
|
||||
names = [job[x] for x in ["name", "tnam"] if x in job]
|
||||
for fn in names:
|
||||
path = os.path.join(job["ptop"], job["prel"], fn)
|
||||
try:
|
||||
if os.path.getsize(path) > 0:
|
||||
# upload completed or both present
|
||||
break
|
||||
except:
|
||||
# missing; restart
|
||||
job = None
|
||||
break
|
||||
else:
|
||||
# file contents match, but not the path
|
||||
src = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
dst = os.path.join(cj["ptop"], cj["prel"], cj["name"])
|
||||
vsrc = os.path.join(job["vtop"], job["prel"], job["name"])
|
||||
vsrc = vsrc.replace("\\", "/") # just for prints anyways
|
||||
if job["need"]:
|
||||
self.log("up2k", "unfinished:\n {0}\n {1}".format(src, dst))
|
||||
err = "partial upload exists at a different location; please resume uploading here instead:\n{0}{1} ".format(
|
||||
job["vdir"], job["name"]
|
||||
)
|
||||
err = "partial upload exists at a different location; please resume uploading here instead:\n"
|
||||
err += vsrc + " "
|
||||
raise Pebkac(400, err)
|
||||
elif "nodupe" in job["flag"]:
|
||||
self.log("up2k", "dupe-reject:\n {0}\n {1}".format(src, dst))
|
||||
err = "upload rejected, file already exists:\n " + vsrc + " "
|
||||
raise Pebkac(400, err)
|
||||
else:
|
||||
# symlink to the client-provided name,
|
||||
# returning the previous upload info
|
||||
job = deepcopy(job)
|
||||
suffix = self._suffix(dst, now, job["addr"])
|
||||
job["name"] = cj["name"] + suffix
|
||||
self._symlink(src, dst + suffix)
|
||||
else:
|
||||
for k in ["ptop", "vtop", "prel"]:
|
||||
job[k] = cj[k]
|
||||
|
||||
pdir = os.path.join(cj["ptop"], cj["prel"])
|
||||
job["name"] = self._untaken(pdir, cj["name"], now, cj["addr"])
|
||||
dst = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
os.unlink(fsenc(dst)) # TODO ed pls
|
||||
self._symlink(src, dst)
|
||||
|
||||
if not job:
|
||||
job = {
|
||||
"wark": wark,
|
||||
"t0": now,
|
||||
"addr": cj["addr"],
|
||||
"vdir": cj["vdir"],
|
||||
"rdir": cj["rdir"],
|
||||
# client-provided, sanitized by _get_wark:
|
||||
"name": cj["name"],
|
||||
"size": cj["size"],
|
||||
"lmod": cj["lmod"],
|
||||
"hash": deepcopy(cj["hash"]),
|
||||
"need": [],
|
||||
}
|
||||
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
job["name"] += self._suffix(path, now, cj["addr"])
|
||||
# client-provided, sanitized by _get_wark: name, size, lmod
|
||||
for k in [
|
||||
"addr",
|
||||
"vtop",
|
||||
"ptop",
|
||||
"prel",
|
||||
"flag",
|
||||
"name",
|
||||
"size",
|
||||
"lmod",
|
||||
"poke",
|
||||
]:
|
||||
job[k] = cj[k]
|
||||
|
||||
# one chunk may occur multiple times in a file;
|
||||
# filter to unique values for the list of missing chunks
|
||||
# (preserve order to reduce disk thrashing)
|
||||
job["need"] = []
|
||||
lut = {}
|
||||
for k in cj["hash"]:
|
||||
if k not in lut:
|
||||
@@ -108,13 +372,12 @@ class Up2k(object):
|
||||
"wark": wark,
|
||||
}
|
||||
|
||||
def _suffix(self, fpath, ts, ip):
|
||||
def _untaken(self, fdir, fname, ts, ip):
|
||||
# TODO broker which avoid this race and
|
||||
# provides a new filename if taken (same as bup)
|
||||
if not os.path.exists(fsenc(fpath)):
|
||||
return ""
|
||||
|
||||
return ".{:.6f}-{}".format(ts, ip)
|
||||
suffix = ".{:.6f}-{}".format(ts, ip)
|
||||
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
|
||||
return f["orz"][1]
|
||||
|
||||
def _symlink(self, src, dst):
|
||||
# TODO store this in linktab so we never delete src if there are links to it
|
||||
@@ -141,40 +404,58 @@ class Up2k(object):
|
||||
lsrc = "../" * (len(lsrc) - 1) + "/".join(lsrc)
|
||||
os.symlink(fsenc(lsrc), fsenc(ldst))
|
||||
except (AttributeError, OSError) as ex:
|
||||
self.log("up2k", "cannot symlink; creating copy")
|
||||
self.log("up2k", "cannot symlink; creating copy: " + repr(ex))
|
||||
shutil.copy2(fsenc(src), fsenc(dst))
|
||||
|
||||
def handle_chunk(self, wark, chash):
|
||||
def handle_chunk(self, ptop, wark, chash):
|
||||
with self.mutex:
|
||||
job = self.registry.get(wark)
|
||||
job = self.registry[ptop].get(wark, None)
|
||||
if not job:
|
||||
raise Pebkac(404, "unknown wark")
|
||||
raise Pebkac(400, "unknown wark")
|
||||
|
||||
if chash not in job["need"]:
|
||||
raise Pebkac(200, "already got that but thanks??")
|
||||
|
||||
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
|
||||
if not nchunk:
|
||||
raise Pebkac(404, "unknown chunk")
|
||||
raise Pebkac(400, "unknown chunk")
|
||||
|
||||
job["poke"] = time.time()
|
||||
|
||||
chunksize = self._get_chunksize(job["size"])
|
||||
ofs = [chunksize * x for x in nchunk]
|
||||
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
|
||||
|
||||
return [chunksize, ofs, path, job["lmod"]]
|
||||
|
||||
def confirm_chunk(self, wark, chash):
|
||||
def confirm_chunk(self, ptop, wark, chash):
|
||||
with self.mutex:
|
||||
job = self.registry[wark]
|
||||
job = self.registry[ptop][wark]
|
||||
pdir = os.path.join(job["ptop"], job["prel"])
|
||||
src = os.path.join(pdir, job["tnam"])
|
||||
dst = os.path.join(pdir, job["name"])
|
||||
|
||||
job["need"].remove(chash)
|
||||
ret = len(job["need"])
|
||||
if ret > 0:
|
||||
return ret, src
|
||||
|
||||
if WINDOWS and ret == 0:
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
self.lastmod_q.put([path, (int(time.time()), int(job["lmod"]))])
|
||||
atomic_move(src, dst)
|
||||
|
||||
return ret
|
||||
if WINDOWS:
|
||||
self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))])
|
||||
|
||||
db = self.db.get(job["ptop"], None)
|
||||
if db:
|
||||
rp = os.path.join(job["prel"], job["name"]).replace("\\", "/")
|
||||
self.db_rm(db, rp)
|
||||
self.db_add(db, job["wark"], rp, job["lmod"], job["size"])
|
||||
db.commit()
|
||||
del self.registry[ptop][wark]
|
||||
# in-memory registry is reserved for unfinished uploads
|
||||
|
||||
return ret, dst
|
||||
|
||||
def _get_chunksize(self, filesize):
|
||||
chunksize = 1024 * 1024
|
||||
@@ -188,6 +469,13 @@ class Up2k(object):
|
||||
chunksize += stepsize
|
||||
stepsize *= mul
|
||||
|
||||
def db_rm(self, db, rp):
|
||||
db.execute("delete from up where rp = ?", (rp,))
|
||||
|
||||
def db_add(self, db, wark, rp, ts, sz):
|
||||
v = (wark, ts, sz, rp)
|
||||
db.execute("insert into up values (?,?,?,?)", v)
|
||||
|
||||
def _get_wark(self, cj):
|
||||
if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB
|
||||
raise Pebkac(400, "name or numchunks not according to spec")
|
||||
@@ -204,9 +492,13 @@ class Up2k(object):
|
||||
except:
|
||||
cj["lmod"] = int(time.time())
|
||||
|
||||
# server-reproducible file identifier, independent of name or location
|
||||
ident = [self.salt, str(cj["size"])]
|
||||
ident.extend(cj["hash"])
|
||||
wark = self._wark_from_hashlist(cj["size"], cj["hash"])
|
||||
return wark
|
||||
|
||||
def _wark_from_hashlist(self, filesize, hashes):
|
||||
""" server-reproducible file identifier, independent of name or location """
|
||||
ident = [self.salt, str(filesize)]
|
||||
ident.extend(hashes)
|
||||
ident = "\n".join(ident)
|
||||
|
||||
hasher = hashlib.sha512()
|
||||
@@ -216,10 +508,40 @@ class Up2k(object):
|
||||
wark = base64.urlsafe_b64encode(digest)
|
||||
return wark.decode("utf-8").rstrip("=")
|
||||
|
||||
def _hashlist_from_file(self, path):
|
||||
fsz = os.path.getsize(path)
|
||||
csz = self._get_chunksize(fsz)
|
||||
ret = []
|
||||
with open(path, "rb", 512 * 1024) as f:
|
||||
while fsz > 0:
|
||||
hashobj = hashlib.sha512()
|
||||
rem = min(csz, fsz)
|
||||
fsz -= rem
|
||||
while rem > 0:
|
||||
buf = f.read(min(rem, 64 * 1024))
|
||||
if not buf:
|
||||
raise Exception("EOF at " + str(f.tell()))
|
||||
|
||||
hashobj.update(buf)
|
||||
rem -= len(buf)
|
||||
|
||||
digest = hashobj.digest()[:32]
|
||||
digest = base64.urlsafe_b64encode(digest)
|
||||
ret.append(digest.decode("utf-8").rstrip("="))
|
||||
|
||||
return ret
|
||||
|
||||
def _new_upload(self, job):
|
||||
self.registry[job["wark"]] = job
|
||||
path = os.path.join(job["rdir"], job["name"])
|
||||
with open(fsenc(path), "wb") as f:
|
||||
self.registry[job["ptop"]][job["wark"]] = job
|
||||
pdir = os.path.join(job["ptop"], job["prel"])
|
||||
job["name"] = self._untaken(pdir, job["name"], job["t0"], job["addr"])
|
||||
# if len(job["name"].split(".")) > 8:
|
||||
# raise Exception("aaa")
|
||||
|
||||
tnam = job["name"] + ".PARTIAL"
|
||||
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
||||
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
||||
f, job["tnam"] = f["orz"]
|
||||
f.seek(job["size"] - 1)
|
||||
f.write(b"e")
|
||||
|
||||
@@ -236,3 +558,58 @@ class Up2k(object):
|
||||
os.utime(fsenc(path), times)
|
||||
except:
|
||||
self.log("lmod", "failed to utime ({}, {})".format(path, times))
|
||||
|
||||
def _snapshot(self):
|
||||
persist_interval = 30 # persist unfinished uploads index every 30 sec
|
||||
discard_interval = 21600 # drop unfinished uploads after 6 hours inactivity
|
||||
prev = {}
|
||||
while True:
|
||||
time.sleep(persist_interval)
|
||||
with self.mutex:
|
||||
for k, reg in self.registry.items():
|
||||
self._snap_reg(prev, k, reg, discard_interval)
|
||||
|
||||
def _snap_reg(self, prev, k, reg, discard_interval):
|
||||
now = time.time()
|
||||
rm = [x for x in reg.values() if now - x["poke"] > discard_interval]
|
||||
if rm:
|
||||
m = "dropping {} abandoned uploads in {}".format(len(rm), k)
|
||||
vis = [self._vis_job_progress(x) for x in rm]
|
||||
self.log("up2k", "\n".join([m] + vis))
|
||||
for job in rm:
|
||||
del reg[job["wark"]]
|
||||
try:
|
||||
# remove the filename reservation
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
if os.path.getsize(path) == 0:
|
||||
os.unlink(path)
|
||||
|
||||
if len(job["hash"]) == len(job["need"]):
|
||||
# PARTIAL is empty, delete that too
|
||||
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
|
||||
os.unlink(path)
|
||||
except:
|
||||
pass
|
||||
|
||||
path = os.path.join(k, ".hist", "up2k.snap")
|
||||
if not reg:
|
||||
if k not in prev or prev[k] is not None:
|
||||
prev[k] = None
|
||||
if os.path.exists(path):
|
||||
os.unlink(path)
|
||||
return
|
||||
|
||||
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
|
||||
etag = [len(reg), newest]
|
||||
if etag == prev.get(k, None):
|
||||
return
|
||||
|
||||
path2 = "{}.{}".format(path, os.getpid())
|
||||
j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8")
|
||||
with gzip.GzipFile(path2, "wb") as f:
|
||||
f.write(j)
|
||||
|
||||
atomic_move(path2, path)
|
||||
|
||||
self.log("up2k", "snap: {} |{}|".format(path, len(reg.keys())))
|
||||
prev[k] = etag
|
||||
|
@@ -2,13 +2,17 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import base64
|
||||
import select
|
||||
import struct
|
||||
import hashlib
|
||||
import platform
|
||||
import threading
|
||||
import mimetypes
|
||||
import contextlib
|
||||
import subprocess as sp # nosec
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
@@ -42,6 +46,7 @@ if WINDOWS and PY2:
|
||||
|
||||
HTTPCODE = {
|
||||
200: "OK",
|
||||
204: "No Content",
|
||||
206: "Partial Content",
|
||||
304: "Not Modified",
|
||||
400: "Bad Request",
|
||||
@@ -49,6 +54,7 @@ HTTPCODE = {
|
||||
404: "Not Found",
|
||||
405: "Method Not Allowed",
|
||||
413: "Payload Too Large",
|
||||
416: "Requested Range Not Satisfiable",
|
||||
422: "Unprocessable Entity",
|
||||
500: "Internal Server Error",
|
||||
501: "Not Implemented",
|
||||
@@ -93,6 +99,80 @@ class Unrecv(object):
|
||||
self.buf = buf + self.buf
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ren_open(fname, *args, **kwargs):
|
||||
fdir = kwargs.pop("fdir", None)
|
||||
suffix = kwargs.pop("suffix", None)
|
||||
|
||||
if fname == os.devnull:
|
||||
with open(fname, *args, **kwargs) as f:
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
orig_name = fname
|
||||
bname = fname
|
||||
ext = ""
|
||||
while True:
|
||||
ofs = bname.rfind(".")
|
||||
if ofs < 0 or ofs < len(bname) - 7:
|
||||
# doesn't look like an extension anymore
|
||||
break
|
||||
|
||||
ext = bname[ofs:] + ext
|
||||
bname = bname[:ofs]
|
||||
|
||||
b64 = ""
|
||||
while True:
|
||||
try:
|
||||
if fdir:
|
||||
fpath = os.path.join(fdir, fname)
|
||||
else:
|
||||
fpath = fname
|
||||
|
||||
if suffix and os.path.exists(fpath):
|
||||
fpath += suffix
|
||||
fname += suffix
|
||||
ext += suffix
|
||||
|
||||
with open(fsenc(fpath), *args, **kwargs) as f:
|
||||
if b64:
|
||||
fp2 = "fn-trunc.{}.txt".format(b64)
|
||||
fp2 = os.path.join(fdir, fp2)
|
||||
with open(fsenc(fp2), "wb") as f2:
|
||||
f2.write(orig_name.encode("utf-8"))
|
||||
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
except OSError as ex_:
|
||||
ex = ex_
|
||||
if ex.errno != 36:
|
||||
raise
|
||||
|
||||
if not b64:
|
||||
b64 = (bname + ext).encode("utf-8", "replace")
|
||||
b64 = hashlib.sha512(b64).digest()[:12]
|
||||
b64 = base64.urlsafe_b64encode(b64).decode("utf-8").rstrip("=")
|
||||
|
||||
badlen = len(fname)
|
||||
while len(fname) >= badlen:
|
||||
if len(bname) < 8:
|
||||
raise ex
|
||||
|
||||
if len(bname) > len(ext):
|
||||
# drop the last letter of the filename
|
||||
bname = bname[:-1]
|
||||
else:
|
||||
try:
|
||||
# drop the leftmost sub-extension
|
||||
_, ext = ext.split(".", 1)
|
||||
except:
|
||||
# okay do the first letter then
|
||||
ext = "." + ext[2:]
|
||||
|
||||
fname = "{}~{}{}".format(bname, b64, ext)
|
||||
|
||||
|
||||
class MultipartParser(object):
|
||||
def __init__(self, log_func, sr, http_headers):
|
||||
self.sr = sr
|
||||
@@ -309,18 +389,7 @@ def get_boundary(headers):
|
||||
def read_header(sr):
|
||||
ret = b""
|
||||
while True:
|
||||
if ret.endswith(b"\r\n\r\n"):
|
||||
break
|
||||
elif ret.endswith(b"\r\n\r"):
|
||||
n = 1
|
||||
elif ret.endswith(b"\r\n"):
|
||||
n = 2
|
||||
elif ret.endswith(b"\r"):
|
||||
n = 3
|
||||
else:
|
||||
n = 4
|
||||
|
||||
buf = sr.recv(n)
|
||||
buf = sr.recv(1024)
|
||||
if not buf:
|
||||
if not ret:
|
||||
return None
|
||||
@@ -332,11 +401,40 @@ def read_header(sr):
|
||||
)
|
||||
|
||||
ret += buf
|
||||
ofs = ret.find(b"\r\n\r\n")
|
||||
if ofs < 0:
|
||||
if len(ret) > 1024 * 64:
|
||||
raise Pebkac(400, "header 2big")
|
||||
else:
|
||||
continue
|
||||
|
||||
if len(ret) > 1024 * 64:
|
||||
raise Pebkac(400, "header 2big")
|
||||
sr.unrecv(ret[ofs + 4 :])
|
||||
return ret[:ofs].decode("utf-8", "surrogateescape").split("\r\n")
|
||||
|
||||
return ret[:-4].decode("utf-8", "surrogateescape").split("\r\n")
|
||||
|
||||
def humansize(sz, terse=False):
|
||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||
if sz < 1024:
|
||||
break
|
||||
|
||||
sz /= 1024.0
|
||||
|
||||
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||
|
||||
if not terse:
|
||||
return ret
|
||||
|
||||
return ret.replace("iB", "").replace(" ", "")
|
||||
|
||||
|
||||
def get_spd(nbyte, t0, t=None):
|
||||
if t is None:
|
||||
t = time.time()
|
||||
|
||||
bps = nbyte / ((t - t0) + 0.001)
|
||||
s1 = humansize(nbyte).replace(" ", "\033[33m").replace("iB", "")
|
||||
s2 = humansize(bps).replace(" ", "\033[35m").replace("iB", "")
|
||||
return "{} \033[0m{}/s\033[0m".format(s1, s2)
|
||||
|
||||
|
||||
def undot(path):
|
||||
@@ -388,6 +486,21 @@ def exclude_dotfiles(filepaths):
|
||||
yield fpath
|
||||
|
||||
|
||||
def html_escape(s, quote=False):
|
||||
"""html.escape but also newlines"""
|
||||
s = (
|
||||
s.replace("&", "&")
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace("\r", " ")
|
||||
.replace("\n", " ")
|
||||
)
|
||||
if quote:
|
||||
s = s.replace('"', """).replace("'", "'")
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def quotep(txt):
|
||||
"""url quoter which deals with bytes correctly"""
|
||||
btxt = w8enc(txt)
|
||||
@@ -402,8 +515,8 @@ def quotep(txt):
|
||||
def unquotep(txt):
|
||||
"""url unquoter which deals with bytes correctly"""
|
||||
btxt = w8enc(txt)
|
||||
unq1 = btxt.replace(b"+", b" ")
|
||||
unq2 = unquote(unq1)
|
||||
# btxt = btxt.replace(b"+", b" ")
|
||||
unq2 = unquote(btxt)
|
||||
return w8dec(unq2)
|
||||
|
||||
|
||||
@@ -436,6 +549,16 @@ else:
|
||||
fsdec = w8dec
|
||||
|
||||
|
||||
def atomic_move(src, dst):
|
||||
if not PY2:
|
||||
os.replace(src, dst)
|
||||
else:
|
||||
if os.path.exists(dst):
|
||||
os.unlink(dst)
|
||||
|
||||
os.rename(src, dst)
|
||||
|
||||
|
||||
def read_socket(sr, total_size):
|
||||
remains = total_size
|
||||
while remains > 0:
|
||||
@@ -451,6 +574,15 @@ def read_socket(sr, total_size):
|
||||
yield buf
|
||||
|
||||
|
||||
def read_socket_unbounded(sr):
|
||||
while True:
|
||||
buf = sr.recv(32 * 1024)
|
||||
if not buf:
|
||||
return
|
||||
|
||||
yield buf
|
||||
|
||||
|
||||
def hashcopy(actor, fin, fout):
|
||||
u32_lim = int((2 ** 31) * 0.9)
|
||||
hashobj = hashlib.sha512()
|
||||
@@ -470,6 +602,46 @@ def hashcopy(actor, fin, fout):
|
||||
return tlen, hashobj.hexdigest(), digest_b64
|
||||
|
||||
|
||||
def sendfile_py(lower, upper, f, s):
|
||||
remains = upper - lower
|
||||
f.seek(lower)
|
||||
while remains > 0:
|
||||
# time.sleep(0.01)
|
||||
buf = f.read(min(4096, remains))
|
||||
if not buf:
|
||||
return remains
|
||||
|
||||
try:
|
||||
s.sendall(buf)
|
||||
remains -= len(buf)
|
||||
except:
|
||||
return remains
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def sendfile_kern(lower, upper, f, s):
|
||||
out_fd = s.fileno()
|
||||
in_fd = f.fileno()
|
||||
ofs = lower
|
||||
while ofs < upper:
|
||||
try:
|
||||
req = min(2 ** 30, upper - ofs)
|
||||
select.select([], [out_fd], [], 10)
|
||||
n = os.sendfile(out_fd, in_fd, ofs, req)
|
||||
except Exception as ex:
|
||||
# print("sendfile: " + repr(ex))
|
||||
n = 0
|
||||
|
||||
if n <= 0:
|
||||
return upper - ofs
|
||||
|
||||
ofs += n
|
||||
# print("sendfile: ok, sent {} now, {} total, {} remains".format(n, ofs - lower, upper - ofs))
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def unescape_cookie(orig):
|
||||
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
|
||||
ret = ""
|
||||
@@ -550,3 +722,6 @@ class Pebkac(Exception):
|
||||
def __init__(self, code, msg=None):
|
||||
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
||||
self.code = code
|
||||
|
||||
def __repr__(self):
|
||||
return "Pebkac({}, {})".format(self.code, repr(self.args))
|
||||
|
12
copyparty/web/Makefile
Normal file
12
copyparty/web/Makefile
Normal file
@@ -0,0 +1,12 @@
|
||||
# run me to zopfli all the static files
|
||||
# which should help on really slow connections
|
||||
# but then why are you using copyparty in the first place
|
||||
|
||||
pk: $(addsuffix .gz, $(wildcard *.js *.css))
|
||||
un: $(addsuffix .un, $(wildcard *.gz))
|
||||
|
||||
%.gz: %
|
||||
pigz -11 -J 34 -I 5730 $<
|
||||
|
||||
%.un: %
|
||||
pigz -d $<
|
@@ -131,6 +131,17 @@ a {
|
||||
.logue {
|
||||
padding: .2em 1.5em;
|
||||
}
|
||||
#srv_info {
|
||||
opacity: .5;
|
||||
font-size: .8em;
|
||||
color: #fc5;
|
||||
position: absolute;
|
||||
top: .5em;
|
||||
left: 2em;
|
||||
}
|
||||
#srv_info span {
|
||||
color: #fff;
|
||||
}
|
||||
a.play {
|
||||
color: #e70;
|
||||
}
|
||||
|
@@ -33,14 +33,15 @@
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>File Name</th>
|
||||
<th>File Size</th>
|
||||
<th sort="int">File Size</th>
|
||||
<th>T</th>
|
||||
<th>Date</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
||||
{%- for f in files %}
|
||||
<tr><td>{{ f[0] }}</td><td><a href="{{ f[1] }}">{{ f[2] }}</a></td><td>{{ f[3] }}</td><td>{{ f[4] }}</td></tr>
|
||||
<tr><td>{{ f[0] }}</td><td><a href="{{ f[1] }}">{{ f[2] }}</a></td><td>{{ f[3] }}</td><td>{{ f[4] }}</td><td>{{ f[5] }}</td></tr>
|
||||
{%- endfor %}
|
||||
|
||||
</tbody>
|
||||
@@ -53,6 +54,10 @@
|
||||
|
||||
<h2><a href="?h">control-panel</a></h2>
|
||||
|
||||
{%- if srv_info %}
|
||||
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
||||
{%- endif %}
|
||||
|
||||
<div id="widget">
|
||||
<div id="wtoggle">♫</div>
|
||||
<div id="widgeti">
|
||||
@@ -63,6 +68,8 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="/.cpr/util.js{{ ts }}"></script>
|
||||
|
||||
{%- if can_read %}
|
||||
<script src="/.cpr/browser.js{{ ts }}"></script>
|
||||
{%- endif %}
|
||||
|
@@ -1,115 +1,25 @@
|
||||
"use strict";
|
||||
|
||||
// error handler for mobile devices
|
||||
function hcroak(msg) {
|
||||
document.body.innerHTML = msg;
|
||||
window.onerror = undefined;
|
||||
throw 'fatal_err';
|
||||
}
|
||||
function croak(msg) {
|
||||
document.body.textContent = msg;
|
||||
window.onerror = undefined;
|
||||
throw msg;
|
||||
}
|
||||
function esc(txt) {
|
||||
return txt.replace(/[&"<>]/g, function (c) {
|
||||
return {
|
||||
'&': '&',
|
||||
'"': '"',
|
||||
'<': '<',
|
||||
'>': '>'
|
||||
}[c];
|
||||
});
|
||||
}
|
||||
window.onerror = function (msg, url, lineNo, columnNo, error) {
|
||||
window.onerror = undefined;
|
||||
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
||||
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
||||
|
||||
if (error) {
|
||||
var find = ['desc', 'stack', 'trace'];
|
||||
for (var a = 0; a < find.length; a++)
|
||||
if (String(error[find[a]]) !== 'undefined')
|
||||
html.push('<h2>' + find[a] + '</h2>' +
|
||||
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
|
||||
}
|
||||
document.body.style.fontSize = '0.8em';
|
||||
document.body.style.padding = '0 1em 1em 1em';
|
||||
hcroak(html.join('\n'));
|
||||
};
|
||||
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||
if (!String.prototype.endsWith) {
|
||||
String.prototype.endsWith = function (search, this_len) {
|
||||
if (this_len === undefined || this_len > this.length) {
|
||||
this_len = this.length;
|
||||
}
|
||||
return this.substring(this_len - search.length, this_len) === search;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = document.createElement('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
|
||||
script.onreadystatechange = cb;
|
||||
script.onload = cb;
|
||||
|
||||
head.appendChild(script);
|
||||
}
|
||||
|
||||
|
||||
function o(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
window.onerror = vis_exh;
|
||||
|
||||
function dbg(msg) {
|
||||
o('path').innerHTML = msg;
|
||||
ebi('path').innerHTML = msg;
|
||||
}
|
||||
|
||||
function ev(e) {
|
||||
e = e || window.event;
|
||||
e.preventDefault ? e.preventDefault() : (e.returnValue = false);
|
||||
|
||||
if (e.preventDefault)
|
||||
e.preventDefault()
|
||||
|
||||
if (e.stopPropagation)
|
||||
e.stopPropagation();
|
||||
|
||||
e.returnValue = false;
|
||||
return e;
|
||||
}
|
||||
|
||||
|
||||
function sortTable(table, col) {
|
||||
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
|
||||
th = table.tHead.rows[0].cells,
|
||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||
i, reverse = th[col].className == 'sort1' ? -1 : 1;
|
||||
for (var a = 0, thl = th.length; a < thl; a++)
|
||||
th[a].className = '';
|
||||
th[col].className = 'sort' + reverse;
|
||||
var stype = th[col].getAttribute('sort');
|
||||
tr = tr.sort(function (a, b) {
|
||||
var v1 = a.cells[col].textContent.trim();
|
||||
var v2 = b.cells[col].textContent.trim();
|
||||
if (stype == 'int') {
|
||||
v1 = parseInt(v1.replace(/,/g, ''));
|
||||
v2 = parseInt(v2.replace(/,/g, ''));
|
||||
return reverse * (v1 - v2);
|
||||
}
|
||||
return reverse * (v1.localeCompare(v2));
|
||||
});
|
||||
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]);
|
||||
}
|
||||
function makeSortable(table) {
|
||||
var th = table.tHead, i;
|
||||
th && (th = th.rows[0]) && (th = th.cells);
|
||||
if (th) i = th.length;
|
||||
else return; // if no `<thead>` then do nothing
|
||||
while (--i >= 0) (function (i) {
|
||||
th[i].addEventListener('click', function () { sortTable(table, i) });
|
||||
}(i));
|
||||
}
|
||||
makeSortable(o('files'));
|
||||
makeSortable(ebi('files'));
|
||||
|
||||
|
||||
// extract songs + add play column
|
||||
@@ -122,10 +32,9 @@ var mp = (function () {
|
||||
'tracks': tracks,
|
||||
'cover_url': ''
|
||||
};
|
||||
var re_audio = new RegExp('\.(opus|ogg|m4a|aac|mp3|wav|flac)$', 'i');
|
||||
var re_cover = new RegExp('^(cover|folder|cd|front|back)\.(jpe?g|png|gif)$', 'i');
|
||||
var re_audio = /\.(opus|ogg|m4a|aac|mp3|wav|flac)$/i;
|
||||
|
||||
var trs = document.getElementById('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
|
||||
var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
|
||||
for (var a = 0, aa = trs.length; a < aa; a++) {
|
||||
var tds = trs[a].getElementsByTagName('td');
|
||||
var link = tds[1].getElementsByTagName('a')[0];
|
||||
@@ -141,7 +50,7 @@ var mp = (function () {
|
||||
}
|
||||
|
||||
for (var a = 0, aa = tracks.length; a < aa; a++)
|
||||
o('trk' + a).onclick = ev_play;
|
||||
ebi('trk' + a).onclick = ev_play;
|
||||
|
||||
ret.vol = localStorage.getItem('vol');
|
||||
if (ret.vol !== null)
|
||||
@@ -168,8 +77,8 @@ var mp = (function () {
|
||||
// toggle player widget
|
||||
var widget = (function () {
|
||||
var ret = {};
|
||||
var widget = document.getElementById('widget');
|
||||
var wtoggle = document.getElementById('wtoggle');
|
||||
var widget = ebi('widget');
|
||||
var wtoggle = ebi('wtoggle');
|
||||
var touchmode = false;
|
||||
var side_open = false;
|
||||
var was_paused = true;
|
||||
@@ -198,7 +107,7 @@ var widget = (function () {
|
||||
ret.paused = function (paused) {
|
||||
if (was_paused != paused) {
|
||||
was_paused = paused;
|
||||
o('bplay').innerHTML = paused ? '▶' : '⏸';
|
||||
ebi('bplay').innerHTML = paused ? '▶' : '⏸';
|
||||
}
|
||||
};
|
||||
var click_handler = function (e) {
|
||||
@@ -222,8 +131,8 @@ var widget = (function () {
|
||||
// buffer/position bar
|
||||
var pbar = (function () {
|
||||
var r = {};
|
||||
r.bcan = o('barbuf');
|
||||
r.pcan = o('barpos');
|
||||
r.bcan = ebi('barbuf');
|
||||
r.pcan = ebi('barpos');
|
||||
r.bctx = r.bcan.getContext('2d');
|
||||
r.pctx = r.pcan.getContext('2d');
|
||||
|
||||
@@ -288,7 +197,7 @@ var pbar = (function () {
|
||||
// volume bar
|
||||
var vbar = (function () {
|
||||
var r = {};
|
||||
r.can = o('pvol');
|
||||
r.can = ebi('pvol');
|
||||
r.ctx = r.can.getContext('2d');
|
||||
|
||||
var bctx = r.ctx;
|
||||
@@ -385,7 +294,7 @@ var vbar = (function () {
|
||||
else
|
||||
play(0);
|
||||
};
|
||||
o('bplay').onclick = function (e) {
|
||||
ebi('bplay').onclick = function (e) {
|
||||
ev(e);
|
||||
if (mp.au) {
|
||||
if (mp.au.paused)
|
||||
@@ -396,15 +305,15 @@ var vbar = (function () {
|
||||
else
|
||||
play(0);
|
||||
};
|
||||
o('bprev').onclick = function (e) {
|
||||
ebi('bprev').onclick = function (e) {
|
||||
ev(e);
|
||||
bskip(-1);
|
||||
};
|
||||
o('bnext').onclick = function (e) {
|
||||
ebi('bnext').onclick = function (e) {
|
||||
ev(e);
|
||||
bskip(1);
|
||||
};
|
||||
o('barpos').onclick = function (e) {
|
||||
ebi('barpos').onclick = function (e) {
|
||||
if (!mp.au) {
|
||||
//dbg((new Date()).getTime());
|
||||
return play(0);
|
||||
@@ -413,17 +322,12 @@ var vbar = (function () {
|
||||
var rect = pbar.pcan.getBoundingClientRect();
|
||||
var x = e.clientX - rect.left;
|
||||
var mul = x * 1.0 / rect.width;
|
||||
var seek = mp.au.duration * mul;
|
||||
console.log('seek: ' + seek);
|
||||
if (!isFinite(seek))
|
||||
return;
|
||||
|
||||
/*
|
||||
dbg(//Math.round(rect.width) + 'x' + Math.round(rect.height) + '+' +
|
||||
//Math.round(rect.left) + '+' + Math.round(rect.top) + ', ' +
|
||||
//Math.round(e.clientX) + 'x' + Math.round(e.clientY) + ', ' +
|
||||
Math.round(mp.au.currentTime * 10) / 10 + ', ' +
|
||||
Math.round(mp.au.duration * 10) / 10 + '*' +
|
||||
Math.round(mul * 1000) / 1000);
|
||||
*/
|
||||
|
||||
mp.au.currentTime = mp.au.duration * mul;
|
||||
mp.au.currentTime = seek;
|
||||
|
||||
if (mp.au === mp.au_native)
|
||||
// hack: ogv.js breaks on .play() during playback
|
||||
@@ -479,12 +383,18 @@ function ev_play(e) {
|
||||
|
||||
|
||||
function setclass(id, clas) {
|
||||
o(id).setAttribute('class', clas);
|
||||
ebi(id).setAttribute('class', clas);
|
||||
}
|
||||
|
||||
|
||||
var iOS = !!navigator.platform &&
|
||||
/iPad|iPhone|iPod/.test(navigator.platform);
|
||||
var need_ogv = true;
|
||||
try {
|
||||
need_ogv = new Audio().canPlayType('audio/ogg; codecs=opus') !== 'probably';
|
||||
|
||||
if (/ Edge\//.exec(navigator.userAgent + ''))
|
||||
need_ogv = true;
|
||||
}
|
||||
catch (ex) { }
|
||||
|
||||
|
||||
// plays the tid'th audio file on the page
|
||||
@@ -507,7 +417,7 @@ function play(tid, call_depth) {
|
||||
var hack_attempt_play = true;
|
||||
|
||||
var url = mp.tracks[tid];
|
||||
if (iOS && /\.(ogg|opus)$/i.test(url)) {
|
||||
if (need_ogv && /\.(ogg|opus)$/i.test(url)) {
|
||||
if (mp.au_ogvjs) {
|
||||
mp.au = mp.au_ogvjs;
|
||||
}
|
||||
@@ -544,7 +454,8 @@ function play(tid, call_depth) {
|
||||
mp.au.tid = tid;
|
||||
mp.au.src = url;
|
||||
mp.au.volume = mp.expvol();
|
||||
setclass('trk' + tid, 'play act');
|
||||
var oid = 'trk' + tid;
|
||||
setclass(oid, 'play act');
|
||||
|
||||
try {
|
||||
if (hack_attempt_play)
|
||||
@@ -553,7 +464,11 @@ function play(tid, call_depth) {
|
||||
if (mp.au.paused)
|
||||
autoplay_blocked();
|
||||
|
||||
location.hash = 'trk' + tid;
|
||||
var o = ebi(oid);
|
||||
o.setAttribute('id', 'thx_js');
|
||||
location.hash = oid;
|
||||
o.setAttribute('id', oid);
|
||||
|
||||
pbar.drawbuf();
|
||||
return true;
|
||||
}
|
||||
@@ -569,7 +484,6 @@ function play(tid, call_depth) {
|
||||
function evau_error(e) {
|
||||
var err = '';
|
||||
var eplaya = (e && e.target) || (window.event && window.event.srcElement);
|
||||
var url = eplaya.src;
|
||||
|
||||
switch (eplaya.error.code) {
|
||||
case eplaya.error.MEDIA_ERR_ABORTED:
|
||||
@@ -594,7 +508,6 @@ function evau_error(e) {
|
||||
err += '\n\nFile: «' + decodeURIComponent(eplaya.src.split('/').slice(-1)[0]) + '»';
|
||||
|
||||
alert(err);
|
||||
play(eplaya.tid + 1);
|
||||
}
|
||||
|
||||
|
||||
@@ -611,26 +524,27 @@ function show_modal(html) {
|
||||
|
||||
// hide fullscreen message
|
||||
function unblocked() {
|
||||
var dom = o('blocked');
|
||||
var dom = ebi('blocked');
|
||||
if (dom)
|
||||
dom.remove();
|
||||
dom.parentNode.removeChild(dom);
|
||||
}
|
||||
|
||||
|
||||
// show ui to manually start playback of a linked song
|
||||
function autoplay_blocked(tid) {
|
||||
function autoplay_blocked() {
|
||||
show_modal(
|
||||
'<div id="blk_play"><a id="blk_go"></a></div>' +
|
||||
'<div id="blk_abrt"><a id="blk_na">Cancel<br />(show file list)</a></div>');
|
||||
'<div id="blk_play"><a href="#" id="blk_go"></a></div>' +
|
||||
'<div id="blk_abrt"><a href="#" id="blk_na">Cancel<br />(show file list)</a></div>');
|
||||
|
||||
var go = o('blk_go');
|
||||
var na = o('blk_na');
|
||||
var go = ebi('blk_go');
|
||||
var na = ebi('blk_na');
|
||||
|
||||
var fn = mp.tracks[mp.au.tid].split(/\//).pop();
|
||||
fn = decodeURIComponent(fn.replace(/\+/g, ' '));
|
||||
|
||||
go.textContent = 'Play "' + fn + '"';
|
||||
go.onclick = function () {
|
||||
go.onclick = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
unblocked();
|
||||
mp.au.play();
|
||||
};
|
||||
|
@@ -1,3 +1,7 @@
|
||||
@font-face {
|
||||
font-family: 'scp';
|
||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
||||
}
|
||||
html, body {
|
||||
color: #333;
|
||||
background: #eee;
|
||||
@@ -9,6 +13,7 @@ html, body {
|
||||
}
|
||||
#mw {
|
||||
margin: 0 auto;
|
||||
padding: 0 1.5em;
|
||||
}
|
||||
pre, code, a {
|
||||
color: #480;
|
||||
@@ -22,7 +27,7 @@ code {
|
||||
font-size: .96em;
|
||||
}
|
||||
pre, code {
|
||||
font-family: monospace, monospace;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
@@ -42,7 +47,7 @@ pre code {
|
||||
pre code:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
pre code:before {
|
||||
pre code::before {
|
||||
content: counter(precode);
|
||||
-webkit-user-select: none;
|
||||
display: inline-block;
|
||||
@@ -83,6 +88,7 @@ h3 {
|
||||
h1 a, h3 a, h5 a,
|
||||
h2 a, h4 a, h6 a {
|
||||
color: inherit;
|
||||
display: block;
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0;
|
||||
@@ -103,8 +109,12 @@ h2 a, h4 a, h6 a {
|
||||
#mp ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em {
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
@@ -167,14 +177,12 @@ small {
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
margin: 1em 0;
|
||||
}
|
||||
td {
|
||||
th, td {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
th {
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
blink {
|
||||
animation: blinker .7s cubic-bezier(.9, 0, .1, 1) infinite;
|
||||
}
|
||||
@@ -197,13 +205,15 @@ blink {
|
||||
height: 100%;
|
||||
}
|
||||
#mw {
|
||||
padding: 0 1em;
|
||||
margin: 0 auto;
|
||||
right: 0;
|
||||
}
|
||||
#mp {
|
||||
max-width: 54em;
|
||||
max-width: 52em;
|
||||
margin-bottom: 6em;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
}
|
||||
a {
|
||||
color: #fff;
|
||||
@@ -237,12 +247,6 @@ blink {
|
||||
z-index: 10;
|
||||
width: calc(100% - 1em);
|
||||
}
|
||||
#mn.undocked {
|
||||
position: fixed;
|
||||
padding: 1.2em 0 1em 1em;
|
||||
box-shadow: 0 0 .5em rgba(0, 0, 0, 0.3);
|
||||
background: #f7f7f7;
|
||||
}
|
||||
#mn a {
|
||||
color: #444;
|
||||
background: none;
|
||||
@@ -260,7 +264,7 @@ blink {
|
||||
#mn a:last-child {
|
||||
padding-right: .5em;
|
||||
}
|
||||
#mn a:not(:last-child):after {
|
||||
#mn a:not(:last-child)::after {
|
||||
content: '';
|
||||
width: 1.05em;
|
||||
height: 1.05em;
|
||||
@@ -289,6 +293,32 @@ blink {
|
||||
text-decoration: underline;
|
||||
border: none;
|
||||
}
|
||||
#mh a:hover {
|
||||
color: #000;
|
||||
background: #ddd;
|
||||
}
|
||||
#toolsbox {
|
||||
overflow: hidden;
|
||||
display: inline-block;
|
||||
background: #eee;
|
||||
height: 1.5em;
|
||||
padding: 0 .2em;
|
||||
margin: 0 .2em;
|
||||
position: absolute;
|
||||
}
|
||||
#toolsbox.open {
|
||||
height: auto;
|
||||
overflow: visible;
|
||||
background: #eee;
|
||||
box-shadow: 0 .2em .2em #ccc;
|
||||
padding-bottom: .2em;
|
||||
}
|
||||
#toolsbox a {
|
||||
display: block;
|
||||
}
|
||||
#toolsbox a+a {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -332,8 +362,12 @@ blink {
|
||||
html.dark #m>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em {
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
@@ -354,7 +388,7 @@ blink {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
html.dark #mn a:not(:last-child):after {
|
||||
html.dark #mn a:not(:last-child)::after {
|
||||
border-color: rgba(255,255,255,0.3);
|
||||
}
|
||||
html.dark #mn a {
|
||||
@@ -371,21 +405,32 @@ blink {
|
||||
color: #ccc;
|
||||
background: none;
|
||||
}
|
||||
html.dark #mh a:hover {
|
||||
background: #333;
|
||||
color: #fff;
|
||||
}
|
||||
html.dark #toolsbox {
|
||||
background: #222;
|
||||
}
|
||||
html.dark #toolsbox.open {
|
||||
box-shadow: 0 .2em .2em #069;
|
||||
border-radius: 0 0 .4em .4em;
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (min-width: 70em) {
|
||||
@media screen and (min-width: 66em) {
|
||||
#mw {
|
||||
position: fixed;
|
||||
overflow-y: auto;
|
||||
left: 14em;
|
||||
left: calc(100% - 57em);
|
||||
left: calc(100% - 55em);
|
||||
max-width: none;
|
||||
bottom: 0;
|
||||
scrollbar-color: #eb0 #f7f7f7;
|
||||
}
|
||||
#toc {
|
||||
width: 13em;
|
||||
width: calc(100% - 57.3em);
|
||||
width: calc(100% - 55.3em);
|
||||
max-width: 30em;
|
||||
background: #eee;
|
||||
position: fixed;
|
||||
@@ -424,32 +469,127 @@ blink {
|
||||
html.dark #mw {
|
||||
scrollbar-color: #b80 #282828;
|
||||
}
|
||||
html.dark #mn.undocked {
|
||||
box-shadow: 0 0 .5em #555;
|
||||
border: none;
|
||||
background: #0a0a0a;
|
||||
html.dark #toc::-webkit-scrollbar-track {
|
||||
background: #282828;
|
||||
}
|
||||
html.dark #toc::-webkit-scrollbar {
|
||||
background: #282828;
|
||||
width: .8em;
|
||||
}
|
||||
html.dark #toc::-webkit-scrollbar-thumb {
|
||||
background: #b80;
|
||||
}
|
||||
}
|
||||
@media screen and (min-width: 87.5em) {
|
||||
@media screen and (min-width: 85.5em) {
|
||||
#toc { width: 30em }
|
||||
#mw { left: 30.5em }
|
||||
}
|
||||
@media print {
|
||||
@page {
|
||||
size: A4;
|
||||
padding: 0;
|
||||
margin: .5in .6in;
|
||||
mso-header-margin: .6in;
|
||||
mso-footer-margin: .6in;
|
||||
mso-paper-source: 0;
|
||||
}
|
||||
a {
|
||||
color: #079;
|
||||
text-decoration: none;
|
||||
border-bottom: .07em solid #4ac;
|
||||
padding: 0 .3em;
|
||||
}
|
||||
#toc {
|
||||
margin: 0 !important;
|
||||
}
|
||||
#toc>ul {
|
||||
border-left: .1em solid #84c4dd;
|
||||
}
|
||||
#mn, #mh {
|
||||
display: none;
|
||||
}
|
||||
html, body, #toc, #mw {
|
||||
margin: 0 !important;
|
||||
word-break: break-word;
|
||||
width: 52em;
|
||||
}
|
||||
#toc {
|
||||
margin-left: 1em !important;
|
||||
}
|
||||
#toc a {
|
||||
color: #000 !important;
|
||||
}
|
||||
#toc a::after {
|
||||
/* hopefully supported by browsers eventually */
|
||||
content: leader('.') target-counter(attr(href), page);
|
||||
}
|
||||
a[ctr]::before {
|
||||
content: attr(ctr) '. ';
|
||||
}
|
||||
h1 {
|
||||
margin: 2em 0;
|
||||
}
|
||||
h2 {
|
||||
margin: 2em 0 0 0;
|
||||
}
|
||||
h1, h2, h3 {
|
||||
page-break-inside: avoid;
|
||||
}
|
||||
h1::after,
|
||||
h2::after,
|
||||
h3::after {
|
||||
content: 'orz';
|
||||
color: transparent;
|
||||
display: block;
|
||||
line-height: 1em;
|
||||
padding: 4em 0 0 0;
|
||||
margin: 0 0 -5em 0;
|
||||
}
|
||||
p {
|
||||
page-break-inside: avoid;
|
||||
}
|
||||
table {
|
||||
page-break-inside: auto;
|
||||
}
|
||||
tr {
|
||||
page-break-inside: avoid;
|
||||
page-break-after: auto;
|
||||
}
|
||||
thead {
|
||||
display: table-header-group;
|
||||
}
|
||||
tfoot {
|
||||
display: table-footer-group;
|
||||
}
|
||||
#mp a.vis::after {
|
||||
content: ' (' attr(href) ')';
|
||||
border-bottom: 1px solid #bbb;
|
||||
color: #444;
|
||||
}
|
||||
blockquote {
|
||||
border-color: #555;
|
||||
}
|
||||
code {
|
||||
border-color: #bbb;
|
||||
}
|
||||
pre, pre code {
|
||||
border-color: #999;
|
||||
}
|
||||
pre code::before {
|
||||
color: #058;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark a {
|
||||
color: #000;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
color: #240;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #940;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
@@ -17,15 +17,23 @@
|
||||
<a id="save" href="?edit">save</a>
|
||||
<a id="sbs" href="#">sbs</a>
|
||||
<a id="nsbs" href="#">editor</a>
|
||||
<a id="help" href="#">help</a>
|
||||
<div id="toolsbox">
|
||||
<a id="tools" href="#">tools</a>
|
||||
<a id="fmt_table" href="#">prettify table (ctrl-k)</a>
|
||||
<a id="iter_uni" href="#">non-ascii: iterate (ctrl-u)</a>
|
||||
<a id="mark_uni" href="#">non-ascii: markup</a>
|
||||
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
|
||||
<a id="help" href="#">help</a>
|
||||
</div>
|
||||
{%- else %}
|
||||
<a href="?edit">edit (basic)</a>
|
||||
<a href="?edit2">edit (fancy)</a>
|
||||
<a href="?raw">view raw</a>
|
||||
{%- endif %}
|
||||
</div>
|
||||
<div id="toc"></div>
|
||||
<div id="mtw">
|
||||
<textarea id="mt">{{ md }}</textarea>
|
||||
<textarea id="mt" autocomplete="off">{{ md }}</textarea>
|
||||
</div>
|
||||
<div id="mw">
|
||||
<div id="ml">
|
||||
@@ -39,16 +47,19 @@
|
||||
|
||||
{%- if edit %}
|
||||
<div id="helpbox">
|
||||
<textarea>
|
||||
<textarea autocomplete="off">
|
||||
|
||||
write markdown (html is permitted)
|
||||
write markdown (most html is 🙆 too)
|
||||
|
||||
### hotkey list
|
||||
## hotkey list
|
||||
* `Ctrl-S` to save
|
||||
* `Ctrl-E` to toggle mode
|
||||
* `Ctrl-K` to prettyprint a table
|
||||
* `Ctrl-U` to iterate non-ascii chars
|
||||
* `Ctrl-H` / `Ctrl-Shift-H` to create a header
|
||||
* `TAB` / `Shift-TAB` to indent/dedent a selection
|
||||
|
||||
### toolbar
|
||||
## toolbar
|
||||
1. toggle dark mode
|
||||
2. show/hide navigation bar
|
||||
3. save changes on server
|
||||
@@ -56,16 +67,68 @@ write markdown (html is permitted)
|
||||
5. toggle editor/preview
|
||||
6. this thing :^)
|
||||
|
||||
.
|
||||
## markdown
|
||||
|||
|
||||
|--|--|
|
||||
|`**bold**`|**bold**|
|
||||
|`_italic_`|_italic_|
|
||||
|`~~strike~~`|~~strike~~|
|
||||
|`` `code` ``|`code`|
|
||||
|`[](#hotkey-list)`|[](#hotkey-list)|
|
||||
|`[](/foo/bar.md#header)`|[](/foo/bar.md#header)|
|
||||
|`<blink>💯</blink>`|<blink>💯</blink>|
|
||||
|
||||
## tables
|
||||
|left-aligned|centered|right-aligned
|
||||
| ---------- | :----: | ----------:
|
||||
|one |two |three
|
||||
|
||||
|left-aligned|centered|right-aligned
|
||||
| ---------- | :----: | ----------:
|
||||
|one |two |three
|
||||
|
||||
## lists
|
||||
* one
|
||||
* two
|
||||
1. one
|
||||
1. two
|
||||
* one
|
||||
* two
|
||||
1. one
|
||||
1. two
|
||||
|
||||
## headers
|
||||
# level 1
|
||||
## level 2
|
||||
### level 3
|
||||
|
||||
## quote
|
||||
> hello
|
||||
> hello
|
||||
|
||||
## codeblock
|
||||
four spaces (no tab pls)
|
||||
|
||||
## code in lists
|
||||
* foo
|
||||
bar
|
||||
six spaces total
|
||||
* foo
|
||||
bar
|
||||
six spaces total
|
||||
.
|
||||
</textarea>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
<script>
|
||||
|
||||
var link_md_as_html = false; // TODO (does nothing)
|
||||
var last_modified = {{ lastmod }};
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
(function () {
|
||||
var btn = document.getElementById("lightswitch");
|
||||
@@ -82,17 +145,11 @@ var last_modified = {{ lastmod }};
|
||||
toggle();
|
||||
})();
|
||||
|
||||
if (!String.startsWith) {
|
||||
String.prototype.startsWith = function(s, i) {
|
||||
i = i>0 ? i|0 : 0;
|
||||
return this.substring(i, i + s.length) === s;
|
||||
};
|
||||
}
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js"></script>
|
||||
<script src="/.cpr/deps/marked.full.js"></script>
|
||||
<script src="/.cpr/md.js"></script>
|
||||
{%- if edit %}
|
||||
<script src="/.cpr/md2.js"></script>
|
||||
<script src="/.cpr/md2.js"></script>
|
||||
{%- endif %}
|
||||
</body></html>
|
||||
|
@@ -1,15 +1,59 @@
|
||||
var dom_toc = document.getElementById('toc');
|
||||
var dom_wrap = document.getElementById('mw');
|
||||
var dom_hbar = document.getElementById('mh');
|
||||
var dom_nav = document.getElementById('mn');
|
||||
var dom_pre = document.getElementById('mp');
|
||||
var dom_src = document.getElementById('mt');
|
||||
var dom_navtgl = document.getElementById('navtoggle');
|
||||
"use strict";
|
||||
|
||||
var dom_toc = ebi('toc');
|
||||
var dom_wrap = ebi('mw');
|
||||
var dom_hbar = ebi('mh');
|
||||
var dom_nav = ebi('mn');
|
||||
var dom_pre = ebi('mp');
|
||||
var dom_src = ebi('mt');
|
||||
var dom_navtgl = ebi('navtoggle');
|
||||
|
||||
|
||||
// chrome 49 needs this
|
||||
var chromedbg = function () { console.log(arguments); }
|
||||
|
||||
// null-logger
|
||||
var dbg = function () { };
|
||||
|
||||
// replace dbg with the real deal here or in the console:
|
||||
// dbg = chromedbg
|
||||
// dbg = console.log
|
||||
|
||||
|
||||
// plugins
|
||||
var md_plug = {};
|
||||
|
||||
|
||||
function hesc(txt) {
|
||||
return txt.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
}
|
||||
|
||||
|
||||
function cls(dom, name, add) {
|
||||
var re = new RegExp('(^| )' + name + '( |$)');
|
||||
var lst = (dom.getAttribute('class') + '').replace(re, "$1$2").replace(/ /, "");
|
||||
dom.setAttribute('class', lst + (add ? ' ' + name : ''));
|
||||
}
|
||||
|
||||
|
||||
function statify(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
|
||||
// dodge browser issues
|
||||
(function () {
|
||||
var ua = navigator.userAgent;
|
||||
if (ua.indexOf(') Gecko/') !== -1 && /Linux| Mac /.exec(ua)) {
|
||||
// necessary on ff-68.7 at least
|
||||
var s = document.createElement('style');
|
||||
s.innerHTML = '@page { margin: .5in .6in .8in .6in; }';
|
||||
console.log(s.innerHTML);
|
||||
document.head.appendChild(s);
|
||||
}
|
||||
})();
|
||||
|
||||
|
||||
// add navbar
|
||||
(function () {
|
||||
var n = document.location + '';
|
||||
@@ -28,17 +72,213 @@ function hesc(txt) {
|
||||
dom_nav.innerHTML = nav.join('');
|
||||
})();
|
||||
|
||||
function convert_markdown(md_text) {
|
||||
marked.setOptions({
|
||||
|
||||
// faster than replacing the entire html (chrome 1.8x, firefox 1.6x)
|
||||
function copydom(src, dst, lv) {
|
||||
var sc = src.childNodes,
|
||||
dc = dst.childNodes;
|
||||
|
||||
if (sc.length !== dc.length) {
|
||||
dbg("replace L%d (%d/%d) |%d|",
|
||||
lv, sc.length, dc.length, src.innerHTML.length);
|
||||
|
||||
dst.innerHTML = src.innerHTML;
|
||||
return;
|
||||
}
|
||||
|
||||
var rpl = [];
|
||||
for (var a = sc.length - 1; a >= 0; a--) {
|
||||
var st = sc[a].tagName,
|
||||
dt = dc[a].tagName;
|
||||
|
||||
if (st !== dt) {
|
||||
dbg("replace L%d (%d/%d) type %s/%s", lv, a, sc.length, st, dt);
|
||||
rpl.push(a);
|
||||
continue;
|
||||
}
|
||||
|
||||
var sa = sc[a].attributes || [],
|
||||
da = dc[a].attributes || [];
|
||||
|
||||
if (sa.length !== da.length) {
|
||||
dbg("replace L%d (%d/%d) attr# %d/%d",
|
||||
lv, a, sc.length, sa.length, da.length);
|
||||
|
||||
rpl.push(a);
|
||||
continue;
|
||||
}
|
||||
|
||||
var dirty = false;
|
||||
for (var b = sa.length - 1; b >= 0; b--) {
|
||||
var name = sa[b].name,
|
||||
sv = sa[b].value,
|
||||
dv = dc[a].getAttribute(name);
|
||||
|
||||
if (name == "data-ln" && sv !== dv) {
|
||||
dc[a].setAttribute(name, sv);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (sv !== dv) {
|
||||
dbg("replace L%d (%d/%d) attr %s [%s] [%s]",
|
||||
lv, a, sc.length, name, sv, dv);
|
||||
|
||||
dirty = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (dirty)
|
||||
rpl.push(a);
|
||||
}
|
||||
|
||||
// TODO pure guessing
|
||||
if (rpl.length > sc.length / 3) {
|
||||
dbg("replace L%d fully, %s (%d/%d) |%d|",
|
||||
lv, rpl.length, sc.length, src.innerHTML.length);
|
||||
|
||||
dst.innerHTML = src.innerHTML;
|
||||
return;
|
||||
}
|
||||
|
||||
// repl is reversed; build top-down
|
||||
var nbytes = 0;
|
||||
for (var a = rpl.length - 1; a >= 0; a--) {
|
||||
var html = sc[rpl[a]].outerHTML;
|
||||
dc[rpl[a]].outerHTML = html;
|
||||
nbytes += html.length;
|
||||
}
|
||||
if (nbytes > 0)
|
||||
dbg("replaced %d bytes L%d", nbytes, lv);
|
||||
|
||||
for (var a = 0; a < sc.length; a++)
|
||||
copydom(sc[a], dc[a], lv + 1);
|
||||
|
||||
if (src.innerHTML !== dst.innerHTML) {
|
||||
dbg("setting %d bytes L%d", src.innerHTML.length, lv);
|
||||
dst.innerHTML = src.innerHTML;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function md_plug_err(ex, js) {
|
||||
var errbox = ebi('md_errbox');
|
||||
if (errbox)
|
||||
errbox.parentNode.removeChild(errbox);
|
||||
|
||||
if (!ex)
|
||||
return;
|
||||
|
||||
var msg = (ex + '').split('\n')[0];
|
||||
var ln = ex.lineNumber;
|
||||
var o = null;
|
||||
if (ln) {
|
||||
msg = "Line " + ln + ", " + msg;
|
||||
var lns = js.split('\n');
|
||||
if (ln < lns.length) {
|
||||
o = document.createElement('span');
|
||||
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
|
||||
o.textContent = lns[ln - 1];
|
||||
}
|
||||
}
|
||||
errbox = document.createElement('div');
|
||||
errbox.setAttribute('id', 'md_errbox');
|
||||
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
|
||||
errbox.textContent = msg;
|
||||
errbox.onclick = function () {
|
||||
alert('' + ex.stack);
|
||||
};
|
||||
if (o) {
|
||||
errbox.appendChild(o);
|
||||
errbox.style.padding = '.25em .5em';
|
||||
}
|
||||
dom_nav.appendChild(errbox);
|
||||
|
||||
try {
|
||||
console.trace();
|
||||
}
|
||||
catch (ex2) { }
|
||||
}
|
||||
|
||||
|
||||
function load_plug(md_text, plug_type) {
|
||||
if (!md_opt.allow_plugins)
|
||||
return md_text;
|
||||
|
||||
var find = '\n```copyparty_' + plug_type + '\n';
|
||||
var ofs = md_text.indexOf(find);
|
||||
if (ofs === -1)
|
||||
return md_text;
|
||||
|
||||
var ofs2 = md_text.indexOf('\n```', ofs + 1);
|
||||
if (ofs2 == -1)
|
||||
return md_text;
|
||||
|
||||
var js = md_text.slice(ofs + find.length, ofs2 + 1);
|
||||
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
|
||||
|
||||
var old_plug = md_plug[plug_type];
|
||||
if (!old_plug || old_plug[1] != js) {
|
||||
js = 'const x = { ' + js + ' }; x;';
|
||||
try {
|
||||
var x = eval(js);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug[plug_type] = null;
|
||||
md_plug_err(ex, js);
|
||||
return md;
|
||||
}
|
||||
if (x['ctor']) {
|
||||
x['ctor']();
|
||||
delete x['ctor'];
|
||||
}
|
||||
md_plug[plug_type] = [x, js];
|
||||
}
|
||||
|
||||
return md;
|
||||
}
|
||||
|
||||
|
||||
function convert_markdown(md_text, dest_dom) {
|
||||
md_text = md_text.replace(/\r/g, '');
|
||||
|
||||
md_plug_err(null);
|
||||
md_text = load_plug(md_text, 'pre');
|
||||
md_text = load_plug(md_text, 'post');
|
||||
|
||||
var marked_opts = {
|
||||
//headerPrefix: 'h-',
|
||||
breaks: true,
|
||||
gfm: true
|
||||
});
|
||||
var html = marked(md_text);
|
||||
dom_pre.innerHTML = html;
|
||||
};
|
||||
|
||||
var ext = md_plug['pre'];
|
||||
if (ext)
|
||||
Object.assign(marked_opts, ext[0]);
|
||||
|
||||
try {
|
||||
var md_html = marked(md_text, marked_opts);
|
||||
}
|
||||
catch (ex) {
|
||||
if (ext)
|
||||
md_plug_err(ex, ext[1]);
|
||||
|
||||
throw ex;
|
||||
}
|
||||
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
||||
|
||||
var nodes = md_dom.getElementsByTagName('a');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
var href = nodes[a].getAttribute('href');
|
||||
var txt = nodes[a].textContent;
|
||||
|
||||
if (!txt)
|
||||
nodes[a].textContent = href;
|
||||
else if (href !== txt)
|
||||
nodes[a].setAttribute('class', 'vis');
|
||||
}
|
||||
|
||||
// todo-lists (should probably be a marked extension)
|
||||
var nodes = dom_pre.getElementsByTagName('input');
|
||||
nodes = md_dom.getElementsByTagName('input');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
var dom_box = nodes[a];
|
||||
if (dom_box.getAttribute('type') !== 'checkbox')
|
||||
@@ -58,9 +298,10 @@ function convert_markdown(md_text) {
|
||||
html.substr(html.indexOf('>') + 1);
|
||||
}
|
||||
|
||||
var manip_nodes = dom_pre.getElementsByTagName('*');
|
||||
for (var a = manip_nodes.length - 1; a >= 0; a--) {
|
||||
var el = manip_nodes[a];
|
||||
// separate <code> for each line in <pre>
|
||||
nodes = md_dom.getElementsByTagName('pre');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
var el = nodes[a];
|
||||
|
||||
var is_precode =
|
||||
el.tagName == 'PRE' &&
|
||||
@@ -71,24 +312,69 @@ function convert_markdown(md_text) {
|
||||
continue;
|
||||
|
||||
var nline = parseInt(el.getAttribute('data-ln')) + 1;
|
||||
var lines = el.innerHTML.replace(/\r?\n<\/code>$/i, '</code>').split(/\r?\n/g);
|
||||
var lines = el.innerHTML.replace(/\n<\/code>$/i, '</code>').split(/\n/g);
|
||||
for (var b = 0; b < lines.length - 1; b++)
|
||||
lines[b] += '</code>\n<code data-ln="' + (nline + b) + '">';
|
||||
|
||||
el.innerHTML = lines.join('');
|
||||
}
|
||||
|
||||
// self-link headers
|
||||
var id_seen = {},
|
||||
dyn = md_dom.getElementsByTagName('*');
|
||||
|
||||
nodes = [];
|
||||
for (var a = 0, aa = dyn.length; a < aa; a++)
|
||||
if (/^[Hh]([1-6])/.exec(dyn[a].tagName) !== null)
|
||||
nodes.push(dyn[a]);
|
||||
|
||||
for (var a = 0; a < nodes.length; a++) {
|
||||
el = nodes[a];
|
||||
var id = el.getAttribute('id'),
|
||||
orig_id = id;
|
||||
|
||||
if (id_seen[id]) {
|
||||
for (var n = 1; n < 4096; n++) {
|
||||
id = orig_id + '-' + n;
|
||||
if (!id_seen[id])
|
||||
break;
|
||||
}
|
||||
el.setAttribute('id', id);
|
||||
}
|
||||
id_seen[id] = 1;
|
||||
el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>';
|
||||
}
|
||||
|
||||
ext = md_plug['post'];
|
||||
if (ext && ext[0].render)
|
||||
try {
|
||||
ext[0].render(md_dom);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug_err(ex, ext[1]);
|
||||
}
|
||||
|
||||
copydom(md_dom, dest_dom, 0);
|
||||
|
||||
if (ext && ext[0].render2)
|
||||
try {
|
||||
ext[0].render2(dest_dom);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug_err(ex, ext[1]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function init_toc() {
|
||||
var loader = document.getElementById('ml');
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
|
||||
var anchors = []; // list of toc entries, complex objects
|
||||
var anchor = null; // current toc node
|
||||
var id_seen = {}; // taken IDs
|
||||
var html = []; // generated toc html
|
||||
var lv = 0; // current indentation level in the toc html
|
||||
var re = new RegExp('^[Hh]([1-3])');
|
||||
var ctr = [0, 0, 0, 0, 0, 0];
|
||||
|
||||
var manip_nodes_dyn = dom_pre.getElementsByTagName('*');
|
||||
var manip_nodes = [];
|
||||
@@ -97,7 +383,7 @@ function init_toc() {
|
||||
|
||||
for (var a = 0, aa = manip_nodes.length; a < aa; a++) {
|
||||
var elm = manip_nodes[a];
|
||||
var m = re.exec(elm.tagName);
|
||||
var m = /^[Hh]([1-6])/.exec(elm.tagName);
|
||||
var is_header = m !== null;
|
||||
if (is_header) {
|
||||
var nlv = m[1];
|
||||
@@ -109,24 +395,18 @@ function init_toc() {
|
||||
html.push('</ul>');
|
||||
lv--;
|
||||
}
|
||||
ctr[lv - 1]++;
|
||||
for (var b = lv; b < 6; b++)
|
||||
ctr[b] = 0;
|
||||
|
||||
var orig_id = elm.getAttribute('id');
|
||||
var id = orig_id;
|
||||
if (id_seen[id]) {
|
||||
for (var n = 1; n < 4096; n++) {
|
||||
id = orig_id + '-' + n;
|
||||
if (!id_seen[id])
|
||||
break;
|
||||
}
|
||||
elm.setAttribute('id', id);
|
||||
}
|
||||
id_seen[id] = 1;
|
||||
elm.childNodes[0].setAttribute('ctr', ctr.slice(0, lv).join('.'));
|
||||
|
||||
var ahref = '<a href="#' + id + '">' +
|
||||
elm.innerHTML + '</a>';
|
||||
var elm2 = elm.cloneNode(true);
|
||||
elm2.childNodes[0].textContent = elm.textContent;
|
||||
while (elm2.childNodes.length > 1)
|
||||
elm2.removeChild(elm2.childNodes[1]);
|
||||
|
||||
html.push('<li>' + ahref + '</li>');
|
||||
elm.innerHTML = ahref;
|
||||
html.push('<li>' + elm2.innerHTML + '</li>');
|
||||
|
||||
if (anchor != null)
|
||||
anchors.push(anchor);
|
||||
@@ -208,7 +488,7 @@ function init_toc() {
|
||||
|
||||
|
||||
// "main" :p
|
||||
convert_markdown(dom_src.value);
|
||||
convert_markdown(dom_src.value, dom_pre);
|
||||
var toc = init_toc();
|
||||
|
||||
|
||||
@@ -240,40 +520,10 @@ var redraw = (function () {
|
||||
|
||||
|
||||
dom_navtgl.onclick = function () {
|
||||
var timeout = null;
|
||||
function show_nav(e) {
|
||||
if (e && e.target == dom_hbar && e.pageX && e.pageX < dom_hbar.offsetWidth / 2)
|
||||
return;
|
||||
|
||||
clearTimeout(timeout);
|
||||
dom_nav.style.display = 'block';
|
||||
}
|
||||
function hide_nav() {
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(function () {
|
||||
dom_nav.style.display = 'none';
|
||||
}, 30);
|
||||
}
|
||||
var hidden = dom_navtgl.innerHTML == 'hide nav';
|
||||
dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav';
|
||||
if (hidden) {
|
||||
dom_nav.setAttribute('class', 'undocked');
|
||||
dom_nav.style.display = 'none';
|
||||
dom_nav.style.top = dom_hbar.offsetHeight + 'px';
|
||||
dom_nav.onmouseenter = show_nav;
|
||||
dom_nav.onmouseleave = hide_nav;
|
||||
dom_hbar.onmouseenter = show_nav;
|
||||
dom_hbar.onmouseleave = hide_nav;
|
||||
}
|
||||
else {
|
||||
dom_nav.setAttribute('class', '');
|
||||
dom_nav.style.display = 'block';
|
||||
dom_nav.style.top = '0';
|
||||
dom_nav.onmouseenter = null;
|
||||
dom_nav.onmouseleave = null;
|
||||
dom_hbar.onmouseenter = null;
|
||||
dom_hbar.onmouseleave = null;
|
||||
}
|
||||
dom_nav.style.display = hidden ? 'none' : 'block';
|
||||
|
||||
if (window.localStorage)
|
||||
localStorage.setItem('hidenav', hidden ? 1 : 0);
|
||||
|
||||
|
@@ -4,12 +4,15 @@
|
||||
#mtw {
|
||||
display: block;
|
||||
position: fixed;
|
||||
left: 0;
|
||||
left: .5em;
|
||||
bottom: 0;
|
||||
width: calc(100% - 58em);
|
||||
width: calc(100% - 56em);
|
||||
}
|
||||
#mw {
|
||||
left: calc(100% - 57em);
|
||||
left: calc(100% - 55em);
|
||||
overflow-y: auto;
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
}
|
||||
|
||||
|
||||
@@ -21,15 +24,17 @@
|
||||
}
|
||||
#mw.preview,
|
||||
#mtw.editor {
|
||||
z-index: 3;
|
||||
z-index: 5;
|
||||
}
|
||||
#mtw.single,
|
||||
#mw.single {
|
||||
left: calc((100% - 58em) / 2);
|
||||
margin: 0;
|
||||
left: 1em;
|
||||
left: max(1em, calc((100% - 56em) / 2));
|
||||
}
|
||||
#mtw.single {
|
||||
width: 57em;
|
||||
width: 55em;
|
||||
width: min(55em, calc(100% - 2em));
|
||||
}
|
||||
|
||||
|
||||
@@ -38,65 +43,86 @@
|
||||
}
|
||||
#mt, #mtr {
|
||||
width: 100%;
|
||||
height: calc(100% - 5px);
|
||||
height: calc(100% - 1px);
|
||||
color: #444;
|
||||
background: #f7f7f7;
|
||||
border: 1px solid #999;
|
||||
outline: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-family: 'consolas', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
overflow-y: scroll;
|
||||
line-height: 1.3em;
|
||||
font-size: .9em;
|
||||
position: relative;
|
||||
scrollbar-color: #eb0 #f7f7f7;
|
||||
}
|
||||
html.dark #mt {
|
||||
color: #eee;
|
||||
background: #222;
|
||||
border: 1px solid #777;
|
||||
scrollbar-color: #b80 #282828;
|
||||
}
|
||||
#mtr {
|
||||
position: absolute;
|
||||
top: 1px;
|
||||
left: 1px;
|
||||
top: 0;
|
||||
left: 0;
|
||||
}
|
||||
#save.force-save {
|
||||
color: #400;
|
||||
background: #f97;
|
||||
border-radius: .15em;
|
||||
}
|
||||
html.dark #save.force-save {
|
||||
color: #fca;
|
||||
background: #720;
|
||||
}
|
||||
#save.disabled {
|
||||
opacity: .4;
|
||||
}
|
||||
#helpbox,
|
||||
#toast {
|
||||
background: #f7f7f7;
|
||||
border-radius: .4em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#helpbox {
|
||||
display: none;
|
||||
position: fixed;
|
||||
background: #f7f7f7;
|
||||
box-shadow: 0 .5em 2em #777;
|
||||
border-radius: .4em;
|
||||
padding: 2em;
|
||||
top: 4em;
|
||||
overflow-y: auto;
|
||||
box-shadow: 0 .5em 2em #777;
|
||||
height: calc(100% - 12em);
|
||||
left: calc(50% - 15em);
|
||||
right: 0;
|
||||
width: 30em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#helpclose {
|
||||
display: block;
|
||||
}
|
||||
html.dark #helpbox {
|
||||
background: #222;
|
||||
box-shadow: 0 .5em 2em #444;
|
||||
}
|
||||
html.dark #helpbox,
|
||||
html.dark #toast {
|
||||
background: #222;
|
||||
border: 1px solid #079;
|
||||
border-width: 1px 0;
|
||||
}
|
||||
|
||||
/* dbg:
|
||||
#mt {
|
||||
opacity: .5;
|
||||
#toast {
|
||||
font-weight: bold;
|
||||
text-align: center;
|
||||
padding: .6em 0;
|
||||
position: fixed;
|
||||
z-index: 9001;
|
||||
top: 30%;
|
||||
transition: opacity 0.2s ease-in-out;
|
||||
opacity: 1;
|
||||
}
|
||||
*/
|
||||
|
||||
# mt {opacity: .5;top:1px}
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -160,8 +160,12 @@ h2 {
|
||||
.mdo ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em {
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
@@ -253,8 +257,12 @@ html.dark .mdo>ul,
|
||||
html.dark .mdo>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em {
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
|
@@ -17,13 +17,17 @@
|
||||
</div>
|
||||
</div>
|
||||
<div id="m">
|
||||
<textarea id="mt" style="display:none">{{ md }}</textarea>
|
||||
<textarea id="mt" style="display:none" autocomplete="off">{{ md }}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
|
||||
var link_md_as_html = false; // TODO (does nothing)
|
||||
var last_modified = {{ lastmod }};
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
var lightswitch = (function () {
|
||||
var fun = function () {
|
||||
@@ -39,6 +43,7 @@ var lightswitch = (function () {
|
||||
})();
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/deps/easymde.full.js"></script>
|
||||
<script src="/.cpr/util.js"></script>
|
||||
<script src="/.cpr/deps/easymde.js"></script>
|
||||
<script src="/.cpr/mde.js"></script>
|
||||
</body></html>
|
||||
|
@@ -1,7 +1,9 @@
|
||||
var dom_wrap = document.getElementById('mw');
|
||||
var dom_nav = document.getElementById('mn');
|
||||
var dom_doc = document.getElementById('m');
|
||||
var dom_md = document.getElementById('mt');
|
||||
"use strict";
|
||||
|
||||
var dom_wrap = ebi('mw');
|
||||
var dom_nav = ebi('mn');
|
||||
var dom_doc = ebi('m');
|
||||
var dom_md = ebi('mt');
|
||||
|
||||
(function () {
|
||||
var n = document.location + '';
|
||||
@@ -63,7 +65,7 @@ var mde = (function () {
|
||||
mde.codemirror.on("change", function () {
|
||||
md_changed(mde);
|
||||
});
|
||||
var loader = document.getElementById('ml');
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
return mde;
|
||||
})();
|
||||
@@ -121,7 +123,7 @@ function save(mde) {
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
|
||||
var url = (document.location + '').split('?')[0] + '?raw';
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
@@ -213,7 +215,7 @@ function save_chk() {
|
||||
var ok = document.createElement('div');
|
||||
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
|
||||
ok.innerHTML = 'OK✔️';
|
||||
var parent = document.getElementById('m');
|
||||
var parent = ebi('m');
|
||||
document.documentElement.appendChild(ok);
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
|
@@ -1,61 +1,6 @@
|
||||
"use strict";
|
||||
|
||||
// error handler for mobile devices
|
||||
function hcroak(msg) {
|
||||
document.body.innerHTML = msg;
|
||||
window.onerror = undefined;
|
||||
throw 'fatal_err';
|
||||
}
|
||||
function croak(msg) {
|
||||
document.body.textContent = msg;
|
||||
window.onerror = undefined;
|
||||
throw msg;
|
||||
}
|
||||
function esc(txt) {
|
||||
return txt.replace(/[&"<>]/g, function (c) {
|
||||
return {
|
||||
'&': '&',
|
||||
'"': '"',
|
||||
'<': '<',
|
||||
'>': '>'
|
||||
}[c];
|
||||
});
|
||||
}
|
||||
window.onerror = function (msg, url, lineNo, columnNo, error) {
|
||||
window.onerror = undefined;
|
||||
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
||||
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
||||
|
||||
if (error) {
|
||||
var find = ['desc', 'stack', 'trace'];
|
||||
for (var a = 0; a < find.length; a++)
|
||||
if (String(error[find[a]]) !== 'undefined')
|
||||
html.push('<h2>' + find[a] + '</h2>' +
|
||||
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
|
||||
}
|
||||
document.body.style.fontSize = '0.8em';
|
||||
document.body.style.padding = '0 1em 1em 1em';
|
||||
hcroak(html.join('\n'));
|
||||
};
|
||||
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = document.createElement('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
|
||||
script.onreadystatechange = cb;
|
||||
script.onload = cb;
|
||||
|
||||
head.appendChild(script);
|
||||
}
|
||||
|
||||
|
||||
function o(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
window.onerror = vis_exh;
|
||||
|
||||
|
||||
(function () {
|
||||
@@ -88,12 +33,12 @@ function goto(dest) {
|
||||
for (var a = obj.length - 1; a >= 0; a--)
|
||||
obj[a].classList.remove('act');
|
||||
|
||||
var obj = document.querySelectorAll('#ops>a');
|
||||
obj = document.querySelectorAll('#ops>a');
|
||||
for (var a = obj.length - 1; a >= 0; a--)
|
||||
obj[a].classList.remove('act');
|
||||
|
||||
if (dest) {
|
||||
document.getElementById('op_' + dest).classList.add('act');
|
||||
ebi('op_' + dest).classList.add('act');
|
||||
document.querySelector('#ops>a[data-dest=' + dest + ']').classList.add('act');
|
||||
|
||||
var fn = window['goto_' + dest];
|
||||
@@ -121,7 +66,7 @@ function goto_up2k() {
|
||||
if (op !== null && op !== '.')
|
||||
goto(op);
|
||||
}
|
||||
document.getElementById('ops').style.display = 'block';
|
||||
ebi('ops').style.display = 'block';
|
||||
})();
|
||||
|
||||
|
||||
@@ -150,21 +95,21 @@ function up2k_init(have_crypto) {
|
||||
|
||||
// show modal message
|
||||
function showmodal(msg) {
|
||||
o('u2notbtn').innerHTML = msg;
|
||||
o('u2btn').style.display = 'none';
|
||||
o('u2notbtn').style.display = 'block';
|
||||
o('u2conf').style.opacity = '0.5';
|
||||
ebi('u2notbtn').innerHTML = msg;
|
||||
ebi('u2btn').style.display = 'none';
|
||||
ebi('u2notbtn').style.display = 'block';
|
||||
ebi('u2conf').style.opacity = '0.5';
|
||||
}
|
||||
|
||||
// hide modal message
|
||||
function unmodal() {
|
||||
o('u2notbtn').style.display = 'none';
|
||||
o('u2btn').style.display = 'block';
|
||||
o('u2conf').style.opacity = '1';
|
||||
o('u2notbtn').innerHTML = '';
|
||||
ebi('u2notbtn').style.display = 'none';
|
||||
ebi('u2btn').style.display = 'block';
|
||||
ebi('u2conf').style.opacity = '1';
|
||||
ebi('u2notbtn').innerHTML = '';
|
||||
}
|
||||
|
||||
var post_url = o('op_bup').getElementsByTagName('form')[0].getAttribute('action');
|
||||
var post_url = ebi('op_bup').getElementsByTagName('form')[0].getAttribute('action');
|
||||
if (post_url && post_url.charAt(post_url.length - 1) !== '/')
|
||||
post_url += '/';
|
||||
|
||||
@@ -181,25 +126,25 @@ function up2k_init(have_crypto) {
|
||||
import_js('/.cpr/deps/sha512.js', unmodal);
|
||||
|
||||
if (is_https)
|
||||
o('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
|
||||
ebi('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
|
||||
else
|
||||
o('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance';
|
||||
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance';
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// show uploader if the user only has write-access
|
||||
if (!o('files'))
|
||||
if (!ebi('files'))
|
||||
goto('up2k');
|
||||
|
||||
// shows or clears an error message in the basic uploader ui
|
||||
function setmsg(msg) {
|
||||
if (msg !== undefined) {
|
||||
o('u2err').setAttribute('class', 'err');
|
||||
o('u2err').innerHTML = msg;
|
||||
ebi('u2err').setAttribute('class', 'err');
|
||||
ebi('u2err').innerHTML = msg;
|
||||
}
|
||||
else {
|
||||
o('u2err').setAttribute('class', '');
|
||||
o('u2err').innerHTML = '';
|
||||
ebi('u2err').setAttribute('class', '');
|
||||
ebi('u2err').innerHTML = '';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -210,7 +155,7 @@ function up2k_init(have_crypto) {
|
||||
}
|
||||
|
||||
// handle user intent to use the basic uploader instead
|
||||
o('u2nope').onclick = function (e) {
|
||||
ebi('u2nope').onclick = function (e) {
|
||||
e.preventDefault();
|
||||
setmsg('');
|
||||
goto('bup');
|
||||
@@ -229,9 +174,9 @@ function up2k_init(have_crypto) {
|
||||
function cfg_get(name) {
|
||||
var val = localStorage.getItem(name);
|
||||
if (val === null)
|
||||
return parseInt(o(name).value);
|
||||
return parseInt(ebi(name).value);
|
||||
|
||||
o(name).value = val;
|
||||
ebi(name).value = val;
|
||||
return val;
|
||||
}
|
||||
|
||||
@@ -242,7 +187,7 @@ function up2k_init(have_crypto) {
|
||||
else
|
||||
val = (val == '1');
|
||||
|
||||
o(name).checked = val;
|
||||
ebi(name).checked = val;
|
||||
return val;
|
||||
}
|
||||
|
||||
@@ -250,12 +195,13 @@ function up2k_init(have_crypto) {
|
||||
localStorage.setItem(
|
||||
name, val ? '1' : '0');
|
||||
|
||||
o(name).checked = val;
|
||||
ebi(name).checked = val;
|
||||
return val;
|
||||
}
|
||||
|
||||
var parallel_uploads = cfg_get('nthread');
|
||||
var multitask = bcfg_get('multitask', true);
|
||||
var ask_up = bcfg_get('ask_up', true);
|
||||
|
||||
var col_hashing = '#00bbff';
|
||||
var col_hashed = '#004466';
|
||||
@@ -284,9 +230,9 @@ function up2k_init(have_crypto) {
|
||||
return un2k("this is the basic uploader; up2k needs at least<br />chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1");
|
||||
|
||||
function nav() {
|
||||
o('file' + fdom_ctr).click();
|
||||
ebi('file' + fdom_ctr).click();
|
||||
}
|
||||
o('u2btn').addEventListener('click', nav, false);
|
||||
ebi('u2btn').addEventListener('click', nav, false);
|
||||
|
||||
function ondrag(ev) {
|
||||
ev.stopPropagation();
|
||||
@@ -294,8 +240,8 @@ function up2k_init(have_crypto) {
|
||||
ev.dataTransfer.dropEffect = 'copy';
|
||||
ev.dataTransfer.effectAllowed = 'copy';
|
||||
}
|
||||
o('u2btn').addEventListener('dragover', ondrag, false);
|
||||
o('u2btn').addEventListener('dragenter', ondrag, false);
|
||||
ebi('u2btn').addEventListener('dragover', ondrag, false);
|
||||
ebi('u2btn').addEventListener('dragenter', ondrag, false);
|
||||
|
||||
function gotfile(ev) {
|
||||
ev.stopPropagation();
|
||||
@@ -317,6 +263,7 @@ function up2k_init(have_crypto) {
|
||||
|
||||
more_one_file();
|
||||
var bad_files = [];
|
||||
var good_files = [];
|
||||
for (var a = 0; a < files.length; a++) {
|
||||
var fobj = files[a];
|
||||
if (is_itemlist) {
|
||||
@@ -330,9 +277,32 @@ function up2k_init(have_crypto) {
|
||||
throw 1;
|
||||
}
|
||||
catch (ex) {
|
||||
bad_files.push([a, fobj.name]);
|
||||
bad_files.push(fobj.name);
|
||||
continue;
|
||||
}
|
||||
good_files.push(fobj);
|
||||
}
|
||||
|
||||
if (bad_files.length > 0) {
|
||||
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length);
|
||||
for (var a = 0; a < bad_files.length; a++)
|
||||
msg += '-- ' + bad_files[a] + '\n';
|
||||
|
||||
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
|
||||
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
|
||||
|
||||
alert(msg);
|
||||
}
|
||||
|
||||
var msg = ['upload these ' + good_files.length + ' files?'];
|
||||
for (var a = 0; a < good_files.length; a++)
|
||||
msg.push(good_files[a].name);
|
||||
|
||||
if (ask_up && !confirm(msg.join('\n')))
|
||||
return;
|
||||
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var fobj = good_files[a];
|
||||
var now = new Date().getTime();
|
||||
var lmod = fobj.lastModified || now;
|
||||
var entry = {
|
||||
@@ -357,31 +327,20 @@ function up2k_init(have_crypto) {
|
||||
var tr = document.createElement('tr');
|
||||
tr.innerHTML = '<td id="f{0}n"></td><td id="f{0}t">hashing</td><td id="f{0}p" class="prog"></td>'.format(st.files.length);
|
||||
tr.getElementsByTagName('td')[0].textContent = entry.name;
|
||||
o('u2tab').appendChild(tr);
|
||||
ebi('u2tab').appendChild(tr);
|
||||
|
||||
st.files.push(entry);
|
||||
st.todo.hash.push(entry);
|
||||
}
|
||||
|
||||
if (bad_files.length > 0) {
|
||||
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length);
|
||||
for (var a = 0; a < bad_files.length; a++)
|
||||
msg += '-- ' + bad_files[a][1] + '\n';
|
||||
|
||||
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
|
||||
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
|
||||
|
||||
alert(msg);
|
||||
}
|
||||
}
|
||||
o('u2btn').addEventListener('drop', gotfile, false);
|
||||
ebi('u2btn').addEventListener('drop', gotfile, false);
|
||||
|
||||
function more_one_file() {
|
||||
fdom_ctr++;
|
||||
var elm = document.createElement('div')
|
||||
elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" />'.format(fdom_ctr);
|
||||
o('u2form').appendChild(elm);
|
||||
o('file' + fdom_ctr).addEventListener('change', gotfile, false);
|
||||
ebi('u2form').appendChild(elm);
|
||||
ebi('file' + fdom_ctr).addEventListener('change', gotfile, false);
|
||||
}
|
||||
more_one_file();
|
||||
|
||||
@@ -391,16 +350,17 @@ function up2k_init(have_crypto) {
|
||||
//
|
||||
|
||||
function handshakes_permitted() {
|
||||
return multitask || (
|
||||
st.todo.upload.length == 0 &&
|
||||
st.busy.upload.length == 0);
|
||||
var lim = multitask ? 1 : 0;
|
||||
return lim >=
|
||||
st.todo.upload.length +
|
||||
st.busy.upload.length;
|
||||
}
|
||||
|
||||
function hashing_permitted() {
|
||||
return multitask || (
|
||||
handshakes_permitted() &&
|
||||
st.todo.handshake.length == 0 &&
|
||||
st.busy.handshake.length == 0);
|
||||
var lim = multitask ? 1 : 0;
|
||||
return handshakes_permitted() && lim >=
|
||||
st.todo.handshake.length +
|
||||
st.busy.handshake.length;
|
||||
}
|
||||
|
||||
var tasker = (function () {
|
||||
@@ -451,17 +411,6 @@ function up2k_init(have_crypto) {
|
||||
/// hashing
|
||||
//
|
||||
|
||||
// https://gist.github.com/jonleighton/958841
|
||||
function buf2b64_maybe_fucky(buffer) {
|
||||
var ret = '';
|
||||
var view = new DataView(buffer);
|
||||
for (var i = 0; i < view.byteLength; i++) {
|
||||
ret += String.fromCharCode(view.getUint8(i));
|
||||
}
|
||||
return window.btoa(ret).replace(
|
||||
/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
|
||||
}
|
||||
|
||||
// https://gist.github.com/jonleighton/958841
|
||||
function buf2b64(arrayBuffer) {
|
||||
var base64 = '';
|
||||
@@ -502,20 +451,6 @@ function up2k_init(have_crypto) {
|
||||
return base64;
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest
|
||||
function buf2hex(buffer) {
|
||||
var hexCodes = [];
|
||||
var view = new DataView(buffer);
|
||||
for (var i = 0; i < view.byteLength; i += 4) {
|
||||
var value = view.getUint32(i) // 4 bytes per iter
|
||||
var stringValue = value.toString(16) // doesn't pad
|
||||
var padding = '00000000'
|
||||
var paddedValue = (padding + stringValue).slice(-padding.length)
|
||||
hexCodes.push(paddedValue);
|
||||
}
|
||||
return hexCodes.join("");
|
||||
}
|
||||
|
||||
function get_chunksize(filesize) {
|
||||
var chunksize = 1024 * 1024;
|
||||
var stepsize = 512 * 1024;
|
||||
@@ -602,7 +537,7 @@ function up2k_init(have_crypto) {
|
||||
pb_html += '<div id="f{0}p{1}" style="width:{2}%"><div></div></div>'.format(
|
||||
t.n, a, pb_perc);
|
||||
|
||||
o('f{0}p'.format(t.n)).innerHTML = pb_html;
|
||||
ebi('f{0}p'.format(t.n)).innerHTML = pb_html;
|
||||
|
||||
var reader = new FileReader();
|
||||
|
||||
@@ -677,7 +612,7 @@ function up2k_init(have_crypto) {
|
||||
alert('{0} ms, {1} MB/s\n'.format(t.t2 - t.t1, spd.toFixed(3)) + t.hash.join('\n'));
|
||||
}
|
||||
|
||||
o('f{0}t'.format(t.n)).innerHTML = 'connecting';
|
||||
ebi('f{0}t'.format(t.n)).innerHTML = 'connecting';
|
||||
st.busy.hash.splice(st.busy.hash.indexOf(t), 1);
|
||||
st.todo.handshake.push(t);
|
||||
};
|
||||
@@ -706,7 +641,7 @@ function up2k_init(have_crypto) {
|
||||
if (response.name !== t.name) {
|
||||
// file exists; server renamed us
|
||||
t.name = response.name;
|
||||
o('f{0}n'.format(t.n)).textContent = t.name;
|
||||
ebi('f{0}n'.format(t.n)).textContent = t.name;
|
||||
}
|
||||
|
||||
t.postlist = [];
|
||||
@@ -736,23 +671,41 @@ function up2k_init(have_crypto) {
|
||||
msg = 'uploading';
|
||||
done = false;
|
||||
}
|
||||
o('f{0}t'.format(t.n)).innerHTML = msg;
|
||||
ebi('f{0}t'.format(t.n)).innerHTML = msg;
|
||||
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
|
||||
|
||||
if (done) {
|
||||
var spd1 = (t.size / ((t.t2 - t.t1) / 1000.)) / (1024 * 1024.);
|
||||
var spd2 = (t.size / ((t.t3 - t.t2) / 1000.)) / (1024 * 1024.);
|
||||
o('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format(
|
||||
ebi('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format(
|
||||
spd1.toFixed(2), spd2.toFixed(2));
|
||||
}
|
||||
tasker();
|
||||
}
|
||||
else
|
||||
else {
|
||||
var err = "";
|
||||
var rsp = (xhr.responseText + '');
|
||||
if (rsp.indexOf('partial upload exists') !== -1 ||
|
||||
rsp.indexOf('file already exists') !== -1) {
|
||||
err = rsp;
|
||||
var ofs = err.lastIndexOf(' : ');
|
||||
if (ofs > 0)
|
||||
err = err.slice(0, ofs);
|
||||
}
|
||||
if (err != "") {
|
||||
ebi('f{0}t'.format(t.n)).innerHTML = "ERROR";
|
||||
ebi('f{0}p'.format(t.n)).innerHTML = err;
|
||||
|
||||
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
|
||||
tasker();
|
||||
return;
|
||||
}
|
||||
alert("server broke (error {0}):\n\"{1}\"\n".format(
|
||||
xhr.status,
|
||||
(xhr.response && xhr.response.err) ||
|
||||
(xhr.responseText && xhr.responseText) ||
|
||||
"no further information"));
|
||||
}
|
||||
};
|
||||
xhr.open('POST', post_url + 'handshake.php', true);
|
||||
xhr.responseType = 'text';
|
||||
@@ -803,7 +756,7 @@ function up2k_init(have_crypto) {
|
||||
t.postlist.splice(t.postlist.indexOf(npart), 1);
|
||||
if (t.postlist.length == 0) {
|
||||
t.t3 = new Date().getTime();
|
||||
o('f{0}t'.format(t.n)).innerHTML = 'verifying';
|
||||
ebi('f{0}t'.format(t.n)).innerHTML = 'verifying';
|
||||
st.todo.handshake.push(t);
|
||||
}
|
||||
tasker();
|
||||
@@ -834,7 +787,7 @@ function up2k_init(have_crypto) {
|
||||
//
|
||||
|
||||
function prog(nfile, nchunk, color, percent) {
|
||||
var n1 = o('f{0}p{1}'.format(nfile, nchunk));
|
||||
var n1 = ebi('f{0}p{1}'.format(nfile, nchunk));
|
||||
var n2 = n1.getElementsByTagName('div')[0];
|
||||
if (percent === undefined) {
|
||||
n1.style.background = color;
|
||||
@@ -857,7 +810,7 @@ function up2k_init(have_crypto) {
|
||||
dir.preventDefault();
|
||||
} catch (ex) { }
|
||||
|
||||
var obj = o('nthread');
|
||||
var obj = ebi('nthread');
|
||||
if (dir.target) {
|
||||
obj.style.background = '#922';
|
||||
var v = Math.floor(parseInt(obj.value));
|
||||
@@ -887,24 +840,30 @@ function up2k_init(have_crypto) {
|
||||
bcfg_set('multitask', multitask);
|
||||
}
|
||||
|
||||
function tgl_ask_up() {
|
||||
ask_up = !ask_up;
|
||||
bcfg_set('ask_up', ask_up);
|
||||
}
|
||||
|
||||
function nop(ev) {
|
||||
ev.preventDefault();
|
||||
this.click();
|
||||
}
|
||||
|
||||
o('nthread_add').onclick = function (ev) {
|
||||
ebi('nthread_add').onclick = function (ev) {
|
||||
ev.preventDefault();
|
||||
bumpthread(1);
|
||||
};
|
||||
o('nthread_sub').onclick = function (ev) {
|
||||
ebi('nthread_sub').onclick = function (ev) {
|
||||
ev.preventDefault();
|
||||
bumpthread(-1);
|
||||
};
|
||||
|
||||
o('nthread').addEventListener('input', bumpthread, false);
|
||||
o('multitask').addEventListener('click', tgl_multitask, false);
|
||||
ebi('nthread').addEventListener('input', bumpthread, false);
|
||||
ebi('multitask').addEventListener('click', tgl_multitask, false);
|
||||
ebi('ask_up').addEventListener('click', tgl_ask_up, false);
|
||||
|
||||
var nodes = o('u2conf').getElementsByTagName('a');
|
||||
var nodes = ebi('u2conf').getElementsByTagName('a');
|
||||
for (var a = nodes.length - 1; a >= 0; a--)
|
||||
nodes[a].addEventListener('touchend', nop, false);
|
||||
|
||||
|
@@ -194,6 +194,12 @@
|
||||
#u2conf input+a {
|
||||
background: #d80;
|
||||
}
|
||||
#u2conf input[type="checkbox"]+label {
|
||||
color: #f5a;
|
||||
}
|
||||
#u2conf input[type="checkbox"]:checked+label {
|
||||
color: #fc5;
|
||||
}
|
||||
#u2foot {
|
||||
color: #fff;
|
||||
font-style: italic;
|
||||
|
@@ -43,10 +43,14 @@
|
||||
<input class="txtbox" id="nthread" value="2" />
|
||||
<a href="#" id="nthread_add">+</a>
|
||||
</td>
|
||||
<td rowspan="2">
|
||||
<td rowspan="2" style="padding-left:1.5em">
|
||||
<input type="checkbox" id="multitask" />
|
||||
<label for="multitask">hash while<br />uploading</label>
|
||||
</td>
|
||||
<td rowspan="2">
|
||||
<input type="checkbox" id="ask_up" />
|
||||
<label for="ask_up">ask for<br />confirmation</label>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
109
copyparty/web/util.js
Normal file
109
copyparty/web/util.js
Normal file
@@ -0,0 +1,109 @@
|
||||
"use strict";
|
||||
|
||||
// error handler for mobile devices
|
||||
function hcroak(msg) {
|
||||
document.body.innerHTML = msg;
|
||||
window.onerror = undefined;
|
||||
throw 'fatal_err';
|
||||
}
|
||||
function croak(msg) {
|
||||
document.body.textContent = msg;
|
||||
window.onerror = undefined;
|
||||
throw msg;
|
||||
}
|
||||
function esc(txt) {
|
||||
return txt.replace(/[&"<>]/g, function (c) {
|
||||
return {
|
||||
'&': '&',
|
||||
'"': '"',
|
||||
'<': '<',
|
||||
'>': '>'
|
||||
}[c];
|
||||
});
|
||||
}
|
||||
function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
window.onerror = undefined;
|
||||
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
||||
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
||||
|
||||
if (error) {
|
||||
var find = ['desc', 'stack', 'trace'];
|
||||
for (var a = 0; a < find.length; a++)
|
||||
if (String(error[find[a]]) !== 'undefined')
|
||||
html.push('<h2>' + find[a] + '</h2>' +
|
||||
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
|
||||
}
|
||||
document.body.style.fontSize = '0.8em';
|
||||
document.body.style.padding = '0 1em 1em 1em';
|
||||
hcroak(html.join('\n'));
|
||||
}
|
||||
|
||||
|
||||
function ebi(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||
if (!String.prototype.endsWith) {
|
||||
String.prototype.endsWith = function (search, this_len) {
|
||||
if (this_len === undefined || this_len > this.length) {
|
||||
this_len = this.length;
|
||||
}
|
||||
return this.substring(this_len - search.length, this_len) === search;
|
||||
};
|
||||
}
|
||||
if (!String.startsWith) {
|
||||
String.prototype.startsWith = function (s, i) {
|
||||
i = i > 0 ? i | 0 : 0;
|
||||
return this.substring(i, i + s.length) === s;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = document.createElement('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
|
||||
script.onreadystatechange = cb;
|
||||
script.onload = cb;
|
||||
|
||||
head.appendChild(script);
|
||||
}
|
||||
|
||||
|
||||
function sortTable(table, col) {
|
||||
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
|
||||
th = table.tHead.rows[0].cells,
|
||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||
i, reverse = th[col].className == 'sort1' ? -1 : 1;
|
||||
for (var a = 0, thl = th.length; a < thl; a++)
|
||||
th[a].className = '';
|
||||
th[col].className = 'sort' + reverse;
|
||||
var stype = th[col].getAttribute('sort');
|
||||
tr = tr.sort(function (a, b) {
|
||||
var v1 = a.cells[col].textContent.trim();
|
||||
var v2 = b.cells[col].textContent.trim();
|
||||
if (stype == 'int') {
|
||||
v1 = parseInt(v1.replace(/,/g, ''));
|
||||
v2 = parseInt(v2.replace(/,/g, ''));
|
||||
return reverse * (v1 - v2);
|
||||
}
|
||||
return reverse * (v1.localeCompare(v2));
|
||||
});
|
||||
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]);
|
||||
}
|
||||
function makeSortable(table) {
|
||||
var th = table.tHead, i;
|
||||
th && (th = th.rows[0]) && (th = th.cells);
|
||||
if (th) i = th.length;
|
||||
else return; // if no `<thead>` then do nothing
|
||||
while (--i >= 0) (function (i) {
|
||||
th[i].onclick = function () {
|
||||
sortTable(table, i);
|
||||
};
|
||||
}(i));
|
||||
}
|
@@ -3,6 +3,14 @@ echo not a script
|
||||
exit 1
|
||||
|
||||
|
||||
##
|
||||
## delete all partial uploads
|
||||
## (supports linux/macos, probably windows+msys2)
|
||||
|
||||
gzip -d < .hist/up2k.snap | jq -r '.[].tnam' | while IFS= read -r f; do rm -f -- "$f"; done
|
||||
gzip -d < .hist/up2k.snap | jq -r '.[].name' | while IFS= read -r f; do wc -c -- "$f" | grep -qiE '^[^0-9a-z]*0' && rm -f -- "$f"; done
|
||||
|
||||
|
||||
##
|
||||
## create a test payload
|
||||
|
||||
@@ -13,7 +21,7 @@ head -c $((2*1024*1024*1024)) /dev/zero | openssl enc -aes-256-ctr -pass pass:hu
|
||||
## testing multiple parallel uploads
|
||||
## usage: para | tee log
|
||||
|
||||
para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}; do for ((n=0;n<s;n++)); do curl -sF "act=bput" -F "f=@garbage.file" http://127.0.0.1:1234/ 2>&1 & done; wait; echo; done; done; }
|
||||
para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}; do for ((n=0;n<s;n++)); do curl -sF "act=bput" -F "f=@garbage.file" http://127.0.0.1:3923/ 2>&1 & done; wait; echo; done; done; }
|
||||
|
||||
|
||||
##
|
||||
@@ -36,13 +44,13 @@ for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qwe,rty;asd
|
||||
|
||||
fn=$(printf '\xba\xdc\xab.cab')
|
||||
echo asdf > "$fn"
|
||||
curl --cookie cppwd=wark -sF "act=bput" -F "f=@$fn" http://127.0.0.1:1234/moji/%ED%91/
|
||||
curl --cookie cppwd=wark -sF "act=bput" -F "f=@$fn" http://127.0.0.1:3923/moji/%ED%91/
|
||||
|
||||
|
||||
##
|
||||
## test compression
|
||||
|
||||
wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:1234/.cpr/deps/ogv.js -O- | md5sum; p=~ed/dev/copyparty/copyparty/web/deps/ogv.js.gz; md5sum $p; gzip -d < $p | md5sum
|
||||
wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:3923/.cpr/deps/ogv.js -O- | md5sum; p=~ed/dev/copyparty/copyparty/web/deps/ogv.js.gz; md5sum $p; gzip -d < $p | md5sum
|
||||
|
||||
|
||||
##
|
||||
@@ -80,3 +88,45 @@ for d in /usr /var; do find $d -type f -size +30M 2>/dev/null; done | while IFS=
|
||||
# py2 on osx
|
||||
brew install python@2
|
||||
pip install virtualenv
|
||||
|
||||
|
||||
##
|
||||
## http 206
|
||||
|
||||
# az = abcdefghijklmnopqrstuvwxyz
|
||||
|
||||
printf '%s\r\n' 'GET /az HTTP/1.1' 'Host: ocv.me' 'Range: bytes=5-10' '' | ncat ocv.me 80
|
||||
# Content-Range: bytes 5-10/26
|
||||
# Content-Length: 6
|
||||
# fghijk
|
||||
|
||||
Range: bytes=0-1 "ab" Content-Range: bytes 0-1/26
|
||||
Range: bytes=24-24 "y" Content-Range: bytes 24-24/26
|
||||
Range: bytes=24-25 "yz" Content-Range: bytes 24-25/26
|
||||
Range: bytes=24- "yz" Content-Range: bytes 24-25/26
|
||||
Range: bytes=25-29 "z" Content-Range: bytes 25-25/26
|
||||
Range: bytes=26- Content-Range: bytes */26
|
||||
HTTP/1.1 416 Requested Range Not Satisfiable
|
||||
|
||||
|
||||
##
|
||||
## md perf
|
||||
|
||||
var tsh = [];
|
||||
function convert_markdown(md_text, dest_dom) {
|
||||
tsh.push(new Date().getTime());
|
||||
while (tsh.length > 10)
|
||||
tsh.shift();
|
||||
if (tsh.length > 1) {
|
||||
var end = tsh.slice(-2);
|
||||
console.log("render", end.pop() - end.pop(), (tsh[tsh.length - 1] - tsh[0]) / (tsh.length - 1));
|
||||
}
|
||||
|
||||
|
||||
##
|
||||
## tmpfiles.d meme
|
||||
|
||||
mk() { rm -rf /tmp/foo; sudo -u ed bash -c 'mkdir /tmp/foo; echo hi > /tmp/foo/bar'; }
|
||||
mk && t0="$(date)" && while true; do date -s "$(date '+ 1 hour')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
||||
mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
||||
mk && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done; echo "$t0"
|
||||
|
35
docs/pretend-youre-qnap.patch
Normal file
35
docs/pretend-youre-qnap.patch
Normal file
@@ -0,0 +1,35 @@
|
||||
diff --git a/copyparty/httpcli.py b/copyparty/httpcli.py
|
||||
index 2d3c1ad..e1e85a0 100644
|
||||
--- a/copyparty/httpcli.py
|
||||
+++ b/copyparty/httpcli.py
|
||||
@@ -864,6 +864,30 @@ class HttpCli(object):
|
||||
#
|
||||
# send reply
|
||||
|
||||
+ try:
|
||||
+ fakefn = self.conn.hsrv.fakefn
|
||||
+ fakectr = self.conn.hsrv.fakectr
|
||||
+ fakedata = self.conn.hsrv.fakedata
|
||||
+ except:
|
||||
+ fakefn = b''
|
||||
+ fakectr = 0
|
||||
+ fakedata = b''
|
||||
+
|
||||
+ self.log('\n{} {}\n{}'.format(fakefn, fakectr, open_args[0]))
|
||||
+ if fakefn == open_args[0] and fakectr > 0:
|
||||
+ self.reply(fakedata, mime=guess_mime(req_path)[0])
|
||||
+ self.conn.hsrv.fakectr = fakectr - 1
|
||||
+ else:
|
||||
+ with open_func(*open_args) as f:
|
||||
+ fakedata = f.read()
|
||||
+
|
||||
+ self.conn.hsrv.fakefn = open_args[0]
|
||||
+ self.conn.hsrv.fakedata = fakedata
|
||||
+ self.conn.hsrv.fakectr = 15
|
||||
+ self.reply(fakedata, mime=guess_mime(req_path)[0])
|
||||
+
|
||||
+ return True
|
||||
+
|
||||
self.out_headers["Accept-Ranges"] = "bytes"
|
||||
self.send_headers(
|
||||
length=upper - lower,
|
62
docs/rclone.md
Normal file
62
docs/rclone.md
Normal file
@@ -0,0 +1,62 @@
|
||||
# using rclone to mount a remote copyparty server as a local filesystem
|
||||
|
||||
speed estimates with server and client on the same win10 machine:
|
||||
* `1070 MiB/s` with rclone as both server and client
|
||||
* `570 MiB/s` with rclone-client and `copyparty -ed -j16` as server
|
||||
* `220 MiB/s` with rclone-client and `copyparty -ed` as server
|
||||
* `100 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
|
||||
|
||||
when server is on another machine (1gbit LAN),
|
||||
* `75 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
|
||||
* `92 MiB/s` with rclone-client and `copyparty -ed` as server
|
||||
* `103 MiB/s` (connection max) with `copyparty -ed -j16` and all the others
|
||||
|
||||
|
||||
# creating the config file
|
||||
|
||||
if you want to use password auth, add `headers = Cookie,cppwd=fgsfds` below
|
||||
|
||||
|
||||
### on windows clients:
|
||||
```
|
||||
(
|
||||
echo [cpp]
|
||||
echo type = http
|
||||
echo url = http://127.0.0.1:3923/
|
||||
) > %userprofile%\.config\rclone\rclone.conf
|
||||
```
|
||||
|
||||
also install the windows dependencies: [winfsp](https://github.com/billziss-gh/winfsp/releases/latest)
|
||||
|
||||
|
||||
### on unix clients:
|
||||
```
|
||||
cat > ~/.config/rclone/rclone.conf <<'EOF'
|
||||
[cpp]
|
||||
type = http
|
||||
url = http://127.0.0.1:3923/
|
||||
EOF
|
||||
```
|
||||
|
||||
|
||||
# mounting the copyparty server locally
|
||||
```
|
||||
rclone.exe mount --vfs-cache-max-age 5s --attr-timeout 5s --dir-cache-time 5s cpp: Z:
|
||||
```
|
||||
|
||||
|
||||
# use rclone as server too, replacing copyparty
|
||||
|
||||
feels out of place but is too good not to mention
|
||||
|
||||
```
|
||||
rclone.exe serve http --read-only .
|
||||
```
|
||||
|
||||
* `webdav` gives write-access but `http` is twice as fast
|
||||
* `ftp` is buggy, avoid
|
||||
|
||||
|
||||
# bugs
|
||||
|
||||
* rclone-client throws an exception if you try to read an empty file (should return zero bytes)
|
10
docs/unirange.py
Normal file
10
docs/unirange.py
Normal file
@@ -0,0 +1,10 @@
|
||||
v = "U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+2000-206F, U+2074, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD"
|
||||
for v in v.split(","):
|
||||
if "+" in v:
|
||||
v = v.split("+")[1]
|
||||
if "-" in v:
|
||||
lo, hi = v.split("-")
|
||||
else:
|
||||
lo = hi = v
|
||||
for v in range(int(lo, 16), int(hi, 16) + 1):
|
||||
print("{:4x} [{}]".format(v, chr(v)))
|
129
scripts/copyparty-repack.sh
Executable file
129
scripts/copyparty-repack.sh
Executable file
@@ -0,0 +1,129 @@
|
||||
#!/bin/bash
|
||||
repacker=1
|
||||
set -e
|
||||
|
||||
# -- download latest copyparty (source.tgz and sfx),
|
||||
# -- build minimal sfx versions,
|
||||
# -- create a .tar.gz bundle
|
||||
#
|
||||
# convenient for deploying updates to inconvenient locations
|
||||
# (and those are usually linux so bash is good inaff)
|
||||
# (but that said this even has macos support)
|
||||
#
|
||||
# bundle will look like:
|
||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty
|
||||
# -rw-r--r-- 0 ed ed 491318 Nov 19 00:40 copyparty-extras/copyparty-0.5.4.tar.gz
|
||||
# -rwxr-xr-x 0 ed ed 30254 Nov 17 23:58 copyparty-extras/copyparty-fuse.py
|
||||
# -rwxr-xr-x 0 ed ed 481403 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.sh
|
||||
# -rwxr-xr-x 0 ed ed 506043 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.py
|
||||
# -rwxr-xr-x 0 ed ed 167699 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.sh
|
||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
|
||||
|
||||
|
||||
command -v gtar && tar() { gtar "$@"; }
|
||||
command -v gsed && sed() { gsed "$@"; }
|
||||
td="$(mktemp -d)"
|
||||
od="$(pwd)"
|
||||
cd "$td"
|
||||
pwd
|
||||
|
||||
|
||||
dl_text() {
|
||||
command -v curl && exec curl "$@"
|
||||
exec wget -O- "$@"
|
||||
}
|
||||
dl_files() {
|
||||
command -v curl && exec curl -L --remote-name-all "$@"
|
||||
exec wget "$@"
|
||||
}
|
||||
export -f dl_files
|
||||
|
||||
|
||||
# if cache exists, use that instead of bothering github
|
||||
cache="$od/.copyparty-repack.cache"
|
||||
[ -e "$cache" ] &&
|
||||
tar -xf "$cache" ||
|
||||
{
|
||||
# get download links from github
|
||||
dl_text https://api.github.com/repos/9001/copyparty/releases/latest |
|
||||
(
|
||||
# prefer jq if available
|
||||
jq -r '.assets[]|select(.name|test("-sfx|tar.gz")).browser_download_url' ||
|
||||
|
||||
# fallback to awk (sorry)
|
||||
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
|
||||
tar -czf "$cache" *
|
||||
}
|
||||
|
||||
|
||||
# move src into copyparty-extras/,
|
||||
# move sfx into copyparty-extras/sfx-full/
|
||||
mkdir -p copyparty-extras/sfx-{full,lite}
|
||||
mv copyparty-sfx.* copyparty-extras/sfx-full/
|
||||
mv copyparty-*.tar.gz copyparty-extras/
|
||||
|
||||
|
||||
# unpack the source code
|
||||
( cd copyparty-extras/
|
||||
tar -xf *.tar.gz
|
||||
)
|
||||
|
||||
|
||||
# use repacker from release if that is newer
|
||||
p_other=copyparty-extras/copyparty-*/scripts/copyparty-repack.sh
|
||||
other=$(awk -F= 'BEGIN{v=-1} NR<10&&/^repacker=/{v=$NF} END{print v}' <$p_other)
|
||||
[ $repacker -lt $other ] &&
|
||||
cat $p_other >"$od/$0" && cd "$od" && rm -rf "$td" && exec "$0" "$@"
|
||||
|
||||
|
||||
# now drop the cache
|
||||
rm -f "$cache"
|
||||
|
||||
|
||||
# fix permissions
|
||||
chmod 755 \
|
||||
copyparty-extras/sfx-full/* \
|
||||
copyparty-extras/copyparty-*/{scripts,bin}/*
|
||||
|
||||
|
||||
# extract and repack the sfx with less features enabled
|
||||
( cd copyparty-extras/sfx-full/
|
||||
./copyparty-sfx.py -h
|
||||
cd ../copyparty-*/
|
||||
./scripts/make-sfx.sh re no-ogv no-cm
|
||||
)
|
||||
|
||||
|
||||
# put new sfx into copyparty-extras/sfx-lite/,
|
||||
# fuse client into copyparty-extras/,
|
||||
# copy lite-sfx.py to ./copyparty,
|
||||
# delete extracted source code
|
||||
( cd copyparty-extras/
|
||||
mv copyparty-*/dist/* sfx-lite/
|
||||
mv copyparty-*/bin/copyparty-fuse.py .
|
||||
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
|
||||
rm -rf copyparty-{0..9}*.*.*{0..9}
|
||||
)
|
||||
|
||||
|
||||
# and include the repacker itself too
|
||||
cp -av "$od/$0" copyparty-extras/ ||
|
||||
cp -av "$0" copyparty-extras/ ||
|
||||
true
|
||||
|
||||
|
||||
# create the bundle
|
||||
fn=copyparty-$(date +%Y-%m%d-%H%M%S).tgz
|
||||
tar -czvf "$od/$fn" *
|
||||
cd "$od"
|
||||
rm -rf "$td"
|
||||
|
||||
|
||||
echo
|
||||
echo "done, here's your bundle:"
|
||||
ls -al "$fn"
|
@@ -3,7 +3,7 @@ WORKDIR /z
|
||||
ENV ver_asmcrypto=2821dd1dedd1196c378f5854037dda5c869313f3 \
|
||||
ver_markdownit=10.0.0 \
|
||||
ver_showdown=1.9.1 \
|
||||
ver_marked=1.0.0 \
|
||||
ver_marked=1.1.0 \
|
||||
ver_ogvjs=1.6.1 \
|
||||
ver_mde=2.10.1 \
|
||||
ver_codemirror=5.53.2 \
|
||||
@@ -11,8 +11,11 @@ ENV ver_asmcrypto=2821dd1dedd1196c378f5854037dda5c869313f3 \
|
||||
ver_zopfli=1.0.3
|
||||
|
||||
|
||||
# download
|
||||
RUN apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev \
|
||||
# download;
|
||||
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
||||
RUN mkdir -p /z/dist/no-pk \
|
||||
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
|
||||
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
|
||||
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \
|
||||
&& wget https://github.com/asmcrypto/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
|
||||
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
|
||||
@@ -36,23 +39,7 @@ RUN apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzi
|
||||
&& npm install \
|
||||
&& npm i gulp-cli -g ) \
|
||||
&& unzip fontawesome.zip \
|
||||
&& tar -xf zopfli.tgz \
|
||||
&& mkdir -p /z/dist/no-pk
|
||||
|
||||
|
||||
# uncomment if you wanna test the abandoned markdown converters
|
||||
#ENV build_abandoned=1
|
||||
|
||||
|
||||
RUN [ $build_abandoned ] || exit 0; \
|
||||
git clone --depth 1 --branch $ver_showdown https://github.com/showdownjs/showdown/ \
|
||||
&& wget https://github.com/markdown-it/markdown-it/archive/$ver_markdownit.tar.gz -O markdownit.tgz \
|
||||
&& (cd showdown \
|
||||
&& npm install \
|
||||
&& npm i grunt -g ) \
|
||||
&& (tar -xf markdownit.tgz \
|
||||
&& cd markdown-it-$ver_markdownit \
|
||||
&& npm install )
|
||||
&& tar -xf zopfli.tgz
|
||||
|
||||
|
||||
# build fonttools (which needs zopfli)
|
||||
@@ -80,31 +67,27 @@ RUN cd ogvjs-$ver_ogvjs \
|
||||
&& cp -pv \
|
||||
ogv.js \
|
||||
ogv-worker-audio.js \
|
||||
ogv-demuxer-ogg.js \
|
||||
ogv-demuxer-ogg-wasm.js \
|
||||
ogv-demuxer-ogg-wasm.wasm \
|
||||
ogv-demuxer-webm.js \
|
||||
ogv-demuxer-webm-wasm.js \
|
||||
ogv-demuxer-webm-wasm.wasm \
|
||||
ogv-decoder-audio-opus.js \
|
||||
ogv-decoder-audio-opus-wasm.js \
|
||||
ogv-decoder-audio-opus-wasm.wasm \
|
||||
ogv-decoder-audio-vorbis.js \
|
||||
ogv-decoder-audio-vorbis-wasm.js \
|
||||
ogv-decoder-audio-vorbis-wasm.wasm \
|
||||
dynamicaudio.swf \
|
||||
/z/dist
|
||||
|
||||
# ogv-demuxer-ogg.js \
|
||||
# ogv-demuxer-webm.js \
|
||||
# ogv-decoder-audio-opus.js \
|
||||
# ogv-decoder-audio-vorbis.js \
|
||||
# dynamicaudio.swf \
|
||||
|
||||
|
||||
# build marked
|
||||
RUN wget https://github.com/markedjs/marked/commit/5c166d4164791f643693478e4ac094d63d6e0c9a.patch -O marked-git-1.patch \
|
||||
&& wget https://patch-diff.githubusercontent.com/raw/markedjs/marked/pull/1652.patch -O marked-git-2.patch
|
||||
|
||||
COPY marked.patch /z/
|
||||
COPY marked-ln.patch /z/
|
||||
RUN cd marked-$ver_marked \
|
||||
&& patch -p1 < /z/marked-git-1.patch \
|
||||
&& patch -p1 < /z/marked-git-2.patch \
|
||||
&& patch -p1 < /z/marked-ln.patch \
|
||||
&& patch -p1 < /z/marked.patch \
|
||||
&& npm run build \
|
||||
@@ -138,57 +121,10 @@ RUN cd easy-markdown-editor-$ver_mde \
|
||||
&& patch -p1 < /z/easymde-ln.patch \
|
||||
&& gulp \
|
||||
&& cp -pv dist/easymde.min.css /z/dist/easymde.css \
|
||||
&& cp -pv dist/easymde.min.js /z/dist/easymde.js \
|
||||
&& sed -ri '/pipe.terser/d; /cleanCSS/d' gulpfile.js \
|
||||
&& gulp \
|
||||
&& cp -pv dist/easymde.min.css /z/dist/easymde.full.css \
|
||||
&& cp -pv dist/easymde.min.js /z/dist/easymde.full.js
|
||||
&& cp -pv dist/easymde.min.js /z/dist/easymde.js
|
||||
|
||||
|
||||
# build showdown (abandoned; disabled by default)
|
||||
COPY showdown.patch /z/
|
||||
RUN [ $build_abandoned ] || exit 0; \
|
||||
cd showdown \
|
||||
&& rm -rf bin dist \
|
||||
# # remove ellipsis plugin \
|
||||
&& rm \
|
||||
src/subParsers/ellipsis.js \
|
||||
test/cases/ellipsis* \
|
||||
# # remove html-to-md converter \
|
||||
&& rm \
|
||||
test/node/testsuite.makemd.js \
|
||||
test/node/showdown.Converter.makeMarkdown.js \
|
||||
# # remove emojis \
|
||||
&& rm src/subParsers/emoji.js \
|
||||
&& awk '/^showdown.helper.emojis/ {o=1} !o; /^\}/ {o=0}' \
|
||||
>f <src/helpers.js \
|
||||
&& mv f src/helpers.js \
|
||||
&& rm -rf test/features/emojis \
|
||||
# # remove ghmentions \
|
||||
&& rm test/features/ghMentions.* \
|
||||
# # remove option descriptions \
|
||||
&& sed -ri '/descri(ption|be): /d' src/options.js \
|
||||
&& patch -p1 < /z/showdown.patch
|
||||
|
||||
RUN [ $build_abandoned ] || exit 0; \
|
||||
cd showdown \
|
||||
&& grunt build \
|
||||
&& sed -ri '/sourceMappingURL=showdown.min.js.map/d' dist/showdown.min.js \
|
||||
&& mv dist/showdown.min.js /z/dist/showdown.js \
|
||||
&& ls -al /z/dist/showdown.js
|
||||
|
||||
|
||||
# build markdownit (abandoned; disabled by default)
|
||||
COPY markdown-it.patch /z/
|
||||
RUN [ $build_abandoned ] || exit 0; \
|
||||
cd markdown-it-$ver_markdownit \
|
||||
&& patch -p1 < /z/markdown-it.patch \
|
||||
&& make browserify \
|
||||
&& cp -pv dist/markdown-it.min.js /z/dist/markdown-it.js \
|
||||
&& cp -pv dist/markdown-it.js /z/dist/markdown-it-full.js
|
||||
|
||||
|
||||
# build fontawesome
|
||||
# build fontawesome and scp
|
||||
COPY mini-fa.sh /z
|
||||
COPY mini-fa.css /z
|
||||
RUN /bin/ash /z/mini-fa.sh
|
||||
@@ -203,38 +139,6 @@ RUN cd /z/dist \
|
||||
&& rmdir no-pk
|
||||
|
||||
|
||||
# showdown: abandoned due to code-blocks in lists failing
|
||||
# 22770 orig
|
||||
# 12154 no-emojis
|
||||
# 12134 no-srcmap
|
||||
# 11189 no-descriptions
|
||||
# 11152 no-ellipsis
|
||||
# 10617 no-this.makeMd
|
||||
# 9569 no-extensions
|
||||
# 9537 no-extensions
|
||||
# 9410 no-mentions
|
||||
|
||||
|
||||
# markdown-it: abandoned because no header anchors (and too big)
|
||||
# 32322 107754 orig (wowee)
|
||||
# 19619 21392 71540 less entities
|
||||
|
||||
|
||||
# marked:
|
||||
# 9253 29773 orig
|
||||
# 9159 29633 no copyright (reverted)
|
||||
# 9040 29057 no sanitize
|
||||
# 8870 28631 no email-mangle
|
||||
# so really not worth it, just drop the patch when that stops working
|
||||
|
||||
|
||||
# easymde:
|
||||
# 91836 orig
|
||||
# 88635 no spellcheck
|
||||
# 88392 no urlRE
|
||||
# 85651 less bidi
|
||||
# 82855 less mode meta
|
||||
|
||||
|
||||
# d=/home/ed/dev/copyparty/scripts/deps-docker/; tar -cf ../x . && ssh root@$bip "cd $d && tar -xv >&2 && make >&2 && tar -cC ../../copyparty/web deps" <../x | (cd ../../copyparty/web/; cat > the.tgz; tar -xvf the.tgz)
|
||||
# git diff -U2 --no-index marked-1.1.0-orig/ marked-1.1.0-edit/ -U2 | sed -r '/^index /d;s`^(diff --git a/)[^/]+/(.* b/)[^/]+/`\1\2`; s`^(---|\+\+\+) ([ab]/)[^/]+/`\1 \2`' > ../dev/copyparty/scripts/deps-docker/marked-ln.patch
|
||||
# d=/home/ed/dev/copyparty/scripts/deps-docker/; tar -cf ../x . && ssh root@$bip "cd $d && tar -xv >&2 && make >&2 && tar -cC ../../copyparty/web deps" <../x | (cd ../../copyparty/web/; cat > the.tgz; tar -xvf the.tgz; rm the.tgz)
|
||||
# gzip -dkf ../dev/copyparty/copyparty/web/deps/deps/marked.full.js.gz && diff -NarU2 ../dev/copyparty/copyparty/web/deps/{,deps/}marked.full.js
|
||||
|
@@ -35,7 +35,7 @@ add data-ln="%d" to most tags, %d is the source markdown line
|
||||
+ // this.ln will be bumped by recursive calls into this func;
|
||||
+ // reset the count and rely on the outermost token's raw only
|
||||
+ ln = this.ln;
|
||||
+
|
||||
+
|
||||
// newline
|
||||
if (token = this.tokenizer.space(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
@@ -180,7 +180,7 @@ diff --git a/src/Parser.js b/src/Parser.js
|
||||
+ // similar to tables, writing contents before the <ul> tag
|
||||
+ // so update the tag attribute as we go
|
||||
+ // (assuming all list entries got tagged with a source-line, probably safe w)
|
||||
+ body += this.renderer.tag_ln(item.tokens[0].ln).listitem(itemBody, task, checked);
|
||||
+ body += this.renderer.tag_ln((item.tokens[0] || token).ln).listitem(itemBody, task, checked);
|
||||
}
|
||||
|
||||
- out += this.renderer.list(body, ordered, start);
|
||||
@@ -234,7 +234,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
- return '<pre><code>'
|
||||
+ return '<pre' + this.ln + '><code>'
|
||||
+ (escaped ? code : escape(code, true))
|
||||
+ '</code></pre>';
|
||||
+ '</code></pre>\n';
|
||||
}
|
||||
|
||||
- return '<pre><code class="'
|
||||
|
@@ -1,7 +1,141 @@
|
||||
diff -NarU1 marked-1.0.0-orig/src/defaults.js marked-1.0.0-edit/src/defaults.js
|
||||
--- marked-1.0.0-orig/src/defaults.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/src/defaults.js 2020-04-25 19:16:56.124621393 +0000
|
||||
@@ -9,10 +9,6 @@
|
||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||
--- a/src/Lexer.js
|
||||
+++ b/src/Lexer.js
|
||||
@@ -5,5 +5,5 @@ const { block, inline } = require('./rules.js');
|
||||
/**
|
||||
* smartypants text replacement
|
||||
- */
|
||||
+ *
|
||||
function smartypants(text) {
|
||||
return text
|
||||
@@ -26,5 +26,5 @@ function smartypants(text) {
|
||||
/**
|
||||
* mangle email addresses
|
||||
- */
|
||||
+ *
|
||||
function mangle(text) {
|
||||
let out = '',
|
||||
@@ -439,5 +439,5 @@ module.exports = class Lexer {
|
||||
|
||||
// autolink
|
||||
- if (token = this.tokenizer.autolink(src, mangle)) {
|
||||
+ if (token = this.tokenizer.autolink(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
@@ -446,5 +446,5 @@ module.exports = class Lexer {
|
||||
|
||||
// url (gfm)
|
||||
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
|
||||
+ if (!inLink && (token = this.tokenizer.url(src))) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
@@ -453,5 +453,5 @@ module.exports = class Lexer {
|
||||
|
||||
// text
|
||||
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
|
||||
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
--- a/src/Renderer.js
|
||||
+++ b/src/Renderer.js
|
||||
@@ -140,5 +140,5 @@ module.exports = class Renderer {
|
||||
|
||||
link(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
+ href = cleanUrl(this.options.baseUrl, href);
|
||||
if (href === null) {
|
||||
return text;
|
||||
@@ -153,5 +153,5 @@ module.exports = class Renderer {
|
||||
|
||||
image(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
+ href = cleanUrl(this.options.baseUrl, href);
|
||||
if (href === null) {
|
||||
return text;
|
||||
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
--- a/src/Tokenizer.js
|
||||
+++ b/src/Tokenizer.js
|
||||
@@ -287,11 +287,8 @@ module.exports = class Tokenizer {
|
||||
if (cap) {
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
- ? 'paragraph'
|
||||
- : 'html',
|
||||
+ type: 'html',
|
||||
raw: cap[0],
|
||||
- pre: !this.options.sanitizer
|
||||
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
|
||||
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
|
||||
+ text: cap[0]
|
||||
};
|
||||
}
|
||||
@@ -421,15 +418,9 @@ module.exports = class Tokenizer {
|
||||
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
- ? 'text'
|
||||
- : 'html',
|
||||
+ type: 'html',
|
||||
raw: cap[0],
|
||||
inLink,
|
||||
inRawBlock,
|
||||
- text: this.options.sanitize
|
||||
- ? (this.options.sanitizer
|
||||
- ? this.options.sanitizer(cap[0])
|
||||
- : escape(cap[0]))
|
||||
- : cap[0]
|
||||
+ text: cap[0]
|
||||
};
|
||||
}
|
||||
@@ -550,10 +541,10 @@ module.exports = class Tokenizer {
|
||||
}
|
||||
|
||||
- autolink(src, mangle) {
|
||||
+ autolink(src) {
|
||||
const cap = this.rules.inline.autolink.exec(src);
|
||||
if (cap) {
|
||||
let text, href;
|
||||
if (cap[2] === '@') {
|
||||
- text = escape(this.options.mangle ? mangle(cap[1]) : cap[1]);
|
||||
+ text = escape(cap[1]);
|
||||
href = 'mailto:' + text;
|
||||
} else {
|
||||
@@ -578,10 +569,10 @@ module.exports = class Tokenizer {
|
||||
}
|
||||
|
||||
- url(src, mangle) {
|
||||
+ url(src) {
|
||||
let cap;
|
||||
if (cap = this.rules.inline.url.exec(src)) {
|
||||
let text, href;
|
||||
if (cap[2] === '@') {
|
||||
- text = escape(this.options.mangle ? mangle(cap[0]) : cap[0]);
|
||||
+ text = escape(cap[0]);
|
||||
href = 'mailto:' + text;
|
||||
} else {
|
||||
@@ -615,12 +606,12 @@ module.exports = class Tokenizer {
|
||||
}
|
||||
|
||||
- inlineText(src, inRawBlock, smartypants) {
|
||||
+ inlineText(src, inRawBlock) {
|
||||
const cap = this.rules.inline.text.exec(src);
|
||||
if (cap) {
|
||||
let text;
|
||||
if (inRawBlock) {
|
||||
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
|
||||
+ text = cap[0];
|
||||
} else {
|
||||
- text = escape(this.options.smartypants ? smartypants(cap[0]) : cap[0]);
|
||||
+ text = escape(cap[0]);
|
||||
}
|
||||
return {
|
||||
diff --git a/src/defaults.js b/src/defaults.js
|
||||
--- a/src/defaults.js
|
||||
+++ b/src/defaults.js
|
||||
@@ -8,12 +8,8 @@ function getDefaults() {
|
||||
highlight: null,
|
||||
langPrefix: 'language-',
|
||||
- mangle: true,
|
||||
pedantic: false,
|
||||
@@ -12,10 +146,12 @@ diff -NarU1 marked-1.0.0-orig/src/defaults.js marked-1.0.0-edit/src/defaults.js
|
||||
smartLists: false,
|
||||
- smartypants: false,
|
||||
tokenizer: null,
|
||||
diff -NarU1 marked-1.0.0-orig/src/helpers.js marked-1.0.0-edit/src/helpers.js
|
||||
--- marked-1.0.0-orig/src/helpers.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/src/helpers.js 2020-04-25 18:58:43.001320210 +0000
|
||||
@@ -65,16 +65,3 @@
|
||||
walkTokens: null,
|
||||
diff --git a/src/helpers.js b/src/helpers.js
|
||||
--- a/src/helpers.js
|
||||
+++ b/src/helpers.js
|
||||
@@ -64,18 +64,5 @@ function edit(regex, opt) {
|
||||
const nonWordAndColonTest = /[^\w:]/g;
|
||||
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
|
||||
-function cleanUrl(sanitize, base, href) {
|
||||
- if (sanitize) {
|
||||
@@ -33,7 +169,9 @@ diff -NarU1 marked-1.0.0-orig/src/helpers.js marked-1.0.0-edit/src/helpers.js
|
||||
- }
|
||||
+function cleanUrl(base, href) {
|
||||
if (base && !originIndependentUrl.test(href)) {
|
||||
@@ -224,8 +211,2 @@
|
||||
href = resolveUrl(base, href);
|
||||
@@ -223,10 +210,4 @@ function findClosingBracket(str, b) {
|
||||
}
|
||||
|
||||
-function checkSanitizeDeprecation(opt) {
|
||||
- if (opt && opt.sanitize && !opt.silent) {
|
||||
@@ -42,228 +180,161 @@ diff -NarU1 marked-1.0.0-orig/src/helpers.js marked-1.0.0-edit/src/helpers.js
|
||||
-}
|
||||
-
|
||||
module.exports = {
|
||||
@@ -240,4 +221,3 @@
|
||||
escape,
|
||||
@@ -239,5 +220,4 @@ module.exports = {
|
||||
splitCells,
|
||||
rtrim,
|
||||
- findClosingBracket,
|
||||
- checkSanitizeDeprecation
|
||||
+ findClosingBracket
|
||||
};
|
||||
diff -NarU1 marked-1.0.0-orig/src/Lexer.js marked-1.0.0-edit/src/Lexer.js
|
||||
--- marked-1.0.0-orig/src/Lexer.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/src/Lexer.js 2020-04-25 22:46:54.107584066 +0000
|
||||
@@ -6,3 +6,3 @@
|
||||
* smartypants text replacement
|
||||
- */
|
||||
+ *
|
||||
function smartypants(text) {
|
||||
@@ -27,3 +27,3 @@
|
||||
* mangle email addresses
|
||||
- */
|
||||
+ *
|
||||
function mangle(text) {
|
||||
@@ -388,3 +388,3 @@
|
||||
// autolink
|
||||
- if (token = this.tokenizer.autolink(src, mangle)) {
|
||||
+ if (token = this.tokenizer.autolink(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
@@ -395,3 +395,3 @@
|
||||
// url (gfm)
|
||||
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
|
||||
+ if (!inLink && (token = this.tokenizer.url(src))) {
|
||||
src = src.substring(token.raw.length);
|
||||
@@ -402,3 +402,3 @@
|
||||
// text
|
||||
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
|
||||
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
|
||||
src = src.substring(token.raw.length);
|
||||
diff -NarU1 marked-1.0.0-orig/src/marked.js marked-1.0.0-edit/src/marked.js
|
||||
--- marked-1.0.0-orig/src/marked.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/src/marked.js 2020-04-25 22:42:55.140924439 +0000
|
||||
@@ -8,3 +8,2 @@
|
||||
diff --git a/src/marked.js b/src/marked.js
|
||||
--- a/src/marked.js
|
||||
+++ b/src/marked.js
|
||||
@@ -7,5 +7,4 @@ const Slugger = require('./Slugger.js');
|
||||
const {
|
||||
merge,
|
||||
- checkSanitizeDeprecation,
|
||||
escape
|
||||
@@ -37,3 +36,2 @@
|
||||
opt = merge({}, marked.defaults, opt || {});
|
||||
- checkSanitizeDeprecation(opt);
|
||||
const highlight = opt.highlight;
|
||||
@@ -101,6 +99,5 @@
|
||||
opt = merge({}, marked.defaults, opt || {});
|
||||
- checkSanitizeDeprecation(opt);
|
||||
return Parser.parse(Lexer.lex(src, opt), opt);
|
||||
} = require('./helpers.js');
|
||||
@@ -35,5 +34,4 @@ function marked(src, opt, callback) {
|
||||
|
||||
opt = merge({}, marked.defaults, opt || {});
|
||||
- checkSanitizeDeprecation(opt);
|
||||
|
||||
if (callback) {
|
||||
@@ -108,5 +106,5 @@ function marked(src, opt, callback) {
|
||||
return Parser.parse(tokens, opt);
|
||||
} catch (e) {
|
||||
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
|
||||
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
|
||||
if ((opt || marked.defaults).silent) {
|
||||
diff -NarU1 marked-1.0.0-orig/src/Renderer.js marked-1.0.0-edit/src/Renderer.js
|
||||
--- marked-1.0.0-orig/src/Renderer.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/src/Renderer.js 2020-04-25 18:59:15.091319265 +0000
|
||||
@@ -134,3 +134,3 @@
|
||||
link(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
+ href = cleanUrl(this.options.baseUrl, href);
|
||||
if (href === null) {
|
||||
@@ -147,3 +147,3 @@
|
||||
image(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
+ href = cleanUrl(this.options.baseUrl, href);
|
||||
if (href === null) {
|
||||
diff -NarU1 marked-1.0.0-orig/src/Tokenizer.js marked-1.0.0-edit/src/Tokenizer.js
|
||||
--- marked-1.0.0-orig/src/Tokenizer.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/src/Tokenizer.js 2020-04-25 22:47:07.610917004 +0000
|
||||
@@ -256,9 +256,6 @@
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
- ? 'paragraph'
|
||||
- : 'html',
|
||||
- raw: cap[0],
|
||||
- pre: !this.options.sanitizer
|
||||
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
|
||||
+ type: 'html',
|
||||
+ raw: cap[0],
|
||||
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
|
||||
+ text: cap[0]
|
||||
};
|
||||
@@ -382,5 +379,3 @@
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
- ? 'text'
|
||||
- : 'html',
|
||||
+ type: 'html',
|
||||
raw: cap[0],
|
||||
@@ -388,7 +383,3 @@
|
||||
inRawBlock,
|
||||
- text: this.options.sanitize
|
||||
- ? (this.options.sanitizer
|
||||
- ? this.options.sanitizer(cap[0])
|
||||
- : escape(cap[0]))
|
||||
- : cap[0]
|
||||
+ text: cap[0]
|
||||
};
|
||||
@@ -504,3 +495,3 @@
|
||||
|
||||
- autolink(src, mangle) {
|
||||
+ autolink(src) {
|
||||
const cap = this.rules.inline.autolink.exec(src);
|
||||
@@ -509,3 +500,3 @@
|
||||
if (cap[2] === '@') {
|
||||
- text = escape(this.options.mangle ? mangle(cap[1]) : cap[1]);
|
||||
+ text = escape(cap[1]);
|
||||
href = 'mailto:' + text;
|
||||
@@ -532,3 +523,3 @@
|
||||
|
||||
- url(src, mangle) {
|
||||
+ url(src) {
|
||||
let cap;
|
||||
@@ -537,3 +528,3 @@
|
||||
if (cap[2] === '@') {
|
||||
- text = escape(this.options.mangle ? mangle(cap[0]) : cap[0]);
|
||||
+ text = escape(cap[0]);
|
||||
href = 'mailto:' + text;
|
||||
@@ -569,3 +560,3 @@
|
||||
|
||||
- inlineText(src, inRawBlock, smartypants) {
|
||||
+ inlineText(src, inRawBlock) {
|
||||
const cap = this.rules.inline.text.exec(src);
|
||||
@@ -574,5 +565,5 @@
|
||||
if (inRawBlock) {
|
||||
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
|
||||
+ text = cap[0];
|
||||
} else {
|
||||
- text = escape(this.options.smartypants ? smartypants(cap[0]) : cap[0]);
|
||||
+ text = escape(cap[0]);
|
||||
}
|
||||
diff -NarU1 marked-1.0.0-orig/test/bench.js marked-1.0.0-edit/test/bench.js
|
||||
--- marked-1.0.0-orig/test/bench.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/test/bench.js 2020-04-25 19:02:27.227980287 +0000
|
||||
@@ -34,3 +34,2 @@
|
||||
if (opt.silent) {
|
||||
return '<p>An error occurred:</p><pre>'
|
||||
diff --git a/test/bench.js b/test/bench.js
|
||||
--- a/test/bench.js
|
||||
+++ b/test/bench.js
|
||||
@@ -33,5 +33,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
@@ -46,3 +45,2 @@
|
||||
});
|
||||
@@ -45,5 +44,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
@@ -59,3 +57,2 @@
|
||||
});
|
||||
@@ -58,5 +56,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
@@ -71,3 +68,2 @@
|
||||
});
|
||||
@@ -70,5 +67,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
@@ -84,3 +80,2 @@
|
||||
});
|
||||
@@ -83,5 +79,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: true,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
@@ -96,3 +91,2 @@
|
||||
});
|
||||
@@ -95,5 +90,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: true,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
diff -NarU1 marked-1.0.0-orig/test/specs/run-spec.js marked-1.0.0-edit/test/specs/run-spec.js
|
||||
--- marked-1.0.0-orig/test/specs/run-spec.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/test/specs/run-spec.js 2020-04-25 19:05:24.321308408 +0000
|
||||
@@ -21,6 +21,2 @@
|
||||
});
|
||||
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
|
||||
--- a/test/specs/run-spec.js
|
||||
+++ b/test/specs/run-spec.js
|
||||
@@ -22,8 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
|
||||
}
|
||||
|
||||
- if (spec.options.sanitizer) {
|
||||
- // eslint-disable-next-line no-eval
|
||||
- spec.options.sanitizer = eval(spec.options.sanitizer);
|
||||
- }
|
||||
|
||||
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
|
||||
@@ -49,2 +45 @@
|
||||
@@ -53,3 +49,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
||||
runSpecs('New', './new');
|
||||
runSpecs('ReDOS', './redos');
|
||||
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
|
||||
diff -NarU1 marked-1.0.0-orig/test/unit/Lexer-spec.js marked-1.0.0-edit/test/unit/Lexer-spec.js
|
||||
--- marked-1.0.0-orig/test/unit/Lexer-spec.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/test/unit/Lexer-spec.js 2020-04-25 22:47:27.170916427 +0000
|
||||
@@ -464,3 +464,3 @@
|
||||
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
|
||||
--- a/test/unit/Lexer-spec.js
|
||||
+++ b/test/unit/Lexer-spec.js
|
||||
@@ -465,5 +465,5 @@ a | b
|
||||
});
|
||||
|
||||
- it('sanitize', () => {
|
||||
+ /*it('sanitize', () => {
|
||||
expectTokens({
|
||||
@@ -482,3 +482,3 @@
|
||||
md: '<div>html</div>',
|
||||
@@ -483,5 +483,5 @@ a | b
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
@@ -586,3 +586,3 @@
|
||||
|
||||
@@ -587,5 +587,5 @@ a | b
|
||||
});
|
||||
|
||||
- it('html sanitize', () => {
|
||||
+ /*it('html sanitize', () => {
|
||||
expectInlineTokens({
|
||||
@@ -596,3 +596,3 @@
|
||||
md: '<div>html</div>',
|
||||
@@ -597,5 +597,5 @@ a | b
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
@@ -825,3 +825,3 @@
|
||||
it('link', () => {
|
||||
@@ -909,5 +909,5 @@ a | b
|
||||
});
|
||||
|
||||
- it('autolink mangle email', () => {
|
||||
+ /*it('autolink mangle email', () => {
|
||||
expectInlineTokens({
|
||||
@@ -845,3 +845,3 @@
|
||||
md: '<test@example.com>',
|
||||
@@ -929,5 +929,5 @@ a | b
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
@@ -882,3 +882,3 @@
|
||||
it('url', () => {
|
||||
@@ -966,5 +966,5 @@ a | b
|
||||
});
|
||||
|
||||
- it('url mangle email', () => {
|
||||
+ /*it('url mangle email', () => {
|
||||
expectInlineTokens({
|
||||
@@ -902,3 +902,3 @@
|
||||
md: 'test@example.com',
|
||||
@@ -986,5 +986,5 @@ a | b
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
@@ -918,3 +918,3 @@
|
||||
|
||||
@@ -1002,5 +1002,5 @@ a | b
|
||||
});
|
||||
|
||||
- describe('smartypants', () => {
|
||||
+ /*describe('smartypants', () => {
|
||||
it('single quotes', () => {
|
||||
@@ -988,3 +988,3 @@
|
||||
expectInlineTokens({
|
||||
@@ -1072,5 +1072,5 @@ a | b
|
||||
});
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
});
|
||||
|
@@ -26,3 +26,6 @@ awk '/:before .content:"\\/ {sub(/[^"]+"./,""); sub(/".*/,""); print}' </z/dist/
|
||||
|
||||
# and finally create a woff with just our icons
|
||||
pyftsubset "$orig_woff" --unicodes-file=/z/icon.list --no-ignore-missing-unicodes --flavor=woff --with-zopfli --output-file=/z/dist/no-pk/mini-fa.woff --verbose
|
||||
|
||||
# scp is easier, just want basic latin
|
||||
pyftsubset /z/scp.woff2 --unicodes="20-7e,ab,b7,bb,2022" --no-ignore-missing-unicodes --flavor=woff2 --output-file=/z/dist/no-pk/scp.woff2 --verbose
|
||||
|
100
scripts/fusefuzz.py
Executable file
100
scripts/fusefuzz.py
Executable file
@@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
"""
|
||||
td=/dev/shm/; [ -e $td ] || td=$HOME; mkdir -p $td/fusefuzz/{r,v}
|
||||
PYTHONPATH=.. python3 -m copyparty -v $td/fusefuzz/r::r -i 127.0.0.1
|
||||
../bin/copyparty-fuse.py http://127.0.0.1:3923/ $td/fusefuzz/v -cf 2 -cd 0.5
|
||||
(d="$PWD"; cd $td/fusefuzz && "$d"/fusefuzz.py)
|
||||
"""
|
||||
|
||||
|
||||
def chk(fsz, rsz, ofs0, shift, ofs, rf, vf):
|
||||
if ofs != rf.tell():
|
||||
rf.seek(ofs)
|
||||
vf.seek(ofs)
|
||||
|
||||
rb = rf.read(rsz)
|
||||
vb = vf.read(rsz)
|
||||
|
||||
print(f"fsz {fsz} rsz {rsz} ofs {ofs0} shift {shift} ofs {ofs} = {len(rb)}")
|
||||
|
||||
if rb != vb:
|
||||
for n, buf in enumerate([rb, vb]):
|
||||
with open("buf." + str(n), "wb") as f:
|
||||
f.write(buf)
|
||||
|
||||
raise Exception(f"{len(rb)} != {len(vb)}")
|
||||
|
||||
return rb, vb
|
||||
|
||||
|
||||
def main():
|
||||
v = "v"
|
||||
for n in range(5):
|
||||
with open(f"r/{n}", "wb") as f:
|
||||
f.write(b"h" * n)
|
||||
|
||||
rand = os.urandom(7919) # prime
|
||||
for fsz in range(1024 * 1024 * 2 - 3, 1024 * 1024 * 2 + 3):
|
||||
with open("r/f", "wb", fsz) as f:
|
||||
f.write((rand * int(fsz / len(rand) + 1))[:fsz])
|
||||
|
||||
for rsz in range(64 * 1024 - 2, 64 * 1024 + 2):
|
||||
ofslist = [0, 1, 2]
|
||||
for n in range(3):
|
||||
ofslist.append(fsz - n)
|
||||
ofslist.append(fsz - (rsz * 1 + n))
|
||||
ofslist.append(fsz - (rsz * 2 + n))
|
||||
|
||||
for ofs0 in ofslist:
|
||||
for shift in range(-3, 3):
|
||||
print(f"fsz {fsz} rsz {rsz} ofs {ofs0} shift {shift}")
|
||||
ofs = ofs0
|
||||
if ofs < 0 or ofs >= fsz:
|
||||
continue
|
||||
|
||||
for n in range(1, 3):
|
||||
with open(f"{v}/{n}", "rb") as f:
|
||||
f.read()
|
||||
|
||||
prev_ofs = -99
|
||||
with open("r/f", "rb", rsz) as rf:
|
||||
with open(f"{v}/f", "rb", rsz) as vf:
|
||||
while True:
|
||||
ofs += shift
|
||||
if ofs < 0 or ofs > fsz or ofs == prev_ofs:
|
||||
break
|
||||
|
||||
prev_ofs = ofs
|
||||
|
||||
rb, vb = chk(fsz, rsz, ofs0, shift, ofs, rf, vf)
|
||||
|
||||
if not rb:
|
||||
break
|
||||
|
||||
ofs += len(rb)
|
||||
|
||||
for n in range(1, 3):
|
||||
with open(f"{v}/{n}", "rb") as f:
|
||||
f.read()
|
||||
|
||||
with open("r/f", "rb", rsz) as rf:
|
||||
with open(f"{v}/f", "rb", rsz) as vf:
|
||||
for n in range(2):
|
||||
ofs += shift
|
||||
if ofs < 0 or ofs > fsz:
|
||||
break
|
||||
|
||||
rb, vb = chk(fsz, rsz, ofs0, shift, ofs, rf, vf)
|
||||
|
||||
ofs -= rsz
|
||||
|
||||
# bumping fsz, sleep away the dentry cache in cppf
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -3,12 +3,15 @@ set -e
|
||||
echo
|
||||
|
||||
# osx support
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
which md5sum 2>/dev/null >/dev/null &&
|
||||
|
@@ -18,13 +18,16 @@ echo
|
||||
# (the fancy markdown editor)
|
||||
|
||||
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
unexpand() { gunexpand "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
[ -e copyparty/__main__.py ] || cd ..
|
||||
@@ -94,8 +97,39 @@ cd sfx
|
||||
rm -f ../tar
|
||||
}
|
||||
|
||||
ver="$(awk '/^VERSION *= \(/ {
|
||||
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < ../copyparty/__version__.py)"
|
||||
ver=
|
||||
git describe --tags >/dev/null 2>/dev/null && {
|
||||
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
|
||||
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//; s/-g?/./g')";
|
||||
t_ver=
|
||||
|
||||
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+$' && {
|
||||
# short format (exact version number)
|
||||
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g')";
|
||||
}
|
||||
|
||||
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+-[0-9]+-g[0-9a-f]+$' && {
|
||||
# long format (unreleased commit)
|
||||
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g; s/(.*) (.*)/\1 "\2"/')"
|
||||
}
|
||||
|
||||
[ -z "$t_ver" ] && {
|
||||
printf 'unexpected git version format: [%s]\n' "$git_ver"
|
||||
exit 1
|
||||
}
|
||||
|
||||
dt="$(git log -1 --format=%cd --date=format:'%Y,%m,%d' | sed -E 's/,0?/, /g')"
|
||||
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
|
||||
sed -ri '
|
||||
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;
|
||||
s/^(S_VERSION =)(.*)/#\1\2\n\1 "'"$ver"'"/;
|
||||
s/^(BUILD_DT =)(.*)/#\1\2\n\1 ('"$dt"')/;
|
||||
' copyparty/__version__.py
|
||||
}
|
||||
|
||||
[ -z "$ver" ] &&
|
||||
ver="$(awk '/^VERSION *= \(/ {
|
||||
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
|
||||
|
||||
ts=$(date -u +%s)
|
||||
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
|
||||
@@ -166,3 +200,6 @@ chmod 755 $sfx_out.*
|
||||
printf "done:\n"
|
||||
printf " %s\n" "$(realpath $sfx_out)."{sh,py}
|
||||
# rm -rf *
|
||||
|
||||
# tar -tvf ../sfx/tar | sed -r 's/(.* ....-..-.. ..:.. )(.*)/\2 `` \1/' | sort | sed -r 's/(.*) `` (.*)/\2 \1/'| less
|
||||
# for n in {1..9}; do tar -tf tar | grep -vE '/$' | sed -r 's/(.*)\.(.*)/\2.\1/' | sort | sed -r 's/([^\.]+)\.(.*)/\2.\1/' | tar -cT- | bzip2 -c$n | wc -c; done
|
||||
|
@@ -2,12 +2,16 @@
|
||||
set -e
|
||||
echo
|
||||
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# osx support
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
which md5sum 2>/dev/null >/dev/null &&
|
||||
@@ -16,15 +20,15 @@ which md5sum 2>/dev/null >/dev/null &&
|
||||
|
||||
ver="$1"
|
||||
|
||||
[[ "x$ver" == x ]] &&
|
||||
[ "x$ver" = x ] &&
|
||||
{
|
||||
echo "need argument 1: version"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
[[ -e copyparty/__main__.py ]] || cd ..
|
||||
[[ -e copyparty/__main__.py ]] ||
|
||||
[ -e copyparty/__main__.py ] || cd ..
|
||||
[ -e copyparty/__main__.py ] ||
|
||||
{
|
||||
echo "run me from within the project root folder"
|
||||
echo
|
||||
@@ -35,8 +39,8 @@ mkdir -p dist
|
||||
zip_path="$(pwd)/dist/copyparty-$ver.zip"
|
||||
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"
|
||||
|
||||
[[ -e "$zip_path" ]] ||
|
||||
[[ -e "$tgz_path" ]] &&
|
||||
[ -e "$zip_path" ] ||
|
||||
[ -e "$tgz_path" ] &&
|
||||
{
|
||||
echo "found existing archives for this version"
|
||||
echo " $zip_path"
|
||||
|
@@ -2,7 +2,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re, os, sys, stat, time, shutil, tarfile, hashlib, platform, tempfile
|
||||
import re, os, sys, time, shutil, signal, tarfile, hashlib, platform, tempfile
|
||||
import subprocess as sp
|
||||
|
||||
"""
|
||||
@@ -29,6 +29,7 @@ STAMP = None
|
||||
PY2 = sys.version_info[0] == 2
|
||||
sys.dont_write_bytecode = True
|
||||
me = os.path.abspath(os.path.realpath(__file__))
|
||||
cpp = None
|
||||
|
||||
|
||||
def eprint(*args, **kwargs):
|
||||
@@ -191,6 +192,16 @@ def makesfx(tar_src, ver, ts):
|
||||
# skip 0
|
||||
|
||||
|
||||
def u8(gen):
|
||||
try:
|
||||
for s in gen:
|
||||
yield s.decode("utf-8", "ignore")
|
||||
except:
|
||||
yield s
|
||||
for s in gen:
|
||||
yield s
|
||||
|
||||
|
||||
def get_py_win(ret):
|
||||
tops = []
|
||||
p = str(os.getenv("LocalAppdata"))
|
||||
@@ -216,11 +227,11 @@ def get_py_win(ret):
|
||||
# $WIRESHARK_SLOGAN
|
||||
for top in tops:
|
||||
try:
|
||||
for name1 in sorted(os.listdir(top), reverse=True):
|
||||
for name1 in u8(sorted(os.listdir(top), reverse=True)):
|
||||
if name1.lower().startswith("python"):
|
||||
path1 = os.path.join(top, name1)
|
||||
try:
|
||||
for name2 in os.listdir(path1):
|
||||
for name2 in u8(os.listdir(path1)):
|
||||
if name2.lower() == "python.exe":
|
||||
path2 = os.path.join(path1, name2)
|
||||
ret[path2.lower()] = path2
|
||||
@@ -237,7 +248,7 @@ def get_py_nix(ret):
|
||||
next
|
||||
|
||||
try:
|
||||
for fn in os.listdir(bindir):
|
||||
for fn in u8(os.listdir(bindir)):
|
||||
if ptn.match(fn):
|
||||
fn = os.path.join(bindir, fn)
|
||||
ret[fn.lower()] = fn
|
||||
@@ -295,17 +306,19 @@ def hashfile(fn):
|
||||
def unpack():
|
||||
"""unpacks the tar yielded by `data`"""
|
||||
name = "pe-copyparty"
|
||||
tag = "v" + str(STAMP)
|
||||
withpid = "{}.{}".format(name, os.getpid())
|
||||
top = tempfile.gettempdir()
|
||||
final = os.path.join(top, name)
|
||||
mine = os.path.join(top, withpid)
|
||||
tar = os.path.join(mine, "tar")
|
||||
tag_mine = os.path.join(mine, "v" + str(STAMP))
|
||||
tag_final = os.path.join(final, "v" + str(STAMP))
|
||||
|
||||
if os.path.exists(tag_final):
|
||||
msg("found early")
|
||||
return final
|
||||
try:
|
||||
if tag in os.listdir(final):
|
||||
msg("found early")
|
||||
return final
|
||||
except:
|
||||
pass
|
||||
|
||||
nwrite = 0
|
||||
os.mkdir(mine)
|
||||
@@ -328,12 +341,15 @@ def unpack():
|
||||
|
||||
os.remove(tar)
|
||||
|
||||
with open(tag_mine, "wb") as f:
|
||||
with open(os.path.join(mine, tag), "wb") as f:
|
||||
f.write(b"h\n")
|
||||
|
||||
if os.path.exists(tag_final):
|
||||
msg("found late")
|
||||
return final
|
||||
try:
|
||||
if tag in os.listdir(final):
|
||||
msg("found late")
|
||||
return final
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.path.islink(final):
|
||||
@@ -352,7 +368,7 @@ def unpack():
|
||||
msg("reloc fail,", mine)
|
||||
return mine
|
||||
|
||||
for fn in os.listdir(top):
|
||||
for fn in u8(os.listdir(top)):
|
||||
if fn.startswith(name) and fn not in [name, withpid]:
|
||||
try:
|
||||
old = os.path.join(top, fn)
|
||||
@@ -418,17 +434,35 @@ def get_payload():
|
||||
def confirm():
|
||||
msg()
|
||||
msg("*** hit enter to exit ***")
|
||||
raw_input() if PY2 else input()
|
||||
try:
|
||||
raw_input() if PY2 else input()
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def run(tmp, py):
|
||||
global cpp
|
||||
|
||||
msg("OK")
|
||||
msg("will use:", py)
|
||||
msg("bound to:", tmp)
|
||||
|
||||
# "systemd-tmpfiles-clean.timer"?? HOW do you even come up with this shit
|
||||
try:
|
||||
import fcntl
|
||||
|
||||
fd = os.open(tmp, os.O_RDONLY)
|
||||
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
tmp = os.readlink(tmp) # can't flock a symlink, even with O_NOFOLLOW
|
||||
except:
|
||||
pass
|
||||
|
||||
fp_py = os.path.join(tmp, "py")
|
||||
with open(fp_py, "wb") as f:
|
||||
f.write(py.encode("utf-8") + b"\n")
|
||||
try:
|
||||
with open(fp_py, "wb") as f:
|
||||
f.write(py.encode("utf-8") + b"\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
# avoid loading ./copyparty.py
|
||||
cmd = [
|
||||
@@ -440,16 +474,21 @@ def run(tmp, py):
|
||||
] + list(sys.argv[1:])
|
||||
|
||||
msg("\n", cmd, "\n")
|
||||
p = sp.Popen(str(x) for x in cmd)
|
||||
cpp = sp.Popen(str(x) for x in cmd)
|
||||
try:
|
||||
p.wait()
|
||||
cpp.wait()
|
||||
except:
|
||||
p.wait()
|
||||
cpp.wait()
|
||||
|
||||
if p.returncode != 0:
|
||||
if cpp.returncode != 0:
|
||||
confirm()
|
||||
|
||||
sys.exit(p.returncode)
|
||||
sys.exit(cpp.returncode)
|
||||
|
||||
|
||||
def bye(sig, frame):
|
||||
if cpp is not None:
|
||||
cpp.terminate()
|
||||
|
||||
|
||||
def main():
|
||||
@@ -484,6 +523,8 @@ def main():
|
||||
|
||||
# skip 0
|
||||
|
||||
signal.signal(signal.SIGTERM, bye)
|
||||
|
||||
tmp = unpack()
|
||||
fp_py = os.path.join(tmp, "py")
|
||||
if os.path.exists(fp_py):
|
||||
|
@@ -32,8 +32,12 @@ dir="$(
|
||||
|
||||
# detect available pythons
|
||||
(IFS=:; for d in $PATH; do
|
||||
printf '%s\n' "$d"/python* "$d"/pypy* | tac;
|
||||
done) | grep -E '(python|pypy)[0-9\.-]*$' > $dir/pys || true
|
||||
printf '%s\n' "$d"/python* "$d"/pypy*;
|
||||
done) |
|
||||
(sed -E 's/(.*\/[^/0-9]+)([0-9]?[^/]*)$/\2 \1/' || cat) |
|
||||
(sort -nr || cat) |
|
||||
(sed -E 's/([^ ]*) (.*)/\2\1/' || cat) |
|
||||
grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
|
||||
|
||||
# see if we made a choice before
|
||||
[ -z "$pybin" ] && pybin="$(cat $dir/py 2>/dev/null || true)"
|
||||
|
164
scripts/speedtest-fs.py
Normal file
164
scripts/speedtest-fs.py
Normal file
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import time
|
||||
import signal
|
||||
import traceback
|
||||
import threading
|
||||
from queue import Queue
|
||||
|
||||
|
||||
"""speedtest-fs: filesystem performance estimate"""
|
||||
__author__ = "ed <copyparty@ocv.me>"
|
||||
__copyright__ = 2020
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
|
||||
def get_spd(nbyte, nsec):
|
||||
if not nsec:
|
||||
return "0.000 MB 0.000 sec 0.000 MB/s"
|
||||
|
||||
mb = nbyte / (1024 * 1024.0)
|
||||
spd = mb / nsec
|
||||
|
||||
return f"{mb:.3f} MB {nsec:.3f} sec {spd:.3f} MB/s"
|
||||
|
||||
|
||||
class Inf(object):
|
||||
def __init__(self, t0):
|
||||
self.msgs = []
|
||||
self.errors = []
|
||||
self.reports = []
|
||||
self.mtx_msgs = threading.Lock()
|
||||
self.mtx_reports = threading.Lock()
|
||||
|
||||
self.n_byte = 0
|
||||
self.n_sec = 0
|
||||
self.n_done = 0
|
||||
self.t0 = t0
|
||||
|
||||
thr = threading.Thread(target=self.print_msgs)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def msg(self, fn, n_read):
|
||||
with self.mtx_msgs:
|
||||
self.msgs.append(f"{fn} {n_read}")
|
||||
|
||||
def err(self, fn):
|
||||
with self.mtx_reports:
|
||||
self.errors.append(f"{fn}\n{traceback.format_exc()}")
|
||||
|
||||
def print_msgs(self):
|
||||
while True:
|
||||
time.sleep(0.02)
|
||||
with self.mtx_msgs:
|
||||
msgs = self.msgs
|
||||
self.msgs = []
|
||||
|
||||
if not msgs:
|
||||
continue
|
||||
|
||||
msgs = msgs[-64:]
|
||||
msgs = [f"{get_spd(self.n_byte, self.n_sec)} {x}" for x in msgs]
|
||||
print("\n".join(msgs))
|
||||
|
||||
def report(self, fn, n_byte, n_sec):
|
||||
with self.mtx_reports:
|
||||
self.reports.append([n_byte, n_sec, fn])
|
||||
self.n_byte += n_byte
|
||||
self.n_sec += n_sec
|
||||
|
||||
def done(self):
|
||||
with self.mtx_reports:
|
||||
self.n_done += 1
|
||||
|
||||
|
||||
def get_files(dir_path):
|
||||
for fn in os.listdir(dir_path):
|
||||
fn = os.path.join(dir_path, fn)
|
||||
st = os.stat(fn).st_mode
|
||||
|
||||
if stat.S_ISDIR(st):
|
||||
yield from get_files(fn)
|
||||
|
||||
if stat.S_ISREG(st):
|
||||
yield fn
|
||||
|
||||
|
||||
def worker(q, inf, read_sz):
|
||||
while True:
|
||||
fn = q.get()
|
||||
if not fn:
|
||||
break
|
||||
|
||||
n_read = 0
|
||||
try:
|
||||
t0 = time.time()
|
||||
with open(fn, "rb") as f:
|
||||
while True:
|
||||
buf = f.read(read_sz)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
n_read += len(buf)
|
||||
inf.msg(fn, n_read)
|
||||
|
||||
inf.report(fn, n_read, time.time() - t0)
|
||||
except:
|
||||
inf.err(fn)
|
||||
|
||||
inf.done()
|
||||
|
||||
|
||||
def sighandler(signo, frame):
|
||||
os._exit(0)
|
||||
|
||||
|
||||
def main():
|
||||
signal.signal(signal.SIGINT, sighandler)
|
||||
|
||||
root = "."
|
||||
if len(sys.argv) > 1:
|
||||
root = sys.argv[1]
|
||||
|
||||
t0 = time.time()
|
||||
q = Queue(256)
|
||||
inf = Inf(t0)
|
||||
|
||||
num_threads = 8
|
||||
read_sz = 32 * 1024
|
||||
for _ in range(num_threads):
|
||||
thr = threading.Thread(target=worker, args=(q, inf, read_sz,))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
for fn in get_files(root):
|
||||
q.put(fn)
|
||||
|
||||
for _ in range(num_threads):
|
||||
q.put(None)
|
||||
|
||||
while inf.n_done < num_threads:
|
||||
time.sleep(0.1)
|
||||
|
||||
t2 = time.time()
|
||||
print("\n")
|
||||
|
||||
log = inf.reports
|
||||
log.sort()
|
||||
for nbyte, nsec, fn in log[-64:]:
|
||||
print(f"{get_spd(nbyte, nsec)} {fn}")
|
||||
|
||||
print()
|
||||
print("\n".join(inf.errors))
|
||||
|
||||
print(get_spd(inf.n_byte, t2 - t0))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
141
srv/extend.md
Normal file
141
srv/extend.md
Normal file
@@ -0,0 +1,141 @@
|
||||
# hi
|
||||
this showcases my worst idea yet; *extending markdown with inline javascript*
|
||||
|
||||
due to obvious reasons it's disabled by default, and can be enabled with `-emp`
|
||||
|
||||
the examples are by no means correct, they're as much of a joke as this feature itself
|
||||
|
||||
|
||||
### sub-header
|
||||
nothing special about this one
|
||||
|
||||
|
||||
## except/
|
||||
this one becomes a hyperlink to ./except/ thanks to
|
||||
* the `copyparty_pre` plugin at the end of this file
|
||||
* which is invoked as a markdown filter every time the document is modified
|
||||
* which looks for headers ending with a `/` and erwrites all headers below that
|
||||
|
||||
it is a passthrough to the markdown extension api, see https://marked.js.org/using_pro
|
||||
|
||||
in addition to the markdown extension functions, `ctor` will be called on document init
|
||||
|
||||
|
||||
### these/
|
||||
and this one becomes ./except/these/
|
||||
|
||||
|
||||
#### ones.md
|
||||
finally ./except/these/ones.md
|
||||
|
||||
|
||||
### also-this.md
|
||||
whic hshoud be ./except/also-this.md
|
||||
|
||||
|
||||
|
||||
|
||||
# ok
|
||||
now for another extension type, `copyparty_post` which is called to manipulate the generated dom instead
|
||||
|
||||
`copyparty_post` can have the following functions, all optional
|
||||
* `ctor` is called on document init
|
||||
* `render` is called when the dom is done but still in-memory
|
||||
* `render2` is called with the live browser dom as-displayed
|
||||
|
||||
## post example
|
||||
|
||||
the values in the `ex:` columns are linkified to `example.com/$value`
|
||||
|
||||
| ex:foo | bar | ex:baz |
|
||||
| ------------ | -------- | ------ |
|
||||
| asdf | nice | fgsfds |
|
||||
| more one row | hi hello | aaa |
|
||||
|
||||
and the table can be sorted by clicking the headers
|
||||
|
||||
the difference is that with `copyparty_pre` you'll probably break various copyparty features but if you use `copyparty_post` then future copyparty versions will probably break you
|
||||
|
||||
|
||||
|
||||
|
||||
# heres the plugins
|
||||
if there is anything below ths line in the preview then the plugin feature is disabled (good)
|
||||
|
||||
|
||||
|
||||
|
||||
```copyparty_pre
|
||||
ctor() {
|
||||
md_plug['h'] = {
|
||||
on: false,
|
||||
lv: -1,
|
||||
path: []
|
||||
}
|
||||
},
|
||||
walkTokens(token) {
|
||||
if (token.type == 'heading') {
|
||||
var h = md_plug['h'],
|
||||
is_dir = token.text.endsWith('/');
|
||||
|
||||
if (h.lv >= token.depth) {
|
||||
h.on = false;
|
||||
}
|
||||
if (!h.on && is_dir) {
|
||||
h.on = true;
|
||||
h.lv = token.depth;
|
||||
h.path = [token.text];
|
||||
}
|
||||
else if (h.on && h.lv < token.depth) {
|
||||
h.path = h.path.slice(0, token.depth - h.lv);
|
||||
h.path.push(token.text);
|
||||
}
|
||||
if (!h.on)
|
||||
return false;
|
||||
|
||||
var path = h.path.join('');
|
||||
var emoji = is_dir ? '📂' : '📜';
|
||||
token.tokens[0].text = '<a href="' + path + '">' + emoji + ' ' + path + '</a>';
|
||||
}
|
||||
if (token.type == 'paragraph') {
|
||||
//console.log(JSON.parse(JSON.stringify(token.tokens)));
|
||||
for (var a = 0; a < token.tokens.length; a++) {
|
||||
var t = token.tokens[a];
|
||||
if (t.type == 'text' || t.type == 'strong' || t.type == 'em') {
|
||||
var ret = '', text = t.text;
|
||||
for (var b = 0; b < text.length; b++)
|
||||
ret += (Math.random() > 0.5) ? text[b] : text[b].toUpperCase();
|
||||
|
||||
t.text = ret;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
```copyparty_post
|
||||
render(dom) {
|
||||
var ths = dom.querySelectorAll('th');
|
||||
for (var a = 0; a < ths.length; a++) {
|
||||
var th = ths[a];
|
||||
if (th.textContent.indexOf('ex:') === 0) {
|
||||
th.textContent = th.textContent.slice(3);
|
||||
var nrow = 0;
|
||||
while ((th = th.previousSibling) != null)
|
||||
nrow++;
|
||||
|
||||
var trs = ths[a].parentNode.parentNode.parentNode.querySelectorAll('tr');
|
||||
for (var b = 1; b < trs.length; b++) {
|
||||
var td = trs[b].childNodes[nrow];
|
||||
td.innerHTML = '<a href="//example.com/' + td.innerHTML + '">' + td.innerHTML + '</a>';
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
render2(dom) {
|
||||
window.makeSortable(dom.getElementsByTagName('table')[0]);
|
||||
}
|
||||
```
|
34
srv/test.md
34
srv/test.md
@@ -1,5 +1,16 @@
|
||||
### hello world
|
||||
|
||||
* qwe
|
||||
* asd
|
||||
* zxc
|
||||
* 573
|
||||
* one
|
||||
* two
|
||||
|
||||
* |||
|
||||
|--|--|
|
||||
|listed|table|
|
||||
|
||||
```
|
||||
[72....................................................................]
|
||||
[80............................................................................]
|
||||
@@ -17,6 +28,16 @@
|
||||
[80............................................................................]
|
||||
```
|
||||
|
||||
```
|
||||
l[i]=1I;(){}o0O</> var foo = "$(`bar`)"; a's'd
|
||||
```
|
||||
|
||||
🔍🌽.📕.🍙🔎
|
||||
|
||||
[](#s1)
|
||||
[s1](#s1)
|
||||
[#s1](#s1)
|
||||
|
||||
a123456789b123456789c123456789d123456789e123456789f123456789g123456789h123456789i123456789j123456789k123456789l123456789m123456789n123456789o123456789p123456789q123456789r123456789s123456789t123456789u123456789v123456789w123456789x123456789y123456789z123456789
|
||||
|
||||
<foo> bar & <span>baz</span>
|
||||
@@ -113,6 +134,15 @@ a newline toplevel
|
||||
| a table | on the right |
|
||||
| second row | foo bar |
|
||||
|
||||
||
|
||||
--|:-:|-:
|
||||
a table | big text in this | aaakbfddd
|
||||
second row | centred | bbb
|
||||
|
||||
||
|
||||
--|--|--
|
||||
foo
|
||||
|
||||
* list entry
|
||||
* [x] yes
|
||||
* [ ] no
|
||||
@@ -201,3 +231,7 @@ unrelated neat stuff:
|
||||
awk '/./ {printf "%s %d\n", $0, NR; next} 1' <test.md >ln.md
|
||||
gawk '{print gensub(/([a-zA-Z\.])/,NR" \\1","1")}' <test.md >ln.md
|
||||
```
|
||||
|
||||
a|b|c
|
||||
--|--|--
|
||||
foo
|
||||
|
@@ -3,8 +3,10 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
import subprocess as sp # nosec
|
||||
|
||||
@@ -30,9 +32,6 @@ class TestVFS(unittest.TestCase):
|
||||
response = self.unfoo(response)
|
||||
self.assertEqual(util.undot(query), response)
|
||||
|
||||
def absify(self, root, names):
|
||||
return ["{}/{}".format(root, x).replace("//", "/") for x in names]
|
||||
|
||||
def ls(self, vfs, vpath, uname):
|
||||
"""helper for resolving and listing a folder"""
|
||||
vn, rem = vfs.get(vpath, uname, True, False)
|
||||
@@ -59,16 +58,31 @@ class TestVFS(unittest.TestCase):
|
||||
|
||||
if os.path.exists("/Volumes"):
|
||||
devname, _ = self.chkcmd("hdiutil", "attach", "-nomount", "ram://8192")
|
||||
_, _ = self.chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
|
||||
return "/Volumes/cptd"
|
||||
devname = devname.strip()
|
||||
print("devname: [{}]".format(devname))
|
||||
for _ in range(10):
|
||||
try:
|
||||
_, _ = self.chkcmd(
|
||||
"diskutil", "eraseVolume", "HFS+", "cptd", devname
|
||||
)
|
||||
return "/Volumes/cptd"
|
||||
except Exception as ex:
|
||||
print(repr(ex))
|
||||
time.sleep(0.25)
|
||||
|
||||
raise Exception("TODO support windows")
|
||||
raise Exception("ramdisk creation failed")
|
||||
|
||||
ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
|
||||
try:
|
||||
os.mkdir(ret)
|
||||
finally:
|
||||
return ret
|
||||
|
||||
def log(self, src, msg):
|
||||
pass
|
||||
|
||||
def test(self):
|
||||
td = self.get_ramdisk() + "/vfs"
|
||||
td = os.path.join(self.get_ramdisk(), "vfs")
|
||||
try:
|
||||
shutil.rmtree(td)
|
||||
except OSError:
|
||||
@@ -99,7 +113,7 @@ class TestVFS(unittest.TestCase):
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td + "/a/ab")
|
||||
self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
|
||||
self.assertEqual(vfs.uread, ["*"])
|
||||
self.assertEqual(vfs.uwrite, [])
|
||||
|
||||
@@ -109,7 +123,7 @@ class TestVFS(unittest.TestCase):
|
||||
).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td + "/a/aa")
|
||||
self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
|
||||
self.assertEqual(vfs.uread, ["*"])
|
||||
self.assertEqual(vfs.uwrite, [])
|
||||
|
||||
@@ -138,42 +152,63 @@ class TestVFS(unittest.TestCase):
|
||||
n = n.nodes["acb"]
|
||||
self.assertEqual(n.nodes, {})
|
||||
self.assertEqual(n.vpath, "a/ac/acb")
|
||||
self.assertEqual(n.realpath, td + "/a/ac/acb")
|
||||
self.assertEqual(n.realpath, os.path.join(td, "a", "ac", "acb"))
|
||||
self.assertEqual(n.uread, ["k"])
|
||||
self.assertEqual(n.uwrite, ["*", "k"])
|
||||
|
||||
# something funky about the windows path normalization,
|
||||
# doesn't really matter but makes the test messy, TODO?
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "/", "*")
|
||||
self.assertEqual(fsdir, td)
|
||||
self.assertEqual(real, ["b", "c"])
|
||||
self.assertEqual(list(virt), ["a"])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a", "*")
|
||||
self.assertEqual(fsdir, td + "/a")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a"))
|
||||
self.assertEqual(real, ["aa", "ab"])
|
||||
self.assertEqual(list(virt), ["ac"])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ab", "*")
|
||||
self.assertEqual(fsdir, td + "/a/ab")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ab"))
|
||||
self.assertEqual(real, ["aba", "abb", "abc"])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac", "*")
|
||||
self.assertEqual(fsdir, td + "/a/ac")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
|
||||
self.assertEqual(real, ["aca", "acc"])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac", "k")
|
||||
self.assertEqual(fsdir, td + "/a/ac")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
|
||||
self.assertEqual(real, ["aca", "acc"])
|
||||
self.assertEqual(list(virt), ["acb"])
|
||||
|
||||
self.assertRaises(util.Pebkac, vfs.get, "a/ac/acb", "*", True, False)
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac/acb", "k")
|
||||
self.assertEqual(fsdir, td + "/a/ac/acb")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ac", "acb"))
|
||||
self.assertEqual(real, ["acba", "acbb", "acbc"])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
# admin-only rootfs with all-read-only subfolder
|
||||
vfs = AuthSrv(Namespace(c=None, a=["k:k"], v=[".::ak", "a:a:r"]), self.log,).vfs
|
||||
self.assertEqual(len(vfs.nodes), 1)
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td)
|
||||
self.assertEqual(vfs.uread, ["k"])
|
||||
self.assertEqual(vfs.uwrite, ["k"])
|
||||
n = vfs.nodes["a"]
|
||||
self.assertEqual(len(vfs.nodes), 1)
|
||||
self.assertEqual(n.vpath, "a")
|
||||
self.assertEqual(n.realpath, os.path.join(td, "a"))
|
||||
self.assertEqual(n.uread, ["*"])
|
||||
self.assertEqual(n.uwrite, [])
|
||||
self.assertEqual(vfs.can_access("/", "*"), [False, False])
|
||||
self.assertEqual(vfs.can_access("/", "k"), [True, True])
|
||||
self.assertEqual(vfs.can_access("/a", "*"), [True, False])
|
||||
self.assertEqual(vfs.can_access("/a", "k"), [True, False])
|
||||
|
||||
# breadth-first construction
|
||||
vfs = AuthSrv(
|
||||
Namespace(
|
||||
@@ -207,20 +242,20 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(list(v1), ["a"])
|
||||
|
||||
fsp, r1, v1 = self.ls(vfs, "a", "*")
|
||||
self.assertEqual(fsp, td + "/a")
|
||||
self.assertEqual(fsp, os.path.join(td, "a"))
|
||||
self.assertEqual(r1, ["aa", "ab"])
|
||||
self.assertEqual(list(v1), ["ac"])
|
||||
|
||||
fsp1, r1, v1 = self.ls(vfs, "a/ac", "*")
|
||||
fsp2, r2, v2 = self.ls(vfs, "b", "*")
|
||||
self.assertEqual(fsp1, td + "/b")
|
||||
self.assertEqual(fsp2, td + "/b")
|
||||
self.assertEqual(fsp1, os.path.join(td, "b"))
|
||||
self.assertEqual(fsp2, os.path.join(td, "b"))
|
||||
self.assertEqual(r1, ["ba", "bb", "bc"])
|
||||
self.assertEqual(r1, r2)
|
||||
self.assertEqual(list(v1), list(v2))
|
||||
|
||||
# config file parser
|
||||
cfg_path = self.get_ramdisk() + "/test.cfg"
|
||||
cfg_path = os.path.join(self.get_ramdisk(), "test.cfg")
|
||||
with open(cfg_path, "wb") as f:
|
||||
f.write(
|
||||
dedent(
|
||||
@@ -248,10 +283,11 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(len(n.nodes), 1)
|
||||
n = n.nodes["dst"]
|
||||
self.assertEqual(n.vpath, "dst")
|
||||
self.assertEqual(n.realpath, td + "/src")
|
||||
self.assertEqual(n.realpath, os.path.join(td, "src"))
|
||||
self.assertEqual(n.uread, ["a", "asd"])
|
||||
self.assertEqual(n.uwrite, ["asd"])
|
||||
self.assertEqual(len(n.nodes), 0)
|
||||
|
||||
os.chdir(tempfile.gettempdir())
|
||||
shutil.rmtree(td)
|
||||
os.unlink(cfg_path)
|
||||
|
Reference in New Issue
Block a user