Compare commits

..

138 Commits

Author SHA1 Message Date
ed
58f9e05d93 v0.7.3 2021-02-03 00:50:51 +01:00
ed
1ec981aea7 bind multiple ip/ports 2021-02-03 00:49:51 +01:00
ed
2a90286a7c dim the socket debug msgs 2021-02-03 00:25:13 +01:00
ed
12d25d09b2 limit gz/br unpacker to embedded resources 2021-02-03 00:19:14 +01:00
ed
a039fae1a4 remove extra anon-rw warning 2021-02-03 00:17:12 +01:00
ed
322b9abadc v0.7.2 2021-01-29 00:52:41 +01:00
ed
0aaf954cea up2k: increase purge timeout 2021-01-29 00:52:22 +01:00
ed
c2d22aa3d1 up2k: make confirmation optional 2021-01-29 00:49:35 +01:00
ed
6934c75bba nice 2021-01-29 00:43:57 +01:00
ed
c58cf78f86 yabe 2021-01-24 16:14:01 +01:00
ed
7f0de790ab more macports compat 2021-01-23 21:19:29 +01:00
ed
d4bb4e3a73 v0.7.1 2021-01-23 19:55:35 +01:00
ed
d25612d038 make-sfx: support macports 2021-01-23 19:55:24 +01:00
ed
116b2351b0 mention howto purge partial uploads 2021-01-23 19:25:25 +01:00
ed
69b83dfdc4 up2k: limit runahead in client 2021-01-23 19:05:45 +01:00
ed
3b1839c2ce up2k: ask before starting the upload 2021-01-23 18:51:08 +01:00
ed
13742ebdf8 verify that PARTIALs exist after a restart 2021-01-23 18:49:43 +01:00
ed
634657bea1 up2k: discard empty PARTIALs 2021-01-23 18:10:11 +01:00
ed
46e70d50b7 v0.7.0 2021-01-10 17:49:56 +01:00
ed
d64e9b85a7 prefer sqlite over registry snaps 2021-01-10 17:47:27 +01:00
ed
fb853edbe3 prevent index loss on mid-write crash 2021-01-10 17:16:55 +01:00
ed
cc076c1be1 persist/timeout incomplete uploads too 2021-01-10 16:47:35 +01:00
ed
98cc9a6755 mojibake support + exception handling 2021-01-10 09:48:26 +01:00
ed
7bd2b9c23a sqlite3 as up2k db + build index on boot + rproxy ip fix 2021-01-10 09:27:11 +01:00
ed
de724a1ff3 up2k: add volume flag to reject existing files 2021-01-09 15:20:02 +01:00
ed
2163055dae media-player: play links don't scroll on click 2021-01-09 14:40:56 +01:00
ed
93ed0fc10b v0.6.3 2021-01-07 01:09:32 +01:00
ed
0d98cefd40 fix dumb 2021-01-07 01:06:31 +01:00
ed
d58988a033 use sendfile when possible 2021-01-07 00:50:42 +01:00
ed
2acfab1e3f cleanup 2021-01-06 22:54:54 +01:00
ed
b915dfe9a6 nagle adds ~.2sec delay on last packet 2021-01-06 21:08:52 +00:00
ed
25bd5a823e fuse-client: add timestamps to logger 2021-01-06 17:40:42 +01:00
ed
1c35de4716 fuse-client: cache tweaks 2021-01-06 17:22:07 +01:00
ed
4c00435a0a fuse: add windows-explorer settings 2021-01-06 17:18:37 +01:00
ed
844e3079a8 saved for posterity 2021-01-06 17:13:24 +01:00
ed
4778cb5b2c readme: add quickstart 2021-01-02 22:57:48 +01:00
ed
ec5d60b919 fuse-client: fix directory parser 2021-01-01 21:54:56 +01:00
ed
e1f4b960e8 oh no 2020-12-20 02:33:37 +01:00
ed
669e46da54 update TODOs 2020-12-14 09:19:43 +01:00
ed
ba94cc5df7 v0.6.2 2020-12-14 04:28:21 +01:00
ed
d08245c3df v0.6.1 2020-12-14 03:51:24 +01:00
ed
5c18d12cbf self-upgrading upgrader... getting too meta 2020-12-14 03:45:59 +01:00
ed
580a42dec7 sfx-repack: support wget 2020-12-14 02:59:15 +01:00
ed
29286e159b up2k-client: ignore rejected dupes 2020-12-12 00:55:42 +01:00
ed
19bcf90e9f support uploads with huge filenames 2020-12-12 00:35:54 +01:00
ed
dae9c00742 always display world-readable subvolumes 2020-12-04 23:28:18 +01:00
ed
35324ceb7c tests: support windows 2020-12-04 23:26:46 +01:00
ed
5aadd47199 dodge python-bug #7980 2020-12-01 23:20:44 +01:00
ed
7d9057cc62 v0.6.0 2020-12-01 02:58:11 +01:00
ed
c4b322b883 this commit sponsored by eslint 2020-12-01 02:25:46 +01:00
ed
19b09c898a fix sfx repack whoops 2020-11-30 03:27:27 +01:00
ed
eafe2098b6 v0.5.7 2020-11-30 03:01:14 +01:00
ed
2bc6a20d71 md: poll server for changes 2020-11-30 03:00:44 +01:00
ed
8b502a7235 v0.5.6 2020-11-29 19:49:16 +01:00
ed
37567844af md: add render2 plugin func 2020-11-29 19:34:08 +01:00
ed
2f6c4e0e34 refactoring 2020-11-29 19:32:22 +01:00
ed
1c7cc4cb2b ignore border when sizing table 2020-11-29 18:48:55 +01:00
ed
f83db3648e git tag as sfx version 2020-11-28 20:02:20 +01:00
ed
b164aa00d4 md: fix eof scroll glitch 2020-11-27 21:25:52 +01:00
ed
a2d866d0c2 show plugin errors 2020-11-27 21:10:47 +01:00
ed
2dfe4ac4c6 v0.5.5 2020-11-27 03:25:14 +01:00
ed
db65d05cb5 fix unittest for recent macos versions 2020-11-27 03:24:55 +01:00
ed
300c0194c7 add inline markdown plugins 2020-11-27 03:22:41 +01:00
ed
37a0d2b087 good idea 2020-11-19 02:24:26 +01:00
ed
a4959300ea add sfx downloader/repacker 2020-11-19 01:23:24 +01:00
ed
223657e5f8 v0.5.4 2020-11-17 23:58:08 +01:00
ed
0c53de6767 more lenient md table formatter 2020-11-17 23:55:14 +01:00
ed
9c309b1498 add filetype column 2020-11-17 23:43:55 +01:00
ed
1aa1b34c80 add reverse-proxy support 2020-11-17 23:42:33 +01:00
ed
755a2ee023 v0.5.3 2020-11-13 03:31:07 +01:00
ed
69d3359e47 lots of stuff:
* show per-connection and per-transfer speeds
* support multiple cookies in parser
* set SameSite=Lax
* restore macos support in sfx.sh
* md-editor: add mojibake/unicode hunter
* md-editor: add table formatter
* md-editor: make bold bolder
* md-editor: more hotkeys
* md-editor: fix saving in fancy
* md-editor: fix eof-scrolling in chrome
* md-editor: fix text erasure with newline
* md-editor: fix backspace behavior in gutter
2020-11-13 02:58:38 +01:00
ed
a90c49b8fb fuse.py: support mojibake on windows 2020-10-25 08:07:17 +01:00
ed
b1222edb27 mention rclone in docs 2020-10-25 08:05:11 +01:00
ed
b967a92f69 support rclone as fuse client 2020-10-25 08:04:41 +01:00
ed
90a5cb5e59 fuse: support https + passwords, use argparse,
better handle windows trying to listdir(file)
2020-08-31 03:44:46 +02:00
ed
7aba9cb76b add contrib 2020-08-23 22:40:25 +00:00
ed
f550a8171d sfx: support ubuntu and openrc:
-- ubuntu does not let root follow symlinks created by other users
-- openrc expects copyparty to die if you kill the sfx parent
2020-08-23 22:32:44 +00:00
ed
82e568d4c9 sfx: support py27 on win10 when %TEMP% contains Skatteoppgjør.pdf 2020-08-18 19:23:17 +00:00
ed
7b2a4a3d59 v0.5.2 2020-08-18 18:22:23 +00:00
ed
0265455cd1 v0.5.1 2020-08-17 21:55:16 +00:00
ed
afafc886a4 support windows 2020-08-17 21:53:24 +00:00
ed
8a959f6ac4 add server info banner thing 2020-08-17 21:33:06 +00:00
ed
1c3aa0d2c5 deal with a soho nas (and FF60esr) 2020-08-17 20:39:46 +00:00
ed
79b7d3316a v0.5.0 2020-08-16 23:04:10 +00:00
ed
fa7768583a md-editor: tolerate inaccurate mtimes 2020-08-17 00:44:22 +00:00
ed
faf49f6c15 md-editor: add paragraph jumping 2020-08-17 00:42:05 +00:00
ed
765af31b83 improve fuse-fuzzer 2020-08-13 04:43:13 +00:00
ed
b6a3c52d67 fuse: be nicer to software which fails on truncated reads, such as Wimgapi.dll 2020-08-11 18:16:37 +00:00
ed
b025c2f660 fuse: windows optimizations 2020-08-09 04:09:42 +00:00
ed
e559a7c878 another fuse cache fix 2020-08-09 00:51:48 +00:00
ed
5c8855aafd trailing whitespace best syntax fug 2020-08-08 00:51:37 +00:00
ed
b5fc537b89 support PUT and ACAO 2020-08-08 00:47:54 +00:00
ed
14899d3a7c fix fuse cache bugs 2020-08-07 23:55:48 +00:00
ed
0ea7881652 fuse: cache options 2020-08-07 21:55:40 +00:00
ed
ec29b59d1e black 2020-08-07 20:00:30 +00:00
ed
9405597c15 workaround python-issue2494 on windows 2020-08-06 19:31:52 +00:00
ed
82441978c6 fuse: windows howto 2020-08-06 18:22:25 +00:00
ed
e0e6291bdb cleanup + readme 2020-08-04 23:46:57 +00:00
ed
b2b083fd0a fuse: support windows/msys2 2020-08-04 22:50:45 +00:00
ed
f8a51b68e7 fuse: add fork based on fuse-python 2020-08-04 22:42:40 +00:00
ed
e0a19108e5 ensure firefox shows the latest md 2020-06-25 00:07:50 +00:00
ed
770ea68ca8 workaround systemd being a joke 2020-06-24 23:53:23 +00:00
ed
ce36c52baf 1234 too popular 2020-06-24 23:52:42 +00:00
ed
a7da1dd233 v0.4.3 2020-05-17 16:46:47 +02:00
ed
678ef296b4 fully hide the navbar when asked 2020-05-17 16:44:58 +02:00
ed
9e5627d805 drop opus audio support on old iOS versions 2020-05-17 16:44:17 +02:00
ed
5958ee4439 autoindent oversight 2020-05-17 08:20:54 +02:00
ed
7127e57f0e happens on macs too 2020-05-17 02:58:22 +02:00
ed
ee9c6dc8aa use marked.js v1.1.0 2020-05-17 02:28:03 +02:00
ed
92779b3f48 2x chrome editor perf 2020-05-17 00:49:49 +02:00
ed
2f1baf17d4 numbered headers for paper-prints 2020-05-17 00:33:34 +02:00
ed
583da3d4a9 actually consider paper-printing 2020-05-16 02:24:27 +02:00
ed
bf9ff78bcc autofill blank link descriptions 2020-05-16 02:19:45 +02:00
ed
2cb07792cc add monospace font 2020-05-16 02:13:34 +02:00
ed
47bc8bb466 multiprocessing adds latency; default to off 2020-05-16 02:05:18 +02:00
ed
94ad1f5732 option to list dotfiles 2020-05-16 01:40:29 +02:00
ed
09557fbe83 v0.4.2 2020-05-15 01:02:18 +02:00
ed
1c0f44fa4e more 206 correctness 2020-05-15 00:52:57 +02:00
ed
fc4d59d2d7 improve autoindent 2020-05-15 00:39:36 +02:00
ed
12345fbacc fix editor cursor (especially in firefox) 2020-05-15 00:03:26 +02:00
ed
2e33c8d222 improve http206 and fuse-client 2020-05-15 00:00:49 +02:00
ed
db5f07f164 v0.4.1 2020-05-14 01:08:42 +02:00
ed
e050e69a43 dodge osx-safari bugs 2020-05-14 00:28:10 +02:00
ed
27cb1d4fc7 fix scroll sync on osx ff/chrome 2020-05-14 00:03:01 +02:00
ed
5d6a740947 fix undo/redo cursor pos 2020-05-13 23:27:27 +02:00
ed
da3f68c363 editor performance 2020-05-13 23:26:11 +02:00
ed
d7d1c3685c sfx notes 2020-05-13 01:12:33 +02:00
ed
dab3407beb v0.4.0 2020-05-13 00:44:23 +02:00
ed
592987a54a support smol screens 2020-05-13 00:39:29 +02:00
ed
8dca8326f7 osx fixes + shrinking 2020-05-12 22:36:21 +02:00
ed
633481fae3 fix preview 2020-05-12 21:11:38 +02:00
ed
e7b99e6fb7 (ノ ゚ヮ゚)ノ 彡┻━┻ 2020-05-12 20:56:42 +02:00
ed
2a6a3aedd0 shrink sfx some more 2020-05-12 00:26:40 +02:00
ed
866c74c841 autoindent 2020-05-12 00:00:54 +02:00
ed
dad92bde26 smart-home 2020-05-11 22:04:02 +02:00
ed
a994e034f7 lol wow 2020-05-11 02:07:21 +02:00
ed
2801c04f2e bit too aggressive 2020-05-11 01:56:26 +02:00
ed
316e3abfab NIH! NIH! NIH! 2020-05-11 01:38:30 +02:00
71 changed files with 7369 additions and 1184 deletions

12
.eslintrc.json Normal file
View File

@@ -0,0 +1,12 @@
{
"env": {
"browser": true,
"es2021": true
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaVersion": 12
},
"rules": {
}
}

2
.gitattributes vendored
View File

@@ -1,4 +1,6 @@
* text eol=lf * text eol=lf
*.reg text eol=crlf
*.png binary *.png binary
*.gif binary *.gif binary

10
.gitignore vendored
View File

@@ -11,14 +11,12 @@ dist/
sfx/ sfx/
.venv/ .venv/
# sublime # ide
*.sublime-workspace *.sublime-workspace
# winmerge # winmerge
*.bak *.bak
# other licenses # derived
contrib/ copyparty/web/deps/
srv/
# deps
copyparty/web/deps

8
.vscode/launch.json vendored
View File

@@ -9,13 +9,15 @@
"console": "integratedTerminal", "console": "integratedTerminal",
"cwd": "${workspaceFolder}", "cwd": "${workspaceFolder}",
"args": [ "args": [
"-j",
"0",
//"-nw", //"-nw",
"-ed",
"-emp",
"-e2d",
"-e2s",
"-a", "-a",
"ed:wark", "ed:wark",
"-v", "-v",
"srv::r:aed" "srv::r:aed:cnodupe"
] ]
}, },
{ {

View File

@@ -37,7 +37,7 @@
"python.linting.banditEnabled": true, "python.linting.banditEnabled": true,
"python.linting.flake8Args": [ "python.linting.flake8Args": [
"--max-line-length=120", "--max-line-length=120",
"--ignore=E722,F405,E203,W503,W293", "--ignore=E722,F405,E203,W503,W293,E402",
], ],
"python.linting.banditArgs": [ "python.linting.banditArgs": [
"--ignore=B104" "--ignore=B104"
@@ -55,6 +55,6 @@
// //
// things you may wanna edit: // things you may wanna edit:
// //
"python.pythonPath": ".venv/bin/python", "python.pythonPath": "/usr/bin/python3",
//"python.linting.enabled": true, //"python.linting.enabled": true,
} }

10
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,10 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "pre",
"command": "true;rm -rf inc/* inc/.hist/;mkdir -p inc;",
"type": "shell"
}
]
}

View File

@@ -13,12 +13,25 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* code standard: `black` * code standard: `black`
## quickstart
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
running the sfx without arguments (for example doubleclicking it on Windows) will let anyone access the current folder; see `-h` for help if you want accounts and volumes etc
you may also want these, especially on servers:
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for legit https)
## notes ## notes
* iPhone/iPad: use Firefox to download files * iPhone/iPad: use Firefox to download files
* Android-Chrome: set max "parallel uploads" for 200% upload speed (android bug) * Android-Chrome: set max "parallel uploads" for 200% upload speed (android bug)
* Android-Firefox: takes a while to select files (in order to avoid the above android-chrome issue) * Android-Firefox: takes a while to select files (in order to avoid the above android-chrome issue)
* Desktop-Firefox: may use gigabytes of RAM if your connection is great and your files are massive * Desktop-Firefox: may use gigabytes of RAM if your connection is great and your files are massive
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
* because no browsers currently implement the media-query to do this properly orz
## status ## status
@@ -36,10 +49,22 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [x] accounts * [x] accounts
* [x] markdown viewer * [x] markdown viewer
* [x] markdown editor * [x] markdown editor
* [x] FUSE client (read-only)
summary: it works! you can use it! (but technically not even close to beta) summary: it works! you can use it! (but technically not even close to beta)
# client examples
* javascript: dump some state into a file (two separate examples)
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* FUSE: mount a copyparty server as a local filesystem
* cross-platform python client available in [./bin/](bin/)
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
# dependencies # dependencies
* `jinja2` * `jinja2`
@@ -55,28 +80,36 @@ currently there are two self-contained binaries:
* `copyparty-sfx.sh` for unix (linux and osx) -- smaller, more robust * `copyparty-sfx.sh` for unix (linux and osx) -- smaller, more robust
* `copyparty-sfx.py` for windows (unix too) -- crossplatform, beta * `copyparty-sfx.py` for windows (unix too) -- crossplatform, beta
launch either of them and it'll unpack and run copyparty, assuming you have python installed of course launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `copyparty.py` and keep it in the same folder because `sys.path` is funky pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `copyparty.py` and keep it in the same folder because `sys.path` is funky
if you don't need all the features you can repack the sfx and save a bunch of space, tho currently the only removable feature is the opus/vorbis javascript decoder which is needed by apple devices to play foss audio files
steps to reduce the sfx size from `720 kB` to `250 kB` roughly: ## sfx repack
* run one of the sfx'es once to unpack it
* `./scripts/make-sfx.sh re no-ogv` creates a new pair of sfx
no internet connection needed, just download an sfx and the repo zip (also if you're on windows use msys2) if you don't need all the features you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except for either msys2 or WSL if you're on windows)
* `724K` original size as of v0.4.0
* `256K` after `./scripts/make-sfx.sh re no-ogv`
* `164K` after `./scripts/make-sfx.sh re no-ogv no-cm`
the features you can opt to drop are
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files
* `cm`/easymde, the "fancy" markdown editor
for the `re`pack to work, first run one of the sfx'es once to unpack it
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a `no-ogv no-cm` repack; works on linux/macos (and windows with msys2 or WSL)
# install on android # install on android
install [Termux](https://termux.com/) (see [ocv.me/termux](https://ocv.me/termux/)) and then copy-paste this into Termux (long-tap) all at once: install [Termux](https://termux.com/) (see [ocv.me/termux](https://ocv.me/termux/)) and then copy-paste this into Termux (long-tap) all at once:
```sh ```sh
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install curl && cd && curl -L https://github.com/9001/copyparty/raw/master/scripts/copyparty-android.sh > copyparty-android.sh && chmod 755 copyparty-android.sh && ./copyparty-android.sh -h apt update && apt -y full-upgrade && termux-setup-storage && apt -y install python && python -m ensurepip && python -m pip install -U copyparty
echo $? echo $?
``` ```
after the initial setup (and restarting bash), you can launch copyparty at any time by running "copyparty" in Termux after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
# dev env setup # dev env setup
@@ -104,13 +137,15 @@ in the `scripts` folder:
roughly sorted by priority roughly sorted by priority
* up2k handle filename too long * reduce up2k roundtrips
* up2k fails on empty files? alert then stuck * start from a chunk index and just go
* terminate client on bad data
* drop onto folders * drop onto folders
* look into android thumbnail cache file format * `os.copy_file_range` for up2k cloning
* up2k partials ui
* support pillow-simd * support pillow-simd
* cache sha512 chunks on client * cache sha512 chunks on client
* symlink existing files on upload
* comment field * comment field
* ~~look into android thumbnail cache file format~~ bad idea
* figure out the deal with pixel3a not being connectable as hotspot * figure out the deal with pixel3a not being connectable as hotspot
* pixel3a having unpredictable 3sec latency in general :|||| * pixel3a having unpredictable 3sec latency in general :||||

41
bin/README.md Normal file
View File

@@ -0,0 +1,41 @@
# copyparty-fuse.py
* mount a copyparty server as a local filesystem (read-only)
* **supports Windows!** -- expect `194 MiB/s` sequential read
* **supports Linux** -- expect `117 MiB/s` sequential read
* **supports macos** -- expect `85 MiB/s` sequential read
filecache is default-on for windows and macos;
* macos readsize is 64kB, so speed ~32 MiB/s without the cache
* windows readsize varies by software; explorer=1M, pv=32k
note that copyparty should run with `-ed` to enable dotfiles (hidden otherwise)
also consider using [../docs/rclone.md](../docs/rclone.md) instead for 5x performance
## to run this on windows:
* install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/)
* [x] add python 3.x to PATH (it asks during install)
* `python -m pip install --user fusepy`
* `python ./copyparty-fuse.py n: http://192.168.1.69:3923/`
10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled:
* `pacman -S mingw64/mingw-w64-x86_64-python{,-pip}`
* `/mingw64/bin/python3 -m pip install --user fusepy`
* `/mingw64/bin/python3 ./copyparty-fuse.py [...]`
you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releases/latest), let me know if you [figure out how](https://github.com/dokan-dev/dokany/wiki/FUSE)
(winfsp's sshfs leaks, doesn't look like winfsp itself does, should be fine)
# copyparty-fuse🅱.py
* mount a copyparty server as a local filesystem (read-only)
* does the same thing except more correct, `samba` approves
* **supports Linux** -- expect `18 MiB/s` (wait what)
* **supports Macos** -- probably
# copyparty-fuse-streaming.py
* pretend this doesn't exist

1100
bin/copyparty-fuse-streaming.py Executable file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

592
bin/copyparty-fuseb.py Executable file
View File

@@ -0,0 +1,592 @@
#!/usr/bin/env python3
from __future__ import print_function, unicode_literals
"""copyparty-fuseb: remote copyparty as a local filesystem"""
__author__ = "ed <copyparty@ocv.me>"
__copyright__ = 2020
__license__ = "MIT"
__url__ = "https://github.com/9001/copyparty/"
import re
import os
import sys
import time
import stat
import errno
import struct
import threading
import http.client # py2: httplib
import urllib.parse
from datetime import datetime
from urllib.parse import quote_from_bytes as quote
try:
import fuse
from fuse import Fuse
fuse.fuse_python_api = (0, 2)
if not hasattr(fuse, "__version__"):
raise Exception("your fuse-python is way old")
except:
print(
"\n could not import fuse; these may help:\n python3 -m pip install --user fuse-python\n apt install libfuse\n modprobe fuse\n"
)
raise
"""
mount a copyparty server (local or remote) as a filesystem
usage:
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas
dependencies:
sudo apk add fuse-dev python3-dev
python3 -m pip install --user fuse-python
fork of copyparty-fuse.py based on fuse-python which
appears to be more compliant than fusepy? since this works with samba
(probably just my garbage code tbh)
"""
def threadless_log(msg):
print(msg + "\n", end="")
def boring_log(msg):
msg = "\033[36m{:012x}\033[0m {}\n".format(threading.current_thread().ident, msg)
print(msg[4:], end="")
def rice_tid():
tid = threading.current_thread().ident
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
def fancy_log(msg):
print("{} {}\n".format(rice_tid(), msg), end="")
def null_log(msg):
pass
info = fancy_log
log = fancy_log
dbg = fancy_log
log = null_log
dbg = null_log
def get_tid():
return threading.current_thread().ident
def html_dec(txt):
return (
txt.replace("&lt;", "<")
.replace("&gt;", ">")
.replace("&quot;", '"')
.replace("&amp;", "&")
)
class CacheNode(object):
def __init__(self, tag, data):
self.tag = tag
self.data = data
self.ts = time.time()
class Stat(fuse.Stat):
def __init__(self):
self.st_mode = 0
self.st_ino = 0
self.st_dev = 0
self.st_nlink = 1
self.st_uid = 1000
self.st_gid = 1000
self.st_size = 0
self.st_atime = 0
self.st_mtime = 0
self.st_ctime = 0
class Gateway(object):
def __init__(self, base_url):
self.base_url = base_url
ui = urllib.parse.urlparse(base_url)
self.web_root = ui.path.strip("/")
try:
self.web_host, self.web_port = ui.netloc.split(":")
self.web_port = int(self.web_port)
except:
self.web_host = ui.netloc
if ui.scheme == "http":
self.web_port = 80
elif ui.scheme == "https":
raise Exception("todo")
else:
raise Exception("bad url?")
self.conns = {}
def quotep(self, path):
# TODO: mojibake support
path = path.encode("utf-8", "ignore")
return quote(path, safe="/")
def getconn(self, tid=None):
tid = tid or get_tid()
try:
return self.conns[tid]
except:
info("new conn [{}] [{}]".format(self.web_host, self.web_port))
conn = http.client.HTTPConnection(self.web_host, self.web_port, timeout=260)
self.conns[tid] = conn
return conn
def closeconn(self, tid=None):
tid = tid or get_tid()
try:
self.conns[tid].close()
del self.conns[tid]
except:
pass
def sendreq(self, *args, **kwargs):
tid = get_tid()
try:
c = self.getconn(tid)
c.request(*list(args), **kwargs)
return c.getresponse()
except:
self.closeconn(tid)
c = self.getconn(tid)
c.request(*list(args), **kwargs)
return c.getresponse()
def listdir(self, path):
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
r = self.sendreq("GET", web_path)
if r.status != 200:
self.closeconn()
raise Exception(
"http error {} reading dir {} in {}".format(
r.status, web_path, rice_tid()
)
)
return self.parse_html(r)
def download_file_range(self, path, ofs1, ofs2):
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
log("downloading {}".format(hdr_range))
r = self.sendreq("GET", web_path, headers={"Range": hdr_range})
if r.status != http.client.PARTIAL_CONTENT:
self.closeconn()
raise Exception(
"http error {} reading file {} range {} in {}".format(
r.status, web_path, hdr_range, rice_tid()
)
)
return r.read()
def parse_html(self, datasrc):
ret = []
remainder = b""
ptn = re.compile(
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
)
while True:
buf = remainder + datasrc.read(4096)
# print('[{}]'.format(buf.decode('utf-8')))
if not buf:
break
remainder = b""
endpos = buf.rfind(b"\n")
if endpos >= 0:
remainder = buf[endpos + 1 :]
buf = buf[:endpos]
lines = buf.decode("utf-8").split("\n")
for line in lines:
m = ptn.match(line)
if not m:
# print(line)
continue
ftype, fname, fsize, fdate = m.groups()
fname = html_dec(fname)
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
sz = int(fsize)
if ftype == "-":
ret.append([fname, self.stat_file(ts, sz), 0])
else:
ret.append([fname, self.stat_dir(ts, sz), 0])
return ret
def stat_dir(self, ts, sz=4096):
ret = Stat()
ret.st_mode = stat.S_IFDIR | 0o555
ret.st_nlink = 2
ret.st_size = sz
ret.st_atime = ts
ret.st_mtime = ts
ret.st_ctime = ts
return ret
def stat_file(self, ts, sz):
ret = Stat()
ret.st_mode = stat.S_IFREG | 0o444
ret.st_size = sz
ret.st_atime = ts
ret.st_mtime = ts
ret.st_ctime = ts
return ret
class CPPF(Fuse):
def __init__(self, *args, **kwargs):
Fuse.__init__(self, *args, **kwargs)
self.url = None
self.dircache = []
self.dircache_mtx = threading.Lock()
self.filecache = []
self.filecache_mtx = threading.Lock()
def init2(self):
# TODO figure out how python-fuse wanted this to go
self.gw = Gateway(self.url) # .decode('utf-8'))
info("up")
def clean_dircache(self):
"""not threadsafe"""
now = time.time()
cutoff = 0
for cn in self.dircache:
if now - cn.ts > 1:
cutoff += 1
else:
break
if cutoff > 0:
self.dircache = self.dircache[cutoff:]
def get_cached_dir(self, dirpath):
# with self.dircache_mtx:
if True:
self.clean_dircache()
for cn in self.dircache:
if cn.tag == dirpath:
return cn
return None
"""
,-------------------------------, g1>=c1, g2<=c2
|cache1 cache2| buf[g1-c1:(g1-c1)+(g2-g1)]
`-------------------------------'
,---------------,
|get1 get2|
`---------------'
__________________________________________________________________________
,-------------------------------, g2<=c2, (g2>=c1)
|cache1 cache2| cdr=buf[:g2-c1]
`-------------------------------' dl car; g1-512K:c1
,---------------,
|get1 get2|
`---------------'
__________________________________________________________________________
,-------------------------------, g1>=c1, (g1<=c2)
|cache1 cache2| car=buf[c2-g1:]
`-------------------------------' dl cdr; c2:c2+1M
,---------------,
|get1 get2|
`---------------'
"""
def get_cached_file(self, path, get1, get2, file_sz):
car = None
cdr = None
ncn = -1
# with self.filecache_mtx:
if True:
dbg("cache request from {} to {}, size {}".format(get1, get2, file_sz))
for cn in self.filecache:
ncn += 1
cache_path, cache1 = cn.tag
if cache_path != path:
continue
cache2 = cache1 + len(cn.data)
if get2 <= cache1 or get1 >= cache2:
continue
if get1 >= cache1 and get2 <= cache2:
# keep cache entry alive by moving it to the end
self.filecache = (
self.filecache[:ncn] + self.filecache[ncn + 1 :] + [cn]
)
buf_ofs = get1 - cache1
buf_end = buf_ofs + (get2 - get1)
dbg(
"found all ({}, {} to {}, len {}) [{}:{}] = {}".format(
ncn,
cache1,
cache2,
len(cn.data),
buf_ofs,
buf_end,
buf_end - buf_ofs,
)
)
return cn.data[buf_ofs:buf_end]
if get2 < cache2:
x = cn.data[: get2 - cache1]
if not cdr or len(cdr) < len(x):
dbg(
"found car ({}, {} to {}, len {}) [:{}-{}] = [:{}] = {}".format(
ncn,
cache1,
cache2,
len(cn.data),
get2,
cache1,
get2 - cache1,
len(x),
)
)
cdr = x
continue
if get1 > cache1:
x = cn.data[-(cache2 - get1) :]
if not car or len(car) < len(x):
dbg(
"found cdr ({}, {} to {}, len {}) [-({}-{}):] = [-{}:] = {}".format(
ncn,
cache1,
cache2,
len(cn.data),
cache2,
get1,
cache2 - get1,
len(x),
)
)
car = x
continue
raise Exception("what")
if car and cdr:
dbg("<cache> have both")
ret = car + cdr
if len(ret) == get2 - get1:
return ret
raise Exception("{} + {} != {} - {}".format(len(car), len(cdr), get2, get1))
elif cdr:
h_end = get1 + (get2 - get1) - len(cdr)
h_ofs = h_end - 512 * 1024
if h_ofs < 0:
h_ofs = 0
buf_ofs = (get2 - get1) - len(cdr)
dbg(
"<cache> cdr {}, car {}-{}={} [-{}:]".format(
len(cdr), h_ofs, h_end, h_end - h_ofs, buf_ofs
)
)
buf = self.gw.download_file_range(path, h_ofs, h_end)
ret = buf[-buf_ofs:] + cdr
elif car:
h_ofs = get1 + len(car)
h_end = h_ofs + 1024 * 1024
if h_end > file_sz:
h_end = file_sz
buf_ofs = (get2 - get1) - len(car)
dbg(
"<cache> car {}, cdr {}-{}={} [:{}]".format(
len(car), h_ofs, h_end, h_end - h_ofs, buf_ofs
)
)
buf = self.gw.download_file_range(path, h_ofs, h_end)
ret = car + buf[:buf_ofs]
else:
h_ofs = get1 - 256 * 1024
h_end = get2 + 1024 * 1024
if h_ofs < 0:
h_ofs = 0
if h_end > file_sz:
h_end = file_sz
buf_ofs = get1 - h_ofs
buf_end = buf_ofs + get2 - get1
dbg(
"<cache> {}-{}={} [{}:{}]".format(
h_ofs, h_end, h_end - h_ofs, buf_ofs, buf_end
)
)
buf = self.gw.download_file_range(path, h_ofs, h_end)
ret = buf[buf_ofs:buf_end]
cn = CacheNode([path, h_ofs], buf)
# with self.filecache_mtx:
if True:
if len(self.filecache) > 6:
self.filecache = self.filecache[1:] + [cn]
else:
self.filecache.append(cn)
return ret
def _readdir(self, path):
path = path.strip("/")
log("readdir {}".format(path))
ret = self.gw.listdir(path)
# with self.dircache_mtx:
if True:
cn = CacheNode(path, ret)
self.dircache.append(cn)
self.clean_dircache()
return ret
def readdir(self, path, offset):
for e in self._readdir(path)[offset:]:
# log("yield [{}]".format(e[0]))
yield fuse.Direntry(e[0])
def open(self, path, flags):
if (flags & (os.O_RDONLY | os.O_WRONLY | os.O_RDWR)) != os.O_RDONLY:
return -errno.EACCES
st = self.getattr(path)
try:
if st.st_nlink > 0:
return st
except:
return st # -int(os.errcode)
def read(self, path, length, offset, fh=None, *args):
if args:
log("unexpected args [" + "] [".join(repr(x) for x in args) + "]")
raise Exception()
path = path.strip("/")
ofs2 = offset + length
log("read {} @ {} len {} end {}".format(path, offset, length, ofs2))
st = self.getattr(path)
try:
file_sz = st.st_size
except:
return st # -int(os.errcode)
if ofs2 > file_sz:
ofs2 = file_sz
log("truncate to len {} end {}".format(ofs2 - offset, ofs2))
if file_sz == 0 or offset >= ofs2:
return b""
# toggle cache here i suppose
# return self.get_cached_file(path, offset, ofs2, file_sz)
return self.gw.download_file_range(path, offset, ofs2)
def getattr(self, path):
log("getattr [{}]".format(path))
path = path.strip("/")
try:
dirpath, fname = path.rsplit("/", 1)
except:
dirpath = ""
fname = path
if not path:
ret = self.gw.stat_dir(time.time())
dbg("=root")
return ret
cn = self.get_cached_dir(dirpath)
if cn:
log("cache ok")
dents = cn.data
else:
log("cache miss")
dents = self._readdir(dirpath)
for cache_name, cache_stat, _ in dents:
if cache_name == fname:
dbg("=file")
return cache_stat
log("=404")
return -errno.ENOENT
def main():
time.strptime("19970815", "%Y%m%d") # python#7980
server = CPPF()
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
server.parse(values=server, errex=1)
if not server.url or not str(server.url).startswith("http"):
print("\nerror:")
print(" need argument: -o url=<...>")
print(" need argument: mount-path")
print("example:")
print(
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas"
)
sys.exit(1)
server.init2()
threading.Thread(target=server.main, daemon=True).start()
while True:
time.sleep(9001)
if __name__ == "__main__":
main()

View File

@@ -118,7 +118,7 @@ printf ']}' >> /dev/shm/$salt.hs
printf '\033[36m' printf '\033[36m'
#curl "http://$target:1234$posturl/handshake.php" -H "Content-Type: text/plain;charset=UTF-8" -H "Cookie: cppwd=$passwd" --data "$(cat "/dev/shm/$salt.hs")" | tee /dev/shm/$salt.res #curl "http://$target:3923$posturl/handshake.php" -H "Content-Type: text/plain;charset=UTF-8" -H "Cookie: cppwd=$passwd" --data "$(cat "/dev/shm/$salt.hs")" | tee /dev/shm/$salt.res
{ {
{ {
@@ -135,7 +135,7 @@ EOF
cat /dev/shm/$salt.hs cat /dev/shm/$salt.hs
} | } |
tee /dev/shm/$salt.hsb | tee /dev/shm/$salt.hsb |
ncat $target 1234 | ncat $target 3923 |
tee /dev/shm/$salt.hs1r tee /dev/shm/$salt.hs1r
wark="$(cat /dev/shm/$salt.hs1r | getwark)" wark="$(cat /dev/shm/$salt.hs1r | getwark)"
@@ -190,7 +190,7 @@ EOF
nchunk=$((nchunk+1)) nchunk=$((nchunk+1))
done | done |
ncat $target 1234 | ncat $target 3923 |
tee /dev/shm/$salt.pr tee /dev/shm/$salt.pr
t=$(date +%s.%N) t=$(date +%s.%N)
@@ -201,7 +201,7 @@ t=$(date +%s.%N)
printf '\033[36m' printf '\033[36m'
ncat $target 1234 < /dev/shm/$salt.hsb | ncat $target 3923 < /dev/shm/$salt.hsb |
tee /dev/shm/$salt.hs2r | tee /dev/shm/$salt.hs2r |
grep -E '"hash": ?\[ *\]' grep -E '"hash": ?\[ *\]'

22
contrib/README.md Normal file
View File

@@ -0,0 +1,22 @@
### [`copyparty.bat`](copyparty.bat)
* launches copyparty with no arguments (anon read+write within same folder)
* intended for windows machines with no python.exe in PATH
* works on windows, linux and macos
* assumes `copyparty-sfx.py` was renamed to `copyparty.py` in the same folder as `copyparty.bat`
### [`index.html`](index.html)
* drop-in redirect from an httpd to copyparty
* assumes the webserver and copyparty is running on the same server/IP
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
disables thumbnails and folder-type detection in windows explorer, makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
# OS integration
init-scripts to start copyparty as a service
* [`systemd/copyparty.service`](systemd/copyparty.service)
* [`openrc/copyparty`](openrc/copyparty)
# Reverse-proxy
copyparty has basic support for running behind another webserver
* [`nginx/copyparty.conf`](nginx/copyparty.conf)

33
contrib/copyparty.bat Normal file
View File

@@ -0,0 +1,33 @@
exec python "$(dirname "$0")"/copyparty.py
@rem on linux, the above will execute and the script will terminate
@rem on windows, the rest of this script will run
@echo off
cls
set py=
for /f %%i in ('where python 2^>nul') do (
set "py=%%i"
goto c1
)
:c1
if [%py%] == [] (
for /f %%i in ('where /r "%localappdata%\programs\python" python 2^>nul') do (
set "py=%%i"
goto c2
)
)
:c2
if [%py%] == [] set "py=c:\python27\python.exe"
if not exist "%py%" (
echo could not find python
echo(
pause
exit /b
)
start cmd /c %py% "%~dp0\copyparty.py"

View File

@@ -0,0 +1,31 @@
Windows Registry Editor Version 5.00
; this will do 3 things, all optional:
; 1) disable thumbnails
; 2) delete all existing folder type settings/detections
; 3) disable folder type detection (force default columns)
;
; this makes the file explorer way faster,
; especially on slow/networked locations
; =====================================================================
; 1) disable thumbnails
[HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Explorer\Advanced]
"IconsOnly"=dword:00000001
; =====================================================================
; 2) delete all existing folder type settings/detections
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags]
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\BagMRU]
; =====================================================================
; 3) disable folder type detection
[HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags\AllFolders\Shell]
"FolderType"="NotSpecified"

43
contrib/index.html Normal file
View File

@@ -0,0 +1,43 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>⇆🎉 redirect</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<style>
html, body {
font-family: sans-serif;
}
body {
padding: 1em 2em;
font-size: 1.5em;
}
a {
font-size: 1.2em;
padding: .1em;
}
</style>
</head>
<body>
<span id="desc">you probably want</span> <a id="redir" href="//10.13.1.1:3923/">copyparty</a>
<script>
var a = document.getElementById('redir'),
proto = window.location.protocol.indexOf('https') === 0 ? 'https' : 'http',
loc = window.location.hostname || '127.0.0.1',
port = a.getAttribute('href').split(':').pop().split('/')[0],
url = proto + '://' + loc + ':' + port + '/';
a.setAttribute('href', url);
document.getElementById('desc').innerHTML = 'redirecting to';
setTimeout(function() {
window.location.href = url;
}, 500);
</script>
</body>
</html>

View File

@@ -0,0 +1,26 @@
upstream cpp {
server 127.0.0.1:3923;
keepalive 120;
}
server {
listen 443 ssl;
listen [::]:443 ssl;
server_name fs.example.com;
location / {
proxy_pass http://cpp;
proxy_redirect off;
# disable buffering (next 4 lines)
proxy_http_version 1.1;
client_max_body_size 0;
proxy_buffering off;
proxy_request_buffering off;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Connection "Keep-Alive";
}
}

18
contrib/openrc/copyparty Normal file
View File

@@ -0,0 +1,18 @@
#!/sbin/openrc-run
# this will start `/usr/local/bin/copyparty-sfx.py`
# and share '/mnt' with anonymous read+write
#
# installation:
# cp -pv copyparty /etc/init.d && rc-update add copyparty
#
# you may want to:
# change '/usr/bin/python' to another interpreter
# change '/mnt::a' to another location or permission-set
name="$SVCNAME"
command_background=true
pidfile="/var/run/$SVCNAME.pid"
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
command_args="-q -v /mnt::a"

View File

@@ -0,0 +1,19 @@
# this will start `/usr/local/bin/copyparty-sfx.py`
# and share '/mnt' with anonymous read+write
#
# installation:
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
#
# you may want to:
# change '/usr/bin/python' to another interpreter
# change '/mnt::a' to another location or permission-set
[Unit]
Description=copyparty file server
[Service]
ExecStart=/usr/bin/python /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
[Install]
WantedBy=multi-user.target

View File

@@ -16,6 +16,8 @@ if platform.system() == "Windows":
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393] VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393]
# introduced in anniversary update # introduced in anniversary update
MACOS = platform.system() == "Darwin"
class EnvParams(object): class EnvParams(object):
def __init__(self): def __init__(self):

View File

@@ -9,6 +9,7 @@ __license__ = "MIT"
__url__ = "https://github.com/9001/copyparty/" __url__ = "https://github.com/9001/copyparty/"
import os import os
import time
import shutil import shutil
import filecmp import filecmp
import locale import locale
@@ -85,6 +86,7 @@ def ensure_cert():
def main(): def main():
time.strptime("19970815", "%Y%m%d") # python#7980
if WINDOWS: if WINDOWS:
os.system("") # enables colors os.system("") # enables colors
@@ -103,17 +105,22 @@ def main():
epilog=dedent( epilog=dedent(
""" """
-a takes username:password, -a takes username:password,
-v takes src:dst:permset:permset:... where "permset" is -v takes src:dst:permset:permset:cflag:cflag:...
accesslevel followed by username (no separator) where "permset" is accesslevel followed by username (no separator)
and "cflag" is config flags to set on this volume
list of cflags:
cnodupe rejects existing files (instead of symlinking them)
example:\033[35m example:\033[35m
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed \033[36m -a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
mount current directory at "/" with mount current directory at "/" with
* r (read-only) for everyone * r (read-only) for everyone
* a (read+write) for ed * a (read+write) for ed
mount ../inc at "/dump" with mount ../inc at "/dump" with
* w (write-only) for everyone * w (write-only) for everyone
* a (read+write) for ed \033[0m * a (read+write) for ed
* reject duplicate files \033[0m
if no accounts or volumes are configured, if no accounts or volumes are configured,
current folder will be read/write for everyone current folder will be read/write for everyone
@@ -123,18 +130,36 @@ def main():
""" """
), ),
) )
ap.add_argument( # fmt: off
"-c", metavar="PATH", type=str, action="append", help="add config file" ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
) ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind") ap.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
ap.add_argument("-p", metavar="PORT", type=int, default=1234, help="port to bind") ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
ap.add_argument("-nc", metavar="NUM", type=int, default=16, help="max num clients") ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
ap.add_argument("-j", metavar="CORES", type=int, help="max num cpu cores")
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account") ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume") ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
ap.add_argument("-q", action="store_true", help="quiet") ap.add_argument("-q", action="store_true", help="quiet")
ap.add_argument("-nw", action="store_true", help="benchmark: disable writing") ap.add_argument("-ed", action="store_true", help="enable ?dots")
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap.add_argument("-e2d", action="store_true", help="enable up2k database")
ap.add_argument("-e2s", action="store_true", help="enable up2k db-scanner")
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap.add_argument("-nih", action="store_true", help="no info hostname")
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
al = ap.parse_args() al = ap.parse_args()
# fmt: on
al.i = al.i.split(",")
try:
if "-" in al.p:
lo, hi = [int(x) for x in al.p.split("-")]
al.p = list(range(lo, hi + 1))
else:
al.p = [int(x) for x in al.p.split(",")]
except:
raise Exception("invalid value for -p")
SvcHub(al).run() SvcHub(al).run()

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (0, 3, 1) VERSION = (0, 7, 3)
CODENAME = "docuparty" CODENAME = "keeping track"
BUILD_DT = (2020, 5, 7) BUILD_DT = (2021, 2, 3)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -12,11 +12,12 @@ from .util import undot, Pebkac, fsdec, fsenc
class VFS(object): class VFS(object):
"""single level in the virtual fs""" """single level in the virtual fs"""
def __init__(self, realpath, vpath, uread=[], uwrite=[]): def __init__(self, realpath, vpath, uread=[], uwrite=[], flags={}):
self.realpath = realpath # absolute path on host filesystem self.realpath = realpath # absolute path on host filesystem
self.vpath = vpath # absolute path in the virtual filesystem self.vpath = vpath # absolute path in the virtual filesystem
self.uread = uread # users who can read this self.uread = uread # users who can read this
self.uwrite = uwrite # users who can write this self.uwrite = uwrite # users who can write this
self.flags = flags # config switches
self.nodes = {} # child nodes self.nodes = {} # child nodes
def add(self, src, dst): def add(self, src, dst):
@@ -36,6 +37,7 @@ class VFS(object):
"{}/{}".format(self.vpath, name).lstrip("/"), "{}/{}".format(self.vpath, name).lstrip("/"),
self.uread, self.uread,
self.uwrite, self.uwrite,
self.flags,
) )
self.nodes[name] = vn self.nodes[name] = vn
return vn.add(src, dst) return vn.add(src, dst)
@@ -104,7 +106,7 @@ class VFS(object):
real.sort() real.sort()
if not rem: if not rem:
for name, vn2 in sorted(self.nodes.items()): for name, vn2 in sorted(self.nodes.items()):
if uname in vn2.uread: if uname in vn2.uread or "*" in vn2.uread:
virt_vis[name] = vn2 virt_vis[name] = vn2
# no vfs nodes in the list of real inodes # no vfs nodes in the list of real inodes
@@ -128,16 +130,15 @@ class VFS(object):
class AuthSrv(object): class AuthSrv(object):
"""verifies users against given paths""" """verifies users against given paths"""
def __init__(self, args, log_func): def __init__(self, args, log_func, warn_anonwrite=True):
self.log_func = log_func
self.args = args self.args = args
self.log_func = log_func
self.warn_anonwrite = True self.warn_anonwrite = warn_anonwrite
if WINDOWS: if WINDOWS:
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)") self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
else: else:
self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)") self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)$")
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.reload() self.reload()
@@ -161,7 +162,7 @@ class AuthSrv(object):
yield prev, True yield prev, True
def _parse_config_file(self, fd, user, mread, mwrite, mount): def _parse_config_file(self, fd, user, mread, mwrite, mflags, mount):
vol_src = None vol_src = None
vol_dst = None vol_dst = None
for ln in [x.decode("utf-8").strip() for x in fd]: for ln in [x.decode("utf-8").strip() for x in fd]:
@@ -191,6 +192,7 @@ class AuthSrv(object):
mount[vol_dst] = vol_src mount[vol_dst] = vol_src
mread[vol_dst] = [] mread[vol_dst] = []
mwrite[vol_dst] = [] mwrite[vol_dst] = []
mflags[vol_dst] = {}
continue continue
lvl, uname = ln.split(" ") lvl, uname = ln.split(" ")
@@ -198,6 +200,9 @@ class AuthSrv(object):
mread[vol_dst].append(uname) mread[vol_dst].append(uname)
if lvl in "wa": if lvl in "wa":
mwrite[vol_dst].append(uname) mwrite[vol_dst].append(uname)
if lvl == "c":
# config option, currently switches only
mflags[vol_dst][uname] = True
def reload(self): def reload(self):
""" """
@@ -210,6 +215,7 @@ class AuthSrv(object):
user = {} # username:password user = {} # username:password
mread = {} # mountpoint:[username] mread = {} # mountpoint:[username]
mwrite = {} # mountpoint:[username] mwrite = {} # mountpoint:[username]
mflags = {} # mountpoint:[flag]
mount = {} # dst:src (mountpoint:realpath) mount = {} # dst:src (mountpoint:realpath)
if self.args.a: if self.args.a:
@@ -220,20 +226,25 @@ class AuthSrv(object):
if self.args.v: if self.args.v:
# list of src:dst:permset:permset:... # list of src:dst:permset:permset:...
# permset is [rwa]username # permset is [rwa]username
for vol_match in [self.re_vol.match(x) for x in self.args.v]: for v_str in self.args.v:
try: m = self.re_vol.match(v_str)
src, dst, perms = vol_match.groups() if not m:
except: raise Exception("invalid -v argument: [{}]".format(v_str))
raise Exception("invalid -v argument")
src, dst, perms = m.groups()
# print("\n".join([src, dst, perms]))
src = fsdec(os.path.abspath(fsenc(src))) src = fsdec(os.path.abspath(fsenc(src)))
dst = dst.strip("/") dst = dst.strip("/")
mount[dst] = src mount[dst] = src
mread[dst] = [] mread[dst] = []
mwrite[dst] = [] mwrite[dst] = []
mflags[dst] = {}
perms = perms.split(":") perms = perms.split(":")
for (lvl, uname) in [[x[0], x[1:]] for x in perms]: for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
if lvl == "c":
# config option, currently switches only
mflags[dst][uname] = True
if uname == "": if uname == "":
uname = "*" uname = "*"
if lvl in "ra": if lvl in "ra":
@@ -244,14 +255,15 @@ class AuthSrv(object):
if self.args.c: if self.args.c:
for cfg_fn in self.args.c: for cfg_fn in self.args.c:
with open(cfg_fn, "rb") as f: with open(cfg_fn, "rb") as f:
self._parse_config_file(f, user, mread, mwrite, mount) self._parse_config_file(f, user, mread, mwrite, mflags, mount)
self.all_writable = []
if not mount: if not mount:
# -h says our defaults are CWD at root and read/write for everyone # -h says our defaults are CWD at root and read/write for everyone
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"]) vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
elif "" not in mount: elif "" not in mount:
# there's volumes but no root; make root inaccessible # there's volumes but no root; make root inaccessible
vfs = VFS(os.path.abspath("."), "", [], []) vfs = VFS(os.path.abspath("."), "")
maxdepth = 0 maxdepth = 0
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))): for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
@@ -261,12 +273,18 @@ class AuthSrv(object):
if dst == "": if dst == "":
# rootfs was mapped; fully replaces the default CWD vfs # rootfs was mapped; fully replaces the default CWD vfs
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst]) vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst], mflags[dst])
continue continue
v = vfs.add(mount[dst], dst) v = vfs.add(mount[dst], dst)
v.uread = mread[dst] v.uread = mread[dst]
v.uwrite = mwrite[dst] v.uwrite = mwrite[dst]
v.flags = mflags[dst]
if v.uwrite:
self.all_writable.append(v)
if vfs.uwrite and vfs not in self.all_writable:
self.all_writable.append(vfs)
missing_users = {} missing_users = {}
for d in [mread, mwrite]: for d in [mread, mwrite]:

View File

@@ -29,7 +29,7 @@ class BrokerMp(object):
self.mutex = threading.Lock() self.mutex = threading.Lock()
cores = self.args.j cores = self.args.j
if cores is None: if not cores:
cores = mp.cpu_count() cores = mp.cpu_count()
self.log("broker", "booting {} subprocesses".format(cores)) self.log("broker", "booting {} subprocesses".format(cores))

View File

@@ -73,7 +73,7 @@ class MpWorker(object):
if PY2: if PY2:
sck = pickle.loads(sck) # nosec sck = pickle.loads(sck) # nosec
self.log("%s %s" % addr, "-" * 4 + "C-qpop") self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,))
self.httpsrv.accept(sck, addr) self.httpsrv.accept(sck, addr)
with self.mutex: with self.mutex:

View File

@@ -28,7 +28,7 @@ class BrokerThr(object):
def put(self, want_retval, dest, *args): def put(self, want_retval, dest, *args):
if dest == "httpconn": if dest == "httpconn":
sck, addr = args sck, addr = args
self.log("%s %s" % addr, "-" * 4 + "C-qpop") self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,))
self.httpsrv.accept(sck, addr) self.httpsrv.accept(sck, addr)
else: else:

View File

@@ -6,6 +6,8 @@ import stat
import gzip import gzip
import time import time
import json import json
import socket
import ctypes
from datetime import datetime from datetime import datetime
import calendar import calendar
@@ -14,9 +16,6 @@ from .util import * # noqa # pylint: disable=unused-wildcard-import
if not PY2: if not PY2:
unicode = str unicode = str
from html import escape as html_escape
else:
from cgi import escape as html_escape # pylint: disable=no-name-in-module
class HttpCli(object): class HttpCli(object):
@@ -25,9 +24,11 @@ class HttpCli(object):
""" """
def __init__(self, conn): def __init__(self, conn):
self.t0 = time.time()
self.conn = conn self.conn = conn
self.s = conn.s self.s = conn.s
self.sr = conn.sr self.sr = conn.sr
self.ip = conn.addr[0]
self.addr = conn.addr self.addr = conn.addr
self.args = conn.args self.args = conn.args
self.auth = conn.auth self.auth = conn.auth
@@ -36,13 +37,13 @@ class HttpCli(object):
self.bufsz = 1024 * 32 self.bufsz = 1024 * 32
self.absolute_urls = False self.absolute_urls = False
self.out_headers = {} self.out_headers = {"Access-Control-Allow-Origin": "*"}
def log(self, msg): def log(self, msg):
self.log_func(self.log_src, msg) self.log_func(self.log_src, msg)
def _check_nonfatal(self, ex): def _check_nonfatal(self, ex):
return ex.code in [403, 404] return ex.code < 400 or ex.code == 404
def _assert_safe_rem(self, rem): def _assert_safe_rem(self, rem):
# sanity check to prevent any disasters # sanity check to prevent any disasters
@@ -83,11 +84,16 @@ class HttpCli(object):
v = self.headers.get("connection", "").lower() v = self.headers.get("connection", "").lower()
self.keepalive = not v.startswith("close") self.keepalive = not v.startswith("close")
v = self.headers.get("x-forwarded-for", None)
if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]:
self.ip = v.split(",")[0]
self.log_src = self.conn.set_rproxy(self.ip)
self.uname = "*" self.uname = "*"
if "cookie" in self.headers: if "cookie" in self.headers:
cookies = self.headers["cookie"].split(";") cookies = self.headers["cookie"].split(";")
for k, v in [x.split("=", 1) for x in cookies]: for k, v in [x.split("=", 1) for x in cookies]:
if k != "cppwd": if k.strip() != "cppwd":
continue continue
v = unescape_cookie(v) v = unescape_cookie(v)
@@ -123,11 +129,20 @@ class HttpCli(object):
self.uparam = uparam self.uparam = uparam
self.vpath = unquotep(vpath) self.vpath = unquotep(vpath)
ua = self.headers.get("user-agent", "")
if ua.startswith("rclone/"):
uparam["raw"] = True
uparam["dots"] = True
try: try:
if self.mode in ["GET", "HEAD"]: if self.mode in ["GET", "HEAD"]:
return self.handle_get() and self.keepalive return self.handle_get() and self.keepalive
elif self.mode == "POST": elif self.mode == "POST":
return self.handle_post() and self.keepalive return self.handle_post() and self.keepalive
elif self.mode == "PUT":
return self.handle_put() and self.keepalive
elif self.mode == "OPTIONS":
return self.handle_options() and self.keepalive
else: else:
raise Pebkac(400, 'invalid HTTP mode "{0}"'.format(self.mode)) raise Pebkac(400, 'invalid HTTP mode "{0}"'.format(self.mode))
@@ -135,7 +150,7 @@ class HttpCli(object):
try: try:
# self.log("pebkac at httpcli.run #2: " + repr(ex)) # self.log("pebkac at httpcli.run #2: " + repr(ex))
self.keepalive = self._check_nonfatal(ex) self.keepalive = self._check_nonfatal(ex)
self.loud_reply(str(ex), status=ex.code) self.loud_reply("{}: {}".format(str(ex), self.vpath), status=ex.code)
return self.keepalive return self.keepalive
except Pebkac: except Pebkac:
return False return False
@@ -143,9 +158,7 @@ class HttpCli(object):
def send_headers(self, length, status=200, mime=None, headers={}): def send_headers(self, length, status=200, mime=None, headers={}):
response = ["HTTP/1.1 {} {}".format(status, HTTPCODE[status])] response = ["HTTP/1.1 {} {}".format(status, HTTPCODE[status])]
if length is None: if length is not None:
self.keepalive = False
else:
response.append("Content-Length: " + str(length)) response.append("Content-Length: " + str(length))
# close if unknown length, otherwise take client's preference # close if unknown length, otherwise take client's preference
@@ -176,7 +189,8 @@ class HttpCli(object):
self.send_headers(len(body), status, mime, headers) self.send_headers(len(body), status, mime, headers)
try: try:
self.s.sendall(body) if self.mode != "HEAD":
self.s.sendall(body)
except: except:
raise Pebkac(400, "client d/c while replying body") raise Pebkac(400, "client d/c while replying body")
@@ -184,7 +198,7 @@ class HttpCli(object):
def loud_reply(self, body, *args, **kwargs): def loud_reply(self, body, *args, **kwargs):
self.log(body.rstrip()) self.log(body.rstrip())
self.reply(b"<pre>" + body.encode("utf-8"), *list(args), **kwargs) self.reply(b"<pre>" + body.encode("utf-8") + b"\r\n", *list(args), **kwargs)
def handle_get(self): def handle_get(self):
logmsg = "{:4} {}".format(self.mode, self.req) logmsg = "{:4} {}".format(self.mode, self.req)
@@ -230,6 +244,30 @@ class HttpCli(object):
return self.tx_browser() return self.tx_browser()
def handle_options(self):
self.log("OPTIONS " + self.req)
self.send_headers(
None,
204,
headers={
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "*",
"Access-Control-Allow-Headers": "*",
},
)
return True
def handle_put(self):
self.log("PUT " + self.req)
if self.headers.get("expect", "").lower() == "100-continue":
try:
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
except:
raise Pebkac(400, "client d/c before 100 continue")
return self.handle_stash()
def handle_post(self): def handle_post(self):
self.log("POST " + self.req) self.log("POST " + self.req)
@@ -243,6 +281,9 @@ class HttpCli(object):
if not ctype: if not ctype:
raise Pebkac(400, "you can't post without a content-type header") raise Pebkac(400, "you can't post without a content-type header")
if "raw" in self.uparam:
return self.handle_stash()
if "multipart/form-data" in ctype: if "multipart/form-data" in ctype:
return self.handle_post_multipart() return self.handle_post_multipart()
@@ -255,6 +296,37 @@ class HttpCli(object):
raise Pebkac(405, "don't know how to handle {} POST".format(ctype)) raise Pebkac(405, "don't know how to handle {} POST".format(ctype))
def handle_stash(self):
remains = int(self.headers.get("content-length", None))
if remains is None:
reader = read_socket_unbounded(self.sr)
self.keepalive = False
else:
reader = read_socket(self.sr, remains)
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
fdir = os.path.join(vfs.realpath, rem)
addr = self.ip.replace(":", ".")
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
path = os.path.join(fdir, fn)
with open(path, "wb", 512 * 1024) as f:
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
spd = self._spd(post_sz)
self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path))
self.reply("{}\n{}\n".format(post_sz, sha_b64).encode("utf-8"))
return True
def _spd(self, nbytes, add=True):
if add:
self.conn.nbyte += nbytes
spd1 = get_spd(nbytes, self.t0)
spd2 = get_spd(self.conn.nbyte, self.conn.t0)
return spd1 + " " + spd2
def handle_post_multipart(self): def handle_post_multipart(self):
self.parser = MultipartParser(self.log, self.sr, self.headers) self.parser = MultipartParser(self.log, self.sr, self.headers)
self.parser.parse() self.parser.parse()
@@ -314,9 +386,11 @@ class HttpCli(object):
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
body["vdir"] = self.vpath body["vtop"] = vfs.vpath
body["rdir"] = os.path.join(vfs.realpath, rem) body["ptop"] = vfs.realpath
body["addr"] = self.addr[0] body["prel"] = rem
body["addr"] = self.ip
body["flag"] = vfs.flags
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body) x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
response = x.get() response = x.get()
@@ -338,7 +412,10 @@ class HttpCli(object):
except KeyError: except KeyError:
raise Pebkac(400, "need hash and wark headers for binary POST") raise Pebkac(400, "need hash and wark headers for binary POST")
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", wark, chash) vfs, _ = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
ptop = vfs.realpath
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash)
response = x.get() response = x.get()
chunksize, cstart, path, lastmod = response chunksize, cstart, path, lastmod = response
@@ -383,8 +460,8 @@ class HttpCli(object):
self.log("clone {} done".format(cstart[0])) self.log("clone {} done".format(cstart[0]))
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", wark, chash) x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
num_left = x.get() num_left, path = x.get()
if not WINDOWS and num_left == 0: if not WINDOWS and num_left == 0:
times = (int(time.time()), int(lastmod)) times = (int(time.time()), int(lastmod))
@@ -394,7 +471,9 @@ class HttpCli(object):
except: except:
self.log("failed to utime ({}, {})".format(path, times)) self.log("failed to utime ({}, {})".format(path, times))
self.loud_reply("thank") spd = self._spd(post_sz)
self.log("{} thank".format(spd))
self.reply(b"thank")
return True return True
def handle_login(self): def handle_login(self):
@@ -407,7 +486,7 @@ class HttpCli(object):
msg = "naw dude" msg = "naw dude"
pwd = "x" # nosec pwd = "x" # nosec
h = {"Set-Cookie": "cppwd={}; Path=/".format(pwd)} h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
html = self.conn.tpl_msg.render(h1=msg, h2='<a href="/">ack</a>', redir="/") html = self.conn.tpl_msg.render(h1=msg, h2='<a href="/">ack</a>', redir="/")
self.reply(html.encode("utf-8"), headers=h) self.reply(html.encode("utf-8"), headers=h)
return True return True
@@ -440,7 +519,7 @@ class HttpCli(object):
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/") vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
html = self.conn.tpl_msg.render( html = self.conn.tpl_msg.render(
h2='<a href="/{}">go to /{}</a>'.format( h2='<a href="/{}">go to /{}</a>'.format(
quotep(vpath), html_escape(vpath, quote=False) quotep(vpath), html_escape(vpath)
), ),
pre="aight", pre="aight",
click=True, click=True,
@@ -474,7 +553,7 @@ class HttpCli(object):
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/") vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
html = self.conn.tpl_msg.render( html = self.conn.tpl_msg.render(
h2='<a href="/{}?edit">go to /{}?edit</a>'.format( h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
quotep(vpath), html_escape(vpath, quote=False) quotep(vpath), html_escape(vpath)
), ),
pre="aight", pre="aight",
click=True, click=True,
@@ -496,33 +575,40 @@ class HttpCli(object):
self.log("discarding incoming file without filename") self.log("discarding incoming file without filename")
# fallthrough # fallthrough
fn = os.devnull
if p_file and not nullwrite: if p_file and not nullwrite:
fdir = os.path.join(vfs.realpath, rem) fdir = os.path.join(vfs.realpath, rem)
fn = os.path.join(fdir, sanitize_fn(p_file)) fname = sanitize_fn(p_file)
if not os.path.isdir(fsenc(fdir)): if not os.path.isdir(fsenc(fdir)):
raise Pebkac(404, "that folder does not exist") raise Pebkac(404, "that folder does not exist")
# TODO broker which avoid this race and suffix = ".{:.6f}-{}".format(time.time(), self.ip)
# provides a new filename if taken (same as up2k) open_args = {"fdir": fdir, "suffix": suffix}
if os.path.exists(fsenc(fn)): else:
fn += ".{:.6f}-{}".format(time.time(), self.addr[0]) open_args = {}
# using current-time instead of t0 cause clients fname = os.devnull
# may reuse a name for multiple files in one post fdir = ""
try: try:
with open(fsenc(fn), "wb") as f: with ren_open(fname, "wb", 512 * 1024, **open_args) as f:
self.log("writing to {0}".format(fn)) f, fname = f["orz"]
self.log("writing to {}/{}".format(fdir, fname))
sz, sha512_hex, _ = hashcopy(self.conn, p_data, f) sz, sha512_hex, _ = hashcopy(self.conn, p_data, f)
if sz == 0: if sz == 0:
raise Pebkac(400, "empty files in post") raise Pebkac(400, "empty files in post")
files.append([sz, sha512_hex]) files.append([sz, sha512_hex])
self.conn.nbyte += sz
except Pebkac: except Pebkac:
if fn != os.devnull: if fname != os.devnull:
os.rename(fsenc(fn), fsenc(fn + ".PARTIAL")) fp = os.path.join(fdir, fname)
suffix = ".PARTIAL"
try:
os.rename(fsenc(fp), fsenc(fp + suffix))
except:
fp = fp[: -len(suffix)]
os.rename(fsenc(fp), fsenc(fp + suffix))
raise raise
@@ -546,7 +632,9 @@ class HttpCli(object):
# truncated SHA-512 prevents length extension attacks; # truncated SHA-512 prevents length extension attacks;
# using SHA-512/224, optionally SHA-512/256 = :64 # using SHA-512/224, optionally SHA-512/256 = :64
self.log(msg) vspd = self._spd(sz_total, False)
self.log("{} {}".format(vspd, msg))
if not nullwrite: if not nullwrite:
# TODO this is bad # TODO this is bad
log_fn = "up.{:.6f}.txt".format(t0) log_fn = "up.{:.6f}.txt".format(t0)
@@ -556,7 +644,7 @@ class HttpCli(object):
"\n".join( "\n".join(
unicode(x) unicode(x)
for x in [ for x in [
":".join(unicode(x) for x in self.addr), ":".join(unicode(x) for x in [self.ip, self.addr[1]]),
msg.rstrip(), msg.rstrip(),
] ]
) )
@@ -568,7 +656,7 @@ class HttpCli(object):
html = self.conn.tpl_msg.render( html = self.conn.tpl_msg.render(
h2='<a href="/{}">return to /{}</a>'.format( h2='<a href="/{}">return to /{}</a>'.format(
quotep(self.vpath), html_escape(self.vpath, quote=False) quotep(self.vpath), html_escape(self.vpath)
), ),
pre=msg, pre=msg,
) )
@@ -605,7 +693,7 @@ class HttpCli(object):
return True return True
fp = os.path.join(vfs.realpath, rem) fp = os.path.join(vfs.realpath, rem)
srv_lastmod = -1 srv_lastmod = srv_lastmod3 = -1
try: try:
st = os.stat(fsenc(fp)) st = os.stat(fsenc(fp))
srv_lastmod = st.st_mtime srv_lastmod = st.st_mtime
@@ -616,7 +704,16 @@ class HttpCli(object):
# if file exists, chekc that timestamp matches the client's # if file exists, chekc that timestamp matches the client's
if srv_lastmod >= 0: if srv_lastmod >= 0:
if cli_lastmod3 not in [-1, srv_lastmod3]: same_lastmod = cli_lastmod3 in [-1, srv_lastmod3]
if not same_lastmod:
# some filesystems/transports limit precision to 1sec, hopefully floored
same_lastmod = (
srv_lastmod == int(srv_lastmod)
and cli_lastmod3 > srv_lastmod3
and cli_lastmod3 - srv_lastmod3 < 1000
)
if not same_lastmod:
response = json.dumps( response = json.dumps(
{ {
"ok": False, "ok": False,
@@ -647,7 +744,7 @@ class HttpCli(object):
if p_field != "body": if p_field != "body":
raise Pebkac(400, "expected body, got {}".format(p_field)) raise Pebkac(400, "expected body, got {}".format(p_field))
with open(fp, "wb") as f: with open(fp, "wb", 512 * 1024) as f:
sz, sha512, _ = hashcopy(self.conn, p_data, f) sz, sha512, _ = hashcopy(self.conn, p_data, f)
new_lastmod = os.stat(fsenc(fp)).st_mtime new_lastmod = os.stat(fsenc(fp)).st_mtime
@@ -672,9 +769,12 @@ class HttpCli(object):
cli_dt = time.strptime(cli_lastmod, "%a, %d %b %Y %H:%M:%S GMT") cli_dt = time.strptime(cli_lastmod, "%a, %d %b %Y %H:%M:%S GMT")
cli_ts = calendar.timegm(cli_dt) cli_ts = calendar.timegm(cli_dt)
return file_lastmod, int(file_ts) > int(cli_ts) return file_lastmod, int(file_ts) > int(cli_ts)
except: except Exception as ex:
self.log("bad lastmod format: {}".format(cli_lastmod)) self.log(
self.log(" expected format: {}".format(file_lastmod)) "lastmod {}\nremote: [{}]\n local: [{}]".format(
repr(ex), cli_lastmod, file_lastmod
)
)
return file_lastmod, file_lastmod != cli_lastmod return file_lastmod, file_lastmod != cli_lastmod
return file_lastmod, True return file_lastmod, True
@@ -697,6 +797,8 @@ class HttpCli(object):
editions[ext or "plain"] = [fs_path, st.st_size] editions[ext or "plain"] = [fs_path, st.st_size]
except: except:
pass pass
if not self.vpath.startswith(".cpr/"):
break
if not editions: if not editions:
raise Pebkac(404) raise Pebkac(404)
@@ -769,11 +871,20 @@ class HttpCli(object):
else: else:
upper = file_sz upper = file_sz
if lower < 0 or lower >= file_sz or upper < 0 or upper > file_sz: if upper > file_sz:
upper = file_sz
if lower < 0 or lower >= upper:
raise Exception() raise Exception()
except: except:
raise Pebkac(400, "invalid range requested: " + hrange) err = "invalid range ({}), size={}".format(hrange, file_sz)
self.loud_reply(
err,
status=416,
headers={"Content-Range": "bytes */{}".format(file_sz)},
)
return True
status = 206 status = 206
self.out_headers["Content-Range"] = "bytes {}-{}/{}".format( self.out_headers["Content-Range"] = "bytes {}-{}/{}".format(
@@ -782,6 +893,7 @@ class HttpCli(object):
logtail += " [\033[36m{}-{}\033[0m]".format(lower, upper) logtail += " [\033[36m{}-{}\033[0m]".format(lower, upper)
use_sendfile = False
if decompress: if decompress:
open_func = gzip.open open_func = gzip.open
open_args = [fsenc(fs_path), "rb"] open_args = [fsenc(fs_path), "rb"]
@@ -791,10 +903,15 @@ class HttpCli(object):
open_func = open open_func = open
# 512 kB is optimal for huge files, use 64k # 512 kB is optimal for huge files, use 64k
open_args = [fsenc(fs_path), "rb", 64 * 1024] open_args = [fsenc(fs_path), "rb", 64 * 1024]
if hasattr(os, "sendfile"):
use_sendfile = not self.args.no_sendfile
# #
# send reply # send reply
if not is_compressed:
self.out_headers["Cache-Control"] = "no-cache"
self.out_headers["Accept-Ranges"] = "bytes" self.out_headers["Accept-Ranges"] = "bytes"
self.send_headers( self.send_headers(
length=upper - lower, length=upper - lower,
@@ -808,33 +925,23 @@ class HttpCli(object):
self.log(logmsg) self.log(logmsg)
return True return True
ret = True
with open_func(*open_args) as f: with open_func(*open_args) as f:
remains = upper - lower if use_sendfile:
f.seek(lower) remains = sendfile_kern(lower, upper, f, self.s)
while remains > 0: else:
# time.sleep(0.01) remains = sendfile_py(lower, upper, f, self.s)
buf = f.read(4096)
if not buf:
break
if remains < len(buf): if remains > 0:
buf = buf[:remains] logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
remains -= len(buf) spd = self._spd((upper - lower) - remains)
self.log("{}, {}".format(logmsg, spd))
try: return ret
self.s.sendall(buf)
except:
logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
self.log(logmsg)
return False
self.log(logmsg)
return True
def tx_md(self, fs_path): def tx_md(self, fs_path):
logmsg = "{:4} {} ".format("", self.req) logmsg = "{:4} {} ".format("", self.req)
if "edit" in self.uparam: if "edit2" in self.uparam:
html_path = "web/mde.html" html_path = "web/mde.html"
template = self.conn.tpl_mde template = self.conn.tpl_mde
else: else:
@@ -844,20 +951,30 @@ class HttpCli(object):
html_path = os.path.join(E.mod, html_path) html_path = os.path.join(E.mod, html_path)
st = os.stat(fsenc(fs_path)) st = os.stat(fsenc(fs_path))
sz_md = st.st_size # sz_md = st.st_size
ts_md = st.st_mtime ts_md = st.st_mtime
st = os.stat(fsenc(html_path)) st = os.stat(fsenc(html_path))
ts_html = st.st_mtime ts_html = st.st_mtime
# TODO dont load into memory ;_;
# (trivial fix, count the &'s)
with open(fsenc(fs_path), "rb") as f:
md = f.read().replace(b"&", b"&amp;")
sz_md = len(md)
file_ts = max(ts_md, ts_html) file_ts = max(ts_md, ts_html)
file_lastmod, do_send = self._chk_lastmod(file_ts) file_lastmod, do_send = self._chk_lastmod(file_ts)
self.out_headers["Last-Modified"] = file_lastmod self.out_headers["Last-Modified"] = file_lastmod
self.out_headers["Cache-Control"] = "no-cache"
status = 200 if do_send else 304 status = 200 if do_send else 304
targs = { targs = {
"title": html_escape(self.vpath, quote=False), "edit": "edit" in self.uparam,
"title": html_escape(self.vpath),
"lastmod": int(ts_md * 1000), "lastmod": int(ts_md * 1000),
"md_plug": "true" if self.args.emp else "false",
"md_chk_rate": self.args.mcr,
"md": "", "md": "",
} }
sz_html = len(template.render(**targs).encode("utf-8")) sz_html = len(template.render(**targs).encode("utf-8"))
@@ -868,9 +985,7 @@ class HttpCli(object):
self.log(logmsg) self.log(logmsg)
return True return True
with open(fsenc(fs_path), "rb") as f: # TODO jinja2 can stream this right?
md = f.read()
targs["md"] = md.decode("utf-8", "replace") targs["md"] = md.decode("utf-8", "replace")
html = template.render(**targs).encode("utf-8") html = template.render(**targs).encode("utf-8")
try: try:
@@ -899,7 +1014,7 @@ class HttpCli(object):
else: else:
vpath += "/" + node vpath += "/" + node
vpnodes.append([quotep(vpath) + "/", html_escape(node, quote=False)]) vpnodes.append([quotep(vpath) + "/", html_escape(node)])
vn, rem = self.auth.vfs.get( vn, rem = self.auth.vfs.get(
self.vpath, self.uname, self.readable, self.writable self.vpath, self.uname, self.readable, self.writable
@@ -914,6 +1029,10 @@ class HttpCli(object):
if abspath.endswith(".md") and "raw" not in self.uparam: if abspath.endswith(".md") and "raw" not in self.uparam:
return self.tx_md(abspath) return self.tx_md(abspath)
bad = "{0}.hist{0}up2k.".format(os.sep)
if abspath.endswith(bad + "db") or abspath.endswith(bad + "snap"):
raise Pebkac(403)
return self.tx_file(abspath) return self.tx_file(abspath)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname) fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)
@@ -935,9 +1054,13 @@ class HttpCli(object):
except: except:
pass pass
# show dotfiles if permitted and requested
if not self.args.ed or "dots" not in self.uparam:
vfs_ls = exclude_dotfiles(vfs_ls)
dirs = [] dirs = []
files = [] files = []
for fn in exclude_dotfiles(vfs_ls): for fn in vfs_ls:
base = "" base = ""
href = fn href = fn
if self.absolute_urls and vpath: if self.absolute_urls and vpath:
@@ -970,7 +1093,12 @@ class HttpCli(object):
dt = datetime.utcfromtimestamp(inf.st_mtime) dt = datetime.utcfromtimestamp(inf.st_mtime)
dt = dt.strftime("%Y-%m-%d %H:%M:%S") dt = dt.strftime("%Y-%m-%d %H:%M:%S")
item = [margin, quotep(href), html_escape(fn, quote=False), sz, dt] try:
ext = "---" if is_dir else fn.rsplit(".", 1)[1]
except:
ext = "%"
item = [margin, quotep(href), html_escape(fn), sz, ext, dt]
if is_dir: if is_dir:
dirs.append(item) dirs.append(item)
else: else:
@@ -983,6 +1111,45 @@ class HttpCli(object):
with open(fsenc(fn), "rb") as f: with open(fsenc(fn), "rb") as f:
logues[n] = f.read().decode("utf-8") logues[n] = f.read().decode("utf-8")
if False:
# this is a mistake
md = None
for fn in [x[2] for x in files]:
if fn.lower() == "readme.md":
fn = os.path.join(abspath, fn)
with open(fn, "rb") as f:
md = f.read().decode("utf-8")
break
srv_info = []
try:
if not self.args.nih:
srv_info.append(str(socket.gethostname()).split(".")[0])
except:
self.log("#wow #whoa")
pass
try:
# some fuses misbehave
if not self.args.nid:
if WINDOWS:
bfree = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
)
srv_info.append(humansize(bfree.value) + " free")
else:
sv = os.statvfs(abspath)
free = humansize(sv.f_frsize * sv.f_bfree, True)
total = humansize(sv.f_frsize * sv.f_blocks, True)
srv_info.append(free + " free")
srv_info.append(total)
except:
pass
ts = "" ts = ""
# ts = "?{}".format(time.time()) # ts = "?{}".format(time.time())
@@ -996,7 +1163,8 @@ class HttpCli(object):
ts=ts, ts=ts,
prologue=logues[0], prologue=logues[0],
epilogue=logues[1], epilogue=logues[1],
title=html_escape(self.vpath, quote=False), title=html_escape(self.vpath),
srv_info="</span> /// <span>".join(srv_info),
) )
self.reply(html.encode("utf-8", "replace")) self.reply(html.encode("utf-8", "replace"))
return True return True

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import os import os
import sys import sys
import ssl import ssl
import time
import socket import socket
try: try:
@@ -41,9 +42,11 @@ class HttpConn(object):
self.auth = hsrv.auth self.auth = hsrv.auth
self.cert_path = hsrv.cert_path self.cert_path = hsrv.cert_path
self.t0 = time.time()
self.nbyte = 0
self.workload = 0 self.workload = 0
self.log_func = hsrv.log self.log_func = hsrv.log
self.log_src = "{} \033[36m{}".format(addr[0], addr[1]).ljust(26) self.set_rproxy()
env = jinja2.Environment() env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web")) env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
@@ -53,6 +56,19 @@ class HttpConn(object):
self.tpl_md = env.get_template("md.html") self.tpl_md = env.get_template("md.html")
self.tpl_mde = env.get_template("mde.html") self.tpl_mde = env.get_template("mde.html")
def set_rproxy(self, ip=None):
if ip is None:
color = 36
ip = self.addr[0]
self.rproxy = None
else:
color = 34
self.rproxy = ip
self.ip = ip
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
return self.log_src
def respath(self, res_name): def respath(self, res_name):
return os.path.join(E.mod, "web", res_name) return os.path.join(E.mod, "web", res_name)
@@ -86,7 +102,7 @@ class HttpConn(object):
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8")) self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
return return
if method not in [None, b"GET ", b"HEAD", b"POST"]: if method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]:
if self.sr: if self.sr:
self.log("\033[1;31mTODO: cannot do https in jython\033[0m") self.log("\033[1;31mTODO: cannot do https in jython\033[0m")
return return

View File

@@ -6,7 +6,7 @@ import time
import socket import socket
import threading import threading
from .__init__ import E from .__init__ import E, MACOS
from .httpconn import HttpConn from .httpconn import HttpConn
from .authsrv import AuthSrv from .authsrv import AuthSrv
@@ -38,7 +38,7 @@ class HttpSrv(object):
def accept(self, sck, addr): def accept(self, sck, addr):
"""takes an incoming tcp connection and creates a thread to handle it""" """takes an incoming tcp connection and creates a thread to handle it"""
self.log("%s %s" % addr, "-" * 5 + "C-cthr") self.log("%s %s" % addr, "\033[1;30m|%sC-cthr\033[0m" % ("-" * 5,))
thr = threading.Thread(target=self.thr_client, args=(sck, addr)) thr = threading.Thread(target=self.thr_client, args=(sck, addr))
thr.daemon = True thr.daemon = True
thr.start() thr.start()
@@ -66,20 +66,23 @@ class HttpSrv(object):
thr.start() thr.start()
try: try:
self.log("%s %s" % addr, "-" * 6 + "C-crun") self.log("%s %s" % addr, "\033[1;30m|%sC-crun\033[0m" % ("-" * 6,))
cli.run() cli.run()
finally: finally:
self.log("%s %s" % addr, "-" * 7 + "C-done") self.log("%s %s" % addr, "\033[1;30m|%sC-cdone\033[0m" % ("-" * 7,))
try: try:
sck.shutdown(socket.SHUT_RDWR) sck.shutdown(socket.SHUT_RDWR)
sck.close() sck.close()
except (OSError, socket.error) as ex: except (OSError, socket.error) as ex:
self.log( if not MACOS:
"%s %s" % addr, "shut_rdwr err:\n {}\n {}".format(repr(sck), ex), self.log(
) "%s %s" % addr,
if ex.errno not in [10038, 107, 57, 9]: "shut_rdwr err:\n {}\n {}".format(repr(sck), ex),
)
if ex.errno not in [10038, 10054, 107, 57, 9]:
# 10038 No longer considered a socket # 10038 No longer considered a socket
# 10054 Foribly closed by remote
# 107 Transport endpoint not connected # 107 Transport endpoint not connected
# 57 Socket is not connected # 57 Socket is not connected
# 9 Bad file descriptor # 9 Bad file descriptor

View File

@@ -8,7 +8,8 @@ import threading
from datetime import datetime, timedelta from datetime import datetime, timedelta
import calendar import calendar
from .__init__ import PY2, WINDOWS, VT100 from .__init__ import PY2, WINDOWS, MACOS, VT100
from .authsrv import AuthSrv
from .tcpsrv import TcpSrv from .tcpsrv import TcpSrv
from .up2k import Up2k from .up2k import Up2k
from .util import mp from .util import mp
@@ -38,6 +39,10 @@ class SvcHub(object):
self.tcpsrv = TcpSrv(self) self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self) self.up2k = Up2k(self)
if self.args.e2d and self.args.e2s:
auth = AuthSrv(self.args, self.log, False)
self.up2k.build_indexes(auth.all_writable)
# decide which worker impl to use # decide which worker impl to use
if self.check_mp_enable(): if self.check_mp_enable():
from .broker_mp import BrokerMp as Broker from .broker_mp import BrokerMp as Broker
@@ -111,6 +116,8 @@ class SvcHub(object):
return msg return msg
elif vmin < 3: elif vmin < 3:
return msg return msg
elif MACOS:
return "multiprocessing is wonky on mac osx;"
else: else:
msg = "need python 2.7 or 3.3+ for multiprocessing;" msg = "need python 2.7 or 3.3+ for multiprocessing;"
if not PY2 and vmin < 3: if not PY2 and vmin < 3:
@@ -127,13 +134,13 @@ class SvcHub(object):
return None return None
def check_mp_enable(self): def check_mp_enable(self):
if self.args.j == 0: if self.args.j == 1:
self.log("root", "multiprocessing disabled by argument -j 0;") self.log("root", "multiprocessing disabled by argument -j 1;")
return False return False
if mp.cpu_count() <= 1: if mp.cpu_count() <= 1:
return False return False
try: try:
# support vscode debugger (bonus: same behavior as on windows) # support vscode debugger (bonus: same behavior as on windows)
mp.set_start_method("spawn", True) mp.set_start_method("spawn", True)

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import re import re
import time import time
import socket import socket
import select
from .util import chkcmd, Counter from .util import chkcmd, Counter
@@ -23,55 +24,75 @@ class TcpSrv(object):
ip = "127.0.0.1" ip = "127.0.0.1"
eps = {ip: "local only"} eps = {ip: "local only"}
if self.args.i != ip: nonlocals = [x for x in self.args.i if x != ip]
eps = self.detect_interfaces(self.args.i) or {self.args.i: "external"} if nonlocals:
eps = self.detect_interfaces(self.args.i)
if not eps:
for x in nonlocals:
eps[x] = "external"
for ip, desc in sorted(eps.items(), key=lambda x: x[1]): for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
self.log( for port in sorted(self.args.p):
"tcpsrv", self.log(
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format( "tcpsrv",
ip, self.args.p, desc "available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
), ip, port, desc
) ),
)
self.srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.srv = []
self.srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) for ip in self.args.i:
for port in self.args.p:
self.srv.append(self._listen(ip, port))
def _listen(self, ip, port):
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
try: try:
self.srv.bind((self.args.i, self.args.p)) srv.bind((ip, port))
return srv
except (OSError, socket.error) as ex: except (OSError, socket.error) as ex:
if ex.errno == 98: if ex.errno == 98:
raise Exception( raise Exception(
"\033[1;31mport {} is busy on interface {}\033[0m".format( "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
self.args.p, self.args.i
)
) )
if ex.errno == 99: if ex.errno == 99:
raise Exception( raise Exception(
"\033[1;31minterface {} does not exist\033[0m".format(self.args.i) "\033[1;31minterface {} does not exist\033[0m".format(ip)
) )
def run(self): def run(self):
self.srv.listen(self.args.nc) for srv in self.srv:
srv.listen(self.args.nc)
self.log("tcpsrv", "listening @ {0}:{1}".format(self.args.i, self.args.p)) ip, port = srv.getsockname()
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
while True: while True:
self.log("tcpsrv", "-" * 1 + "C-ncli") self.log("tcpsrv", "\033[1;30m|%sC-ncli\033[0m" % ("-" * 1,))
if self.num_clients.v >= self.args.nc: if self.num_clients.v >= self.args.nc:
time.sleep(0.1) time.sleep(0.1)
continue continue
self.log("tcpsrv", "-" * 2 + "C-acc1") self.log("tcpsrv", "\033[1;30m|%sC-acc1\033[0m" % ("-" * 2,))
sck, addr = self.srv.accept() ready, _, _ = select.select(self.srv, [], [])
self.log("%s %s" % addr, "-" * 3 + "C-acc2") for srv in ready:
self.num_clients.add() sck, addr = srv.accept()
self.hub.broker.put(False, "httpconn", sck, addr) sip, sport = srv.getsockname()
self.log(
"%s %s" % addr,
"\033[1;30m|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
"-" * 3, sip, sport % 8, sport
),
)
self.num_clients.add()
self.hub.broker.put(False, "httpconn", sck, addr)
def shutdown(self): def shutdown(self):
self.log("tcpsrv", "ok bye") self.log("tcpsrv", "ok bye")
def detect_interfaces(self, listen_ip): def detect_interfaces(self, listen_ips):
eps = {} eps = {}
# get all ips and their interfaces # get all ips and their interfaces
@@ -85,8 +106,9 @@ class TcpSrv(object):
for ln in ip_addr.split("\n"): for ln in ip_addr.split("\n"):
try: try:
ip, dev = r.match(ln.rstrip()).groups() ip, dev = r.match(ln.rstrip()).groups()
if listen_ip in ["0.0.0.0", ip]: for lip in listen_ips:
eps[ip] = dev if lip in ["0.0.0.0", ip]:
eps[ip] = dev
except: except:
pass pass
@@ -113,11 +135,12 @@ class TcpSrv(object):
s.close() s.close()
if default_route and listen_ip in ["0.0.0.0", default_route]: for lip in listen_ips:
desc = "\033[32mexternal" if default_route and lip in ["0.0.0.0", default_route]:
try: desc = "\033[32mexternal"
eps[default_route] += ", " + desc try:
except: eps[default_route] += ", " + desc
eps[default_route] = desc except:
eps[default_route] = desc
return eps return eps

View File

@@ -6,6 +6,9 @@ import os
import re import re
import time import time
import math import math
import json
import gzip
import stat
import shutil import shutil
import base64 import base64
import hashlib import hashlib
@@ -13,7 +16,15 @@ import threading
from copy import deepcopy from copy import deepcopy
from .__init__ import WINDOWS from .__init__ import WINDOWS
from .util import Pebkac, Queue, fsenc, sanitize_fn from .util import Pebkac, Queue, fsdec, fsenc, sanitize_fn, ren_open, atomic_move
HAVE_SQLITE3 = False
try:
import sqlite3
HAVE_SQLITE3 = True
except:
pass
class Up2k(object): class Up2k(object):
@@ -22,20 +33,21 @@ class Up2k(object):
* documentation * documentation
* registry persistence * registry persistence
* ~/.config flatfiles for active jobs * ~/.config flatfiles for active jobs
* wark->path database for finished uploads
""" """
def __init__(self, broker): def __init__(self, broker):
self.broker = broker self.broker = broker
self.args = broker.args self.args = broker.args
self.log = broker.log self.log = broker.log
self.persist = self.args.e2d
# config # config
self.salt = "hunter2" # TODO: config self.salt = "hunter2" # TODO: config
# state # state
self.registry = {}
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.registry = {}
self.db = {}
if WINDOWS: if WINDOWS:
# usually fails to set lastmod too quickly # usually fails to set lastmod too quickly
@@ -44,54 +56,306 @@ class Up2k(object):
thr.daemon = True thr.daemon = True
thr.start() thr.start()
if self.persist:
thr = threading.Thread(target=self._snapshot)
thr.daemon = True
thr.start()
# static # static
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$") self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
if self.persist and not HAVE_SQLITE3:
m = "could not initialize sqlite3, will use in-memory registry only"
self.log("up2k", m)
def _vis_job_progress(self, job):
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
path = os.path.join(job["ptop"], job["prel"], job["name"])
return "{:5.1f}% {}".format(perc, path)
def _vis_reg_progress(self, reg):
ret = []
for _, job in reg.items():
ret.append(self._vis_job_progress(job))
return ret
def register_vpath(self, ptop):
with self.mutex:
if ptop in self.registry:
return None
reg = {}
path = os.path.join(ptop, ".hist", "up2k.snap")
if self.persist and os.path.exists(path):
with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8")
reg = json.loads(j)
for _, job in reg.items():
job["poke"] = time.time()
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
m = [m] + self._vis_reg_progress(reg)
self.log("up2k", "\n".join(m))
self.registry[ptop] = reg
if not self.persist or not HAVE_SQLITE3:
return None
try:
os.mkdir(os.path.join(ptop, ".hist"))
except:
pass
db_path = os.path.join(ptop, ".hist", "up2k.db")
if ptop in self.db:
# self.db[ptop].close()
return None
try:
db = self._open_db(db_path)
self.db[ptop] = db
return db
except Exception as ex:
m = "failed to open [{}]: {}".format(ptop, repr(ex))
self.log("up2k", m)
return None
def build_indexes(self, writeables):
tops = [d.realpath for d in writeables]
for top in tops:
db = self.register_vpath(top)
if db:
# can be symlink so don't `and d.startswith(top)``
excl = set([d for d in tops if d != top])
self._build_dir([db, 0], top, excl, top)
self._drop_lost(db, top)
db.commit()
def _build_dir(self, dbw, top, excl, cdir):
try:
inodes = [fsdec(x) for x in os.listdir(fsenc(cdir))]
except Exception as ex:
self.log("up2k", "listdir: " + repr(ex))
return
histdir = os.path.join(top, ".hist")
for inode in inodes:
abspath = os.path.join(cdir, inode)
try:
inf = os.stat(fsenc(abspath))
except Exception as ex:
self.log("up2k", "stat: " + repr(ex))
continue
if stat.S_ISDIR(inf.st_mode):
if abspath in excl or abspath == histdir:
continue
# self.log("up2k", " dir: {}".format(abspath))
self._build_dir(dbw, top, excl, abspath)
else:
# self.log("up2k", "file: {}".format(abspath))
rp = abspath[len(top) :].replace("\\", "/").strip("/")
c = dbw[0].execute("select * from up where rp = ?", (rp,))
in_db = list(c.fetchall())
if in_db:
_, dts, dsz, _ = in_db[0]
if len(in_db) > 1:
m = "WARN: multiple entries: [{}] => [{}] ({})"
self.log("up2k", m.format(top, rp, len(in_db)))
dts = -1
if dts == inf.st_mtime and dsz == inf.st_size:
continue
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
top, rp, dts, inf.st_mtime, dsz, inf.st_size
)
self.log("up2k", m)
self.db_rm(dbw[0], rp)
dbw[1] += 1
in_db = None
self.log("up2k", "file: {}".format(abspath))
try:
hashes = self._hashlist_from_file(abspath)
except Exception as ex:
self.log("up2k", "hash: " + repr(ex))
continue
wark = self._wark_from_hashlist(inf.st_size, hashes)
self.db_add(dbw[0], wark, rp, inf.st_mtime, inf.st_size)
dbw[1] += 1
if dbw[1] > 1024:
dbw[0].commit()
dbw[1] = 0
def _drop_lost(self, db, top):
rm = []
c = db.execute("select * from up")
for dwark, dts, dsz, drp in c:
abspath = os.path.join(top, drp)
try:
if not os.path.exists(fsenc(abspath)):
rm.append(drp)
except Exception as ex:
self.log("up2k", "stat-rm: " + repr(ex))
if not rm:
return
self.log("up2k", "forgetting {} deleted files".format(len(rm)))
for rp in rm:
self.db_rm(db, rp)
def _open_db(self, db_path):
conn = sqlite3.connect(db_path, check_same_thread=False)
try:
c = conn.execute(r"select * from kv where k = 'sver'")
rows = c.fetchall()
if rows:
ver = rows[0][1]
else:
self.log("up2k", "WARN: no sver in kv, DB corrupt?")
ver = "unknown"
if ver == "1":
try:
nfiles = next(conn.execute("select count(w) from up"))[0]
self.log("up2k", "found DB at {} |{}|".format(db_path, nfiles))
return conn
except Exception as ex:
m = "WARN: could not list files, DB corrupt?\n " + repr(ex)
self.log("up2k", m)
m = "REPLACING unsupported DB (v.{}) at {}".format(ver, db_path)
self.log("up2k", m)
conn.close()
os.unlink(db_path)
conn = sqlite3.connect(db_path, check_same_thread=False)
except:
pass
# sqlite is variable-width only, no point in using char/nchar/varchar
for cmd in [
r"create table kv (k text, v text)",
r"create table up (w text, mt int, sz int, rp text)",
r"insert into kv values ('sver', '1')",
r"create index up_w on up(w)",
]:
conn.execute(cmd)
conn.commit()
self.log("up2k", "created DB at {}".format(db_path))
return conn
def handle_json(self, cj): def handle_json(self, cj):
self.register_vpath(cj["ptop"])
cj["name"] = sanitize_fn(cj["name"]) cj["name"] = sanitize_fn(cj["name"])
cj["poke"] = time.time()
wark = self._get_wark(cj) wark = self._get_wark(cj)
now = time.time() now = time.time()
job = None
with self.mutex: with self.mutex:
# TODO use registry persistence here to symlink any matching wark db = self.db.get(cj["ptop"], None)
if wark in self.registry: reg = self.registry[cj["ptop"]]
job = self.registry[wark] if db:
if job["rdir"] != cj["rdir"] or job["name"] != cj["name"]: cur = db.execute(r"select * from up where w = ?", (wark,))
src = os.path.join(job["rdir"], job["name"]) for _, dtime, dsize, dp_rel in cur:
dst = os.path.join(cj["rdir"], cj["name"]) dp_abs = os.path.join(cj["ptop"], dp_rel).replace("\\", "/")
# relying on path.exists to return false on broken symlinks
if os.path.exists(fsenc(dp_abs)):
try:
prel, name = dp_rel.rsplit("/", 1)
except:
prel = ""
name = dp_rel
job = {
"name": name,
"prel": prel,
"vtop": cj["vtop"],
"ptop": cj["ptop"],
"flag": cj["flag"],
"size": dsize,
"lmod": dtime,
"hash": [],
"need": [],
}
break
if job and wark in reg:
del reg[wark]
if job or wark in reg:
job = job or reg[wark]
if job["prel"] == cj["prel"] and job["name"] == cj["name"]:
# ensure the files haven't been deleted manually
names = [job[x] for x in ["name", "tnam"] if x in job]
for fn in names:
path = os.path.join(job["ptop"], job["prel"], fn)
try:
if os.path.getsize(path) > 0:
# upload completed or both present
break
except:
# missing; restart
job = None
break
else:
# file contents match, but not the path
src = os.path.join(job["ptop"], job["prel"], job["name"])
dst = os.path.join(cj["ptop"], cj["prel"], cj["name"])
vsrc = os.path.join(job["vtop"], job["prel"], job["name"])
vsrc = vsrc.replace("\\", "/") # just for prints anyways
if job["need"]: if job["need"]:
self.log("up2k", "unfinished:\n {0}\n {1}".format(src, dst)) self.log("up2k", "unfinished:\n {0}\n {1}".format(src, dst))
err = "partial upload exists at a different location; please resume uploading here instead:\n{0}{1} ".format( err = "partial upload exists at a different location; please resume uploading here instead:\n"
job["vdir"], job["name"] err += vsrc + " "
) raise Pebkac(400, err)
elif "nodupe" in job["flag"]:
self.log("up2k", "dupe-reject:\n {0}\n {1}".format(src, dst))
err = "upload rejected, file already exists:\n " + vsrc + " "
raise Pebkac(400, err) raise Pebkac(400, err)
else: else:
# symlink to the client-provided name, # symlink to the client-provided name,
# returning the previous upload info # returning the previous upload info
job = deepcopy(job) job = deepcopy(job)
suffix = self._suffix(dst, now, job["addr"]) for k in ["ptop", "vtop", "prel"]:
job["name"] = cj["name"] + suffix job[k] = cj[k]
self._symlink(src, dst + suffix)
else: pdir = os.path.join(cj["ptop"], cj["prel"])
job["name"] = self._untaken(pdir, cj["name"], now, cj["addr"])
dst = os.path.join(job["ptop"], job["prel"], job["name"])
os.unlink(fsenc(dst)) # TODO ed pls
self._symlink(src, dst)
if not job:
job = { job = {
"wark": wark, "wark": wark,
"t0": now, "t0": now,
"addr": cj["addr"],
"vdir": cj["vdir"],
"rdir": cj["rdir"],
# client-provided, sanitized by _get_wark:
"name": cj["name"],
"size": cj["size"],
"lmod": cj["lmod"],
"hash": deepcopy(cj["hash"]), "hash": deepcopy(cj["hash"]),
"need": [],
} }
# client-provided, sanitized by _get_wark: name, size, lmod
path = os.path.join(job["rdir"], job["name"]) for k in [
job["name"] += self._suffix(path, now, cj["addr"]) "addr",
"vtop",
"ptop",
"prel",
"flag",
"name",
"size",
"lmod",
"poke",
]:
job[k] = cj[k]
# one chunk may occur multiple times in a file; # one chunk may occur multiple times in a file;
# filter to unique values for the list of missing chunks # filter to unique values for the list of missing chunks
# (preserve order to reduce disk thrashing) # (preserve order to reduce disk thrashing)
job["need"] = []
lut = {} lut = {}
for k in cj["hash"]: for k in cj["hash"]:
if k not in lut: if k not in lut:
@@ -108,13 +372,12 @@ class Up2k(object):
"wark": wark, "wark": wark,
} }
def _suffix(self, fpath, ts, ip): def _untaken(self, fdir, fname, ts, ip):
# TODO broker which avoid this race and # TODO broker which avoid this race and
# provides a new filename if taken (same as bup) # provides a new filename if taken (same as bup)
if not os.path.exists(fsenc(fpath)): suffix = ".{:.6f}-{}".format(ts, ip)
return "" with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
return f["orz"][1]
return ".{:.6f}-{}".format(ts, ip)
def _symlink(self, src, dst): def _symlink(self, src, dst):
# TODO store this in linktab so we never delete src if there are links to it # TODO store this in linktab so we never delete src if there are links to it
@@ -141,40 +404,58 @@ class Up2k(object):
lsrc = "../" * (len(lsrc) - 1) + "/".join(lsrc) lsrc = "../" * (len(lsrc) - 1) + "/".join(lsrc)
os.symlink(fsenc(lsrc), fsenc(ldst)) os.symlink(fsenc(lsrc), fsenc(ldst))
except (AttributeError, OSError) as ex: except (AttributeError, OSError) as ex:
self.log("up2k", "cannot symlink; creating copy") self.log("up2k", "cannot symlink; creating copy: " + repr(ex))
shutil.copy2(fsenc(src), fsenc(dst)) shutil.copy2(fsenc(src), fsenc(dst))
def handle_chunk(self, wark, chash): def handle_chunk(self, ptop, wark, chash):
with self.mutex: with self.mutex:
job = self.registry.get(wark) job = self.registry[ptop].get(wark, None)
if not job: if not job:
raise Pebkac(404, "unknown wark") raise Pebkac(400, "unknown wark")
if chash not in job["need"]: if chash not in job["need"]:
raise Pebkac(200, "already got that but thanks??") raise Pebkac(200, "already got that but thanks??")
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash] nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
if not nchunk: if not nchunk:
raise Pebkac(404, "unknown chunk") raise Pebkac(400, "unknown chunk")
job["poke"] = time.time()
chunksize = self._get_chunksize(job["size"]) chunksize = self._get_chunksize(job["size"])
ofs = [chunksize * x for x in nchunk] ofs = [chunksize * x for x in nchunk]
path = os.path.join(job["rdir"], job["name"]) path = os.path.join(job["ptop"], job["prel"], job["tnam"])
return [chunksize, ofs, path, job["lmod"]] return [chunksize, ofs, path, job["lmod"]]
def confirm_chunk(self, wark, chash): def confirm_chunk(self, ptop, wark, chash):
with self.mutex: with self.mutex:
job = self.registry[wark] job = self.registry[ptop][wark]
pdir = os.path.join(job["ptop"], job["prel"])
src = os.path.join(pdir, job["tnam"])
dst = os.path.join(pdir, job["name"])
job["need"].remove(chash) job["need"].remove(chash)
ret = len(job["need"]) ret = len(job["need"])
if ret > 0:
return ret, src
if WINDOWS and ret == 0: atomic_move(src, dst)
path = os.path.join(job["rdir"], job["name"])
self.lastmod_q.put([path, (int(time.time()), int(job["lmod"]))])
return ret if WINDOWS:
self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))])
db = self.db.get(job["ptop"], None)
if db:
rp = os.path.join(job["prel"], job["name"]).replace("\\", "/")
self.db_rm(db, rp)
self.db_add(db, job["wark"], rp, job["lmod"], job["size"])
db.commit()
del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads
return ret, dst
def _get_chunksize(self, filesize): def _get_chunksize(self, filesize):
chunksize = 1024 * 1024 chunksize = 1024 * 1024
@@ -188,6 +469,13 @@ class Up2k(object):
chunksize += stepsize chunksize += stepsize
stepsize *= mul stepsize *= mul
def db_rm(self, db, rp):
db.execute("delete from up where rp = ?", (rp,))
def db_add(self, db, wark, rp, ts, sz):
v = (wark, ts, sz, rp)
db.execute("insert into up values (?,?,?,?)", v)
def _get_wark(self, cj): def _get_wark(self, cj):
if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB if len(cj["name"]) > 1024 or len(cj["hash"]) > 512 * 1024: # 16TiB
raise Pebkac(400, "name or numchunks not according to spec") raise Pebkac(400, "name or numchunks not according to spec")
@@ -204,9 +492,13 @@ class Up2k(object):
except: except:
cj["lmod"] = int(time.time()) cj["lmod"] = int(time.time())
# server-reproducible file identifier, independent of name or location wark = self._wark_from_hashlist(cj["size"], cj["hash"])
ident = [self.salt, str(cj["size"])] return wark
ident.extend(cj["hash"])
def _wark_from_hashlist(self, filesize, hashes):
""" server-reproducible file identifier, independent of name or location """
ident = [self.salt, str(filesize)]
ident.extend(hashes)
ident = "\n".join(ident) ident = "\n".join(ident)
hasher = hashlib.sha512() hasher = hashlib.sha512()
@@ -216,10 +508,40 @@ class Up2k(object):
wark = base64.urlsafe_b64encode(digest) wark = base64.urlsafe_b64encode(digest)
return wark.decode("utf-8").rstrip("=") return wark.decode("utf-8").rstrip("=")
def _hashlist_from_file(self, path):
fsz = os.path.getsize(path)
csz = self._get_chunksize(fsz)
ret = []
with open(path, "rb", 512 * 1024) as f:
while fsz > 0:
hashobj = hashlib.sha512()
rem = min(csz, fsz)
fsz -= rem
while rem > 0:
buf = f.read(min(rem, 64 * 1024))
if not buf:
raise Exception("EOF at " + str(f.tell()))
hashobj.update(buf)
rem -= len(buf)
digest = hashobj.digest()[:32]
digest = base64.urlsafe_b64encode(digest)
ret.append(digest.decode("utf-8").rstrip("="))
return ret
def _new_upload(self, job): def _new_upload(self, job):
self.registry[job["wark"]] = job self.registry[job["ptop"]][job["wark"]] = job
path = os.path.join(job["rdir"], job["name"]) pdir = os.path.join(job["ptop"], job["prel"])
with open(fsenc(path), "wb") as f: job["name"] = self._untaken(pdir, job["name"], job["t0"], job["addr"])
# if len(job["name"].split(".")) > 8:
# raise Exception("aaa")
tnam = job["name"] + ".PARTIAL"
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
f, job["tnam"] = f["orz"]
f.seek(job["size"] - 1) f.seek(job["size"] - 1)
f.write(b"e") f.write(b"e")
@@ -236,3 +558,58 @@ class Up2k(object):
os.utime(fsenc(path), times) os.utime(fsenc(path), times)
except: except:
self.log("lmod", "failed to utime ({}, {})".format(path, times)) self.log("lmod", "failed to utime ({}, {})".format(path, times))
def _snapshot(self):
persist_interval = 30 # persist unfinished uploads index every 30 sec
discard_interval = 21600 # drop unfinished uploads after 6 hours inactivity
prev = {}
while True:
time.sleep(persist_interval)
with self.mutex:
for k, reg in self.registry.items():
self._snap_reg(prev, k, reg, discard_interval)
def _snap_reg(self, prev, k, reg, discard_interval):
now = time.time()
rm = [x for x in reg.values() if now - x["poke"] > discard_interval]
if rm:
m = "dropping {} abandoned uploads in {}".format(len(rm), k)
vis = [self._vis_job_progress(x) for x in rm]
self.log("up2k", "\n".join([m] + vis))
for job in rm:
del reg[job["wark"]]
try:
# remove the filename reservation
path = os.path.join(job["ptop"], job["prel"], job["name"])
if os.path.getsize(path) == 0:
os.unlink(path)
if len(job["hash"]) == len(job["need"]):
# PARTIAL is empty, delete that too
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
os.unlink(path)
except:
pass
path = os.path.join(k, ".hist", "up2k.snap")
if not reg:
if k not in prev or prev[k] is not None:
prev[k] = None
if os.path.exists(path):
os.unlink(path)
return
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
etag = [len(reg), newest]
if etag == prev.get(k, None):
return
path2 = "{}.{}".format(path, os.getpid())
j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8")
with gzip.GzipFile(path2, "wb") as f:
f.write(j)
atomic_move(path2, path)
self.log("up2k", "snap: {} |{}|".format(path, len(reg.keys())))
prev[k] = etag

View File

@@ -2,13 +2,17 @@
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import re import re
import os
import sys import sys
import time
import base64 import base64
import select
import struct import struct
import hashlib import hashlib
import platform import platform
import threading import threading
import mimetypes import mimetypes
import contextlib
import subprocess as sp # nosec import subprocess as sp # nosec
from .__init__ import PY2, WINDOWS from .__init__ import PY2, WINDOWS
@@ -42,6 +46,7 @@ if WINDOWS and PY2:
HTTPCODE = { HTTPCODE = {
200: "OK", 200: "OK",
204: "No Content",
206: "Partial Content", 206: "Partial Content",
304: "Not Modified", 304: "Not Modified",
400: "Bad Request", 400: "Bad Request",
@@ -49,6 +54,7 @@ HTTPCODE = {
404: "Not Found", 404: "Not Found",
405: "Method Not Allowed", 405: "Method Not Allowed",
413: "Payload Too Large", 413: "Payload Too Large",
416: "Requested Range Not Satisfiable",
422: "Unprocessable Entity", 422: "Unprocessable Entity",
500: "Internal Server Error", 500: "Internal Server Error",
501: "Not Implemented", 501: "Not Implemented",
@@ -93,6 +99,80 @@ class Unrecv(object):
self.buf = buf + self.buf self.buf = buf + self.buf
@contextlib.contextmanager
def ren_open(fname, *args, **kwargs):
fdir = kwargs.pop("fdir", None)
suffix = kwargs.pop("suffix", None)
if fname == os.devnull:
with open(fname, *args, **kwargs) as f:
yield {"orz": [f, fname]}
return
orig_name = fname
bname = fname
ext = ""
while True:
ofs = bname.rfind(".")
if ofs < 0 or ofs < len(bname) - 7:
# doesn't look like an extension anymore
break
ext = bname[ofs:] + ext
bname = bname[:ofs]
b64 = ""
while True:
try:
if fdir:
fpath = os.path.join(fdir, fname)
else:
fpath = fname
if suffix and os.path.exists(fpath):
fpath += suffix
fname += suffix
ext += suffix
with open(fsenc(fpath), *args, **kwargs) as f:
if b64:
fp2 = "fn-trunc.{}.txt".format(b64)
fp2 = os.path.join(fdir, fp2)
with open(fsenc(fp2), "wb") as f2:
f2.write(orig_name.encode("utf-8"))
yield {"orz": [f, fname]}
return
except OSError as ex_:
ex = ex_
if ex.errno != 36:
raise
if not b64:
b64 = (bname + ext).encode("utf-8", "replace")
b64 = hashlib.sha512(b64).digest()[:12]
b64 = base64.urlsafe_b64encode(b64).decode("utf-8").rstrip("=")
badlen = len(fname)
while len(fname) >= badlen:
if len(bname) < 8:
raise ex
if len(bname) > len(ext):
# drop the last letter of the filename
bname = bname[:-1]
else:
try:
# drop the leftmost sub-extension
_, ext = ext.split(".", 1)
except:
# okay do the first letter then
ext = "." + ext[2:]
fname = "{}~{}{}".format(bname, b64, ext)
class MultipartParser(object): class MultipartParser(object):
def __init__(self, log_func, sr, http_headers): def __init__(self, log_func, sr, http_headers):
self.sr = sr self.sr = sr
@@ -309,18 +389,7 @@ def get_boundary(headers):
def read_header(sr): def read_header(sr):
ret = b"" ret = b""
while True: while True:
if ret.endswith(b"\r\n\r\n"): buf = sr.recv(1024)
break
elif ret.endswith(b"\r\n\r"):
n = 1
elif ret.endswith(b"\r\n"):
n = 2
elif ret.endswith(b"\r"):
n = 3
else:
n = 4
buf = sr.recv(n)
if not buf: if not buf:
if not ret: if not ret:
return None return None
@@ -332,11 +401,40 @@ def read_header(sr):
) )
ret += buf ret += buf
ofs = ret.find(b"\r\n\r\n")
if ofs < 0:
if len(ret) > 1024 * 64:
raise Pebkac(400, "header 2big")
else:
continue
if len(ret) > 1024 * 64: sr.unrecv(ret[ofs + 4 :])
raise Pebkac(400, "header 2big") return ret[:ofs].decode("utf-8", "surrogateescape").split("\r\n")
return ret[:-4].decode("utf-8", "surrogateescape").split("\r\n")
def humansize(sz, terse=False):
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
if sz < 1024:
break
sz /= 1024.0
ret = " ".join([str(sz)[:4].rstrip("."), unit])
if not terse:
return ret
return ret.replace("iB", "").replace(" ", "")
def get_spd(nbyte, t0, t=None):
if t is None:
t = time.time()
bps = nbyte / ((t - t0) + 0.001)
s1 = humansize(nbyte).replace(" ", "\033[33m").replace("iB", "")
s2 = humansize(bps).replace(" ", "\033[35m").replace("iB", "")
return "{} \033[0m{}/s\033[0m".format(s1, s2)
def undot(path): def undot(path):
@@ -388,6 +486,21 @@ def exclude_dotfiles(filepaths):
yield fpath yield fpath
def html_escape(s, quote=False):
"""html.escape but also newlines"""
s = (
s.replace("&", "&amp;")
.replace("<", "&lt;")
.replace(">", "&gt;")
.replace("\r", "&#13;")
.replace("\n", "&#10;")
)
if quote:
s = s.replace('"', "&quot;").replace("'", "&#x27;")
return s
def quotep(txt): def quotep(txt):
"""url quoter which deals with bytes correctly""" """url quoter which deals with bytes correctly"""
btxt = w8enc(txt) btxt = w8enc(txt)
@@ -402,8 +515,8 @@ def quotep(txt):
def unquotep(txt): def unquotep(txt):
"""url unquoter which deals with bytes correctly""" """url unquoter which deals with bytes correctly"""
btxt = w8enc(txt) btxt = w8enc(txt)
unq1 = btxt.replace(b"+", b" ") # btxt = btxt.replace(b"+", b" ")
unq2 = unquote(unq1) unq2 = unquote(btxt)
return w8dec(unq2) return w8dec(unq2)
@@ -436,6 +549,16 @@ else:
fsdec = w8dec fsdec = w8dec
def atomic_move(src, dst):
if not PY2:
os.replace(src, dst)
else:
if os.path.exists(dst):
os.unlink(dst)
os.rename(src, dst)
def read_socket(sr, total_size): def read_socket(sr, total_size):
remains = total_size remains = total_size
while remains > 0: while remains > 0:
@@ -451,6 +574,15 @@ def read_socket(sr, total_size):
yield buf yield buf
def read_socket_unbounded(sr):
while True:
buf = sr.recv(32 * 1024)
if not buf:
return
yield buf
def hashcopy(actor, fin, fout): def hashcopy(actor, fin, fout):
u32_lim = int((2 ** 31) * 0.9) u32_lim = int((2 ** 31) * 0.9)
hashobj = hashlib.sha512() hashobj = hashlib.sha512()
@@ -470,6 +602,46 @@ def hashcopy(actor, fin, fout):
return tlen, hashobj.hexdigest(), digest_b64 return tlen, hashobj.hexdigest(), digest_b64
def sendfile_py(lower, upper, f, s):
remains = upper - lower
f.seek(lower)
while remains > 0:
# time.sleep(0.01)
buf = f.read(min(4096, remains))
if not buf:
return remains
try:
s.sendall(buf)
remains -= len(buf)
except:
return remains
return 0
def sendfile_kern(lower, upper, f, s):
out_fd = s.fileno()
in_fd = f.fileno()
ofs = lower
while ofs < upper:
try:
req = min(2 ** 30, upper - ofs)
select.select([], [out_fd], [], 10)
n = os.sendfile(out_fd, in_fd, ofs, req)
except Exception as ex:
# print("sendfile: " + repr(ex))
n = 0
if n <= 0:
return upper - ofs
ofs += n
# print("sendfile: ok, sent {} now, {} total, {} remains".format(n, ofs - lower, upper - ofs))
return 0
def unescape_cookie(orig): def unescape_cookie(orig):
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn # mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
ret = "" ret = ""
@@ -550,3 +722,6 @@ class Pebkac(Exception):
def __init__(self, code, msg=None): def __init__(self, code, msg=None):
super(Pebkac, self).__init__(msg or HTTPCODE[code]) super(Pebkac, self).__init__(msg or HTTPCODE[code])
self.code = code self.code = code
def __repr__(self):
return "Pebkac({}, {})".format(self.code, repr(self.args))

12
copyparty/web/Makefile Normal file
View File

@@ -0,0 +1,12 @@
# run me to zopfli all the static files
# which should help on really slow connections
# but then why are you using copyparty in the first place
pk: $(addsuffix .gz, $(wildcard *.js *.css))
un: $(addsuffix .un, $(wildcard *.gz))
%.gz: %
pigz -11 -J 34 -I 5730 $<
%.un: %
pigz -d $<

View File

@@ -131,6 +131,17 @@ a {
.logue { .logue {
padding: .2em 1.5em; padding: .2em 1.5em;
} }
#srv_info {
opacity: .5;
font-size: .8em;
color: #fc5;
position: absolute;
top: .5em;
left: 2em;
}
#srv_info span {
color: #fff;
}
a.play { a.play {
color: #e70; color: #e70;
} }

View File

@@ -33,14 +33,15 @@
<tr> <tr>
<th></th> <th></th>
<th>File Name</th> <th>File Name</th>
<th>File Size</th> <th sort="int">File Size</th>
<th>T</th>
<th>Date</th> <th>Date</th>
</tr> </tr>
</thead> </thead>
<tbody> <tbody>
{%- for f in files %} {%- for f in files %}
<tr><td>{{ f[0] }}</td><td><a href="{{ f[1] }}">{{ f[2] }}</a></td><td>{{ f[3] }}</td><td>{{ f[4] }}</td></tr> <tr><td>{{ f[0] }}</td><td><a href="{{ f[1] }}">{{ f[2] }}</a></td><td>{{ f[3] }}</td><td>{{ f[4] }}</td><td>{{ f[5] }}</td></tr>
{%- endfor %} {%- endfor %}
</tbody> </tbody>
@@ -53,6 +54,10 @@
<h2><a href="?h">control-panel</a></h2> <h2><a href="?h">control-panel</a></h2>
{%- if srv_info %}
<div id="srv_info"><span>{{ srv_info }}</span></div>
{%- endif %}
<div id="widget"> <div id="widget">
<div id="wtoggle"></div> <div id="wtoggle"></div>
<div id="widgeti"> <div id="widgeti">
@@ -63,6 +68,8 @@
</div> </div>
</div> </div>
<script src="/.cpr/util.js{{ ts }}"></script>
{%- if can_read %} {%- if can_read %}
<script src="/.cpr/browser.js{{ ts }}"></script> <script src="/.cpr/browser.js{{ ts }}"></script>
{%- endif %} {%- endif %}

View File

@@ -1,115 +1,25 @@
"use strict"; "use strict";
// error handler for mobile devices window.onerror = vis_exh;
function hcroak(msg) {
document.body.innerHTML = msg;
window.onerror = undefined;
throw 'fatal_err';
}
function croak(msg) {
document.body.textContent = msg;
window.onerror = undefined;
throw msg;
}
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
'&': '&amp;',
'"': '&quot;',
'<': '&lt;',
'>': '&gt;'
}[c];
});
}
window.onerror = function (msg, url, lineNo, columnNo, error) {
window.onerror = undefined;
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<h2>' + find[a] + '</h2>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
}
document.body.style.fontSize = '0.8em';
document.body.style.padding = '0 1em 1em 1em';
hcroak(html.join('\n'));
};
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
if (!String.prototype.endsWith) {
String.prototype.endsWith = function (search, this_len) {
if (this_len === undefined || this_len > this.length) {
this_len = this.length;
}
return this.substring(this_len - search.length, this_len) === search;
};
}
// https://stackoverflow.com/a/950146
function import_js(url, cb) {
var head = document.head || document.getElementsByTagName('head')[0];
var script = document.createElement('script');
script.type = 'text/javascript';
script.src = url;
script.onreadystatechange = cb;
script.onload = cb;
head.appendChild(script);
}
function o(id) {
return document.getElementById(id);
}
function dbg(msg) { function dbg(msg) {
o('path').innerHTML = msg; ebi('path').innerHTML = msg;
} }
function ev(e) { function ev(e) {
e = e || window.event; e = e || window.event;
e.preventDefault ? e.preventDefault() : (e.returnValue = false);
if (e.preventDefault)
e.preventDefault()
if (e.stopPropagation)
e.stopPropagation();
e.returnValue = false;
return e; return e;
} }
makeSortable(ebi('files'));
function sortTable(table, col) {
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className == 'sort1' ? -1 : 1;
for (var a = 0, thl = th.length; a < thl; a++)
th[a].className = '';
th[col].className = 'sort' + reverse;
var stype = th[col].getAttribute('sort');
tr = tr.sort(function (a, b) {
var v1 = a.cells[col].textContent.trim();
var v2 = b.cells[col].textContent.trim();
if (stype == 'int') {
v1 = parseInt(v1.replace(/,/g, ''));
v2 = parseInt(v2.replace(/,/g, ''));
return reverse * (v1 - v2);
}
return reverse * (v1.localeCompare(v2));
});
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]);
}
function makeSortable(table) {
var th = table.tHead, i;
th && (th = th.rows[0]) && (th = th.cells);
if (th) i = th.length;
else return; // if no `<thead>` then do nothing
while (--i >= 0) (function (i) {
th[i].addEventListener('click', function () { sortTable(table, i) });
}(i));
}
makeSortable(o('files'));
// extract songs + add play column // extract songs + add play column
@@ -122,10 +32,9 @@ var mp = (function () {
'tracks': tracks, 'tracks': tracks,
'cover_url': '' 'cover_url': ''
}; };
var re_audio = new RegExp('\.(opus|ogg|m4a|aac|mp3|wav|flac)$', 'i'); var re_audio = /\.(opus|ogg|m4a|aac|mp3|wav|flac)$/i;
var re_cover = new RegExp('^(cover|folder|cd|front|back)\.(jpe?g|png|gif)$', 'i');
var trs = document.getElementById('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr'); var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
for (var a = 0, aa = trs.length; a < aa; a++) { for (var a = 0, aa = trs.length; a < aa; a++) {
var tds = trs[a].getElementsByTagName('td'); var tds = trs[a].getElementsByTagName('td');
var link = tds[1].getElementsByTagName('a')[0]; var link = tds[1].getElementsByTagName('a')[0];
@@ -141,7 +50,7 @@ var mp = (function () {
} }
for (var a = 0, aa = tracks.length; a < aa; a++) for (var a = 0, aa = tracks.length; a < aa; a++)
o('trk' + a).onclick = ev_play; ebi('trk' + a).onclick = ev_play;
ret.vol = localStorage.getItem('vol'); ret.vol = localStorage.getItem('vol');
if (ret.vol !== null) if (ret.vol !== null)
@@ -168,8 +77,8 @@ var mp = (function () {
// toggle player widget // toggle player widget
var widget = (function () { var widget = (function () {
var ret = {}; var ret = {};
var widget = document.getElementById('widget'); var widget = ebi('widget');
var wtoggle = document.getElementById('wtoggle'); var wtoggle = ebi('wtoggle');
var touchmode = false; var touchmode = false;
var side_open = false; var side_open = false;
var was_paused = true; var was_paused = true;
@@ -198,7 +107,7 @@ var widget = (function () {
ret.paused = function (paused) { ret.paused = function (paused) {
if (was_paused != paused) { if (was_paused != paused) {
was_paused = paused; was_paused = paused;
o('bplay').innerHTML = paused ? '▶' : '⏸'; ebi('bplay').innerHTML = paused ? '▶' : '⏸';
} }
}; };
var click_handler = function (e) { var click_handler = function (e) {
@@ -222,8 +131,8 @@ var widget = (function () {
// buffer/position bar // buffer/position bar
var pbar = (function () { var pbar = (function () {
var r = {}; var r = {};
r.bcan = o('barbuf'); r.bcan = ebi('barbuf');
r.pcan = o('barpos'); r.pcan = ebi('barpos');
r.bctx = r.bcan.getContext('2d'); r.bctx = r.bcan.getContext('2d');
r.pctx = r.pcan.getContext('2d'); r.pctx = r.pcan.getContext('2d');
@@ -288,7 +197,7 @@ var pbar = (function () {
// volume bar // volume bar
var vbar = (function () { var vbar = (function () {
var r = {}; var r = {};
r.can = o('pvol'); r.can = ebi('pvol');
r.ctx = r.can.getContext('2d'); r.ctx = r.can.getContext('2d');
var bctx = r.ctx; var bctx = r.ctx;
@@ -385,7 +294,7 @@ var vbar = (function () {
else else
play(0); play(0);
}; };
o('bplay').onclick = function (e) { ebi('bplay').onclick = function (e) {
ev(e); ev(e);
if (mp.au) { if (mp.au) {
if (mp.au.paused) if (mp.au.paused)
@@ -396,15 +305,15 @@ var vbar = (function () {
else else
play(0); play(0);
}; };
o('bprev').onclick = function (e) { ebi('bprev').onclick = function (e) {
ev(e); ev(e);
bskip(-1); bskip(-1);
}; };
o('bnext').onclick = function (e) { ebi('bnext').onclick = function (e) {
ev(e); ev(e);
bskip(1); bskip(1);
}; };
o('barpos').onclick = function (e) { ebi('barpos').onclick = function (e) {
if (!mp.au) { if (!mp.au) {
//dbg((new Date()).getTime()); //dbg((new Date()).getTime());
return play(0); return play(0);
@@ -413,17 +322,12 @@ var vbar = (function () {
var rect = pbar.pcan.getBoundingClientRect(); var rect = pbar.pcan.getBoundingClientRect();
var x = e.clientX - rect.left; var x = e.clientX - rect.left;
var mul = x * 1.0 / rect.width; var mul = x * 1.0 / rect.width;
var seek = mp.au.duration * mul;
console.log('seek: ' + seek);
if (!isFinite(seek))
return;
/* mp.au.currentTime = seek;
dbg(//Math.round(rect.width) + 'x' + Math.round(rect.height) + '+' +
//Math.round(rect.left) + '+' + Math.round(rect.top) + ', ' +
//Math.round(e.clientX) + 'x' + Math.round(e.clientY) + ', ' +
Math.round(mp.au.currentTime * 10) / 10 + ', ' +
Math.round(mp.au.duration * 10) / 10 + '*' +
Math.round(mul * 1000) / 1000);
*/
mp.au.currentTime = mp.au.duration * mul;
if (mp.au === mp.au_native) if (mp.au === mp.au_native)
// hack: ogv.js breaks on .play() during playback // hack: ogv.js breaks on .play() during playback
@@ -479,12 +383,18 @@ function ev_play(e) {
function setclass(id, clas) { function setclass(id, clas) {
o(id).setAttribute('class', clas); ebi(id).setAttribute('class', clas);
} }
var iOS = !!navigator.platform && var need_ogv = true;
/iPad|iPhone|iPod/.test(navigator.platform); try {
need_ogv = new Audio().canPlayType('audio/ogg; codecs=opus') !== 'probably';
if (/ Edge\//.exec(navigator.userAgent + ''))
need_ogv = true;
}
catch (ex) { }
// plays the tid'th audio file on the page // plays the tid'th audio file on the page
@@ -507,7 +417,7 @@ function play(tid, call_depth) {
var hack_attempt_play = true; var hack_attempt_play = true;
var url = mp.tracks[tid]; var url = mp.tracks[tid];
if (iOS && /\.(ogg|opus)$/i.test(url)) { if (need_ogv && /\.(ogg|opus)$/i.test(url)) {
if (mp.au_ogvjs) { if (mp.au_ogvjs) {
mp.au = mp.au_ogvjs; mp.au = mp.au_ogvjs;
} }
@@ -544,7 +454,8 @@ function play(tid, call_depth) {
mp.au.tid = tid; mp.au.tid = tid;
mp.au.src = url; mp.au.src = url;
mp.au.volume = mp.expvol(); mp.au.volume = mp.expvol();
setclass('trk' + tid, 'play act'); var oid = 'trk' + tid;
setclass(oid, 'play act');
try { try {
if (hack_attempt_play) if (hack_attempt_play)
@@ -553,7 +464,11 @@ function play(tid, call_depth) {
if (mp.au.paused) if (mp.au.paused)
autoplay_blocked(); autoplay_blocked();
location.hash = 'trk' + tid; var o = ebi(oid);
o.setAttribute('id', 'thx_js');
location.hash = oid;
o.setAttribute('id', oid);
pbar.drawbuf(); pbar.drawbuf();
return true; return true;
} }
@@ -569,7 +484,6 @@ function play(tid, call_depth) {
function evau_error(e) { function evau_error(e) {
var err = ''; var err = '';
var eplaya = (e && e.target) || (window.event && window.event.srcElement); var eplaya = (e && e.target) || (window.event && window.event.srcElement);
var url = eplaya.src;
switch (eplaya.error.code) { switch (eplaya.error.code) {
case eplaya.error.MEDIA_ERR_ABORTED: case eplaya.error.MEDIA_ERR_ABORTED:
@@ -594,7 +508,6 @@ function evau_error(e) {
err += '\n\nFile: «' + decodeURIComponent(eplaya.src.split('/').slice(-1)[0]) + '»'; err += '\n\nFile: «' + decodeURIComponent(eplaya.src.split('/').slice(-1)[0]) + '»';
alert(err); alert(err);
play(eplaya.tid + 1);
} }
@@ -611,26 +524,27 @@ function show_modal(html) {
// hide fullscreen message // hide fullscreen message
function unblocked() { function unblocked() {
var dom = o('blocked'); var dom = ebi('blocked');
if (dom) if (dom)
dom.remove(); dom.parentNode.removeChild(dom);
} }
// show ui to manually start playback of a linked song // show ui to manually start playback of a linked song
function autoplay_blocked(tid) { function autoplay_blocked() {
show_modal( show_modal(
'<div id="blk_play"><a id="blk_go"></a></div>' + '<div id="blk_play"><a href="#" id="blk_go"></a></div>' +
'<div id="blk_abrt"><a id="blk_na">Cancel<br />(show file list)</a></div>'); '<div id="blk_abrt"><a href="#" id="blk_na">Cancel<br />(show file list)</a></div>');
var go = o('blk_go'); var go = ebi('blk_go');
var na = o('blk_na'); var na = ebi('blk_na');
var fn = mp.tracks[mp.au.tid].split(/\//).pop(); var fn = mp.tracks[mp.au.tid].split(/\//).pop();
fn = decodeURIComponent(fn.replace(/\+/g, ' ')); fn = decodeURIComponent(fn.replace(/\+/g, ' '));
go.textContent = 'Play "' + fn + '"'; go.textContent = 'Play "' + fn + '"';
go.onclick = function () { go.onclick = function (e) {
if (e) e.preventDefault();
unblocked(); unblocked();
mp.au.play(); mp.au.play();
}; };

View File

@@ -1,13 +1,19 @@
@font-face {
font-family: 'scp';
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
}
html, body { html, body {
color: #333; color: #333;
background: #eee; background: #eee;
font-family: sans-serif; font-family: sans-serif;
line-height: 1.5em; line-height: 1.5em;
} }
#mtw {
display: none;
}
#mw { #mw {
width: 48.5em;
margin: 0 auto; margin: 0 auto;
margin-bottom: 6em; padding: 0 1.5em;
} }
pre, code, a { pre, code, a {
color: #480; color: #480;
@@ -21,7 +27,7 @@ code {
font-size: .96em; font-size: .96em;
} }
pre, code { pre, code {
font-family: monospace, monospace; font-family: 'scp', monospace, monospace;
white-space: pre-wrap; white-space: pre-wrap;
word-break: break-all; word-break: break-all;
} }
@@ -41,7 +47,7 @@ pre code {
pre code:last-child { pre code:last-child {
border-bottom: none; border-bottom: none;
} }
pre code:before { pre code::before {
content: counter(precode); content: counter(precode);
-webkit-user-select: none; -webkit-user-select: none;
display: inline-block; display: inline-block;
@@ -76,31 +82,39 @@ h2 {
padding-left: .4em; padding-left: .4em;
margin-top: 3em; margin-top: 3em;
} }
h3 {
border-bottom: .1em solid #999;
}
h1 a, h3 a, h5 a, h1 a, h3 a, h5 a,
h2 a, h4 a, h6 a { h2 a, h4 a, h6 a {
color: inherit; color: inherit;
display: block;
background: none; background: none;
border: none; border: none;
padding: 0; padding: 0;
margin: 0; margin: 0;
} }
#m ul, #mp ul,
#m ol { #mp ol {
border-left: .3em solid #ddd; border-left: .3em solid #ddd;
} }
#m>ul, #m>ul,
#m>ol { #m>ol {
border-color: #bbb; border-color: #bbb;
} }
#m ul>li { #mp ul>li {
list-style-type: disc; list-style-type: disc;
} }
#m ul>li, #mp ul>li,
#m ol>li { #mp ol>li {
margin: .7em 0; margin: .7em 0;
} }
strong {
color: #000;
}
p>em, p>em,
li>em { li>em,
td>em {
color: #c50; color: #c50;
padding: .1em; padding: .1em;
border-bottom: .1em solid #bbb; border-bottom: .1em solid #bbb;
@@ -116,8 +130,9 @@ small {
opacity: .8; opacity: .8;
} }
#toc { #toc {
width: 48.5em; margin: 0 1em;
margin: 0 auto; -ms-scroll-chaining: none;
overscroll-behavior-y: none;
} }
#toc ul { #toc ul {
padding-left: 1em; padding-left: 1em;
@@ -162,14 +177,12 @@ small {
} }
table { table {
border-collapse: collapse; border-collapse: collapse;
margin: 1em 0;
} }
td { th, td {
padding: .2em .5em; padding: .2em .5em;
border: .12em solid #aaa; border: .12em solid #aaa;
} }
th {
border: .12em solid #aaa;
}
blink { blink {
animation: blinker .7s cubic-bezier(.9, 0, .1, 1) infinite; animation: blinker .7s cubic-bezier(.9, 0, .1, 1) infinite;
} }
@@ -181,10 +194,26 @@ blink {
opacity: 1; opacity: 1;
} }
} }
@media screen { @media screen {
html, body { html, body {
margin: 0; margin: 0;
padding: 0; padding: 0;
outline: 0;
border: none;
width: 100%;
height: 100%;
}
#mw {
margin: 0 auto;
right: 0;
}
#mp {
max-width: 52em;
margin-bottom: 6em;
word-break: break-word;
overflow-wrap: break-word;
word-wrap: break-word; /*ie*/
} }
a { a {
color: #fff; color: #fff;
@@ -212,15 +241,17 @@ blink {
padding: .5em 0; padding: .5em 0;
} }
#mn { #mn {
font-weight: normal;
padding: 1.3em 0 .7em 1em; padding: 1.3em 0 .7em 1em;
font-size: 1.4em; border-bottom: 1px solid #ccc;
background: #eee;
z-index: 10;
width: calc(100% - 1em);
} }
#mn a { #mn a {
color: #444; color: #444;
background: none; background: none;
margin: 0 0 0 -.2em; margin: 0 0 0 -.2em;
padding: 0 0 0 .4em; padding: .3em 0 .3em .4em;
text-decoration: none; text-decoration: none;
border: none; border: none;
/* ie: */ /* ie: */
@@ -233,7 +264,7 @@ blink {
#mn a:last-child { #mn a:last-child {
padding-right: .5em; padding-right: .5em;
} }
#mn a:not(:last-child):after { #mn a:not(:last-child)::after {
content: ''; content: '';
width: 1.05em; width: 1.05em;
height: 1.05em; height: 1.05em;
@@ -248,7 +279,45 @@ blink {
text-decoration: underline; text-decoration: underline;
} }
#mh { #mh {
margin: 0 0 1.5em 0; padding: .4em 1em;
position: relative;
width: 100%;
width: calc(100% - 3em);
background: #eee;
z-index: 9;
top: 0;
}
#mh a {
color: #444;
background: none;
text-decoration: underline;
border: none;
}
#mh a:hover {
color: #000;
background: #ddd;
}
#toolsbox {
overflow: hidden;
display: inline-block;
background: #eee;
height: 1.5em;
padding: 0 .2em;
margin: 0 .2em;
position: absolute;
}
#toolsbox.open {
height: auto;
overflow: visible;
background: #eee;
box-shadow: 0 .2em .2em #ccc;
padding-bottom: .2em;
}
#toolsbox a {
display: block;
}
#toolsbox a+a {
text-decoration: none;
} }
@@ -270,13 +339,12 @@ blink {
html.dark #toc li { html.dark #toc li {
border-width: 0; border-width: 0;
} }
html.dark #m a, html.dark #mp a {
html.dark #mh a {
background: #057; background: #057;
} }
html.dark #m h1 a, html.dark #m h4 a, html.dark #mp h1 a, html.dark #mp h4 a,
html.dark #m h2 a, html.dark #m h5 a, html.dark #mp h2 a, html.dark #mp h5 a,
html.dark #m h3 a, html.dark #m h6 a { html.dark #mp h3 a, html.dark #mp h6 a {
color: inherit; color: inherit;
background: none; background: none;
} }
@@ -286,16 +354,20 @@ blink {
background: #1a1a1a; background: #1a1a1a;
border: .07em solid #333; border: .07em solid #333;
} }
html.dark #m ul, html.dark #mp ul,
html.dark #m ol { html.dark #mp ol {
border-color: #444; border-color: #444;
} }
html.dark #m>ul, html.dark #m>ul,
html.dark #m>ol { html.dark #m>ol {
border-color: #555; border-color: #555;
} }
html.dark strong {
color: #fff;
}
html.dark p>em, html.dark p>em,
html.dark li>em { html.dark li>em,
html.dark td>em {
color: #f94; color: #f94;
border-color: #666; border-color: #666;
} }
@@ -316,32 +388,61 @@ blink {
background: #282828; background: #282828;
border: .07em dashed #444; border: .07em dashed #444;
} }
html.dark #mn a:not(:last-child):after { html.dark #mn a:not(:last-child)::after {
border-color: rgba(255,255,255,0.3); border-color: rgba(255,255,255,0.3);
} }
html.dark #mn a { html.dark #mn a {
color: #ccc; color: #ccc;
} }
html.dark #mn {
border-bottom: 1px solid #333;
}
html.dark #mn,
html.dark #mh {
background: #222;
}
html.dark #mh a {
color: #ccc;
background: none;
}
html.dark #mh a:hover {
background: #333;
color: #fff;
}
html.dark #toolsbox {
background: #222;
}
html.dark #toolsbox.open {
box-shadow: 0 .2em .2em #069;
border-radius: 0 0 .4em .4em;
}
} }
@media screen and (min-width: 64em) {
@media screen and (min-width: 66em) {
#mw { #mw {
margin-left: 14em; position: fixed;
margin-left: calc(100% - 50em); overflow-y: auto;
left: 14em;
left: calc(100% - 55em);
max-width: none;
bottom: 0;
scrollbar-color: #eb0 #f7f7f7;
} }
#toc { #toc {
width: 13em; width: 13em;
width: calc(100% - 52.3em); width: calc(100% - 55.3em);
max-width: 30em;
background: #eee; background: #eee;
position: fixed; position: fixed;
overflow-y: auto;
top: 0; top: 0;
left: 0; left: 0;
height: 100%; bottom: 0;
overflow-y: auto;
padding: 0; padding: 0;
margin: 0; margin: 0;
box-shadow: 0 0 1em #ccc;
scrollbar-color: #eb0 #f7f7f7; scrollbar-color: #eb0 #f7f7f7;
xscrollbar-width: thin; box-shadow: 0 0 1em rgba(0,0,0,0.1);
border-top: 1px solid #d7d7d7;
} }
#toc li { #toc li {
border-left: .3em solid #ccc; border-left: .3em solid #ccc;
@@ -361,30 +462,134 @@ blink {
html.dark #toc { html.dark #toc {
background: #282828; background: #282828;
border-top: 1px solid #2c2c2c;
box-shadow: 0 0 1em #181818; box-shadow: 0 0 1em #181818;
}
html.dark #toc,
html.dark #mw {
scrollbar-color: #b80 #282828; scrollbar-color: #b80 #282828;
} }
html.dark #toc::-webkit-scrollbar-track {
background: #282828;
}
html.dark #toc::-webkit-scrollbar {
background: #282828;
width: .8em;
}
html.dark #toc::-webkit-scrollbar-thumb {
background: #b80;
}
} }
@media screen and (min-width: 84em) { @media screen and (min-width: 85.5em) {
#toc { width: 30em } #toc { width: 30em }
#mw { margin-left: 32em } #mw { left: 30.5em }
} }
@media print { @media print {
@page {
size: A4;
padding: 0;
margin: .5in .6in;
mso-header-margin: .6in;
mso-footer-margin: .6in;
mso-paper-source: 0;
}
a { a {
color: #079; color: #079;
text-decoration: none; text-decoration: none;
border-bottom: .07em solid #4ac; border-bottom: .07em solid #4ac;
padding: 0 .3em; padding: 0 .3em;
} }
#toc {
margin: 0 !important;
}
#toc>ul { #toc>ul {
border-left: .1em solid #84c4dd; border-left: .1em solid #84c4dd;
} }
#mn, #mh { #mn, #mh {
display: none; display: none;
} }
html, body, #toc, #mw {
margin: 0 !important;
word-break: break-word;
width: 52em;
}
#toc {
margin-left: 1em !important;
}
#toc a {
color: #000 !important;
}
#toc a::after {
/* hopefully supported by browsers eventually */
content: leader('.') target-counter(attr(href), page);
}
a[ctr]::before {
content: attr(ctr) '. ';
}
h1 {
margin: 2em 0;
}
h2 {
margin: 2em 0 0 0;
}
h1, h2, h3 {
page-break-inside: avoid;
}
h1::after,
h2::after,
h3::after {
content: 'orz';
color: transparent;
display: block;
line-height: 1em;
padding: 4em 0 0 0;
margin: 0 0 -5em 0;
}
p {
page-break-inside: avoid;
}
table {
page-break-inside: auto;
}
tr {
page-break-inside: avoid;
page-break-after: auto;
}
thead {
display: table-header-group;
}
tfoot {
display: table-footer-group;
}
#mp a.vis::after {
content: ' (' attr(href) ')';
border-bottom: 1px solid #bbb;
color: #444;
}
blockquote {
border-color: #555;
}
code {
border-color: #bbb;
}
pre, pre code {
border-color: #999;
}
pre code::before {
color: #058;
}
html.dark a {
color: #000;
}
html.dark pre,
html.dark code {
color: #240;
}
html.dark p>em,
html.dark li>em,
html.dark td>em {
color: #940;
}
} }
/* /*

View File

@@ -4,32 +4,136 @@
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7"> <meta name="viewport" content="width=device-width, initial-scale=0.7">
<link href="/.cpr/md.css" rel="stylesheet"> <link href="/.cpr/md.css" rel="stylesheet">
{%- if edit %}
<link href="/.cpr/md2.css" rel="stylesheet">
{%- endif %}
</head> </head>
<body> <body>
<div id="mn"></div> <div id="mn">navbar</div>
<div id="mh">
<a id="lightswitch" href="#">go dark</a>
<a id="navtoggle" href="#">hide nav</a>
{%- if edit %}
<a id="save" href="?edit">save</a>
<a id="sbs" href="#">sbs</a>
<a id="nsbs" href="#">editor</a>
<div id="toolsbox">
<a id="tools" href="#">tools</a>
<a id="fmt_table" href="#">prettify table (ctrl-k)</a>
<a id="iter_uni" href="#">non-ascii: iterate (ctrl-u)</a>
<a id="mark_uni" href="#">non-ascii: markup</a>
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
<a id="help" href="#">help</a>
</div>
{%- else %}
<a href="?edit">edit (basic)</a>
<a href="?edit2">edit (fancy)</a>
<a href="?raw">view raw</a>
{%- endif %}
</div>
<div id="toc"></div> <div id="toc"></div>
<div id="mtw">
<textarea id="mt" autocomplete="off">{{ md }}</textarea>
</div>
<div id="mw"> <div id="mw">
<div id="mh">
<a id="lightswitch" href="#">go dark</a> //
<a id="edit" href="?edit">edit this</a>
</div>
<div id="ml"> <div id="ml">
<div style="text-align:center;margin:5em 0"> <div style="text-align:center;margin:5em 0">
<div style="font-size:2em;margin:1em 0">Loading</div> <div style="font-size:2em;margin:1em 0">Loading</div>
if you're still reading this, check that javascript is allowed if you're still reading this, check that javascript is allowed
</div> </div>
</div> </div>
<div id="m"> <div id="mp"></div>
<textarea id="mt" style="display:none">{{ md }}</textarea>
</div>
</div> </div>
{%- if edit %}
<div id="helpbox">
<textarea autocomplete="off">
write markdown (most html is 🙆 too)
## hotkey list
* `Ctrl-S` to save
* `Ctrl-E` to toggle mode
* `Ctrl-K` to prettyprint a table
* `Ctrl-U` to iterate non-ascii chars
* `Ctrl-H` / `Ctrl-Shift-H` to create a header
* `TAB` / `Shift-TAB` to indent/dedent a selection
## toolbar
1. toggle dark mode
2. show/hide navigation bar
3. save changes on server
4. side-by-side editing
5. toggle editor/preview
6. this thing :^)
## markdown
|||
|--|--|
|`**bold**`|**bold**|
|`_italic_`|_italic_|
|`~~strike~~`|~~strike~~|
|`` `code` ``|`code`|
|`[](#hotkey-list)`|[](#hotkey-list)|
|`[](/foo/bar.md#header)`|[](/foo/bar.md#header)|
|`<blink>💯</blink>`|<blink>💯</blink>|
## tables
|left-aligned|centered|right-aligned
| ---------- | :----: | ----------:
|one |two |three
|left-aligned|centered|right-aligned
| ---------- | :----: | ----------:
|one |two |three
## lists
* one
* two
1. one
1. two
* one
* two
1. one
1. two
## headers
# level 1
## level 2
### level 3
## quote
> hello
> hello
## codeblock
four spaces (no tab pls)
## code in lists
* foo
bar
six spaces total
* foo
bar
six spaces total
.
</textarea>
</div>
{%- endif %}
<script> <script>
var link_md_as_html = false; // TODO (does nothing) var last_modified = {{ lastmod }};
var md_opt = {
link_md_as_html: false,
allow_plugins: {{ md_plug }},
modpoll_freq: {{ md_chk_rate }}
};
(function () { (function () {
var btn = document.getElementById("lightswitch"); var btn = document.getElementById("lightswitch");
var toggle = function () { var toggle = function (e) {
if (e) e.preventDefault();
var dark = !document.documentElement.getAttribute("class"); var dark = !document.documentElement.getAttribute("class");
document.documentElement.setAttribute("class", dark ? "dark" : ""); document.documentElement.setAttribute("class", dark ? "dark" : "");
btn.innerHTML = "go " + (dark ? "light" : "dark"); btn.innerHTML = "go " + (dark ? "light" : "dark");
@@ -42,6 +146,10 @@ var link_md_as_html = false; // TODO (does nothing)
})(); })();
</script> </script>
<script src="/.cpr/util.js"></script>
<script src="/.cpr/deps/marked.full.js"></script> <script src="/.cpr/deps/marked.full.js"></script>
<script src="/.cpr/md.js"></script> <script src="/.cpr/md.js"></script>
{%- if edit %}
<script src="/.cpr/md2.js"></script>
{%- endif %}
</body></html> </body></html>

View File

@@ -1,17 +1,60 @@
/*var conv = new showdown.Converter(); "use strict";
conv.setFlavor('github');
conv.setOption('tasklists', 0);
var mhtml = conv.makeHtml(dom_md.value);
*/
var dom_toc = document.getElementById('toc'); var dom_toc = ebi('toc');
var dom_wrap = document.getElementById('mw'); var dom_wrap = ebi('mw');
var dom_head = document.getElementById('mh'); var dom_hbar = ebi('mh');
var dom_nav = document.getElementById('mn'); var dom_nav = ebi('mn');
var dom_doc = document.getElementById('m'); var dom_pre = ebi('mp');
var dom_md = document.getElementById('mt'); var dom_src = ebi('mt');
var dom_navtgl = ebi('navtoggle');
// add toolbar buttons
// chrome 49 needs this
var chromedbg = function () { console.log(arguments); }
// null-logger
var dbg = function () { };
// replace dbg with the real deal here or in the console:
// dbg = chromedbg
// dbg = console.log
// plugins
var md_plug = {};
function hesc(txt) {
return txt.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
}
function cls(dom, name, add) {
var re = new RegExp('(^| )' + name + '( |$)');
var lst = (dom.getAttribute('class') + '').replace(re, "$1$2").replace(/ /, "");
dom.setAttribute('class', lst + (add ? ' ' + name : ''));
}
function statify(obj) {
return JSON.parse(JSON.stringify(obj));
}
// dodge browser issues
(function () {
var ua = navigator.userAgent;
if (ua.indexOf(') Gecko/') !== -1 && /Linux| Mac /.exec(ua)) {
// necessary on ff-68.7 at least
var s = document.createElement('style');
s.innerHTML = '@page { margin: .5in .6in .8in .6in; }';
console.log(s.innerHTML);
document.head.appendChild(s);
}
})();
// add navbar
(function () { (function () {
var n = document.location + ''; var n = document.location + '';
n = n.substr(n.indexOf('//') + 2).split('?')[0].split('/'); n = n.substr(n.indexOf('//') + 2).split('?')[0].split('/');
@@ -22,27 +65,220 @@ var dom_md = document.getElementById('mt');
if (a > 0) if (a > 0)
loc.push(n[a]); loc.push(n[a]);
var dec = decodeURIComponent(n[a]).replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;"); var dec = hesc(decodeURIComponent(n[a]));
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>'); nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
} }
dom_nav.innerHTML = nav.join(''); dom_nav.innerHTML = nav.join('');
})(); })();
function convert_markdown(md_text) {
marked.setOptions({ // faster than replacing the entire html (chrome 1.8x, firefox 1.6x)
function copydom(src, dst, lv) {
var sc = src.childNodes,
dc = dst.childNodes;
if (sc.length !== dc.length) {
dbg("replace L%d (%d/%d) |%d|",
lv, sc.length, dc.length, src.innerHTML.length);
dst.innerHTML = src.innerHTML;
return;
}
var rpl = [];
for (var a = sc.length - 1; a >= 0; a--) {
var st = sc[a].tagName,
dt = dc[a].tagName;
if (st !== dt) {
dbg("replace L%d (%d/%d) type %s/%s", lv, a, sc.length, st, dt);
rpl.push(a);
continue;
}
var sa = sc[a].attributes || [],
da = dc[a].attributes || [];
if (sa.length !== da.length) {
dbg("replace L%d (%d/%d) attr# %d/%d",
lv, a, sc.length, sa.length, da.length);
rpl.push(a);
continue;
}
var dirty = false;
for (var b = sa.length - 1; b >= 0; b--) {
var name = sa[b].name,
sv = sa[b].value,
dv = dc[a].getAttribute(name);
if (name == "data-ln" && sv !== dv) {
dc[a].setAttribute(name, sv);
continue;
}
if (sv !== dv) {
dbg("replace L%d (%d/%d) attr %s [%s] [%s]",
lv, a, sc.length, name, sv, dv);
dirty = true;
break;
}
}
if (dirty)
rpl.push(a);
}
// TODO pure guessing
if (rpl.length > sc.length / 3) {
dbg("replace L%d fully, %s (%d/%d) |%d|",
lv, rpl.length, sc.length, src.innerHTML.length);
dst.innerHTML = src.innerHTML;
return;
}
// repl is reversed; build top-down
var nbytes = 0;
for (var a = rpl.length - 1; a >= 0; a--) {
var html = sc[rpl[a]].outerHTML;
dc[rpl[a]].outerHTML = html;
nbytes += html.length;
}
if (nbytes > 0)
dbg("replaced %d bytes L%d", nbytes, lv);
for (var a = 0; a < sc.length; a++)
copydom(sc[a], dc[a], lv + 1);
if (src.innerHTML !== dst.innerHTML) {
dbg("setting %d bytes L%d", src.innerHTML.length, lv);
dst.innerHTML = src.innerHTML;
}
}
function md_plug_err(ex, js) {
var errbox = ebi('md_errbox');
if (errbox)
errbox.parentNode.removeChild(errbox);
if (!ex)
return;
var msg = (ex + '').split('\n')[0];
var ln = ex.lineNumber;
var o = null;
if (ln) {
msg = "Line " + ln + ", " + msg;
var lns = js.split('\n');
if (ln < lns.length) {
o = document.createElement('span');
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
o.textContent = lns[ln - 1];
}
}
errbox = document.createElement('div');
errbox.setAttribute('id', 'md_errbox');
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
errbox.textContent = msg;
errbox.onclick = function () {
alert('' + ex.stack);
};
if (o) {
errbox.appendChild(o);
errbox.style.padding = '.25em .5em';
}
dom_nav.appendChild(errbox);
try {
console.trace();
}
catch (ex2) { }
}
function load_plug(md_text, plug_type) {
if (!md_opt.allow_plugins)
return md_text;
var find = '\n```copyparty_' + plug_type + '\n';
var ofs = md_text.indexOf(find);
if (ofs === -1)
return md_text;
var ofs2 = md_text.indexOf('\n```', ofs + 1);
if (ofs2 == -1)
return md_text;
var js = md_text.slice(ofs + find.length, ofs2 + 1);
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
var old_plug = md_plug[plug_type];
if (!old_plug || old_plug[1] != js) {
js = 'const x = { ' + js + ' }; x;';
try {
var x = eval(js);
}
catch (ex) {
md_plug[plug_type] = null;
md_plug_err(ex, js);
return md;
}
if (x['ctor']) {
x['ctor']();
delete x['ctor'];
}
md_plug[plug_type] = [x, js];
}
return md;
}
function convert_markdown(md_text, dest_dom) {
md_text = md_text.replace(/\r/g, '');
md_plug_err(null);
md_text = load_plug(md_text, 'pre');
md_text = load_plug(md_text, 'post');
var marked_opts = {
//headerPrefix: 'h-', //headerPrefix: 'h-',
breaks: true, breaks: true,
gfm: true gfm: true
}); };
var html = marked(md_text);
dom_doc.innerHTML = html;
var loader = document.getElementById('ml'); var ext = md_plug['pre'];
loader.parentNode.removeChild(loader); if (ext)
Object.assign(marked_opts, ext[0]);
try {
var md_html = marked(md_text, marked_opts);
}
catch (ex) {
if (ext)
md_plug_err(ex, ext[1]);
throw ex;
}
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
var nodes = md_dom.getElementsByTagName('a');
for (var a = nodes.length - 1; a >= 0; a--) {
var href = nodes[a].getAttribute('href');
var txt = nodes[a].textContent;
if (!txt)
nodes[a].textContent = href;
else if (href !== txt)
nodes[a].setAttribute('class', 'vis');
}
// todo-lists (should probably be a marked extension) // todo-lists (should probably be a marked extension)
var nodes = dom_doc.getElementsByTagName('input'); nodes = md_dom.getElementsByTagName('input');
for (var a = nodes.length - 1; a >= 0; a--) { for (var a = nodes.length - 1; a >= 0; a--) {
var dom_box = nodes[a]; var dom_box = nodes[a];
if (dom_box.getAttribute('type') !== 'checkbox') if (dom_box.getAttribute('type') !== 'checkbox')
@@ -61,34 +297,94 @@ function convert_markdown(md_text) {
'<span class="todo_' + clas + '">' + char + '</span>' + '<span class="todo_' + clas + '">' + char + '</span>' +
html.substr(html.indexOf('>') + 1); html.substr(html.indexOf('>') + 1);
} }
// separate <code> for each line in <pre>
nodes = md_dom.getElementsByTagName('pre');
for (var a = nodes.length - 1; a >= 0; a--) {
var el = nodes[a];
var is_precode =
el.tagName == 'PRE' &&
el.childNodes.length === 1 &&
el.childNodes[0].tagName == 'CODE';
if (!is_precode)
continue;
var nline = parseInt(el.getAttribute('data-ln')) + 1;
var lines = el.innerHTML.replace(/\n<\/code>$/i, '</code>').split(/\n/g);
for (var b = 0; b < lines.length - 1; b++)
lines[b] += '</code>\n<code data-ln="' + (nline + b) + '">';
el.innerHTML = lines.join('');
}
// self-link headers
var id_seen = {},
dyn = md_dom.getElementsByTagName('*');
nodes = [];
for (var a = 0, aa = dyn.length; a < aa; a++)
if (/^[Hh]([1-6])/.exec(dyn[a].tagName) !== null)
nodes.push(dyn[a]);
for (var a = 0; a < nodes.length; a++) {
el = nodes[a];
var id = el.getAttribute('id'),
orig_id = id;
if (id_seen[id]) {
for (var n = 1; n < 4096; n++) {
id = orig_id + '-' + n;
if (!id_seen[id])
break;
}
el.setAttribute('id', id);
}
id_seen[id] = 1;
el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>';
}
ext = md_plug['post'];
if (ext && ext[0].render)
try {
ext[0].render(md_dom);
}
catch (ex) {
md_plug_err(ex, ext[1]);
}
copydom(md_dom, dest_dom, 0);
if (ext && ext[0].render2)
try {
ext[0].render2(dest_dom);
}
catch (ex) {
md_plug_err(ex, ext[1]);
}
} }
function init_toc() { function init_toc() {
var loader = ebi('ml');
loader.parentNode.removeChild(loader);
var anchors = []; // list of toc entries, complex objects var anchors = []; // list of toc entries, complex objects
var anchor = null; // current toc node var anchor = null; // current toc node
var id_seen = {}; // taken IDs
var html = []; // generated toc html var html = []; // generated toc html
var lv = 0; // current indentation level in the toc html var lv = 0; // current indentation level in the toc html
var re = new RegExp('^[Hh]([1-3])'); var ctr = [0, 0, 0, 0, 0, 0];
var manip_nodes_dyn = dom_doc.getElementsByTagName('*'); var manip_nodes_dyn = dom_pre.getElementsByTagName('*');
var manip_nodes = []; var manip_nodes = [];
for (var a = 0, aa = manip_nodes_dyn.length; a < aa; a++) for (var a = 0, aa = manip_nodes_dyn.length; a < aa; a++)
manip_nodes.push(manip_nodes_dyn[a]); manip_nodes.push(manip_nodes_dyn[a]);
for (var a = 0, aa = manip_nodes.length; a < aa; a++) { for (var a = 0, aa = manip_nodes.length; a < aa; a++) {
var elm = manip_nodes[a]; var elm = manip_nodes[a];
var m = re.exec(elm.tagName); var m = /^[Hh]([1-6])/.exec(elm.tagName);
var is_header = m !== null;
var is_header =
m !== null;
var is_precode =
!is_header &&
elm.tagName == 'PRE' &&
elm.childNodes.length === 1 &&
elm.childNodes[0].tagName == 'CODE';
if (is_header) { if (is_header) {
var nlv = m[1]; var nlv = m[1];
while (lv < nlv) { while (lv < nlv) {
@@ -99,24 +395,18 @@ function init_toc() {
html.push('</ul>'); html.push('</ul>');
lv--; lv--;
} }
ctr[lv - 1]++;
for (var b = lv; b < 6; b++)
ctr[b] = 0;
var orig_id = elm.getAttribute('id'); elm.childNodes[0].setAttribute('ctr', ctr.slice(0, lv).join('.'));
var id = orig_id;
if (id_seen[id]) {
for (var n = 1; n < 4096; n++) {
id = orig_id + '-' + n;
if (!id_seen[id])
break;
}
elm.setAttribute('id', id);
}
id_seen[id] = 1;
var ahref = '<a href="#' + id + '">' + var elm2 = elm.cloneNode(true);
elm.innerHTML + '</a>'; elm2.childNodes[0].textContent = elm.textContent;
while (elm2.childNodes.length > 1)
elm2.removeChild(elm2.childNodes[1]);
html.push('<li>' + ahref + '</li>'); html.push('<li>' + elm2.innerHTML + '</li>');
elm.innerHTML = ahref;
if (anchor != null) if (anchor != null)
anchors.push(anchor); anchors.push(anchor);
@@ -127,17 +417,6 @@ function init_toc() {
y: null y: null
}; };
} }
else if (is_precode) {
// not actually toc-related (sorry),
// split <pre><code /></pre> into one <code> per line
var nline = parseInt(elm.getAttribute('data-ln')) + 1;
var lines = elm.innerHTML.replace(/\r?\n<\/code>$/i, '</code>').split(/\r?\n/g);
for (var b = 0; b < lines.length - 1; b++)
lines[b] += '</code>\n<code data-ln="' + (nline + b) + '">';
elm.innerHTML = lines.join('');
}
if (!is_header && anchor) if (!is_header && anchor)
anchor.kids.push(elm); anchor.kids.push(elm);
} }
@@ -209,41 +488,47 @@ function init_toc() {
// "main" :p // "main" :p
convert_markdown(dom_md.value); convert_markdown(dom_src.value, dom_pre);
var toc = init_toc(); var toc = init_toc();
// scroll handler // scroll handler
(function () { var redraw = (function () {
var timer_active = false; var sbs = false;
var final = null; function onresize() {
sbs = window.matchMedia('(min-width: 64em)').matches;
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
if (sbs) {
dom_toc.style.top = y;
dom_wrap.style.top = y;
dom_toc.style.marginTop = '0';
}
onscroll();
}
function onscroll() { function onscroll() {
clearTimeout(final);
timer_active = false;
toc.refresh(); toc.refresh();
var y = 0;
if (window.matchMedia('(min-width: 64em)').matches)
y = parseInt(dom_nav.offsetHeight) - window.scrollY;
dom_toc.style.marginTop = y < 0 ? 0 : y + "px";
} }
onscroll();
function ev_onscroll() { window.onresize = onresize;
// long timeout: scroll ended window.onscroll = onscroll;
clearTimeout(final); dom_wrap.onscroll = onscroll;
final = setTimeout(onscroll, 100);
// short timeout: continuous updates onresize();
if (timer_active) return onresize;
return;
timer_active = true;
setTimeout(onscroll, 10);
};
window.onscroll = ev_onscroll;
window.onresize = ev_onscroll;
})(); })();
dom_navtgl.onclick = function () {
var hidden = dom_navtgl.innerHTML == 'hide nav';
dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav';
dom_nav.style.display = hidden ? 'none' : 'block';
if (window.localStorage)
localStorage.setItem('hidenav', hidden ? 1 : 0);
redraw();
};
if (window.localStorage && localStorage.getItem('hidenav') == 1)
dom_navtgl.onclick();

128
copyparty/web/md2.css Normal file
View File

@@ -0,0 +1,128 @@
#toc {
display: none;
}
#mtw {
display: block;
position: fixed;
left: .5em;
bottom: 0;
width: calc(100% - 56em);
}
#mw {
left: calc(100% - 55em);
overflow-y: auto;
position: fixed;
bottom: 0;
}
/* single-screen */
#mtw.preview,
#mw.editor {
opacity: 0;
z-index: 1;
}
#mw.preview,
#mtw.editor {
z-index: 5;
}
#mtw.single,
#mw.single {
margin: 0;
left: 1em;
left: max(1em, calc((100% - 56em) / 2));
}
#mtw.single {
width: 55em;
width: min(55em, calc(100% - 2em));
}
#mp {
position: relative;
}
#mt, #mtr {
width: 100%;
height: calc(100% - 1px);
color: #444;
background: #f7f7f7;
border: 1px solid #999;
outline: none;
padding: 0;
margin: 0;
font-family: 'consolas', monospace, monospace;
white-space: pre-wrap;
word-break: break-word;
overflow-wrap: break-word;
word-wrap: break-word; /*ie*/
overflow-y: scroll;
line-height: 1.3em;
font-size: .9em;
position: relative;
scrollbar-color: #eb0 #f7f7f7;
}
html.dark #mt {
color: #eee;
background: #222;
border: 1px solid #777;
scrollbar-color: #b80 #282828;
}
#mtr {
position: absolute;
top: 0;
left: 0;
}
#save.force-save {
color: #400;
background: #f97;
border-radius: .15em;
}
html.dark #save.force-save {
color: #fca;
background: #720;
}
#save.disabled {
opacity: .4;
}
#helpbox,
#toast {
background: #f7f7f7;
border-radius: .4em;
z-index: 9001;
}
#helpbox {
display: none;
position: fixed;
padding: 2em;
top: 4em;
overflow-y: auto;
box-shadow: 0 .5em 2em #777;
height: calc(100% - 12em);
left: calc(50% - 15em);
right: 0;
width: 30em;
}
#helpclose {
display: block;
}
html.dark #helpbox {
box-shadow: 0 .5em 2em #444;
}
html.dark #helpbox,
html.dark #toast {
background: #222;
border: 1px solid #079;
border-width: 1px 0;
}
#toast {
font-weight: bold;
text-align: center;
padding: .6em 0;
position: fixed;
z-index: 9001;
top: 30%;
transition: opacity 0.2s ease-in-out;
opacity: 1;
}
# mt {opacity: .5;top:1px}

1155
copyparty/web/md2.js Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -21,7 +21,6 @@ html, body {
#mn { #mn {
font-weight: normal; font-weight: normal;
margin: 1.3em 0 .7em 1em; margin: 1.3em 0 .7em 1em;
font-size: 1.4em;
} }
#mn a { #mn a {
color: #444; color: #444;
@@ -161,8 +160,12 @@ h2 {
.mdo ol>li { .mdo ol>li {
margin: .7em 0; margin: .7em 0;
} }
strong {
color: #000;
}
p>em, p>em,
li>em { li>em,
td>em {
color: #c50; color: #c50;
padding: .1em; padding: .1em;
border-bottom: .1em solid #bbb; border-bottom: .1em solid #bbb;
@@ -254,8 +257,12 @@ html.dark .mdo>ul,
html.dark .mdo>ol { html.dark .mdo>ol {
border-color: #555; border-color: #555;
} }
html.dark strong {
color: #fff;
}
html.dark p>em, html.dark p>em,
html.dark li>em { html.dark li>em,
html.dark td>em {
color: #f94; color: #f94;
border-color: #666; border-color: #666;
} }

View File

@@ -17,13 +17,17 @@
</div> </div>
</div> </div>
<div id="m"> <div id="m">
<textarea id="mt" style="display:none">{{ md }}</textarea> <textarea id="mt" style="display:none" autocomplete="off">{{ md }}</textarea>
</div> </div>
</div> </div>
<script> <script>
var link_md_as_html = false; // TODO (does nothing)
var last_modified = {{ lastmod }}; var last_modified = {{ lastmod }};
var md_opt = {
link_md_as_html: false,
allow_plugins: {{ md_plug }},
modpoll_freq: {{ md_chk_rate }}
};
var lightswitch = (function () { var lightswitch = (function () {
var fun = function () { var fun = function () {
@@ -39,6 +43,7 @@ var lightswitch = (function () {
})(); })();
</script> </script>
<script src="/.cpr/deps/easymde.full.js"></script> <script src="/.cpr/util.js"></script>
<script src="/.cpr/deps/easymde.js"></script>
<script src="/.cpr/mde.js"></script> <script src="/.cpr/mde.js"></script>
</body></html> </body></html>

View File

@@ -1,7 +1,9 @@
var dom_wrap = document.getElementById('mw'); "use strict";
var dom_nav = document.getElementById('mn');
var dom_doc = document.getElementById('m'); var dom_wrap = ebi('mw');
var dom_md = document.getElementById('mt'); var dom_nav = ebi('mn');
var dom_doc = ebi('m');
var dom_md = ebi('mt');
(function () { (function () {
var n = document.location + ''; var n = document.location + '';
@@ -53,7 +55,8 @@ var mde = (function () {
"save": "Ctrl-S" "save": "Ctrl-S"
}, },
insertTexts: ["[](", ")"], insertTexts: ["[](", ")"],
tabSize: 4, indentWithTabs: false,
tabSize: 2,
toolbar: tbar, toolbar: tbar,
previewClass: 'mdo', previewClass: 'mdo',
onToggleFullScreen: set_jumpto, onToggleFullScreen: set_jumpto,
@@ -62,7 +65,7 @@ var mde = (function () {
mde.codemirror.on("change", function () { mde.codemirror.on("change", function () {
md_changed(mde); md_changed(mde);
}); });
var loader = document.getElementById('ml'); var loader = ebi('ml');
loader.parentNode.removeChild(loader); loader.parentNode.removeChild(loader);
return mde; return mde;
})(); })();
@@ -120,7 +123,7 @@ function save(mde) {
fd.append("lastmod", (force ? -1 : last_modified)); fd.append("lastmod", (force ? -1 : last_modified));
fd.append("body", txt); fd.append("body", txt);
var url = (document.location + '').split('?')[0] + '?raw'; var url = (document.location + '').split('?')[0];
var xhr = new XMLHttpRequest(); var xhr = new XMLHttpRequest();
xhr.open('POST', url, true); xhr.open('POST', url, true);
xhr.responseType = 'text'; xhr.responseType = 'text';
@@ -212,7 +215,7 @@ function save_chk() {
var ok = document.createElement('div'); var ok = document.createElement('div');
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1'); ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
ok.innerHTML = 'OK✔'; ok.innerHTML = 'OK✔';
var parent = document.getElementById('m'); var parent = ebi('m');
document.documentElement.appendChild(ok); document.documentElement.appendChild(ok);
setTimeout(function () { setTimeout(function () {
ok.style.opacity = 0; ok.style.opacity = 0;

View File

@@ -13,6 +13,7 @@ h1 {
border-bottom: 1px solid #ccc; border-bottom: 1px solid #ccc;
margin: 2em 0 .4em 0; margin: 2em 0 .4em 0;
padding: 0 0 .2em 0; padding: 0 0 .2em 0;
font-weight: normal;
} }
li { li {
margin: 1em 0; margin: 1em 0;
@@ -24,4 +25,29 @@ a {
border-bottom: 1px solid #aaa; border-bottom: 1px solid #aaa;
border-radius: .2em; border-radius: .2em;
padding: .2em .8em; padding: .2em .8em;
}
html.dark,
html.dark body,
html.dark #wrap {
background: #222;
color: #ccc;
}
html.dark h1 {
border-color: #777;
}
html.dark a {
color: #fff;
background: #057;
border-color: #37a;
}
html.dark input {
color: #fff;
background: #624;
border: 1px solid #c27;
border-width: 1px 0 0 0;
border-radius: .5em;
padding: .5em .7em;
margin: 0 .5em 0 0;
} }

View File

@@ -36,7 +36,11 @@
</form> </form>
</ul> </ul>
</div> </div>
<!-- script src="/.cpr/splash.js"></script --> <script>
</body>
if (window.localStorage && localStorage.getItem('darkmode') == 1)
document.documentElement.setAttribute("class", "dark");
</script>
</body>
</html> </html>

View File

@@ -1,61 +1,6 @@
"use strict"; "use strict";
// error handler for mobile devices window.onerror = vis_exh;
function hcroak(msg) {
document.body.innerHTML = msg;
window.onerror = undefined;
throw 'fatal_err';
}
function croak(msg) {
document.body.textContent = msg;
window.onerror = undefined;
throw msg;
}
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
'&': '&amp;',
'"': '&quot;',
'<': '&lt;',
'>': '&gt;'
}[c];
});
}
window.onerror = function (msg, url, lineNo, columnNo, error) {
window.onerror = undefined;
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<h2>' + find[a] + '</h2>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
}
document.body.style.fontSize = '0.8em';
document.body.style.padding = '0 1em 1em 1em';
hcroak(html.join('\n'));
};
// https://stackoverflow.com/a/950146
function import_js(url, cb) {
var head = document.head || document.getElementsByTagName('head')[0];
var script = document.createElement('script');
script.type = 'text/javascript';
script.src = url;
script.onreadystatechange = cb;
script.onload = cb;
head.appendChild(script);
}
function o(id) {
return document.getElementById(id);
}
(function () { (function () {
@@ -88,12 +33,12 @@ function goto(dest) {
for (var a = obj.length - 1; a >= 0; a--) for (var a = obj.length - 1; a >= 0; a--)
obj[a].classList.remove('act'); obj[a].classList.remove('act');
var obj = document.querySelectorAll('#ops>a'); obj = document.querySelectorAll('#ops>a');
for (var a = obj.length - 1; a >= 0; a--) for (var a = obj.length - 1; a >= 0; a--)
obj[a].classList.remove('act'); obj[a].classList.remove('act');
if (dest) { if (dest) {
document.getElementById('op_' + dest).classList.add('act'); ebi('op_' + dest).classList.add('act');
document.querySelector('#ops>a[data-dest=' + dest + ']').classList.add('act'); document.querySelector('#ops>a[data-dest=' + dest + ']').classList.add('act');
var fn = window['goto_' + dest]; var fn = window['goto_' + dest];
@@ -121,7 +66,7 @@ function goto_up2k() {
if (op !== null && op !== '.') if (op !== null && op !== '.')
goto(op); goto(op);
} }
document.getElementById('ops').style.display = 'block'; ebi('ops').style.display = 'block';
})(); })();
@@ -150,21 +95,21 @@ function up2k_init(have_crypto) {
// show modal message // show modal message
function showmodal(msg) { function showmodal(msg) {
o('u2notbtn').innerHTML = msg; ebi('u2notbtn').innerHTML = msg;
o('u2btn').style.display = 'none'; ebi('u2btn').style.display = 'none';
o('u2notbtn').style.display = 'block'; ebi('u2notbtn').style.display = 'block';
o('u2conf').style.opacity = '0.5'; ebi('u2conf').style.opacity = '0.5';
} }
// hide modal message // hide modal message
function unmodal() { function unmodal() {
o('u2notbtn').style.display = 'none'; ebi('u2notbtn').style.display = 'none';
o('u2btn').style.display = 'block'; ebi('u2btn').style.display = 'block';
o('u2conf').style.opacity = '1'; ebi('u2conf').style.opacity = '1';
o('u2notbtn').innerHTML = ''; ebi('u2notbtn').innerHTML = '';
} }
var post_url = o('op_bup').getElementsByTagName('form')[0].getAttribute('action'); var post_url = ebi('op_bup').getElementsByTagName('form')[0].getAttribute('action');
if (post_url && post_url.charAt(post_url.length - 1) !== '/') if (post_url && post_url.charAt(post_url.length - 1) !== '/')
post_url += '/'; post_url += '/';
@@ -181,25 +126,25 @@ function up2k_init(have_crypto) {
import_js('/.cpr/deps/sha512.js', unmodal); import_js('/.cpr/deps/sha512.js', unmodal);
if (is_https) if (is_https)
o('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best'; ebi('u2foot').innerHTML = shame + ' so <em>this</em> uploader will do like 500kB/s at best';
else else
o('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance'; ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance';
} }
}; }
// show uploader if the user only has write-access // show uploader if the user only has write-access
if (!o('files')) if (!ebi('files'))
goto('up2k'); goto('up2k');
// shows or clears an error message in the basic uploader ui // shows or clears an error message in the basic uploader ui
function setmsg(msg) { function setmsg(msg) {
if (msg !== undefined) { if (msg !== undefined) {
o('u2err').setAttribute('class', 'err'); ebi('u2err').setAttribute('class', 'err');
o('u2err').innerHTML = msg; ebi('u2err').innerHTML = msg;
} }
else { else {
o('u2err').setAttribute('class', ''); ebi('u2err').setAttribute('class', '');
o('u2err').innerHTML = ''; ebi('u2err').innerHTML = '';
} }
} }
@@ -210,7 +155,7 @@ function up2k_init(have_crypto) {
} }
// handle user intent to use the basic uploader instead // handle user intent to use the basic uploader instead
o('u2nope').onclick = function (e) { ebi('u2nope').onclick = function (e) {
e.preventDefault(); e.preventDefault();
setmsg(''); setmsg('');
goto('bup'); goto('bup');
@@ -229,9 +174,9 @@ function up2k_init(have_crypto) {
function cfg_get(name) { function cfg_get(name) {
var val = localStorage.getItem(name); var val = localStorage.getItem(name);
if (val === null) if (val === null)
return parseInt(o(name).value); return parseInt(ebi(name).value);
o(name).value = val; ebi(name).value = val;
return val; return val;
} }
@@ -242,7 +187,7 @@ function up2k_init(have_crypto) {
else else
val = (val == '1'); val = (val == '1');
o(name).checked = val; ebi(name).checked = val;
return val; return val;
} }
@@ -250,12 +195,13 @@ function up2k_init(have_crypto) {
localStorage.setItem( localStorage.setItem(
name, val ? '1' : '0'); name, val ? '1' : '0');
o(name).checked = val; ebi(name).checked = val;
return val; return val;
} }
var parallel_uploads = cfg_get('nthread'); var parallel_uploads = cfg_get('nthread');
var multitask = bcfg_get('multitask', true); var multitask = bcfg_get('multitask', true);
var ask_up = bcfg_get('ask_up', true);
var col_hashing = '#00bbff'; var col_hashing = '#00bbff';
var col_hashed = '#004466'; var col_hashed = '#004466';
@@ -284,9 +230,9 @@ function up2k_init(have_crypto) {
return un2k("this is the basic uploader; up2k needs at least<br />chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1"); return un2k("this is the basic uploader; up2k needs at least<br />chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1");
function nav() { function nav() {
o('file' + fdom_ctr).click(); ebi('file' + fdom_ctr).click();
} }
o('u2btn').addEventListener('click', nav, false); ebi('u2btn').addEventListener('click', nav, false);
function ondrag(ev) { function ondrag(ev) {
ev.stopPropagation(); ev.stopPropagation();
@@ -294,8 +240,8 @@ function up2k_init(have_crypto) {
ev.dataTransfer.dropEffect = 'copy'; ev.dataTransfer.dropEffect = 'copy';
ev.dataTransfer.effectAllowed = 'copy'; ev.dataTransfer.effectAllowed = 'copy';
} }
o('u2btn').addEventListener('dragover', ondrag, false); ebi('u2btn').addEventListener('dragover', ondrag, false);
o('u2btn').addEventListener('dragenter', ondrag, false); ebi('u2btn').addEventListener('dragenter', ondrag, false);
function gotfile(ev) { function gotfile(ev) {
ev.stopPropagation(); ev.stopPropagation();
@@ -317,6 +263,7 @@ function up2k_init(have_crypto) {
more_one_file(); more_one_file();
var bad_files = []; var bad_files = [];
var good_files = [];
for (var a = 0; a < files.length; a++) { for (var a = 0; a < files.length; a++) {
var fobj = files[a]; var fobj = files[a];
if (is_itemlist) { if (is_itemlist) {
@@ -330,9 +277,32 @@ function up2k_init(have_crypto) {
throw 1; throw 1;
} }
catch (ex) { catch (ex) {
bad_files.push([a, fobj.name]); bad_files.push(fobj.name);
continue; continue;
} }
good_files.push(fobj);
}
if (bad_files.length > 0) {
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length);
for (var a = 0; a < bad_files.length; a++)
msg += '-- ' + bad_files[a] + '\n';
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
alert(msg);
}
var msg = ['upload these ' + good_files.length + ' files?'];
for (var a = 0; a < good_files.length; a++)
msg.push(good_files[a].name);
if (ask_up && !confirm(msg.join('\n')))
return;
for (var a = 0; a < good_files.length; a++) {
var fobj = good_files[a];
var now = new Date().getTime(); var now = new Date().getTime();
var lmod = fobj.lastModified || now; var lmod = fobj.lastModified || now;
var entry = { var entry = {
@@ -357,31 +327,20 @@ function up2k_init(have_crypto) {
var tr = document.createElement('tr'); var tr = document.createElement('tr');
tr.innerHTML = '<td id="f{0}n"></td><td id="f{0}t">hashing</td><td id="f{0}p" class="prog"></td>'.format(st.files.length); tr.innerHTML = '<td id="f{0}n"></td><td id="f{0}t">hashing</td><td id="f{0}p" class="prog"></td>'.format(st.files.length);
tr.getElementsByTagName('td')[0].textContent = entry.name; tr.getElementsByTagName('td')[0].textContent = entry.name;
o('u2tab').appendChild(tr); ebi('u2tab').appendChild(tr);
st.files.push(entry); st.files.push(entry);
st.todo.hash.push(entry); st.todo.hash.push(entry);
} }
if (bad_files.length > 0) {
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length);
for (var a = 0; a < bad_files.length; a++)
msg += '-- ' + bad_files[a][1] + '\n';
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
alert(msg);
}
} }
o('u2btn').addEventListener('drop', gotfile, false); ebi('u2btn').addEventListener('drop', gotfile, false);
function more_one_file() { function more_one_file() {
fdom_ctr++; fdom_ctr++;
var elm = document.createElement('div') var elm = document.createElement('div')
elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" />'.format(fdom_ctr); elm.innerHTML = '<input id="file{0}" type="file" name="file{0}[]" multiple="multiple" />'.format(fdom_ctr);
o('u2form').appendChild(elm); ebi('u2form').appendChild(elm);
o('file' + fdom_ctr).addEventListener('change', gotfile, false); ebi('file' + fdom_ctr).addEventListener('change', gotfile, false);
} }
more_one_file(); more_one_file();
@@ -391,16 +350,17 @@ function up2k_init(have_crypto) {
// //
function handshakes_permitted() { function handshakes_permitted() {
return multitask || ( var lim = multitask ? 1 : 0;
st.todo.upload.length == 0 && return lim >=
st.busy.upload.length == 0); st.todo.upload.length +
st.busy.upload.length;
} }
function hashing_permitted() { function hashing_permitted() {
return multitask || ( var lim = multitask ? 1 : 0;
handshakes_permitted() && return handshakes_permitted() && lim >=
st.todo.handshake.length == 0 && st.todo.handshake.length +
st.busy.handshake.length == 0); st.busy.handshake.length;
} }
var tasker = (function () { var tasker = (function () {
@@ -451,17 +411,6 @@ function up2k_init(have_crypto) {
/// hashing /// hashing
// //
// https://gist.github.com/jonleighton/958841
function buf2b64_maybe_fucky(buffer) {
var ret = '';
var view = new DataView(buffer);
for (var i = 0; i < view.byteLength; i++) {
ret += String.fromCharCode(view.getUint8(i));
}
return window.btoa(ret).replace(
/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '');
}
// https://gist.github.com/jonleighton/958841 // https://gist.github.com/jonleighton/958841
function buf2b64(arrayBuffer) { function buf2b64(arrayBuffer) {
var base64 = ''; var base64 = '';
@@ -502,20 +451,6 @@ function up2k_init(have_crypto) {
return base64; return base64;
} }
// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest
function buf2hex(buffer) {
var hexCodes = [];
var view = new DataView(buffer);
for (var i = 0; i < view.byteLength; i += 4) {
var value = view.getUint32(i) // 4 bytes per iter
var stringValue = value.toString(16) // doesn't pad
var padding = '00000000'
var paddedValue = (padding + stringValue).slice(-padding.length)
hexCodes.push(paddedValue);
}
return hexCodes.join("");
}
function get_chunksize(filesize) { function get_chunksize(filesize) {
var chunksize = 1024 * 1024; var chunksize = 1024 * 1024;
var stepsize = 512 * 1024; var stepsize = 512 * 1024;
@@ -602,7 +537,7 @@ function up2k_init(have_crypto) {
pb_html += '<div id="f{0}p{1}" style="width:{2}%"><div></div></div>'.format( pb_html += '<div id="f{0}p{1}" style="width:{2}%"><div></div></div>'.format(
t.n, a, pb_perc); t.n, a, pb_perc);
o('f{0}p'.format(t.n)).innerHTML = pb_html; ebi('f{0}p'.format(t.n)).innerHTML = pb_html;
var reader = new FileReader(); var reader = new FileReader();
@@ -677,7 +612,7 @@ function up2k_init(have_crypto) {
alert('{0} ms, {1} MB/s\n'.format(t.t2 - t.t1, spd.toFixed(3)) + t.hash.join('\n')); alert('{0} ms, {1} MB/s\n'.format(t.t2 - t.t1, spd.toFixed(3)) + t.hash.join('\n'));
} }
o('f{0}t'.format(t.n)).innerHTML = 'connecting'; ebi('f{0}t'.format(t.n)).innerHTML = 'connecting';
st.busy.hash.splice(st.busy.hash.indexOf(t), 1); st.busy.hash.splice(st.busy.hash.indexOf(t), 1);
st.todo.handshake.push(t); st.todo.handshake.push(t);
}; };
@@ -706,7 +641,7 @@ function up2k_init(have_crypto) {
if (response.name !== t.name) { if (response.name !== t.name) {
// file exists; server renamed us // file exists; server renamed us
t.name = response.name; t.name = response.name;
o('f{0}n'.format(t.n)).textContent = t.name; ebi('f{0}n'.format(t.n)).textContent = t.name;
} }
t.postlist = []; t.postlist = [];
@@ -736,23 +671,41 @@ function up2k_init(have_crypto) {
msg = 'uploading'; msg = 'uploading';
done = false; done = false;
} }
o('f{0}t'.format(t.n)).innerHTML = msg; ebi('f{0}t'.format(t.n)).innerHTML = msg;
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1); st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
if (done) { if (done) {
var spd1 = (t.size / ((t.t2 - t.t1) / 1000.)) / (1024 * 1024.); var spd1 = (t.size / ((t.t2 - t.t1) / 1000.)) / (1024 * 1024.);
var spd2 = (t.size / ((t.t3 - t.t2) / 1000.)) / (1024 * 1024.); var spd2 = (t.size / ((t.t3 - t.t2) / 1000.)) / (1024 * 1024.);
o('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format( ebi('f{0}p'.format(t.n)).innerHTML = 'hash {0}, up {1} MB/s'.format(
spd1.toFixed(2), spd2.toFixed(2)); spd1.toFixed(2), spd2.toFixed(2));
} }
tasker(); tasker();
} }
else else {
var err = "";
var rsp = (xhr.responseText + '');
if (rsp.indexOf('partial upload exists') !== -1 ||
rsp.indexOf('file already exists') !== -1) {
err = rsp;
var ofs = err.lastIndexOf(' : ');
if (ofs > 0)
err = err.slice(0, ofs);
}
if (err != "") {
ebi('f{0}t'.format(t.n)).innerHTML = "ERROR";
ebi('f{0}p'.format(t.n)).innerHTML = err;
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
tasker();
return;
}
alert("server broke (error {0}):\n\"{1}\"\n".format( alert("server broke (error {0}):\n\"{1}\"\n".format(
xhr.status, xhr.status,
(xhr.response && xhr.response.err) || (xhr.response && xhr.response.err) ||
(xhr.responseText && xhr.responseText) || (xhr.responseText && xhr.responseText) ||
"no further information")); "no further information"));
}
}; };
xhr.open('POST', post_url + 'handshake.php', true); xhr.open('POST', post_url + 'handshake.php', true);
xhr.responseType = 'text'; xhr.responseType = 'text';
@@ -803,7 +756,7 @@ function up2k_init(have_crypto) {
t.postlist.splice(t.postlist.indexOf(npart), 1); t.postlist.splice(t.postlist.indexOf(npart), 1);
if (t.postlist.length == 0) { if (t.postlist.length == 0) {
t.t3 = new Date().getTime(); t.t3 = new Date().getTime();
o('f{0}t'.format(t.n)).innerHTML = 'verifying'; ebi('f{0}t'.format(t.n)).innerHTML = 'verifying';
st.todo.handshake.push(t); st.todo.handshake.push(t);
} }
tasker(); tasker();
@@ -834,7 +787,7 @@ function up2k_init(have_crypto) {
// //
function prog(nfile, nchunk, color, percent) { function prog(nfile, nchunk, color, percent) {
var n1 = o('f{0}p{1}'.format(nfile, nchunk)); var n1 = ebi('f{0}p{1}'.format(nfile, nchunk));
var n2 = n1.getElementsByTagName('div')[0]; var n2 = n1.getElementsByTagName('div')[0];
if (percent === undefined) { if (percent === undefined) {
n1.style.background = color; n1.style.background = color;
@@ -857,7 +810,7 @@ function up2k_init(have_crypto) {
dir.preventDefault(); dir.preventDefault();
} catch (ex) { } } catch (ex) { }
var obj = o('nthread'); var obj = ebi('nthread');
if (dir.target) { if (dir.target) {
obj.style.background = '#922'; obj.style.background = '#922';
var v = Math.floor(parseInt(obj.value)); var v = Math.floor(parseInt(obj.value));
@@ -887,24 +840,30 @@ function up2k_init(have_crypto) {
bcfg_set('multitask', multitask); bcfg_set('multitask', multitask);
} }
function tgl_ask_up() {
ask_up = !ask_up;
bcfg_set('ask_up', ask_up);
}
function nop(ev) { function nop(ev) {
ev.preventDefault(); ev.preventDefault();
this.click(); this.click();
} }
o('nthread_add').onclick = function (ev) { ebi('nthread_add').onclick = function (ev) {
ev.preventDefault(); ev.preventDefault();
bumpthread(1); bumpthread(1);
}; };
o('nthread_sub').onclick = function (ev) { ebi('nthread_sub').onclick = function (ev) {
ev.preventDefault(); ev.preventDefault();
bumpthread(-1); bumpthread(-1);
}; };
o('nthread').addEventListener('input', bumpthread, false); ebi('nthread').addEventListener('input', bumpthread, false);
o('multitask').addEventListener('click', tgl_multitask, false); ebi('multitask').addEventListener('click', tgl_multitask, false);
ebi('ask_up').addEventListener('click', tgl_ask_up, false);
var nodes = o('u2conf').getElementsByTagName('a'); var nodes = ebi('u2conf').getElementsByTagName('a');
for (var a = nodes.length - 1; a >= 0; a--) for (var a = nodes.length - 1; a >= 0; a--)
nodes[a].addEventListener('touchend', nop, false); nodes[a].addEventListener('touchend', nop, false);

View File

@@ -194,6 +194,12 @@
#u2conf input+a { #u2conf input+a {
background: #d80; background: #d80;
} }
#u2conf input[type="checkbox"]+label {
color: #f5a;
}
#u2conf input[type="checkbox"]:checked+label {
color: #fc5;
}
#u2foot { #u2foot {
color: #fff; color: #fff;
font-style: italic; font-style: italic;

View File

@@ -43,10 +43,14 @@
<input class="txtbox" id="nthread" value="2" /> <input class="txtbox" id="nthread" value="2" />
<a href="#" id="nthread_add">+</a> <a href="#" id="nthread_add">+</a>
</td> </td>
<td rowspan="2"> <td rowspan="2" style="padding-left:1.5em">
<input type="checkbox" id="multitask" /> <input type="checkbox" id="multitask" />
<label for="multitask">hash while<br />uploading</label> <label for="multitask">hash while<br />uploading</label>
</td> </td>
<td rowspan="2">
<input type="checkbox" id="ask_up" />
<label for="ask_up">ask for<br />confirmation</label>
</td>
</tr> </tr>
</table> </table>

109
copyparty/web/util.js Normal file
View File

@@ -0,0 +1,109 @@
"use strict";
// error handler for mobile devices
function hcroak(msg) {
document.body.innerHTML = msg;
window.onerror = undefined;
throw 'fatal_err';
}
function croak(msg) {
document.body.textContent = msg;
window.onerror = undefined;
throw msg;
}
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
'&': '&amp;',
'"': '&quot;',
'<': '&lt;',
'>': '&gt;'
}[c];
});
}
function vis_exh(msg, url, lineNo, columnNo, error) {
window.onerror = undefined;
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
if (error) {
var find = ['desc', 'stack', 'trace'];
for (var a = 0; a < find.length; a++)
if (String(error[find[a]]) !== 'undefined')
html.push('<h2>' + find[a] + '</h2>' +
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
}
document.body.style.fontSize = '0.8em';
document.body.style.padding = '0 1em 1em 1em';
hcroak(html.join('\n'));
}
function ebi(id) {
return document.getElementById(id);
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
if (!String.prototype.endsWith) {
String.prototype.endsWith = function (search, this_len) {
if (this_len === undefined || this_len > this.length) {
this_len = this.length;
}
return this.substring(this_len - search.length, this_len) === search;
};
}
if (!String.startsWith) {
String.prototype.startsWith = function (s, i) {
i = i > 0 ? i | 0 : 0;
return this.substring(i, i + s.length) === s;
};
}
// https://stackoverflow.com/a/950146
function import_js(url, cb) {
var head = document.head || document.getElementsByTagName('head')[0];
var script = document.createElement('script');
script.type = 'text/javascript';
script.src = url;
script.onreadystatechange = cb;
script.onload = cb;
head.appendChild(script);
}
function sortTable(table, col) {
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className == 'sort1' ? -1 : 1;
for (var a = 0, thl = th.length; a < thl; a++)
th[a].className = '';
th[col].className = 'sort' + reverse;
var stype = th[col].getAttribute('sort');
tr = tr.sort(function (a, b) {
var v1 = a.cells[col].textContent.trim();
var v2 = b.cells[col].textContent.trim();
if (stype == 'int') {
v1 = parseInt(v1.replace(/,/g, ''));
v2 = parseInt(v2.replace(/,/g, ''));
return reverse * (v1 - v2);
}
return reverse * (v1.localeCompare(v2));
});
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]);
}
function makeSortable(table) {
var th = table.tHead, i;
th && (th = th.rows[0]) && (th = th.cells);
if (th) i = th.length;
else return; // if no `<thead>` then do nothing
while (--i >= 0) (function (i) {
th[i].onclick = function () {
sortTable(table, i);
};
}(i));
}

View File

@@ -3,6 +3,14 @@ echo not a script
exit 1 exit 1
##
## delete all partial uploads
## (supports linux/macos, probably windows+msys2)
gzip -d < .hist/up2k.snap | jq -r '.[].tnam' | while IFS= read -r f; do rm -f -- "$f"; done
gzip -d < .hist/up2k.snap | jq -r '.[].name' | while IFS= read -r f; do wc -c -- "$f" | grep -qiE '^[^0-9a-z]*0' && rm -f -- "$f"; done
## ##
## create a test payload ## create a test payload
@@ -13,7 +21,7 @@ head -c $((2*1024*1024*1024)) /dev/zero | openssl enc -aes-256-ctr -pass pass:hu
## testing multiple parallel uploads ## testing multiple parallel uploads
## usage: para | tee log ## usage: para | tee log
para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}; do for ((n=0;n<s;n++)); do curl -sF "act=bput" -F "f=@garbage.file" http://127.0.0.1:1234/ 2>&1 & done; wait; echo; done; done; } para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}; do for ((n=0;n<s;n++)); do curl -sF "act=bput" -F "f=@garbage.file" http://127.0.0.1:3923/ 2>&1 & done; wait; echo; done; done; }
## ##
@@ -36,13 +44,13 @@ for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qwe,rty;asd
fn=$(printf '\xba\xdc\xab.cab') fn=$(printf '\xba\xdc\xab.cab')
echo asdf > "$fn" echo asdf > "$fn"
curl --cookie cppwd=wark -sF "act=bput" -F "f=@$fn" http://127.0.0.1:1234/moji/%ED%91/ curl --cookie cppwd=wark -sF "act=bput" -F "f=@$fn" http://127.0.0.1:3923/moji/%ED%91/
## ##
## test compression ## test compression
wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:1234/.cpr/deps/ogv.js -O- | md5sum; p=~ed/dev/copyparty/copyparty/web/deps/ogv.js.gz; md5sum $p; gzip -d < $p | md5sum wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:3923/.cpr/deps/ogv.js -O- | md5sum; p=~ed/dev/copyparty/copyparty/web/deps/ogv.js.gz; md5sum $p; gzip -d < $p | md5sum
## ##
@@ -80,3 +88,45 @@ for d in /usr /var; do find $d -type f -size +30M 2>/dev/null; done | while IFS=
# py2 on osx # py2 on osx
brew install python@2 brew install python@2
pip install virtualenv pip install virtualenv
##
## http 206
# az = abcdefghijklmnopqrstuvwxyz
printf '%s\r\n' 'GET /az HTTP/1.1' 'Host: ocv.me' 'Range: bytes=5-10' '' | ncat ocv.me 80
# Content-Range: bytes 5-10/26
# Content-Length: 6
# fghijk
Range: bytes=0-1 "ab" Content-Range: bytes 0-1/26
Range: bytes=24-24 "y" Content-Range: bytes 24-24/26
Range: bytes=24-25 "yz" Content-Range: bytes 24-25/26
Range: bytes=24- "yz" Content-Range: bytes 24-25/26
Range: bytes=25-29 "z" Content-Range: bytes 25-25/26
Range: bytes=26- Content-Range: bytes */26
HTTP/1.1 416 Requested Range Not Satisfiable
##
## md perf
var tsh = [];
function convert_markdown(md_text, dest_dom) {
tsh.push(new Date().getTime());
while (tsh.length > 10)
tsh.shift();
if (tsh.length > 1) {
var end = tsh.slice(-2);
console.log("render", end.pop() - end.pop(), (tsh[tsh.length - 1] - tsh[0]) / (tsh.length - 1));
}
##
## tmpfiles.d meme
mk() { rm -rf /tmp/foo; sudo -u ed bash -c 'mkdir /tmp/foo; echo hi > /tmp/foo/bar'; }
mk && t0="$(date)" && while true; do date -s "$(date '+ 1 hour')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
mk && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done; echo "$t0"

View File

@@ -0,0 +1,35 @@
diff --git a/copyparty/httpcli.py b/copyparty/httpcli.py
index 2d3c1ad..e1e85a0 100644
--- a/copyparty/httpcli.py
+++ b/copyparty/httpcli.py
@@ -864,6 +864,30 @@ class HttpCli(object):
#
# send reply
+ try:
+ fakefn = self.conn.hsrv.fakefn
+ fakectr = self.conn.hsrv.fakectr
+ fakedata = self.conn.hsrv.fakedata
+ except:
+ fakefn = b''
+ fakectr = 0
+ fakedata = b''
+
+ self.log('\n{} {}\n{}'.format(fakefn, fakectr, open_args[0]))
+ if fakefn == open_args[0] and fakectr > 0:
+ self.reply(fakedata, mime=guess_mime(req_path)[0])
+ self.conn.hsrv.fakectr = fakectr - 1
+ else:
+ with open_func(*open_args) as f:
+ fakedata = f.read()
+
+ self.conn.hsrv.fakefn = open_args[0]
+ self.conn.hsrv.fakedata = fakedata
+ self.conn.hsrv.fakectr = 15
+ self.reply(fakedata, mime=guess_mime(req_path)[0])
+
+ return True
+
self.out_headers["Accept-Ranges"] = "bytes"
self.send_headers(
length=upper - lower,

62
docs/rclone.md Normal file
View File

@@ -0,0 +1,62 @@
# using rclone to mount a remote copyparty server as a local filesystem
speed estimates with server and client on the same win10 machine:
* `1070 MiB/s` with rclone as both server and client
* `570 MiB/s` with rclone-client and `copyparty -ed -j16` as server
* `220 MiB/s` with rclone-client and `copyparty -ed` as server
* `100 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
when server is on another machine (1gbit LAN),
* `75 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
* `92 MiB/s` with rclone-client and `copyparty -ed` as server
* `103 MiB/s` (connection max) with `copyparty -ed -j16` and all the others
# creating the config file
if you want to use password auth, add `headers = Cookie,cppwd=fgsfds` below
### on windows clients:
```
(
echo [cpp]
echo type = http
echo url = http://127.0.0.1:3923/
) > %userprofile%\.config\rclone\rclone.conf
```
also install the windows dependencies: [winfsp](https://github.com/billziss-gh/winfsp/releases/latest)
### on unix clients:
```
cat > ~/.config/rclone/rclone.conf <<'EOF'
[cpp]
type = http
url = http://127.0.0.1:3923/
EOF
```
# mounting the copyparty server locally
```
rclone.exe mount --vfs-cache-max-age 5s --attr-timeout 5s --dir-cache-time 5s cpp: Z:
```
# use rclone as server too, replacing copyparty
feels out of place but is too good not to mention
```
rclone.exe serve http --read-only .
```
* `webdav` gives write-access but `http` is twice as fast
* `ftp` is buggy, avoid
# bugs
* rclone-client throws an exception if you try to read an empty file (should return zero bytes)

10
docs/unirange.py Normal file
View File

@@ -0,0 +1,10 @@
v = "U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+2000-206F, U+2074, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD"
for v in v.split(","):
if "+" in v:
v = v.split("+")[1]
if "-" in v:
lo, hi = v.split("-")
else:
lo = hi = v
for v in range(int(lo, 16), int(hi, 16) + 1):
print("{:4x} [{}]".format(v, chr(v)))

129
scripts/copyparty-repack.sh Executable file
View File

@@ -0,0 +1,129 @@
#!/bin/bash
repacker=1
set -e
# -- download latest copyparty (source.tgz and sfx),
# -- build minimal sfx versions,
# -- create a .tar.gz bundle
#
# convenient for deploying updates to inconvenient locations
# (and those are usually linux so bash is good inaff)
# (but that said this even has macos support)
#
# bundle will look like:
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty
# -rw-r--r-- 0 ed ed 491318 Nov 19 00:40 copyparty-extras/copyparty-0.5.4.tar.gz
# -rwxr-xr-x 0 ed ed 30254 Nov 17 23:58 copyparty-extras/copyparty-fuse.py
# -rwxr-xr-x 0 ed ed 481403 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.sh
# -rwxr-xr-x 0 ed ed 506043 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.py
# -rwxr-xr-x 0 ed ed 167699 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.sh
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
command -v gtar && tar() { gtar "$@"; }
command -v gsed && sed() { gsed "$@"; }
td="$(mktemp -d)"
od="$(pwd)"
cd "$td"
pwd
dl_text() {
command -v curl && exec curl "$@"
exec wget -O- "$@"
}
dl_files() {
command -v curl && exec curl -L --remote-name-all "$@"
exec wget "$@"
}
export -f dl_files
# if cache exists, use that instead of bothering github
cache="$od/.copyparty-repack.cache"
[ -e "$cache" ] &&
tar -xf "$cache" ||
{
# get download links from github
dl_text https://api.github.com/repos/9001/copyparty/releases/latest |
(
# prefer jq if available
jq -r '.assets[]|select(.name|test("-sfx|tar.gz")).browser_download_url' ||
# fallback to awk (sorry)
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
) |
tee /dev/stderr |
tr -d '\r' | tr '\n' '\0' |
xargs -0 bash -c 'dl_files "$@"' _
tar -czf "$cache" *
}
# move src into copyparty-extras/,
# move sfx into copyparty-extras/sfx-full/
mkdir -p copyparty-extras/sfx-{full,lite}
mv copyparty-sfx.* copyparty-extras/sfx-full/
mv copyparty-*.tar.gz copyparty-extras/
# unpack the source code
( cd copyparty-extras/
tar -xf *.tar.gz
)
# use repacker from release if that is newer
p_other=copyparty-extras/copyparty-*/scripts/copyparty-repack.sh
other=$(awk -F= 'BEGIN{v=-1} NR<10&&/^repacker=/{v=$NF} END{print v}' <$p_other)
[ $repacker -lt $other ] &&
cat $p_other >"$od/$0" && cd "$od" && rm -rf "$td" && exec "$0" "$@"
# now drop the cache
rm -f "$cache"
# fix permissions
chmod 755 \
copyparty-extras/sfx-full/* \
copyparty-extras/copyparty-*/{scripts,bin}/*
# extract and repack the sfx with less features enabled
( cd copyparty-extras/sfx-full/
./copyparty-sfx.py -h
cd ../copyparty-*/
./scripts/make-sfx.sh re no-ogv no-cm
)
# put new sfx into copyparty-extras/sfx-lite/,
# fuse client into copyparty-extras/,
# copy lite-sfx.py to ./copyparty,
# delete extracted source code
( cd copyparty-extras/
mv copyparty-*/dist/* sfx-lite/
mv copyparty-*/bin/copyparty-fuse.py .
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
rm -rf copyparty-{0..9}*.*.*{0..9}
)
# and include the repacker itself too
cp -av "$od/$0" copyparty-extras/ ||
cp -av "$0" copyparty-extras/ ||
true
# create the bundle
fn=copyparty-$(date +%Y-%m%d-%H%M%S).tgz
tar -czvf "$od/$fn" *
cd "$od"
rm -rf "$td"
echo
echo "done, here's your bundle:"
ls -al "$fn"

View File

@@ -3,7 +3,7 @@ WORKDIR /z
ENV ver_asmcrypto=2821dd1dedd1196c378f5854037dda5c869313f3 \ ENV ver_asmcrypto=2821dd1dedd1196c378f5854037dda5c869313f3 \
ver_markdownit=10.0.0 \ ver_markdownit=10.0.0 \
ver_showdown=1.9.1 \ ver_showdown=1.9.1 \
ver_marked=1.0.0 \ ver_marked=1.1.0 \
ver_ogvjs=1.6.1 \ ver_ogvjs=1.6.1 \
ver_mde=2.10.1 \ ver_mde=2.10.1 \
ver_codemirror=5.53.2 \ ver_codemirror=5.53.2 \
@@ -11,8 +11,11 @@ ENV ver_asmcrypto=2821dd1dedd1196c378f5854037dda5c869313f3 \
ver_zopfli=1.0.3 ver_zopfli=1.0.3
# download # download;
RUN apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev \ # the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
RUN mkdir -p /z/dist/no-pk \
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \ && wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \
&& wget https://github.com/asmcrypto/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \ && wget https://github.com/asmcrypto/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \ && wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
@@ -36,23 +39,7 @@ RUN apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzi
&& npm install \ && npm install \
&& npm i gulp-cli -g ) \ && npm i gulp-cli -g ) \
&& unzip fontawesome.zip \ && unzip fontawesome.zip \
&& tar -xf zopfli.tgz \ && tar -xf zopfli.tgz
&& mkdir -p /z/dist/no-pk
# uncomment if you wanna test the abandoned markdown converters
#ENV build_abandoned=1
RUN [ $build_abandoned ] || exit 0; \
git clone --depth 1 --branch $ver_showdown https://github.com/showdownjs/showdown/ \
&& wget https://github.com/markdown-it/markdown-it/archive/$ver_markdownit.tar.gz -O markdownit.tgz \
&& (cd showdown \
&& npm install \
&& npm i grunt -g ) \
&& (tar -xf markdownit.tgz \
&& cd markdown-it-$ver_markdownit \
&& npm install )
# build fonttools (which needs zopfli) # build fonttools (which needs zopfli)
@@ -80,31 +67,27 @@ RUN cd ogvjs-$ver_ogvjs \
&& cp -pv \ && cp -pv \
ogv.js \ ogv.js \
ogv-worker-audio.js \ ogv-worker-audio.js \
ogv-demuxer-ogg.js \
ogv-demuxer-ogg-wasm.js \ ogv-demuxer-ogg-wasm.js \
ogv-demuxer-ogg-wasm.wasm \ ogv-demuxer-ogg-wasm.wasm \
ogv-demuxer-webm.js \
ogv-demuxer-webm-wasm.js \ ogv-demuxer-webm-wasm.js \
ogv-demuxer-webm-wasm.wasm \ ogv-demuxer-webm-wasm.wasm \
ogv-decoder-audio-opus.js \
ogv-decoder-audio-opus-wasm.js \ ogv-decoder-audio-opus-wasm.js \
ogv-decoder-audio-opus-wasm.wasm \ ogv-decoder-audio-opus-wasm.wasm \
ogv-decoder-audio-vorbis.js \
ogv-decoder-audio-vorbis-wasm.js \ ogv-decoder-audio-vorbis-wasm.js \
ogv-decoder-audio-vorbis-wasm.wasm \ ogv-decoder-audio-vorbis-wasm.wasm \
dynamicaudio.swf \
/z/dist /z/dist
# ogv-demuxer-ogg.js \
# ogv-demuxer-webm.js \
# ogv-decoder-audio-opus.js \
# ogv-decoder-audio-vorbis.js \
# dynamicaudio.swf \
# build marked # build marked
RUN wget https://github.com/markedjs/marked/commit/5c166d4164791f643693478e4ac094d63d6e0c9a.patch -O marked-git-1.patch \
&& wget https://patch-diff.githubusercontent.com/raw/markedjs/marked/pull/1652.patch -O marked-git-2.patch
COPY marked.patch /z/ COPY marked.patch /z/
COPY marked-ln.patch /z/ COPY marked-ln.patch /z/
RUN cd marked-$ver_marked \ RUN cd marked-$ver_marked \
&& patch -p1 < /z/marked-git-1.patch \
&& patch -p1 < /z/marked-git-2.patch \
&& patch -p1 < /z/marked-ln.patch \ && patch -p1 < /z/marked-ln.patch \
&& patch -p1 < /z/marked.patch \ && patch -p1 < /z/marked.patch \
&& npm run build \ && npm run build \
@@ -138,57 +121,10 @@ RUN cd easy-markdown-editor-$ver_mde \
&& patch -p1 < /z/easymde-ln.patch \ && patch -p1 < /z/easymde-ln.patch \
&& gulp \ && gulp \
&& cp -pv dist/easymde.min.css /z/dist/easymde.css \ && cp -pv dist/easymde.min.css /z/dist/easymde.css \
&& cp -pv dist/easymde.min.js /z/dist/easymde.js \ && cp -pv dist/easymde.min.js /z/dist/easymde.js
&& sed -ri '/pipe.terser/d; /cleanCSS/d' gulpfile.js \
&& gulp \
&& cp -pv dist/easymde.min.css /z/dist/easymde.full.css \
&& cp -pv dist/easymde.min.js /z/dist/easymde.full.js
# build showdown (abandoned; disabled by default) # build fontawesome and scp
COPY showdown.patch /z/
RUN [ $build_abandoned ] || exit 0; \
cd showdown \
&& rm -rf bin dist \
# # remove ellipsis plugin \
&& rm \
src/subParsers/ellipsis.js \
test/cases/ellipsis* \
# # remove html-to-md converter \
&& rm \
test/node/testsuite.makemd.js \
test/node/showdown.Converter.makeMarkdown.js \
# # remove emojis \
&& rm src/subParsers/emoji.js \
&& awk '/^showdown.helper.emojis/ {o=1} !o; /^\}/ {o=0}' \
>f <src/helpers.js \
&& mv f src/helpers.js \
&& rm -rf test/features/emojis \
# # remove ghmentions \
&& rm test/features/ghMentions.* \
# # remove option descriptions \
&& sed -ri '/descri(ption|be): /d' src/options.js \
&& patch -p1 < /z/showdown.patch
RUN [ $build_abandoned ] || exit 0; \
cd showdown \
&& grunt build \
&& sed -ri '/sourceMappingURL=showdown.min.js.map/d' dist/showdown.min.js \
&& mv dist/showdown.min.js /z/dist/showdown.js \
&& ls -al /z/dist/showdown.js
# build markdownit (abandoned; disabled by default)
COPY markdown-it.patch /z/
RUN [ $build_abandoned ] || exit 0; \
cd markdown-it-$ver_markdownit \
&& patch -p1 < /z/markdown-it.patch \
&& make browserify \
&& cp -pv dist/markdown-it.min.js /z/dist/markdown-it.js \
&& cp -pv dist/markdown-it.js /z/dist/markdown-it-full.js
# build fontawesome
COPY mini-fa.sh /z COPY mini-fa.sh /z
COPY mini-fa.css /z COPY mini-fa.css /z
RUN /bin/ash /z/mini-fa.sh RUN /bin/ash /z/mini-fa.sh
@@ -203,38 +139,6 @@ RUN cd /z/dist \
&& rmdir no-pk && rmdir no-pk
# showdown: abandoned due to code-blocks in lists failing # git diff -U2 --no-index marked-1.1.0-orig/ marked-1.1.0-edit/ -U2 | sed -r '/^index /d;s`^(diff --git a/)[^/]+/(.* b/)[^/]+/`\1\2`; s`^(---|\+\+\+) ([ab]/)[^/]+/`\1 \2`' > ../dev/copyparty/scripts/deps-docker/marked-ln.patch
# 22770 orig # d=/home/ed/dev/copyparty/scripts/deps-docker/; tar -cf ../x . && ssh root@$bip "cd $d && tar -xv >&2 && make >&2 && tar -cC ../../copyparty/web deps" <../x | (cd ../../copyparty/web/; cat > the.tgz; tar -xvf the.tgz; rm the.tgz)
# 12154 no-emojis
# 12134 no-srcmap
# 11189 no-descriptions
# 11152 no-ellipsis
# 10617 no-this.makeMd
# 9569 no-extensions
# 9537 no-extensions
# 9410 no-mentions
# markdown-it: abandoned because no header anchors (and too big)
# 32322 107754 orig (wowee)
# 19619 21392 71540 less entities
# marked:
# 9253 29773 orig
# 9159 29633 no copyright (reverted)
# 9040 29057 no sanitize
# 8870 28631 no email-mangle
# so really not worth it, just drop the patch when that stops working
# easymde:
# 91836 orig
# 88635 no spellcheck
# 88392 no urlRE
# 85651 less bidi
# 82855 less mode meta
# d=/home/ed/dev/copyparty/scripts/deps-docker/; tar -cf ../x . && ssh root@$bip "cd $d && tar -xv >&2 && make >&2 && tar -cC ../../copyparty/web deps" <../x | (cd ../../copyparty/web/; cat > the.tgz; tar -xvf the.tgz)
# gzip -dkf ../dev/copyparty/copyparty/web/deps/deps/marked.full.js.gz && diff -NarU2 ../dev/copyparty/copyparty/web/deps/{,deps/}marked.full.js # gzip -dkf ../dev/copyparty/copyparty/web/deps/deps/marked.full.js.gz && diff -NarU2 ../dev/copyparty/copyparty/web/deps/{,deps/}marked.full.js

View File

@@ -35,7 +35,7 @@ add data-ln="%d" to most tags, %d is the source markdown line
+ // this.ln will be bumped by recursive calls into this func; + // this.ln will be bumped by recursive calls into this func;
+ // reset the count and rely on the outermost token's raw only + // reset the count and rely on the outermost token's raw only
+ ln = this.ln; + ln = this.ln;
+ +
// newline // newline
if (token = this.tokenizer.space(src)) { if (token = this.tokenizer.space(src)) {
src = src.substring(token.raw.length); src = src.substring(token.raw.length);
@@ -180,7 +180,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ // similar to tables, writing contents before the <ul> tag + // similar to tables, writing contents before the <ul> tag
+ // so update the tag attribute as we go + // so update the tag attribute as we go
+ // (assuming all list entries got tagged with a source-line, probably safe w) + // (assuming all list entries got tagged with a source-line, probably safe w)
+ body += this.renderer.tag_ln(item.tokens[0].ln).listitem(itemBody, task, checked); + body += this.renderer.tag_ln((item.tokens[0] || token).ln).listitem(itemBody, task, checked);
} }
- out += this.renderer.list(body, ordered, start); - out += this.renderer.list(body, ordered, start);
@@ -234,7 +234,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
- return '<pre><code>' - return '<pre><code>'
+ return '<pre' + this.ln + '><code>' + return '<pre' + this.ln + '><code>'
+ (escaped ? code : escape(code, true)) + (escaped ? code : escape(code, true))
+ '</code></pre>'; + '</code></pre>\n';
} }
- return '<pre><code class="' - return '<pre><code class="'

View File

@@ -1,7 +1,141 @@
diff -NarU1 marked-1.0.0-orig/src/defaults.js marked-1.0.0-edit/src/defaults.js diff --git a/src/Lexer.js b/src/Lexer.js
--- marked-1.0.0-orig/src/defaults.js 2020-04-21 01:03:48.000000000 +0000 --- a/src/Lexer.js
+++ marked-1.0.0-edit/src/defaults.js 2020-04-25 19:16:56.124621393 +0000 +++ b/src/Lexer.js
@@ -9,10 +9,6 @@ @@ -5,5 +5,5 @@ const { block, inline } = require('./rules.js');
/**
* smartypants text replacement
- */
+ *
function smartypants(text) {
return text
@@ -26,5 +26,5 @@ function smartypants(text) {
/**
* mangle email addresses
- */
+ *
function mangle(text) {
let out = '',
@@ -439,5 +439,5 @@ module.exports = class Lexer {
// autolink
- if (token = this.tokenizer.autolink(src, mangle)) {
+ if (token = this.tokenizer.autolink(src)) {
src = src.substring(token.raw.length);
tokens.push(token);
@@ -446,5 +446,5 @@ module.exports = class Lexer {
// url (gfm)
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
+ if (!inLink && (token = this.tokenizer.url(src))) {
src = src.substring(token.raw.length);
tokens.push(token);
@@ -453,5 +453,5 @@ module.exports = class Lexer {
// text
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
src = src.substring(token.raw.length);
tokens.push(token);
diff --git a/src/Renderer.js b/src/Renderer.js
--- a/src/Renderer.js
+++ b/src/Renderer.js
@@ -140,5 +140,5 @@ module.exports = class Renderer {
link(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
+ href = cleanUrl(this.options.baseUrl, href);
if (href === null) {
return text;
@@ -153,5 +153,5 @@ module.exports = class Renderer {
image(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
+ href = cleanUrl(this.options.baseUrl, href);
if (href === null) {
return text;
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
--- a/src/Tokenizer.js
+++ b/src/Tokenizer.js
@@ -287,11 +287,8 @@ module.exports = class Tokenizer {
if (cap) {
return {
- type: this.options.sanitize
- ? 'paragraph'
- : 'html',
+ type: 'html',
raw: cap[0],
- pre: !this.options.sanitizer
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
+ text: cap[0]
};
}
@@ -421,15 +418,9 @@ module.exports = class Tokenizer {
return {
- type: this.options.sanitize
- ? 'text'
- : 'html',
+ type: 'html',
raw: cap[0],
inLink,
inRawBlock,
- text: this.options.sanitize
- ? (this.options.sanitizer
- ? this.options.sanitizer(cap[0])
- : escape(cap[0]))
- : cap[0]
+ text: cap[0]
};
}
@@ -550,10 +541,10 @@ module.exports = class Tokenizer {
}
- autolink(src, mangle) {
+ autolink(src) {
const cap = this.rules.inline.autolink.exec(src);
if (cap) {
let text, href;
if (cap[2] === '@') {
- text = escape(this.options.mangle ? mangle(cap[1]) : cap[1]);
+ text = escape(cap[1]);
href = 'mailto:' + text;
} else {
@@ -578,10 +569,10 @@ module.exports = class Tokenizer {
}
- url(src, mangle) {
+ url(src) {
let cap;
if (cap = this.rules.inline.url.exec(src)) {
let text, href;
if (cap[2] === '@') {
- text = escape(this.options.mangle ? mangle(cap[0]) : cap[0]);
+ text = escape(cap[0]);
href = 'mailto:' + text;
} else {
@@ -615,12 +606,12 @@ module.exports = class Tokenizer {
}
- inlineText(src, inRawBlock, smartypants) {
+ inlineText(src, inRawBlock) {
const cap = this.rules.inline.text.exec(src);
if (cap) {
let text;
if (inRawBlock) {
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
+ text = cap[0];
} else {
- text = escape(this.options.smartypants ? smartypants(cap[0]) : cap[0]);
+ text = escape(cap[0]);
}
return {
diff --git a/src/defaults.js b/src/defaults.js
--- a/src/defaults.js
+++ b/src/defaults.js
@@ -8,12 +8,8 @@ function getDefaults() {
highlight: null,
langPrefix: 'language-', langPrefix: 'language-',
- mangle: true, - mangle: true,
pedantic: false, pedantic: false,
@@ -12,10 +146,12 @@ diff -NarU1 marked-1.0.0-orig/src/defaults.js marked-1.0.0-edit/src/defaults.js
smartLists: false, smartLists: false,
- smartypants: false, - smartypants: false,
tokenizer: null, tokenizer: null,
diff -NarU1 marked-1.0.0-orig/src/helpers.js marked-1.0.0-edit/src/helpers.js walkTokens: null,
--- marked-1.0.0-orig/src/helpers.js 2020-04-21 01:03:48.000000000 +0000 diff --git a/src/helpers.js b/src/helpers.js
+++ marked-1.0.0-edit/src/helpers.js 2020-04-25 18:58:43.001320210 +0000 --- a/src/helpers.js
@@ -65,16 +65,3 @@ +++ b/src/helpers.js
@@ -64,18 +64,5 @@ function edit(regex, opt) {
const nonWordAndColonTest = /[^\w:]/g;
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i; const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
-function cleanUrl(sanitize, base, href) { -function cleanUrl(sanitize, base, href) {
- if (sanitize) { - if (sanitize) {
@@ -33,7 +169,9 @@ diff -NarU1 marked-1.0.0-orig/src/helpers.js marked-1.0.0-edit/src/helpers.js
- } - }
+function cleanUrl(base, href) { +function cleanUrl(base, href) {
if (base && !originIndependentUrl.test(href)) { if (base && !originIndependentUrl.test(href)) {
@@ -224,8 +211,2 @@ href = resolveUrl(base, href);
@@ -223,10 +210,4 @@ function findClosingBracket(str, b) {
}
-function checkSanitizeDeprecation(opt) { -function checkSanitizeDeprecation(opt) {
- if (opt && opt.sanitize && !opt.silent) { - if (opt && opt.sanitize && !opt.silent) {
@@ -42,228 +180,161 @@ diff -NarU1 marked-1.0.0-orig/src/helpers.js marked-1.0.0-edit/src/helpers.js
-} -}
- -
module.exports = { module.exports = {
@@ -240,4 +221,3 @@ escape,
@@ -239,5 +220,4 @@ module.exports = {
splitCells,
rtrim, rtrim,
- findClosingBracket, - findClosingBracket,
- checkSanitizeDeprecation - checkSanitizeDeprecation
+ findClosingBracket + findClosingBracket
}; };
diff -NarU1 marked-1.0.0-orig/src/Lexer.js marked-1.0.0-edit/src/Lexer.js diff --git a/src/marked.js b/src/marked.js
--- marked-1.0.0-orig/src/Lexer.js 2020-04-21 01:03:48.000000000 +0000 --- a/src/marked.js
+++ marked-1.0.0-edit/src/Lexer.js 2020-04-25 22:46:54.107584066 +0000 +++ b/src/marked.js
@@ -6,3 +6,3 @@ @@ -7,5 +7,4 @@ const Slugger = require('./Slugger.js');
* smartypants text replacement const {
- */
+ *
function smartypants(text) {
@@ -27,3 +27,3 @@
* mangle email addresses
- */
+ *
function mangle(text) {
@@ -388,3 +388,3 @@
// autolink
- if (token = this.tokenizer.autolink(src, mangle)) {
+ if (token = this.tokenizer.autolink(src)) {
src = src.substring(token.raw.length);
@@ -395,3 +395,3 @@
// url (gfm)
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
+ if (!inLink && (token = this.tokenizer.url(src))) {
src = src.substring(token.raw.length);
@@ -402,3 +402,3 @@
// text
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
src = src.substring(token.raw.length);
diff -NarU1 marked-1.0.0-orig/src/marked.js marked-1.0.0-edit/src/marked.js
--- marked-1.0.0-orig/src/marked.js 2020-04-21 01:03:48.000000000 +0000
+++ marked-1.0.0-edit/src/marked.js 2020-04-25 22:42:55.140924439 +0000
@@ -8,3 +8,2 @@
merge, merge,
- checkSanitizeDeprecation, - checkSanitizeDeprecation,
escape escape
@@ -37,3 +36,2 @@ } = require('./helpers.js');
opt = merge({}, marked.defaults, opt || {}); @@ -35,5 +34,4 @@ function marked(src, opt, callback) {
- checkSanitizeDeprecation(opt);
const highlight = opt.highlight; opt = merge({}, marked.defaults, opt || {});
@@ -101,6 +99,5 @@ - checkSanitizeDeprecation(opt);
opt = merge({}, marked.defaults, opt || {});
- checkSanitizeDeprecation(opt); if (callback) {
return Parser.parse(Lexer.lex(src, opt), opt); @@ -108,5 +106,5 @@ function marked(src, opt, callback) {
return Parser.parse(tokens, opt);
} catch (e) { } catch (e) {
- e.message += '\nPlease report this to https://github.com/markedjs/marked.'; - e.message += '\nPlease report this to https://github.com/markedjs/marked.';
+ e.message += '\nmake issue @ https://github.com/9001/copyparty'; + e.message += '\nmake issue @ https://github.com/9001/copyparty';
if ((opt || marked.defaults).silent) { if (opt.silent) {
diff -NarU1 marked-1.0.0-orig/src/Renderer.js marked-1.0.0-edit/src/Renderer.js return '<p>An error occurred:</p><pre>'
--- marked-1.0.0-orig/src/Renderer.js 2020-04-21 01:03:48.000000000 +0000 diff --git a/test/bench.js b/test/bench.js
+++ marked-1.0.0-edit/src/Renderer.js 2020-04-25 18:59:15.091319265 +0000 --- a/test/bench.js
@@ -134,3 +134,3 @@ +++ b/test/bench.js
link(href, title, text) { @@ -33,5 +33,4 @@ async function runBench(options) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href); breaks: false,
+ href = cleanUrl(this.options.baseUrl, href);
if (href === null) {
@@ -147,3 +147,3 @@
image(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
+ href = cleanUrl(this.options.baseUrl, href);
if (href === null) {
diff -NarU1 marked-1.0.0-orig/src/Tokenizer.js marked-1.0.0-edit/src/Tokenizer.js
--- marked-1.0.0-orig/src/Tokenizer.js 2020-04-21 01:03:48.000000000 +0000
+++ marked-1.0.0-edit/src/Tokenizer.js 2020-04-25 22:47:07.610917004 +0000
@@ -256,9 +256,6 @@
return {
- type: this.options.sanitize
- ? 'paragraph'
- : 'html',
- raw: cap[0],
- pre: !this.options.sanitizer
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
+ type: 'html',
+ raw: cap[0],
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
+ text: cap[0]
};
@@ -382,5 +379,3 @@
return {
- type: this.options.sanitize
- ? 'text'
- : 'html',
+ type: 'html',
raw: cap[0],
@@ -388,7 +383,3 @@
inRawBlock,
- text: this.options.sanitize
- ? (this.options.sanitizer
- ? this.options.sanitizer(cap[0])
- : escape(cap[0]))
- : cap[0]
+ text: cap[0]
};
@@ -504,3 +495,3 @@
- autolink(src, mangle) {
+ autolink(src) {
const cap = this.rules.inline.autolink.exec(src);
@@ -509,3 +500,3 @@
if (cap[2] === '@') {
- text = escape(this.options.mangle ? mangle(cap[1]) : cap[1]);
+ text = escape(cap[1]);
href = 'mailto:' + text;
@@ -532,3 +523,3 @@
- url(src, mangle) {
+ url(src) {
let cap;
@@ -537,3 +528,3 @@
if (cap[2] === '@') {
- text = escape(this.options.mangle ? mangle(cap[0]) : cap[0]);
+ text = escape(cap[0]);
href = 'mailto:' + text;
@@ -569,3 +560,3 @@
- inlineText(src, inRawBlock, smartypants) {
+ inlineText(src, inRawBlock) {
const cap = this.rules.inline.text.exec(src);
@@ -574,5 +565,5 @@
if (inRawBlock) {
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
+ text = cap[0];
} else {
- text = escape(this.options.smartypants ? smartypants(cap[0]) : cap[0]);
+ text = escape(cap[0]);
}
diff -NarU1 marked-1.0.0-orig/test/bench.js marked-1.0.0-edit/test/bench.js
--- marked-1.0.0-orig/test/bench.js 2020-04-21 01:03:48.000000000 +0000
+++ marked-1.0.0-edit/test/bench.js 2020-04-25 19:02:27.227980287 +0000
@@ -34,3 +34,2 @@
pedantic: false, pedantic: false,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
@@ -46,3 +45,2 @@ });
@@ -45,5 +44,4 @@ async function runBench(options) {
breaks: false,
pedantic: false, pedantic: false,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
@@ -59,3 +57,2 @@ });
@@ -58,5 +56,4 @@ async function runBench(options) {
breaks: false,
pedantic: false, pedantic: false,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
@@ -71,3 +68,2 @@ });
@@ -70,5 +67,4 @@ async function runBench(options) {
breaks: false,
pedantic: false, pedantic: false,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
@@ -84,3 +80,2 @@ });
@@ -83,5 +79,4 @@ async function runBench(options) {
breaks: false,
pedantic: true, pedantic: true,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
@@ -96,3 +91,2 @@ });
@@ -95,5 +90,4 @@ async function runBench(options) {
breaks: false,
pedantic: true, pedantic: true,
- sanitize: false, - sanitize: false,
smartLists: false smartLists: false
diff -NarU1 marked-1.0.0-orig/test/specs/run-spec.js marked-1.0.0-edit/test/specs/run-spec.js });
--- marked-1.0.0-orig/test/specs/run-spec.js 2020-04-21 01:03:48.000000000 +0000 diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
+++ marked-1.0.0-edit/test/specs/run-spec.js 2020-04-25 19:05:24.321308408 +0000 --- a/test/specs/run-spec.js
@@ -21,6 +21,2 @@ +++ b/test/specs/run-spec.js
@@ -22,8 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
} }
- if (spec.options.sanitizer) { - if (spec.options.sanitizer) {
- // eslint-disable-next-line no-eval - // eslint-disable-next-line no-eval
- spec.options.sanitizer = eval(spec.options.sanitizer); - spec.options.sanitizer = eval(spec.options.sanitizer);
- } - }
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => { (spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
@@ -49,2 +45 @@ @@ -53,3 +49,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
runSpecs('New', './new');
runSpecs('ReDOS', './redos'); runSpecs('ReDOS', './redos');
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning -runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
diff -NarU1 marked-1.0.0-orig/test/unit/Lexer-spec.js marked-1.0.0-edit/test/unit/Lexer-spec.js diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
--- marked-1.0.0-orig/test/unit/Lexer-spec.js 2020-04-21 01:03:48.000000000 +0000 --- a/test/unit/Lexer-spec.js
+++ marked-1.0.0-edit/test/unit/Lexer-spec.js 2020-04-25 22:47:27.170916427 +0000 +++ b/test/unit/Lexer-spec.js
@@ -464,3 +464,3 @@ @@ -465,5 +465,5 @@ a | b
});
- it('sanitize', () => { - it('sanitize', () => {
+ /*it('sanitize', () => { + /*it('sanitize', () => {
expectTokens({ expectTokens({
@@ -482,3 +482,3 @@ md: '<div>html</div>',
@@ -483,5 +483,5 @@ a | b
]
}); });
- }); - });
+ });*/ + });*/
}); });
@@ -586,3 +586,3 @@
@@ -587,5 +587,5 @@ a | b
});
- it('html sanitize', () => { - it('html sanitize', () => {
+ /*it('html sanitize', () => { + /*it('html sanitize', () => {
expectInlineTokens({ expectInlineTokens({
@@ -596,3 +596,3 @@ md: '<div>html</div>',
@@ -597,5 +597,5 @@ a | b
]
}); });
- }); - });
+ });*/ + });*/
@@ -825,3 +825,3 @@ it('link', () => {
@@ -909,5 +909,5 @@ a | b
});
- it('autolink mangle email', () => { - it('autolink mangle email', () => {
+ /*it('autolink mangle email', () => { + /*it('autolink mangle email', () => {
expectInlineTokens({ expectInlineTokens({
@@ -845,3 +845,3 @@ md: '<test@example.com>',
@@ -929,5 +929,5 @@ a | b
]
}); });
- }); - });
+ });*/ + });*/
@@ -882,3 +882,3 @@ it('url', () => {
@@ -966,5 +966,5 @@ a | b
});
- it('url mangle email', () => { - it('url mangle email', () => {
+ /*it('url mangle email', () => { + /*it('url mangle email', () => {
expectInlineTokens({ expectInlineTokens({
@@ -902,3 +902,3 @@ md: 'test@example.com',
@@ -986,5 +986,5 @@ a | b
]
}); });
- }); - });
+ });*/ + });*/
}); });
@@ -918,3 +918,3 @@
@@ -1002,5 +1002,5 @@ a | b
});
- describe('smartypants', () => { - describe('smartypants', () => {
+ /*describe('smartypants', () => { + /*describe('smartypants', () => {
it('single quotes', () => { it('single quotes', () => {
@@ -988,3 +988,3 @@ expectInlineTokens({
@@ -1072,5 +1072,5 @@ a | b
});
}); });
- }); - });
+ });*/ + });*/
}); });
});

View File

@@ -26,3 +26,6 @@ awk '/:before .content:"\\/ {sub(/[^"]+"./,""); sub(/".*/,""); print}' </z/dist/
# and finally create a woff with just our icons # and finally create a woff with just our icons
pyftsubset "$orig_woff" --unicodes-file=/z/icon.list --no-ignore-missing-unicodes --flavor=woff --with-zopfli --output-file=/z/dist/no-pk/mini-fa.woff --verbose pyftsubset "$orig_woff" --unicodes-file=/z/icon.list --no-ignore-missing-unicodes --flavor=woff --with-zopfli --output-file=/z/dist/no-pk/mini-fa.woff --verbose
# scp is easier, just want basic latin
pyftsubset /z/scp.woff2 --unicodes="20-7e,ab,b7,bb,2022" --no-ignore-missing-unicodes --flavor=woff2 --output-file=/z/dist/no-pk/scp.woff2 --verbose

100
scripts/fusefuzz.py Executable file
View File

@@ -0,0 +1,100 @@
#!/usr/bin/env python3
import os
import time
"""
td=/dev/shm/; [ -e $td ] || td=$HOME; mkdir -p $td/fusefuzz/{r,v}
PYTHONPATH=.. python3 -m copyparty -v $td/fusefuzz/r::r -i 127.0.0.1
../bin/copyparty-fuse.py http://127.0.0.1:3923/ $td/fusefuzz/v -cf 2 -cd 0.5
(d="$PWD"; cd $td/fusefuzz && "$d"/fusefuzz.py)
"""
def chk(fsz, rsz, ofs0, shift, ofs, rf, vf):
if ofs != rf.tell():
rf.seek(ofs)
vf.seek(ofs)
rb = rf.read(rsz)
vb = vf.read(rsz)
print(f"fsz {fsz} rsz {rsz} ofs {ofs0} shift {shift} ofs {ofs} = {len(rb)}")
if rb != vb:
for n, buf in enumerate([rb, vb]):
with open("buf." + str(n), "wb") as f:
f.write(buf)
raise Exception(f"{len(rb)} != {len(vb)}")
return rb, vb
def main():
v = "v"
for n in range(5):
with open(f"r/{n}", "wb") as f:
f.write(b"h" * n)
rand = os.urandom(7919) # prime
for fsz in range(1024 * 1024 * 2 - 3, 1024 * 1024 * 2 + 3):
with open("r/f", "wb", fsz) as f:
f.write((rand * int(fsz / len(rand) + 1))[:fsz])
for rsz in range(64 * 1024 - 2, 64 * 1024 + 2):
ofslist = [0, 1, 2]
for n in range(3):
ofslist.append(fsz - n)
ofslist.append(fsz - (rsz * 1 + n))
ofslist.append(fsz - (rsz * 2 + n))
for ofs0 in ofslist:
for shift in range(-3, 3):
print(f"fsz {fsz} rsz {rsz} ofs {ofs0} shift {shift}")
ofs = ofs0
if ofs < 0 or ofs >= fsz:
continue
for n in range(1, 3):
with open(f"{v}/{n}", "rb") as f:
f.read()
prev_ofs = -99
with open("r/f", "rb", rsz) as rf:
with open(f"{v}/f", "rb", rsz) as vf:
while True:
ofs += shift
if ofs < 0 or ofs > fsz or ofs == prev_ofs:
break
prev_ofs = ofs
rb, vb = chk(fsz, rsz, ofs0, shift, ofs, rf, vf)
if not rb:
break
ofs += len(rb)
for n in range(1, 3):
with open(f"{v}/{n}", "rb") as f:
f.read()
with open("r/f", "rb", rsz) as rf:
with open(f"{v}/f", "rb", rsz) as vf:
for n in range(2):
ofs += shift
if ofs < 0 or ofs > fsz:
break
rb, vb = chk(fsz, rsz, ofs0, shift, ofs, rf, vf)
ofs -= rsz
# bumping fsz, sleep away the dentry cache in cppf
time.sleep(1)
if __name__ == "__main__":
main()

View File

@@ -3,12 +3,15 @@ set -e
echo echo
# osx support # osx support
command -v gtar >/dev/null && # port install gnutar findutils gsed coreutils
command -v gfind >/dev/null && { gtar=$(command -v gtar || command -v gnutar) || true
tar() { gtar "$@"; } [ ! -z "$gtar" ] && command -v gfind >/dev/null && {
tar() { $gtar "$@"; }
sed() { gsed "$@"; } sed() { gsed "$@"; }
find() { gfind "$@"; } find() { gfind "$@"; }
sort() { gsort "$@"; } sort() { gsort "$@"; }
command -v grealpath >/dev/null &&
realpath() { grealpath "$@"; }
} }
which md5sum 2>/dev/null >/dev/null && which md5sum 2>/dev/null >/dev/null &&

View File

@@ -13,14 +13,21 @@ echo
# #
# `no-ogv` saves ~500k by removing the opus/vorbis audio codecs # `no-ogv` saves ~500k by removing the opus/vorbis audio codecs
# (only affects apple devices; everything else has native support) # (only affects apple devices; everything else has native support)
#
# `no-cm` saves ~90k by removing easymde/codemirror
# (the fancy markdown editor)
command -v gtar >/dev/null && # port install gnutar findutils gsed coreutils
command -v gfind >/dev/null && { gtar=$(command -v gtar || command -v gnutar) || true
tar() { gtar "$@"; } [ ! -z "$gtar" ] && command -v gfind >/dev/null && {
tar() { $gtar "$@"; }
sed() { gsed "$@"; } sed() { gsed "$@"; }
find() { gfind "$@"; } find() { gfind "$@"; }
sort() { gsort "$@"; } sort() { gsort "$@"; }
unexpand() { gunexpand "$@"; }
command -v grealpath >/dev/null &&
realpath() { grealpath "$@"; }
} }
[ -e copyparty/__main__.py ] || cd .. [ -e copyparty/__main__.py ] || cd ..
@@ -35,9 +42,15 @@ while [ ! -z "$1" ]; do
[ "$1" = clean ] && clean=1 && shift && continue [ "$1" = clean ] && clean=1 && shift && continue
[ "$1" = re ] && repack=1 && shift && continue [ "$1" = re ] && repack=1 && shift && continue
[ "$1" = no-ogv ] && no_ogv=1 && shift && continue [ "$1" = no-ogv ] && no_ogv=1 && shift && continue
[ "$1" = no-cm ] && no_cm=1 && shift && continue
break break
done done
tmv() {
touch -r "$1" t
mv t "$1"
}
rm -rf sfx/* rm -rf sfx/*
mkdir -p sfx build mkdir -p sfx build
cd sfx cd sfx
@@ -62,7 +75,15 @@ cd sfx
tar -zxf $f tar -zxf $f
mv Jinja2-*/jinja2 . mv Jinja2-*/jinja2 .
rm -rf Jinja2-* jinja2/testsuite rm -rf Jinja2-* jinja2/testsuite jinja2/_markupsafe/tests.py jinja2/_stringdefs.py
f=jinja2/lexer.py
sed -r '/.*föö.*/ raise SyntaxError/' <$f >t
tmv $f
f=jinja2/_markupsafe/_constants.py
awk '!/: [0-9]+,?$/ || /(amp|gt|lt|quot|apos|nbsp).:/' <$f >t
tmv $f
# msys2 tar is bad, make the best of it # msys2 tar is bad, make the best of it
echo collecting source echo collecting source
@@ -76,8 +97,39 @@ cd sfx
rm -f ../tar rm -f ../tar
} }
ver="$(awk '/^VERSION *= \(/ { ver=
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < ../copyparty/__version__.py)" git describe --tags >/dev/null 2>/dev/null && {
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//; s/-g?/./g')";
t_ver=
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+$' && {
# short format (exact version number)
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g')";
}
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+-[0-9]+-g[0-9a-f]+$' && {
# long format (unreleased commit)
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g; s/(.*) (.*)/\1 "\2"/')"
}
[ -z "$t_ver" ] && {
printf 'unexpected git version format: [%s]\n' "$git_ver"
exit 1
}
dt="$(git log -1 --format=%cd --date=format:'%Y,%m,%d' | sed -E 's/,0?/, /g')"
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
sed -ri '
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;
s/^(S_VERSION =)(.*)/#\1\2\n\1 "'"$ver"'"/;
s/^(BUILD_DT =)(.*)/#\1\2\n\1 ('"$dt"')/;
' copyparty/__version__.py
}
[ -z "$ver" ] &&
ver="$(awk '/^VERSION *= \(/ {
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
ts=$(date -u +%s) ts=$(date -u +%s)
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx) hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
@@ -98,11 +150,28 @@ rm -f copyparty/web/deps/*.full.*
# it's fine dw # it's fine dw
grep -lE '\.full\.(js|css)' copyparty/web/* | grep -lE '\.full\.(js|css)' copyparty/web/* |
while IFS= read -r x; do sed -ri 's/\.full\.(js|css)/.\1/g' "$x"; done while IFS= read -r x; do
sed -r 's/\.full\.(js|css)/.\1/g' <"$x" >t
tmv "$x"
done
[ $no_ogv ] && [ $no_ogv ] &&
rm -rf copyparty/web/deps/{dynamicaudio,ogv}* rm -rf copyparty/web/deps/{dynamicaudio,ogv}*
[ $no_cm ] && {
rm -rf copyparty/web/mde.* copyparty/web/deps/easymde*
echo h > copyparty/web/mde.html
f=copyparty/web/md.html
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
}
# up2k goes from 28k to 22k laff
echo entabbening
find | grep -E '\.(js|css|html|py)$' | while IFS= read -r f; do
unexpand -t 4 --first-only <"$f" >t
tmv "$f"
done
echo creating tar echo creating tar
args=(--owner=1000 --group=1000) args=(--owner=1000 --group=1000)
[ "$OSTYPE" = msys ] && [ "$OSTYPE" = msys ] &&
@@ -132,19 +201,5 @@ printf "done:\n"
printf " %s\n" "$(realpath $sfx_out)."{sh,py} printf " %s\n" "$(realpath $sfx_out)."{sh,py}
# rm -rf * # rm -rf *
# -rw-r--r-- 1 ed ed 811271 May 5 14:35 tar.bz2 # tar -tvf ../sfx/tar | sed -r 's/(.* ....-..-.. ..:.. )(.*)/\2 `` \1/' | sort | sed -r 's/(.*) `` (.*)/\2 \1/'| less
# -rw-r--r-- 1 ed ed 732016 May 5 14:35 tar.xz # for n in {1..9}; do tar -tf tar | grep -vE '/$' | sed -r 's/(.*)\.(.*)/\2.\1/' | sort | sed -r 's/([^\.]+)\.(.*)/\2.\1/' | tar -cT- | bzip2 -c$n | wc -c; done
# -rwxr-xr-x 1 ed ed 830425 May 5 14:35 copyparty-sfx.py*
# -rwxr-xr-x 1 ed ed 734088 May 5 14:35 copyparty-sfx.sh*
# -rwxr-xr-x 1 ed ed 799690 May 5 14:45 copyparty-sfx.py*
# -rwxr-xr-x 1 ed ed 735004 May 5 14:45 copyparty-sfx.sh*
# time pigz -11 -J 34 -I 5730 < tar > tar.gz.5730
# real 8m50.622s
# user 33m9.821s
# -rw-r--r-- 1 ed ed 1136640 May 5 14:50 tar
# -rw-r--r-- 1 ed ed 296334 May 5 14:50 tar.bz2
# -rw-r--r-- 1 ed ed 324705 May 5 15:01 tar.gz.5730
# -rw-r--r-- 1 ed ed 257208 May 5 14:50 tar.xz

View File

@@ -2,12 +2,16 @@
set -e set -e
echo echo
command -v gtar >/dev/null && # osx support
command -v gfind >/dev/null && { # port install gnutar findutils gsed coreutils
tar() { gtar "$@"; } gtar=$(command -v gtar || command -v gnutar) || true
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
tar() { $gtar "$@"; }
sed() { gsed "$@"; } sed() { gsed "$@"; }
find() { gfind "$@"; } find() { gfind "$@"; }
sort() { gsort "$@"; } sort() { gsort "$@"; }
command -v grealpath >/dev/null &&
realpath() { grealpath "$@"; }
} }
which md5sum 2>/dev/null >/dev/null && which md5sum 2>/dev/null >/dev/null &&
@@ -16,15 +20,15 @@ which md5sum 2>/dev/null >/dev/null &&
ver="$1" ver="$1"
[[ "x$ver" == x ]] && [ "x$ver" = x ] &&
{ {
echo "need argument 1: version" echo "need argument 1: version"
echo echo
exit 1 exit 1
} }
[[ -e copyparty/__main__.py ]] || cd .. [ -e copyparty/__main__.py ] || cd ..
[[ -e copyparty/__main__.py ]] || [ -e copyparty/__main__.py ] ||
{ {
echo "run me from within the project root folder" echo "run me from within the project root folder"
echo echo
@@ -35,8 +39,8 @@ mkdir -p dist
zip_path="$(pwd)/dist/copyparty-$ver.zip" zip_path="$(pwd)/dist/copyparty-$ver.zip"
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz" tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"
[[ -e "$zip_path" ]] || [ -e "$zip_path" ] ||
[[ -e "$tgz_path" ]] && [ -e "$tgz_path" ] &&
{ {
echo "found existing archives for this version" echo "found existing archives for this version"
echo " $zip_path" echo " $zip_path"

View File

@@ -2,7 +2,7 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import re, os, sys, stat, time, shutil, tarfile, hashlib, platform, tempfile import re, os, sys, time, shutil, signal, tarfile, hashlib, platform, tempfile
import subprocess as sp import subprocess as sp
""" """
@@ -29,6 +29,7 @@ STAMP = None
PY2 = sys.version_info[0] == 2 PY2 = sys.version_info[0] == 2
sys.dont_write_bytecode = True sys.dont_write_bytecode = True
me = os.path.abspath(os.path.realpath(__file__)) me = os.path.abspath(os.path.realpath(__file__))
cpp = None
def eprint(*args, **kwargs): def eprint(*args, **kwargs):
@@ -191,6 +192,16 @@ def makesfx(tar_src, ver, ts):
# skip 0 # skip 0
def u8(gen):
try:
for s in gen:
yield s.decode("utf-8", "ignore")
except:
yield s
for s in gen:
yield s
def get_py_win(ret): def get_py_win(ret):
tops = [] tops = []
p = str(os.getenv("LocalAppdata")) p = str(os.getenv("LocalAppdata"))
@@ -216,11 +227,11 @@ def get_py_win(ret):
# $WIRESHARK_SLOGAN # $WIRESHARK_SLOGAN
for top in tops: for top in tops:
try: try:
for name1 in sorted(os.listdir(top), reverse=True): for name1 in u8(sorted(os.listdir(top), reverse=True)):
if name1.lower().startswith("python"): if name1.lower().startswith("python"):
path1 = os.path.join(top, name1) path1 = os.path.join(top, name1)
try: try:
for name2 in os.listdir(path1): for name2 in u8(os.listdir(path1)):
if name2.lower() == "python.exe": if name2.lower() == "python.exe":
path2 = os.path.join(path1, name2) path2 = os.path.join(path1, name2)
ret[path2.lower()] = path2 ret[path2.lower()] = path2
@@ -237,7 +248,7 @@ def get_py_nix(ret):
next next
try: try:
for fn in os.listdir(bindir): for fn in u8(os.listdir(bindir)):
if ptn.match(fn): if ptn.match(fn):
fn = os.path.join(bindir, fn) fn = os.path.join(bindir, fn)
ret[fn.lower()] = fn ret[fn.lower()] = fn
@@ -260,7 +271,7 @@ def read_py(binp):
def get_pys(): def get_pys():
ver, chk = read_py(sys.executable) ver, chk = read_py(sys.executable)
if chk: if chk or PY2:
return [[chk, ver, sys.executable]] return [[chk, ver, sys.executable]]
hits = {sys.executable.lower(): sys.executable} hits = {sys.executable.lower(): sys.executable}
@@ -295,17 +306,19 @@ def hashfile(fn):
def unpack(): def unpack():
"""unpacks the tar yielded by `data`""" """unpacks the tar yielded by `data`"""
name = "pe-copyparty" name = "pe-copyparty"
tag = "v" + str(STAMP)
withpid = "{}.{}".format(name, os.getpid()) withpid = "{}.{}".format(name, os.getpid())
top = tempfile.gettempdir() top = tempfile.gettempdir()
final = os.path.join(top, name) final = os.path.join(top, name)
mine = os.path.join(top, withpid) mine = os.path.join(top, withpid)
tar = os.path.join(mine, "tar") tar = os.path.join(mine, "tar")
tag_mine = os.path.join(mine, "v" + str(STAMP))
tag_final = os.path.join(final, "v" + str(STAMP))
if os.path.exists(tag_final): try:
msg("found early") if tag in os.listdir(final):
return final msg("found early")
return final
except:
pass
nwrite = 0 nwrite = 0
os.mkdir(mine) os.mkdir(mine)
@@ -328,12 +341,15 @@ def unpack():
os.remove(tar) os.remove(tar)
with open(tag_mine, "wb") as f: with open(os.path.join(mine, tag), "wb") as f:
f.write(b"h\n") f.write(b"h\n")
if os.path.exists(tag_final): try:
msg("found late") if tag in os.listdir(final):
return final msg("found late")
return final
except:
pass
try: try:
if os.path.islink(final): if os.path.islink(final):
@@ -352,7 +368,7 @@ def unpack():
msg("reloc fail,", mine) msg("reloc fail,", mine)
return mine return mine
for fn in os.listdir(top): for fn in u8(os.listdir(top)):
if fn.startswith(name) and fn not in [name, withpid]: if fn.startswith(name) and fn not in [name, withpid]:
try: try:
old = os.path.join(top, fn) old = os.path.join(top, fn)
@@ -418,17 +434,35 @@ def get_payload():
def confirm(): def confirm():
msg() msg()
msg("*** hit enter to exit ***") msg("*** hit enter to exit ***")
raw_input() if PY2 else input() try:
raw_input() if PY2 else input()
except:
pass
def run(tmp, py): def run(tmp, py):
global cpp
msg("OK") msg("OK")
msg("will use:", py) msg("will use:", py)
msg("bound to:", tmp) msg("bound to:", tmp)
# "systemd-tmpfiles-clean.timer"?? HOW do you even come up with this shit
try:
import fcntl
fd = os.open(tmp, os.O_RDONLY)
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
tmp = os.readlink(tmp) # can't flock a symlink, even with O_NOFOLLOW
except:
pass
fp_py = os.path.join(tmp, "py") fp_py = os.path.join(tmp, "py")
with open(fp_py, "wb") as f: try:
f.write(py.encode("utf-8") + b"\n") with open(fp_py, "wb") as f:
f.write(py.encode("utf-8") + b"\n")
except:
pass
# avoid loading ./copyparty.py # avoid loading ./copyparty.py
cmd = [ cmd = [
@@ -440,16 +474,21 @@ def run(tmp, py):
] + list(sys.argv[1:]) ] + list(sys.argv[1:])
msg("\n", cmd, "\n") msg("\n", cmd, "\n")
p = sp.Popen(str(x) for x in cmd) cpp = sp.Popen(str(x) for x in cmd)
try: try:
p.wait() cpp.wait()
except: except:
p.wait() cpp.wait()
if p.returncode != 0: if cpp.returncode != 0:
confirm() confirm()
sys.exit(p.returncode) sys.exit(cpp.returncode)
def bye(sig, frame):
if cpp is not None:
cpp.terminate()
def main(): def main():
@@ -484,6 +523,8 @@ def main():
# skip 0 # skip 0
signal.signal(signal.SIGTERM, bye)
tmp = unpack() tmp = unpack()
fp_py = os.path.join(tmp, "py") fp_py = os.path.join(tmp, "py")
if os.path.exists(fp_py): if os.path.exists(fp_py):

View File

@@ -32,8 +32,12 @@ dir="$(
# detect available pythons # detect available pythons
(IFS=:; for d in $PATH; do (IFS=:; for d in $PATH; do
printf '%s\n' "$d"/python* "$d"/pypy* | tac; printf '%s\n' "$d"/python* "$d"/pypy*;
done) | grep -E '(python|pypy)[0-9\.-]*$' > $dir/pys || true done) |
(sed -E 's/(.*\/[^/0-9]+)([0-9]?[^/]*)$/\2 \1/' || cat) |
(sort -nr || cat) |
(sed -E 's/([^ ]*) (.*)/\2\1/' || cat) |
grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
# see if we made a choice before # see if we made a choice before
[ -z "$pybin" ] && pybin="$(cat $dir/py 2>/dev/null || true)" [ -z "$pybin" ] && pybin="$(cat $dir/py 2>/dev/null || true)"

164
scripts/speedtest-fs.py Normal file
View File

@@ -0,0 +1,164 @@
#!/usr/bin/env python
import os
import sys
import stat
import time
import signal
import traceback
import threading
from queue import Queue
"""speedtest-fs: filesystem performance estimate"""
__author__ = "ed <copyparty@ocv.me>"
__copyright__ = 2020
__license__ = "MIT"
__url__ = "https://github.com/9001/copyparty/"
def get_spd(nbyte, nsec):
if not nsec:
return "0.000 MB 0.000 sec 0.000 MB/s"
mb = nbyte / (1024 * 1024.0)
spd = mb / nsec
return f"{mb:.3f} MB {nsec:.3f} sec {spd:.3f} MB/s"
class Inf(object):
def __init__(self, t0):
self.msgs = []
self.errors = []
self.reports = []
self.mtx_msgs = threading.Lock()
self.mtx_reports = threading.Lock()
self.n_byte = 0
self.n_sec = 0
self.n_done = 0
self.t0 = t0
thr = threading.Thread(target=self.print_msgs)
thr.daemon = True
thr.start()
def msg(self, fn, n_read):
with self.mtx_msgs:
self.msgs.append(f"{fn} {n_read}")
def err(self, fn):
with self.mtx_reports:
self.errors.append(f"{fn}\n{traceback.format_exc()}")
def print_msgs(self):
while True:
time.sleep(0.02)
with self.mtx_msgs:
msgs = self.msgs
self.msgs = []
if not msgs:
continue
msgs = msgs[-64:]
msgs = [f"{get_spd(self.n_byte, self.n_sec)} {x}" for x in msgs]
print("\n".join(msgs))
def report(self, fn, n_byte, n_sec):
with self.mtx_reports:
self.reports.append([n_byte, n_sec, fn])
self.n_byte += n_byte
self.n_sec += n_sec
def done(self):
with self.mtx_reports:
self.n_done += 1
def get_files(dir_path):
for fn in os.listdir(dir_path):
fn = os.path.join(dir_path, fn)
st = os.stat(fn).st_mode
if stat.S_ISDIR(st):
yield from get_files(fn)
if stat.S_ISREG(st):
yield fn
def worker(q, inf, read_sz):
while True:
fn = q.get()
if not fn:
break
n_read = 0
try:
t0 = time.time()
with open(fn, "rb") as f:
while True:
buf = f.read(read_sz)
if not buf:
break
n_read += len(buf)
inf.msg(fn, n_read)
inf.report(fn, n_read, time.time() - t0)
except:
inf.err(fn)
inf.done()
def sighandler(signo, frame):
os._exit(0)
def main():
signal.signal(signal.SIGINT, sighandler)
root = "."
if len(sys.argv) > 1:
root = sys.argv[1]
t0 = time.time()
q = Queue(256)
inf = Inf(t0)
num_threads = 8
read_sz = 32 * 1024
for _ in range(num_threads):
thr = threading.Thread(target=worker, args=(q, inf, read_sz,))
thr.daemon = True
thr.start()
for fn in get_files(root):
q.put(fn)
for _ in range(num_threads):
q.put(None)
while inf.n_done < num_threads:
time.sleep(0.1)
t2 = time.time()
print("\n")
log = inf.reports
log.sort()
for nbyte, nsec, fn in log[-64:]:
print(f"{get_spd(nbyte, nsec)} {fn}")
print()
print("\n".join(inf.errors))
print(get_spd(inf.n_byte, t2 - t0))
if __name__ == "__main__":
main()

84
srv/ceditable.html Normal file
View File

@@ -0,0 +1,84 @@
<!DOCTYPE html><html><head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<style>
* {
margin: 0;
padding: 0;
outline: 0;
border: none;
font-size: 1em;
line-height: 1em;
font-family: monospace, monospace;
color: #333;
}
html, body {
width: 100%;
height: 100%;
background: #ddd;
}
html {
font-size: 1.3em;
}
li, #edit {
list-style-type: none;
white-space: pre-wrap;
word-break: break-all;
overflow-wrap: break-word;
word-wrap: break-word; /*ie*/
}
li:nth-child(even) {
background: #ddd;
}
#edit, #html, #txt1, #txt2 {
background: #eee;
position: fixed;
width: calc(50% - .8em);
height: calc(50% - .8em);
}
#txt1 { top: .5em; left: .5em }
#edit { top: .5em; right: .5em }
#html { bottom: .5em; left: .5em }
#txt2 { bottom: .5em; right: .5em }
</style></head><body>
<pre id="edit" contenteditable="true"></pre>
<textarea id="html"></textarea>
<ul id="txt1"></ul>
<ul id="txt2"></ul>
<script>
var edit = document.getElementById('edit'),
html = document.getElementById('html'),
txt1 = document.getElementById('txt1'),
txt2 = document.getElementById('txt2');
var oh = null;
function fun() {
var h = edit.innerHTML;
if (oh != h) {
oh = h;
html.value = h;
var t = edit.innerText;
if (h.indexOf('<div><br></div>') >= 0)
t = t.replace(/\n\n/g, "\n");
t = '<li>' + t.
replace(/&/g, "&amp;").
replace(/</g, "&lt;").
replace(/>/g, "&gt;").
split('\n').join('</li>\n<li>') + '</li>';
t = t.replace(/<li><\/li>/g, '<li> </li>');
txt1.innerHTML = t;
txt2.innerHTML = t;
}
setTimeout(fun, 100);
}
fun();
</script>
</body>
</html>

141
srv/extend.md Normal file
View File

@@ -0,0 +1,141 @@
# hi
this showcases my worst idea yet; *extending markdown with inline javascript*
due to obvious reasons it's disabled by default, and can be enabled with `-emp`
the examples are by no means correct, they're as much of a joke as this feature itself
### sub-header
nothing special about this one
## except/
this one becomes a hyperlink to ./except/ thanks to
* the `copyparty_pre` plugin at the end of this file
* which is invoked as a markdown filter every time the document is modified
* which looks for headers ending with a `/` and erwrites all headers below that
it is a passthrough to the markdown extension api, see https://marked.js.org/using_pro
in addition to the markdown extension functions, `ctor` will be called on document init
### these/
and this one becomes ./except/these/
#### ones.md
finally ./except/these/ones.md
### also-this.md
whic hshoud be ./except/also-this.md
# ok
now for another extension type, `copyparty_post` which is called to manipulate the generated dom instead
`copyparty_post` can have the following functions, all optional
* `ctor` is called on document init
* `render` is called when the dom is done but still in-memory
* `render2` is called with the live browser dom as-displayed
## post example
the values in the `ex:` columns are linkified to `example.com/$value`
| ex:foo | bar | ex:baz |
| ------------ | -------- | ------ |
| asdf | nice | fgsfds |
| more one row | hi hello | aaa |
and the table can be sorted by clicking the headers
the difference is that with `copyparty_pre` you'll probably break various copyparty features but if you use `copyparty_post` then future copyparty versions will probably break you
# heres the plugins
if there is anything below ths line in the preview then the plugin feature is disabled (good)
```copyparty_pre
ctor() {
md_plug['h'] = {
on: false,
lv: -1,
path: []
}
},
walkTokens(token) {
if (token.type == 'heading') {
var h = md_plug['h'],
is_dir = token.text.endsWith('/');
if (h.lv >= token.depth) {
h.on = false;
}
if (!h.on && is_dir) {
h.on = true;
h.lv = token.depth;
h.path = [token.text];
}
else if (h.on && h.lv < token.depth) {
h.path = h.path.slice(0, token.depth - h.lv);
h.path.push(token.text);
}
if (!h.on)
return false;
var path = h.path.join('');
var emoji = is_dir ? '📂' : '📜';
token.tokens[0].text = '<a href="' + path + '">' + emoji + ' ' + path + '</a>';
}
if (token.type == 'paragraph') {
//console.log(JSON.parse(JSON.stringify(token.tokens)));
for (var a = 0; a < token.tokens.length; a++) {
var t = token.tokens[a];
if (t.type == 'text' || t.type == 'strong' || t.type == 'em') {
var ret = '', text = t.text;
for (var b = 0; b < text.length; b++)
ret += (Math.random() > 0.5) ? text[b] : text[b].toUpperCase();
t.text = ret;
}
}
}
return true;
}
```
```copyparty_post
render(dom) {
var ths = dom.querySelectorAll('th');
for (var a = 0; a < ths.length; a++) {
var th = ths[a];
if (th.textContent.indexOf('ex:') === 0) {
th.textContent = th.textContent.slice(3);
var nrow = 0;
while ((th = th.previousSibling) != null)
nrow++;
var trs = ths[a].parentNode.parentNode.parentNode.querySelectorAll('tr');
for (var b = 1; b < trs.length; b++) {
var td = trs[b].childNodes[nrow];
td.innerHTML = '<a href="//example.com/' + td.innerHTML + '">' + td.innerHTML + '</a>';
}
}
}
},
render2(dom) {
window.makeSortable(dom.getElementsByTagName('table')[0]);
}
```

View File

@@ -1,3 +1,54 @@
### hello world
* qwe
* asd
* zxc
* 573
* one
* two
* |||
|--|--|
|listed|table|
```
[72....................................................................]
[80............................................................................]
```
* foo
```
[72....................................................................]
[80............................................................................]
```
* bar
```
[72....................................................................]
[80............................................................................]
```
```
l[i]=1I;(){}o0O</> var foo = "$(`bar`)"; a's'd
```
🔍🌽.📕.🍙🔎
[](#s1)
[s1](#s1)
[#s1](#s1)
a123456789b123456789c123456789d123456789e123456789f123456789g123456789h123456789i123456789j123456789k123456789l123456789m123456789n123456789o123456789p123456789q123456789r123456789s123456789t123456789u123456789v123456789w123456789x123456789y123456789z123456789
<foo> &nbsp; bar &amp; <span>baz</span>
<a href="?foo=bar&baz=qwe&amp;rty">?foo=bar&baz=qwe&amp;rty</a>
<!-- hidden -->
```
<foo> &nbsp; bar &amp; <span>baz</span>
<a href="?foo=bar&baz=qwe&amp;rty">?foo=bar&baz=qwe&amp;rty</a>
<!-- visible -->
```
*fails marked/showdown/tui/simplemde (just italics), **OK: markdown-it/simplemde:*** *fails marked/showdown/tui/simplemde (just italics), **OK: markdown-it/simplemde:***
testing just google.com and underscored _google.com_ also with _google.com,_ trailing comma and _google.com_, comma after testing just google.com and underscored _google.com_ also with _google.com,_ trailing comma and _google.com_, comma after
@@ -83,6 +134,15 @@ a newline toplevel
| a table | on the right | | a table | on the right |
| second row | foo bar | | second row | foo bar |
||
--|:-:|-:
a table | big text in this | aaakbfddd
second row | centred | bbb
||
--|--|--
foo
* list entry * list entry
* [x] yes * [x] yes
* [ ] no * [ ] no
@@ -171,3 +231,7 @@ unrelated neat stuff:
awk '/./ {printf "%s %d\n", $0, NR; next} 1' <test.md >ln.md awk '/./ {printf "%s %d\n", $0, NR; next} 1' <test.md >ln.md
gawk '{print gensub(/([a-zA-Z\.])/,NR" \\1","1")}' <test.md >ln.md gawk '{print gensub(/([a-zA-Z\.])/,NR" \\1","1")}' <test.md >ln.md
``` ```
a|b|c
--|--|--
foo

View File

@@ -3,8 +3,10 @@
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import os import os
import time
import json import json
import shutil import shutil
import tempfile
import unittest import unittest
import subprocess as sp # nosec import subprocess as sp # nosec
@@ -30,9 +32,6 @@ class TestVFS(unittest.TestCase):
response = self.unfoo(response) response = self.unfoo(response)
self.assertEqual(util.undot(query), response) self.assertEqual(util.undot(query), response)
def absify(self, root, names):
return ["{}/{}".format(root, x).replace("//", "/") for x in names]
def ls(self, vfs, vpath, uname): def ls(self, vfs, vpath, uname):
"""helper for resolving and listing a folder""" """helper for resolving and listing a folder"""
vn, rem = vfs.get(vpath, uname, True, False) vn, rem = vfs.get(vpath, uname, True, False)
@@ -59,16 +58,31 @@ class TestVFS(unittest.TestCase):
if os.path.exists("/Volumes"): if os.path.exists("/Volumes"):
devname, _ = self.chkcmd("hdiutil", "attach", "-nomount", "ram://8192") devname, _ = self.chkcmd("hdiutil", "attach", "-nomount", "ram://8192")
_, _ = self.chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname) devname = devname.strip()
return "/Volumes/cptd" print("devname: [{}]".format(devname))
for _ in range(10):
try:
_, _ = self.chkcmd(
"diskutil", "eraseVolume", "HFS+", "cptd", devname
)
return "/Volumes/cptd"
except Exception as ex:
print(repr(ex))
time.sleep(0.25)
raise Exception("TODO support windows") raise Exception("ramdisk creation failed")
ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
try:
os.mkdir(ret)
finally:
return ret
def log(self, src, msg): def log(self, src, msg):
pass pass
def test(self): def test(self):
td = self.get_ramdisk() + "/vfs" td = os.path.join(self.get_ramdisk(), "vfs")
try: try:
shutil.rmtree(td) shutil.rmtree(td)
except OSError: except OSError:
@@ -99,7 +113,7 @@ class TestVFS(unittest.TestCase):
vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs
self.assertEqual(vfs.nodes, {}) self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "") self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td + "/a/ab") self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
self.assertEqual(vfs.uread, ["*"]) self.assertEqual(vfs.uread, ["*"])
self.assertEqual(vfs.uwrite, []) self.assertEqual(vfs.uwrite, [])
@@ -109,7 +123,7 @@ class TestVFS(unittest.TestCase):
).vfs ).vfs
self.assertEqual(vfs.nodes, {}) self.assertEqual(vfs.nodes, {})
self.assertEqual(vfs.vpath, "") self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td + "/a/aa") self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
self.assertEqual(vfs.uread, ["*"]) self.assertEqual(vfs.uread, ["*"])
self.assertEqual(vfs.uwrite, []) self.assertEqual(vfs.uwrite, [])
@@ -138,42 +152,63 @@ class TestVFS(unittest.TestCase):
n = n.nodes["acb"] n = n.nodes["acb"]
self.assertEqual(n.nodes, {}) self.assertEqual(n.nodes, {})
self.assertEqual(n.vpath, "a/ac/acb") self.assertEqual(n.vpath, "a/ac/acb")
self.assertEqual(n.realpath, td + "/a/ac/acb") self.assertEqual(n.realpath, os.path.join(td, "a", "ac", "acb"))
self.assertEqual(n.uread, ["k"]) self.assertEqual(n.uread, ["k"])
self.assertEqual(n.uwrite, ["*", "k"]) self.assertEqual(n.uwrite, ["*", "k"])
# something funky about the windows path normalization,
# doesn't really matter but makes the test messy, TODO?
fsdir, real, virt = self.ls(vfs, "/", "*") fsdir, real, virt = self.ls(vfs, "/", "*")
self.assertEqual(fsdir, td) self.assertEqual(fsdir, td)
self.assertEqual(real, ["b", "c"]) self.assertEqual(real, ["b", "c"])
self.assertEqual(list(virt), ["a"]) self.assertEqual(list(virt), ["a"])
fsdir, real, virt = self.ls(vfs, "a", "*") fsdir, real, virt = self.ls(vfs, "a", "*")
self.assertEqual(fsdir, td + "/a") self.assertEqual(fsdir, os.path.join(td, "a"))
self.assertEqual(real, ["aa", "ab"]) self.assertEqual(real, ["aa", "ab"])
self.assertEqual(list(virt), ["ac"]) self.assertEqual(list(virt), ["ac"])
fsdir, real, virt = self.ls(vfs, "a/ab", "*") fsdir, real, virt = self.ls(vfs, "a/ab", "*")
self.assertEqual(fsdir, td + "/a/ab") self.assertEqual(fsdir, os.path.join(td, "a", "ab"))
self.assertEqual(real, ["aba", "abb", "abc"]) self.assertEqual(real, ["aba", "abb", "abc"])
self.assertEqual(list(virt), []) self.assertEqual(list(virt), [])
fsdir, real, virt = self.ls(vfs, "a/ac", "*") fsdir, real, virt = self.ls(vfs, "a/ac", "*")
self.assertEqual(fsdir, td + "/a/ac") self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
self.assertEqual(real, ["aca", "acc"]) self.assertEqual(real, ["aca", "acc"])
self.assertEqual(list(virt), []) self.assertEqual(list(virt), [])
fsdir, real, virt = self.ls(vfs, "a/ac", "k") fsdir, real, virt = self.ls(vfs, "a/ac", "k")
self.assertEqual(fsdir, td + "/a/ac") self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
self.assertEqual(real, ["aca", "acc"]) self.assertEqual(real, ["aca", "acc"])
self.assertEqual(list(virt), ["acb"]) self.assertEqual(list(virt), ["acb"])
self.assertRaises(util.Pebkac, vfs.get, "a/ac/acb", "*", True, False) self.assertRaises(util.Pebkac, vfs.get, "a/ac/acb", "*", True, False)
fsdir, real, virt = self.ls(vfs, "a/ac/acb", "k") fsdir, real, virt = self.ls(vfs, "a/ac/acb", "k")
self.assertEqual(fsdir, td + "/a/ac/acb") self.assertEqual(fsdir, os.path.join(td, "a", "ac", "acb"))
self.assertEqual(real, ["acba", "acbb", "acbc"]) self.assertEqual(real, ["acba", "acbb", "acbc"])
self.assertEqual(list(virt), []) self.assertEqual(list(virt), [])
# admin-only rootfs with all-read-only subfolder
vfs = AuthSrv(Namespace(c=None, a=["k:k"], v=[".::ak", "a:a:r"]), self.log,).vfs
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(vfs.vpath, "")
self.assertEqual(vfs.realpath, td)
self.assertEqual(vfs.uread, ["k"])
self.assertEqual(vfs.uwrite, ["k"])
n = vfs.nodes["a"]
self.assertEqual(len(vfs.nodes), 1)
self.assertEqual(n.vpath, "a")
self.assertEqual(n.realpath, os.path.join(td, "a"))
self.assertEqual(n.uread, ["*"])
self.assertEqual(n.uwrite, [])
self.assertEqual(vfs.can_access("/", "*"), [False, False])
self.assertEqual(vfs.can_access("/", "k"), [True, True])
self.assertEqual(vfs.can_access("/a", "*"), [True, False])
self.assertEqual(vfs.can_access("/a", "k"), [True, False])
# breadth-first construction # breadth-first construction
vfs = AuthSrv( vfs = AuthSrv(
Namespace( Namespace(
@@ -207,20 +242,20 @@ class TestVFS(unittest.TestCase):
self.assertEqual(list(v1), ["a"]) self.assertEqual(list(v1), ["a"])
fsp, r1, v1 = self.ls(vfs, "a", "*") fsp, r1, v1 = self.ls(vfs, "a", "*")
self.assertEqual(fsp, td + "/a") self.assertEqual(fsp, os.path.join(td, "a"))
self.assertEqual(r1, ["aa", "ab"]) self.assertEqual(r1, ["aa", "ab"])
self.assertEqual(list(v1), ["ac"]) self.assertEqual(list(v1), ["ac"])
fsp1, r1, v1 = self.ls(vfs, "a/ac", "*") fsp1, r1, v1 = self.ls(vfs, "a/ac", "*")
fsp2, r2, v2 = self.ls(vfs, "b", "*") fsp2, r2, v2 = self.ls(vfs, "b", "*")
self.assertEqual(fsp1, td + "/b") self.assertEqual(fsp1, os.path.join(td, "b"))
self.assertEqual(fsp2, td + "/b") self.assertEqual(fsp2, os.path.join(td, "b"))
self.assertEqual(r1, ["ba", "bb", "bc"]) self.assertEqual(r1, ["ba", "bb", "bc"])
self.assertEqual(r1, r2) self.assertEqual(r1, r2)
self.assertEqual(list(v1), list(v2)) self.assertEqual(list(v1), list(v2))
# config file parser # config file parser
cfg_path = self.get_ramdisk() + "/test.cfg" cfg_path = os.path.join(self.get_ramdisk(), "test.cfg")
with open(cfg_path, "wb") as f: with open(cfg_path, "wb") as f:
f.write( f.write(
dedent( dedent(
@@ -248,10 +283,11 @@ class TestVFS(unittest.TestCase):
self.assertEqual(len(n.nodes), 1) self.assertEqual(len(n.nodes), 1)
n = n.nodes["dst"] n = n.nodes["dst"]
self.assertEqual(n.vpath, "dst") self.assertEqual(n.vpath, "dst")
self.assertEqual(n.realpath, td + "/src") self.assertEqual(n.realpath, os.path.join(td, "src"))
self.assertEqual(n.uread, ["a", "asd"]) self.assertEqual(n.uread, ["a", "asd"])
self.assertEqual(n.uwrite, ["asd"]) self.assertEqual(n.uwrite, ["asd"])
self.assertEqual(len(n.nodes), 0) self.assertEqual(len(n.nodes), 0)
os.chdir(tempfile.gettempdir())
shutil.rmtree(td) shutil.rmtree(td)
os.unlink(cfg_path) os.unlink(cfg_path)