mirror of
https://github.com/9001/copyparty.git
synced 2025-10-24 00:24:04 +00:00
Compare commits
211 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0850b8ae2b | ||
|
|
8a68a96c57 | ||
|
|
d3aae8ed6a | ||
|
|
c62ebadda8 | ||
|
|
ffcee6d390 | ||
|
|
de32838346 | ||
|
|
b9a4e47ea2 | ||
|
|
57d994422d | ||
|
|
6ecd745323 | ||
|
|
bd769f5bdb | ||
|
|
2381692aba | ||
|
|
24fdada0a0 | ||
|
|
bb5169710a | ||
|
|
9cde2352f3 | ||
|
|
482dd7a938 | ||
|
|
bddcc69438 | ||
|
|
19d4540630 | ||
|
|
4f5f6c81f5 | ||
|
|
7e4c1238ba | ||
|
|
f7196ac773 | ||
|
|
7a7c832000 | ||
|
|
2b4ccdbebb | ||
|
|
0d16b49489 | ||
|
|
768405b691 | ||
|
|
da01413b7b | ||
|
|
914e22c53e | ||
|
|
43a23bf733 | ||
|
|
92bb00c6d2 | ||
|
|
b0b97a2648 | ||
|
|
2c452fe323 | ||
|
|
ad73d0c77d | ||
|
|
7f9bf1c78c | ||
|
|
61a6bc3a65 | ||
|
|
46e10b0e9f | ||
|
|
8441206e26 | ||
|
|
9fdc5ee748 | ||
|
|
00ff133387 | ||
|
|
96164cb934 | ||
|
|
82fb21ae69 | ||
|
|
89d4a2b4c4 | ||
|
|
fc0c7ff374 | ||
|
|
5148c4f2e9 | ||
|
|
c3b59f7bcf | ||
|
|
61e148202b | ||
|
|
8a4e0739bc | ||
|
|
f75c5f2fe5 | ||
|
|
81d5859588 | ||
|
|
721886bb7a | ||
|
|
b23c272820 | ||
|
|
cd02bfea7a | ||
|
|
6774bd88f9 | ||
|
|
1046a4f376 | ||
|
|
8081f9ddfd | ||
|
|
fa656577d1 | ||
|
|
b14b86990f | ||
|
|
2a6dd7b512 | ||
|
|
feebdee88b | ||
|
|
99d9277f5d | ||
|
|
9af64d6156 | ||
|
|
5e3775c1af | ||
|
|
2d2e8a3da7 | ||
|
|
b2a560b76f | ||
|
|
39397a489d | ||
|
|
ff593a0904 | ||
|
|
f12789cf44 | ||
|
|
4f8cf2fc87 | ||
|
|
fda98730ac | ||
|
|
06c6ddffb6 | ||
|
|
d29f0c066c | ||
|
|
c9e4de3346 | ||
|
|
ca0b97f72d | ||
|
|
b38f20b408 | ||
|
|
05b1dbaf56 | ||
|
|
b8481e32ba | ||
|
|
9c03c65e07 | ||
|
|
d8ed006b9b | ||
|
|
63c0623a5e | ||
|
|
fd84506db0 | ||
|
|
d8bcb44e44 | ||
|
|
56a26b0916 | ||
|
|
efcf1d6b90 | ||
|
|
9f578bfec6 | ||
|
|
1f170d7d28 | ||
|
|
5ae14cf9be | ||
|
|
aaf9d53be9 | ||
|
|
75c73f7ba7 | ||
|
|
b6dba8beee | ||
|
|
94521cdc1a | ||
|
|
3365b1c355 | ||
|
|
6c957c4923 | ||
|
|
833997f04c | ||
|
|
68d51e4037 | ||
|
|
ce274d2011 | ||
|
|
280778ed43 | ||
|
|
0f558ecbbf | ||
|
|
58f9e05d93 | ||
|
|
1ec981aea7 | ||
|
|
2a90286a7c | ||
|
|
12d25d09b2 | ||
|
|
a039fae1a4 | ||
|
|
322b9abadc | ||
|
|
0aaf954cea | ||
|
|
c2d22aa3d1 | ||
|
|
6934c75bba | ||
|
|
c58cf78f86 | ||
|
|
7f0de790ab | ||
|
|
d4bb4e3a73 | ||
|
|
d25612d038 | ||
|
|
116b2351b0 | ||
|
|
69b83dfdc4 | ||
|
|
3b1839c2ce | ||
|
|
13742ebdf8 | ||
|
|
634657bea1 | ||
|
|
46e70d50b7 | ||
|
|
d64e9b85a7 | ||
|
|
fb853edbe3 | ||
|
|
cc076c1be1 | ||
|
|
98cc9a6755 | ||
|
|
7bd2b9c23a | ||
|
|
de724a1ff3 | ||
|
|
2163055dae | ||
|
|
93ed0fc10b | ||
|
|
0d98cefd40 | ||
|
|
d58988a033 | ||
|
|
2acfab1e3f | ||
|
|
b915dfe9a6 | ||
|
|
25bd5a823e | ||
|
|
1c35de4716 | ||
|
|
4c00435a0a | ||
|
|
844e3079a8 | ||
|
|
4778cb5b2c | ||
|
|
ec5d60b919 | ||
|
|
e1f4b960e8 | ||
|
|
669e46da54 | ||
|
|
ba94cc5df7 | ||
|
|
d08245c3df | ||
|
|
5c18d12cbf | ||
|
|
580a42dec7 | ||
|
|
29286e159b | ||
|
|
19bcf90e9f | ||
|
|
dae9c00742 | ||
|
|
35324ceb7c | ||
|
|
5aadd47199 | ||
|
|
7d9057cc62 | ||
|
|
c4b322b883 | ||
|
|
19b09c898a | ||
|
|
eafe2098b6 | ||
|
|
2bc6a20d71 | ||
|
|
8b502a7235 | ||
|
|
37567844af | ||
|
|
2f6c4e0e34 | ||
|
|
1c7cc4cb2b | ||
|
|
f83db3648e | ||
|
|
b164aa00d4 | ||
|
|
a2d866d0c2 | ||
|
|
2dfe4ac4c6 | ||
|
|
db65d05cb5 | ||
|
|
300c0194c7 | ||
|
|
37a0d2b087 | ||
|
|
a4959300ea | ||
|
|
223657e5f8 | ||
|
|
0c53de6767 | ||
|
|
9c309b1498 | ||
|
|
1aa1b34c80 | ||
|
|
755a2ee023 | ||
|
|
69d3359e47 | ||
|
|
a90c49b8fb | ||
|
|
b1222edb27 | ||
|
|
b967a92f69 | ||
|
|
90a5cb5e59 | ||
|
|
7aba9cb76b | ||
|
|
f550a8171d | ||
|
|
82e568d4c9 | ||
|
|
7b2a4a3d59 | ||
|
|
0265455cd1 | ||
|
|
afafc886a4 | ||
|
|
8a959f6ac4 | ||
|
|
1c3aa0d2c5 | ||
|
|
79b7d3316a | ||
|
|
fa7768583a | ||
|
|
faf49f6c15 | ||
|
|
765af31b83 | ||
|
|
b6a3c52d67 | ||
|
|
b025c2f660 | ||
|
|
e559a7c878 | ||
|
|
5c8855aafd | ||
|
|
b5fc537b89 | ||
|
|
14899d3a7c | ||
|
|
0ea7881652 | ||
|
|
ec29b59d1e | ||
|
|
9405597c15 | ||
|
|
82441978c6 | ||
|
|
e0e6291bdb | ||
|
|
b2b083fd0a | ||
|
|
f8a51b68e7 | ||
|
|
e0a19108e5 | ||
|
|
770ea68ca8 | ||
|
|
ce36c52baf | ||
|
|
a7da1dd233 | ||
|
|
678ef296b4 | ||
|
|
9e5627d805 | ||
|
|
5958ee4439 | ||
|
|
7127e57f0e | ||
|
|
ee9c6dc8aa | ||
|
|
92779b3f48 | ||
|
|
2f1baf17d4 | ||
|
|
583da3d4a9 | ||
|
|
bf9ff78bcc | ||
|
|
2cb07792cc | ||
|
|
47bc8bb466 | ||
|
|
94ad1f5732 |
12
.eslintrc.json
Normal file
12
.eslintrc.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2021": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 12
|
||||
},
|
||||
"rules": {
|
||||
}
|
||||
}
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,4 +1,6 @@
|
||||
* text eol=lf
|
||||
|
||||
*.reg text eol=crlf
|
||||
|
||||
*.png binary
|
||||
*.gif binary
|
||||
|
||||
17
.vscode/launch.json
vendored
17
.vscode/launch.json
vendored
@@ -9,15 +9,26 @@
|
||||
"console": "integratedTerminal",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": [
|
||||
"-j",
|
||||
"0",
|
||||
//"-nw",
|
||||
"-ed",
|
||||
"-emp",
|
||||
"-e2dsa",
|
||||
"-e2ts",
|
||||
"-a",
|
||||
"ed:wark",
|
||||
"-v",
|
||||
"srv::r:aed"
|
||||
"srv::r:aed:cnodupe",
|
||||
"-v",
|
||||
"dist:dist:r"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "No debug",
|
||||
"preLaunchTask": "no_dbg",
|
||||
"type": "python",
|
||||
//"request": "attach", "port": 42069
|
||||
// fork: nc -l 42069 </dev/null
|
||||
},
|
||||
{
|
||||
"name": "Run active unit test",
|
||||
"type": "python",
|
||||
|
||||
14
.vscode/settings.json
vendored
14
.vscode/settings.json
vendored
@@ -37,7 +37,7 @@
|
||||
"python.linting.banditEnabled": true,
|
||||
"python.linting.flake8Args": [
|
||||
"--max-line-length=120",
|
||||
"--ignore=E722,F405,E203,W503,W293",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402",
|
||||
],
|
||||
"python.linting.banditArgs": [
|
||||
"--ignore=B104"
|
||||
@@ -50,11 +50,9 @@
|
||||
"files.associations": {
|
||||
"*.makefile": "makefile"
|
||||
},
|
||||
"editor.codeActionsOnSaveTimeout": 9001,
|
||||
"editor.formatOnSaveTimeout": 9001,
|
||||
//
|
||||
// things you may wanna edit:
|
||||
//
|
||||
"python.pythonPath": ".venv/bin/python",
|
||||
//"python.linting.enabled": true,
|
||||
"python.formatting.blackArgs": [
|
||||
"-t",
|
||||
"py27"
|
||||
],
|
||||
"python.linting.enabled": true,
|
||||
}
|
||||
15
.vscode/tasks.json
vendored
Normal file
15
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "pre",
|
||||
"command": "true;rm -rf inc/* inc/.hist/;mkdir -p inc;",
|
||||
"type": "shell"
|
||||
},
|
||||
{
|
||||
"label": "no_dbg",
|
||||
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -e2ts -a ed:wark -v srv::r:aed:cnodupe -v dist:dist:r ;exit 1",
|
||||
"type": "shell"
|
||||
}
|
||||
]
|
||||
}
|
||||
168
README.md
168
README.md
@@ -8,57 +8,169 @@
|
||||
|
||||
turn your phone or raspi into a portable file server with resumable uploads/downloads using IE6 or any other browser
|
||||
|
||||
* server runs on anything with `py2.7` or `py3.2+`
|
||||
* server runs on anything with `py2.7` or `py3.3+`
|
||||
* *resumable* uploads need `firefox 12+` / `chrome 6+` / `safari 6+` / `IE 10+`
|
||||
* code standard: `black`
|
||||
|
||||
|
||||
## quickstart
|
||||
|
||||
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
|
||||
|
||||
running the sfx without arguments (for example doubleclicking it on Windows) will let anyone access the current folder; see `-h` for help if you want accounts and volumes etc
|
||||
|
||||
you may also want these, especially on servers:
|
||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for legit https)
|
||||
|
||||
|
||||
## notes
|
||||
|
||||
* iPhone/iPad: use Firefox to download files
|
||||
* Android-Chrome: set max "parallel uploads" for 200% upload speed (android bug)
|
||||
* Android-Firefox: takes a while to select files (in order to avoid the above android-chrome issue)
|
||||
* Desktop-Firefox: may use gigabytes of RAM if your connection is great and your files are massive
|
||||
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
|
||||
* because no browsers currently implement the media-query to do this properly orz
|
||||
|
||||
|
||||
## status
|
||||
|
||||
* [x] sanic multipart parser
|
||||
* [x] load balancer (multiprocessing)
|
||||
* [x] upload (plain multipart, ie6 support)
|
||||
* [x] upload (js, resumable, multithreaded)
|
||||
* [x] download
|
||||
* [x] browser
|
||||
* [x] media player
|
||||
* [ ] thumbnails
|
||||
* [ ] download as zip
|
||||
* [x] volumes
|
||||
* [x] accounts
|
||||
* [x] markdown viewer
|
||||
* [x] markdown editor
|
||||
* backend stuff
|
||||
* ☑ sanic multipart parser
|
||||
* ☑ load balancer (multiprocessing)
|
||||
* ☑ volumes (mountpoints)
|
||||
* ☑ accounts
|
||||
* upload
|
||||
* ☑ basic: plain multipart, ie6 support
|
||||
* ☑ up2k: js, resumable, multithreaded
|
||||
* ☑ stash: simple PUT filedropper
|
||||
* ☑ symlink/discard existing files (content-matching)
|
||||
* download
|
||||
* ☑ single files in browser
|
||||
* ✖ folders as zip files
|
||||
* ☑ FUSE client (read-only)
|
||||
* browser
|
||||
* ☑ tree-view
|
||||
* ☑ media player
|
||||
* ✖ thumbnails
|
||||
* ✖ SPA (browse while uploading)
|
||||
* currently safe using the file-tree on the left only, not folders in the file list
|
||||
* server indexing
|
||||
* ☑ locate files by contents
|
||||
* ☑ search by name/path/date/size
|
||||
* ☑ search by ID3-tags etc.
|
||||
* markdown
|
||||
* ☑ viewer
|
||||
* ☑ editor (sure why not)
|
||||
|
||||
summary: it works! you can use it! (but technically not even close to beta)
|
||||
|
||||
|
||||
# bugs
|
||||
|
||||
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
|
||||
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
||||
* probably more, pls let me know
|
||||
|
||||
|
||||
# searching
|
||||
|
||||
when started with `-e2dsa` copyparty will scan/index all your files. This avoids duplicates on upload, and also makes the volumes searchable through the web-ui:
|
||||
* make search queries by `size`/`date`/`directory-path`/`filename`, or...
|
||||
* drag/drop a local file to see if the same contents exist somewhere on the server (you get the URL if it does)
|
||||
|
||||
path/name queries are space-separated, AND'ed together, and words are negated with a `-` prefix, so for example:
|
||||
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
|
||||
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
|
||||
|
||||
add `-e2ts` to also scan/index tags from music files:
|
||||
|
||||
|
||||
## search configuration
|
||||
|
||||
searching relies on two databases, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`). Configuration can be done through arguments, volume flags, or a mix of both.
|
||||
|
||||
through arguments:
|
||||
* `-e2d` enables file indexing on upload
|
||||
* `-e2ds` scans writable folders on startup
|
||||
* `-e2dsa` scans all mounted volumes (including readonly ones)
|
||||
* `-e2t` enables metadata indexing on upload
|
||||
* `-e2ts` scans for tags in all files that don't have tags yet
|
||||
* `-e2tsr` deletes all existing tags, so a full reindex
|
||||
|
||||
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
|
||||
* `-v ~/music::ce2dsa:ce2tsr` does a full reindex of everything on startup
|
||||
* `-v ~/music::cd2d` disables **all** indexing, even if any `-e2*` are on
|
||||
* `-v ~/music::cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
||||
|
||||
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
|
||||
|
||||
`-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume:
|
||||
* `-v ~/music::cmte=title,artist` indexes and displays *title* followed by *artist*
|
||||
|
||||
if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected
|
||||
|
||||
`-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux`
|
||||
|
||||
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
|
||||
|
||||
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
|
||||
* is about 20x slower than mutagen
|
||||
* catches a few tags that mutagen doesn't
|
||||
* avoids pulling any GPL code into copyparty
|
||||
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
|
||||
|
||||
|
||||
# client examples
|
||||
|
||||
* javascript: dump some state into a file (two separate examples)
|
||||
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
|
||||
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
||||
|
||||
* curl/wget: upload some files (post=file, chunk=stdin)
|
||||
* `post(){ curl -b cppwd=wark http://127.0.0.1:3923/ -F act=bput -F f=@"$1";}`
|
||||
`post movie.mkv`
|
||||
* `post(){ wget --header='Cookie: cppwd=wark' http://127.0.0.1:3923/?raw --post-file="$1" -O-;}`
|
||||
`post movie.mkv`
|
||||
* `chunk(){ curl -b cppwd=wark http://127.0.0.1:3923/ -T-;}`
|
||||
`chunk <movie.mkv`
|
||||
|
||||
* FUSE: mount a copyparty server as a local filesystem
|
||||
* cross-platform python client available in [./bin/](bin/)
|
||||
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
||||
|
||||
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
||||
|
||||
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
|
||||
b512 <movie.mkv
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
* `jinja2`
|
||||
* pulls in `markupsafe` as of v2.7; use jinja 2.6 on py3.2
|
||||
* `jinja2` (is built into the SFX)
|
||||
|
||||
optional, enables thumbnails:
|
||||
**optional,** enables music tags:
|
||||
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
|
||||
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
|
||||
|
||||
**optional,** will eventually enable thumbnails:
|
||||
* `Pillow` (requires py2.7 or py3.5+)
|
||||
|
||||
|
||||
# sfx
|
||||
|
||||
currently there are two self-contained binaries:
|
||||
* `copyparty-sfx.sh` for unix (linux and osx) -- smaller, more robust
|
||||
* `copyparty-sfx.py` for windows (unix too) -- crossplatform, beta
|
||||
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere
|
||||
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos
|
||||
|
||||
launch either of them and it'll unpack and run copyparty, assuming you have python installed of course
|
||||
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
|
||||
|
||||
pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `copyparty.py` and keep it in the same folder because `sys.path` is funky
|
||||
|
||||
|
||||
## sfx repack
|
||||
|
||||
if you don't need all the features you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except for either msys2 or WSL if you're on windows)
|
||||
* `724K` original size as of v0.4.0
|
||||
* `256K` after `./scripts/make-sfx.sh re no-ogv`
|
||||
@@ -70,16 +182,18 @@ the features you can opt to drop are
|
||||
|
||||
for the `re`pack to work, first run one of the sfx'es once to unpack it
|
||||
|
||||
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a `no-ogv no-cm` repack; works on linux/macos (and windows with msys2 or WSL)
|
||||
|
||||
|
||||
# install on android
|
||||
|
||||
install [Termux](https://termux.com/) (see [ocv.me/termux](https://ocv.me/termux/)) and then copy-paste this into Termux (long-tap) all at once:
|
||||
```sh
|
||||
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install curl && cd && curl -L https://github.com/9001/copyparty/raw/master/scripts/copyparty-android.sh > copyparty-android.sh && chmod 755 copyparty-android.sh && ./copyparty-android.sh -h
|
||||
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install python && python -m ensurepip && python -m pip install -U copyparty
|
||||
echo $?
|
||||
```
|
||||
|
||||
after the initial setup (and restarting bash), you can launch copyparty at any time by running "copyparty" in Termux
|
||||
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
|
||||
|
||||
|
||||
# dev env setup
|
||||
@@ -107,13 +221,15 @@ in the `scripts` folder:
|
||||
|
||||
roughly sorted by priority
|
||||
|
||||
* up2k handle filename too long
|
||||
* up2k fails on empty files? alert then stuck
|
||||
* reduce up2k roundtrips
|
||||
* start from a chunk index and just go
|
||||
* terminate client on bad data
|
||||
* drop onto folders
|
||||
* look into android thumbnail cache file format
|
||||
* `os.copy_file_range` for up2k cloning
|
||||
* up2k partials ui
|
||||
* support pillow-simd
|
||||
* cache sha512 chunks on client
|
||||
* symlink existing files on upload
|
||||
* comment field
|
||||
* ~~look into android thumbnail cache file format~~ bad idea
|
||||
* figure out the deal with pixel3a not being connectable as hotspot
|
||||
* pixel3a having unpredictable 3sec latency in general :||||
|
||||
|
||||
41
bin/README.md
Normal file
41
bin/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# copyparty-fuse.py
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||
* **supports Linux** -- expect `117 MiB/s` sequential read
|
||||
* **supports macos** -- expect `85 MiB/s` sequential read
|
||||
|
||||
filecache is default-on for windows and macos;
|
||||
* macos readsize is 64kB, so speed ~32 MiB/s without the cache
|
||||
* windows readsize varies by software; explorer=1M, pv=32k
|
||||
|
||||
note that copyparty should run with `-ed` to enable dotfiles (hidden otherwise)
|
||||
|
||||
also consider using [../docs/rclone.md](../docs/rclone.md) instead for 5x performance
|
||||
|
||||
|
||||
## to run this on windows:
|
||||
* install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/)
|
||||
* [x] add python 3.x to PATH (it asks during install)
|
||||
* `python -m pip install --user fusepy`
|
||||
* `python ./copyparty-fuse.py n: http://192.168.1.69:3923/`
|
||||
|
||||
10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled:
|
||||
* `pacman -S mingw64/mingw-w64-x86_64-python{,-pip}`
|
||||
* `/mingw64/bin/python3 -m pip install --user fusepy`
|
||||
* `/mingw64/bin/python3 ./copyparty-fuse.py [...]`
|
||||
|
||||
you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releases/latest), let me know if you [figure out how](https://github.com/dokan-dev/dokany/wiki/FUSE)
|
||||
(winfsp's sshfs leaks, doesn't look like winfsp itself does, should be fine)
|
||||
|
||||
|
||||
|
||||
# copyparty-fuse🅱️.py
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* does the same thing except more correct, `samba` approves
|
||||
* **supports Linux** -- expect `18 MiB/s` (wait what)
|
||||
* **supports Macos** -- probably
|
||||
|
||||
|
||||
|
||||
# copyparty-fuse-streaming.py
|
||||
* pretend this doesn't exist
|
||||
1100
bin/copyparty-fuse-streaming.py
Executable file
1100
bin/copyparty-fuse-streaming.py
Executable file
File diff suppressed because it is too large
Load Diff
742
bin/copyparty-fuse.py
Normal file → Executable file
742
bin/copyparty-fuse.py
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
592
bin/copyparty-fuseb.py
Executable file
592
bin/copyparty-fuseb.py
Executable file
@@ -0,0 +1,592 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
"""copyparty-fuseb: remote copyparty as a local filesystem"""
|
||||
__author__ = "ed <copyparty@ocv.me>"
|
||||
__copyright__ = 2020
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import stat
|
||||
import errno
|
||||
import struct
|
||||
import threading
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
|
||||
try:
|
||||
import fuse
|
||||
from fuse import Fuse
|
||||
|
||||
fuse.fuse_python_api = (0, 2)
|
||||
if not hasattr(fuse, "__version__"):
|
||||
raise Exception("your fuse-python is way old")
|
||||
except:
|
||||
print(
|
||||
"\n could not import fuse; these may help:\n python3 -m pip install --user fuse-python\n apt install libfuse\n modprobe fuse\n"
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
"""
|
||||
mount a copyparty server (local or remote) as a filesystem
|
||||
|
||||
usage:
|
||||
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas
|
||||
|
||||
dependencies:
|
||||
sudo apk add fuse-dev python3-dev
|
||||
python3 -m pip install --user fuse-python
|
||||
|
||||
fork of copyparty-fuse.py based on fuse-python which
|
||||
appears to be more compliant than fusepy? since this works with samba
|
||||
(probably just my garbage code tbh)
|
||||
"""
|
||||
|
||||
|
||||
def threadless_log(msg):
|
||||
print(msg + "\n", end="")
|
||||
|
||||
|
||||
def boring_log(msg):
|
||||
msg = "\033[36m{:012x}\033[0m {}\n".format(threading.current_thread().ident, msg)
|
||||
print(msg[4:], end="")
|
||||
|
||||
|
||||
def rice_tid():
|
||||
tid = threading.current_thread().ident
|
||||
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
|
||||
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
|
||||
|
||||
|
||||
def fancy_log(msg):
|
||||
print("{} {}\n".format(rice_tid(), msg), end="")
|
||||
|
||||
|
||||
def null_log(msg):
|
||||
pass
|
||||
|
||||
|
||||
info = fancy_log
|
||||
log = fancy_log
|
||||
dbg = fancy_log
|
||||
log = null_log
|
||||
dbg = null_log
|
||||
|
||||
|
||||
def get_tid():
|
||||
return threading.current_thread().ident
|
||||
|
||||
|
||||
def html_dec(txt):
|
||||
return (
|
||||
txt.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace(""", '"')
|
||||
.replace("&", "&")
|
||||
)
|
||||
|
||||
|
||||
class CacheNode(object):
|
||||
def __init__(self, tag, data):
|
||||
self.tag = tag
|
||||
self.data = data
|
||||
self.ts = time.time()
|
||||
|
||||
|
||||
class Stat(fuse.Stat):
|
||||
def __init__(self):
|
||||
self.st_mode = 0
|
||||
self.st_ino = 0
|
||||
self.st_dev = 0
|
||||
self.st_nlink = 1
|
||||
self.st_uid = 1000
|
||||
self.st_gid = 1000
|
||||
self.st_size = 0
|
||||
self.st_atime = 0
|
||||
self.st_mtime = 0
|
||||
self.st_ctime = 0
|
||||
|
||||
|
||||
class Gateway(object):
|
||||
def __init__(self, base_url):
|
||||
self.base_url = base_url
|
||||
|
||||
ui = urllib.parse.urlparse(base_url)
|
||||
self.web_root = ui.path.strip("/")
|
||||
try:
|
||||
self.web_host, self.web_port = ui.netloc.split(":")
|
||||
self.web_port = int(self.web_port)
|
||||
except:
|
||||
self.web_host = ui.netloc
|
||||
if ui.scheme == "http":
|
||||
self.web_port = 80
|
||||
elif ui.scheme == "https":
|
||||
raise Exception("todo")
|
||||
else:
|
||||
raise Exception("bad url?")
|
||||
|
||||
self.conns = {}
|
||||
|
||||
def quotep(self, path):
|
||||
# TODO: mojibake support
|
||||
path = path.encode("utf-8", "ignore")
|
||||
return quote(path, safe="/")
|
||||
|
||||
def getconn(self, tid=None):
|
||||
tid = tid or get_tid()
|
||||
try:
|
||||
return self.conns[tid]
|
||||
except:
|
||||
info("new conn [{}] [{}]".format(self.web_host, self.web_port))
|
||||
|
||||
conn = http.client.HTTPConnection(self.web_host, self.web_port, timeout=260)
|
||||
|
||||
self.conns[tid] = conn
|
||||
return conn
|
||||
|
||||
def closeconn(self, tid=None):
|
||||
tid = tid or get_tid()
|
||||
try:
|
||||
self.conns[tid].close()
|
||||
del self.conns[tid]
|
||||
except:
|
||||
pass
|
||||
|
||||
def sendreq(self, *args, **kwargs):
|
||||
tid = get_tid()
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
return c.getresponse()
|
||||
except:
|
||||
self.closeconn(tid)
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
return c.getresponse()
|
||||
|
||||
def listdir(self, path):
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
|
||||
r = self.sendreq("GET", web_path)
|
||||
if r.status != 200:
|
||||
self.closeconn()
|
||||
raise Exception(
|
||||
"http error {} reading dir {} in {}".format(
|
||||
r.status, web_path, rice_tid()
|
||||
)
|
||||
)
|
||||
|
||||
return self.parse_html(r)
|
||||
|
||||
def download_file_range(self, path, ofs1, ofs2):
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
|
||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
|
||||
log("downloading {}".format(hdr_range))
|
||||
|
||||
r = self.sendreq("GET", web_path, headers={"Range": hdr_range})
|
||||
if r.status != http.client.PARTIAL_CONTENT:
|
||||
self.closeconn()
|
||||
raise Exception(
|
||||
"http error {} reading file {} range {} in {}".format(
|
||||
r.status, web_path, hdr_range, rice_tid()
|
||||
)
|
||||
)
|
||||
|
||||
return r.read()
|
||||
|
||||
def parse_html(self, datasrc):
|
||||
ret = []
|
||||
remainder = b""
|
||||
ptn = re.compile(
|
||||
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
|
||||
)
|
||||
|
||||
while True:
|
||||
buf = remainder + datasrc.read(4096)
|
||||
# print('[{}]'.format(buf.decode('utf-8')))
|
||||
if not buf:
|
||||
break
|
||||
|
||||
remainder = b""
|
||||
endpos = buf.rfind(b"\n")
|
||||
if endpos >= 0:
|
||||
remainder = buf[endpos + 1 :]
|
||||
buf = buf[:endpos]
|
||||
|
||||
lines = buf.decode("utf-8").split("\n")
|
||||
for line in lines:
|
||||
m = ptn.match(line)
|
||||
if not m:
|
||||
# print(line)
|
||||
continue
|
||||
|
||||
ftype, fname, fsize, fdate = m.groups()
|
||||
fname = html_dec(fname)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
sz = int(fsize)
|
||||
if ftype == "-":
|
||||
ret.append([fname, self.stat_file(ts, sz), 0])
|
||||
else:
|
||||
ret.append([fname, self.stat_dir(ts, sz), 0])
|
||||
|
||||
return ret
|
||||
|
||||
def stat_dir(self, ts, sz=4096):
|
||||
ret = Stat()
|
||||
ret.st_mode = stat.S_IFDIR | 0o555
|
||||
ret.st_nlink = 2
|
||||
ret.st_size = sz
|
||||
ret.st_atime = ts
|
||||
ret.st_mtime = ts
|
||||
ret.st_ctime = ts
|
||||
return ret
|
||||
|
||||
def stat_file(self, ts, sz):
|
||||
ret = Stat()
|
||||
ret.st_mode = stat.S_IFREG | 0o444
|
||||
ret.st_size = sz
|
||||
ret.st_atime = ts
|
||||
ret.st_mtime = ts
|
||||
ret.st_ctime = ts
|
||||
return ret
|
||||
|
||||
|
||||
class CPPF(Fuse):
|
||||
def __init__(self, *args, **kwargs):
|
||||
Fuse.__init__(self, *args, **kwargs)
|
||||
|
||||
self.url = None
|
||||
|
||||
self.dircache = []
|
||||
self.dircache_mtx = threading.Lock()
|
||||
|
||||
self.filecache = []
|
||||
self.filecache_mtx = threading.Lock()
|
||||
|
||||
def init2(self):
|
||||
# TODO figure out how python-fuse wanted this to go
|
||||
self.gw = Gateway(self.url) # .decode('utf-8'))
|
||||
info("up")
|
||||
|
||||
def clean_dircache(self):
|
||||
"""not threadsafe"""
|
||||
now = time.time()
|
||||
cutoff = 0
|
||||
for cn in self.dircache:
|
||||
if now - cn.ts > 1:
|
||||
cutoff += 1
|
||||
else:
|
||||
break
|
||||
|
||||
if cutoff > 0:
|
||||
self.dircache = self.dircache[cutoff:]
|
||||
|
||||
def get_cached_dir(self, dirpath):
|
||||
# with self.dircache_mtx:
|
||||
if True:
|
||||
self.clean_dircache()
|
||||
for cn in self.dircache:
|
||||
if cn.tag == dirpath:
|
||||
return cn
|
||||
|
||||
return None
|
||||
|
||||
"""
|
||||
,-------------------------------, g1>=c1, g2<=c2
|
||||
|cache1 cache2| buf[g1-c1:(g1-c1)+(g2-g1)]
|
||||
`-------------------------------'
|
||||
,---------------,
|
||||
|get1 get2|
|
||||
`---------------'
|
||||
__________________________________________________________________________
|
||||
|
||||
,-------------------------------, g2<=c2, (g2>=c1)
|
||||
|cache1 cache2| cdr=buf[:g2-c1]
|
||||
`-------------------------------' dl car; g1-512K:c1
|
||||
,---------------,
|
||||
|get1 get2|
|
||||
`---------------'
|
||||
__________________________________________________________________________
|
||||
|
||||
,-------------------------------, g1>=c1, (g1<=c2)
|
||||
|cache1 cache2| car=buf[c2-g1:]
|
||||
`-------------------------------' dl cdr; c2:c2+1M
|
||||
,---------------,
|
||||
|get1 get2|
|
||||
`---------------'
|
||||
"""
|
||||
|
||||
def get_cached_file(self, path, get1, get2, file_sz):
|
||||
car = None
|
||||
cdr = None
|
||||
ncn = -1
|
||||
# with self.filecache_mtx:
|
||||
if True:
|
||||
dbg("cache request from {} to {}, size {}".format(get1, get2, file_sz))
|
||||
for cn in self.filecache:
|
||||
ncn += 1
|
||||
|
||||
cache_path, cache1 = cn.tag
|
||||
if cache_path != path:
|
||||
continue
|
||||
|
||||
cache2 = cache1 + len(cn.data)
|
||||
if get2 <= cache1 or get1 >= cache2:
|
||||
continue
|
||||
|
||||
if get1 >= cache1 and get2 <= cache2:
|
||||
# keep cache entry alive by moving it to the end
|
||||
self.filecache = (
|
||||
self.filecache[:ncn] + self.filecache[ncn + 1 :] + [cn]
|
||||
)
|
||||
buf_ofs = get1 - cache1
|
||||
buf_end = buf_ofs + (get2 - get1)
|
||||
dbg(
|
||||
"found all ({}, {} to {}, len {}) [{}:{}] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
len(cn.data),
|
||||
buf_ofs,
|
||||
buf_end,
|
||||
buf_end - buf_ofs,
|
||||
)
|
||||
)
|
||||
return cn.data[buf_ofs:buf_end]
|
||||
|
||||
if get2 < cache2:
|
||||
x = cn.data[: get2 - cache1]
|
||||
if not cdr or len(cdr) < len(x):
|
||||
dbg(
|
||||
"found car ({}, {} to {}, len {}) [:{}-{}] = [:{}] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
len(cn.data),
|
||||
get2,
|
||||
cache1,
|
||||
get2 - cache1,
|
||||
len(x),
|
||||
)
|
||||
)
|
||||
cdr = x
|
||||
|
||||
continue
|
||||
|
||||
if get1 > cache1:
|
||||
x = cn.data[-(cache2 - get1) :]
|
||||
if not car or len(car) < len(x):
|
||||
dbg(
|
||||
"found cdr ({}, {} to {}, len {}) [-({}-{}):] = [-{}:] = {}".format(
|
||||
ncn,
|
||||
cache1,
|
||||
cache2,
|
||||
len(cn.data),
|
||||
cache2,
|
||||
get1,
|
||||
cache2 - get1,
|
||||
len(x),
|
||||
)
|
||||
)
|
||||
car = x
|
||||
|
||||
continue
|
||||
|
||||
raise Exception("what")
|
||||
|
||||
if car and cdr:
|
||||
dbg("<cache> have both")
|
||||
|
||||
ret = car + cdr
|
||||
if len(ret) == get2 - get1:
|
||||
return ret
|
||||
|
||||
raise Exception("{} + {} != {} - {}".format(len(car), len(cdr), get2, get1))
|
||||
|
||||
elif cdr:
|
||||
h_end = get1 + (get2 - get1) - len(cdr)
|
||||
h_ofs = h_end - 512 * 1024
|
||||
|
||||
if h_ofs < 0:
|
||||
h_ofs = 0
|
||||
|
||||
buf_ofs = (get2 - get1) - len(cdr)
|
||||
|
||||
dbg(
|
||||
"<cache> cdr {}, car {}-{}={} [-{}:]".format(
|
||||
len(cdr), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = buf[-buf_ofs:] + cdr
|
||||
|
||||
elif car:
|
||||
h_ofs = get1 + len(car)
|
||||
h_end = h_ofs + 1024 * 1024
|
||||
|
||||
if h_end > file_sz:
|
||||
h_end = file_sz
|
||||
|
||||
buf_ofs = (get2 - get1) - len(car)
|
||||
|
||||
dbg(
|
||||
"<cache> car {}, cdr {}-{}={} [:{}]".format(
|
||||
len(car), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = car + buf[:buf_ofs]
|
||||
|
||||
else:
|
||||
h_ofs = get1 - 256 * 1024
|
||||
h_end = get2 + 1024 * 1024
|
||||
|
||||
if h_ofs < 0:
|
||||
h_ofs = 0
|
||||
|
||||
if h_end > file_sz:
|
||||
h_end = file_sz
|
||||
|
||||
buf_ofs = get1 - h_ofs
|
||||
buf_end = buf_ofs + get2 - get1
|
||||
|
||||
dbg(
|
||||
"<cache> {}-{}={} [{}:{}]".format(
|
||||
h_ofs, h_end, h_end - h_ofs, buf_ofs, buf_end
|
||||
)
|
||||
)
|
||||
|
||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||
ret = buf[buf_ofs:buf_end]
|
||||
|
||||
cn = CacheNode([path, h_ofs], buf)
|
||||
# with self.filecache_mtx:
|
||||
if True:
|
||||
if len(self.filecache) > 6:
|
||||
self.filecache = self.filecache[1:] + [cn]
|
||||
else:
|
||||
self.filecache.append(cn)
|
||||
|
||||
return ret
|
||||
|
||||
def _readdir(self, path):
|
||||
path = path.strip("/")
|
||||
log("readdir {}".format(path))
|
||||
|
||||
ret = self.gw.listdir(path)
|
||||
|
||||
# with self.dircache_mtx:
|
||||
if True:
|
||||
cn = CacheNode(path, ret)
|
||||
self.dircache.append(cn)
|
||||
self.clean_dircache()
|
||||
|
||||
return ret
|
||||
|
||||
def readdir(self, path, offset):
|
||||
for e in self._readdir(path)[offset:]:
|
||||
# log("yield [{}]".format(e[0]))
|
||||
yield fuse.Direntry(e[0])
|
||||
|
||||
def open(self, path, flags):
|
||||
if (flags & (os.O_RDONLY | os.O_WRONLY | os.O_RDWR)) != os.O_RDONLY:
|
||||
return -errno.EACCES
|
||||
|
||||
st = self.getattr(path)
|
||||
try:
|
||||
if st.st_nlink > 0:
|
||||
return st
|
||||
except:
|
||||
return st # -int(os.errcode)
|
||||
|
||||
def read(self, path, length, offset, fh=None, *args):
|
||||
if args:
|
||||
log("unexpected args [" + "] [".join(repr(x) for x in args) + "]")
|
||||
raise Exception()
|
||||
|
||||
path = path.strip("/")
|
||||
|
||||
ofs2 = offset + length
|
||||
log("read {} @ {} len {} end {}".format(path, offset, length, ofs2))
|
||||
|
||||
st = self.getattr(path)
|
||||
try:
|
||||
file_sz = st.st_size
|
||||
except:
|
||||
return st # -int(os.errcode)
|
||||
|
||||
if ofs2 > file_sz:
|
||||
ofs2 = file_sz
|
||||
log("truncate to len {} end {}".format(ofs2 - offset, ofs2))
|
||||
|
||||
if file_sz == 0 or offset >= ofs2:
|
||||
return b""
|
||||
|
||||
# toggle cache here i suppose
|
||||
# return self.get_cached_file(path, offset, ofs2, file_sz)
|
||||
return self.gw.download_file_range(path, offset, ofs2)
|
||||
|
||||
def getattr(self, path):
|
||||
log("getattr [{}]".format(path))
|
||||
|
||||
path = path.strip("/")
|
||||
try:
|
||||
dirpath, fname = path.rsplit("/", 1)
|
||||
except:
|
||||
dirpath = ""
|
||||
fname = path
|
||||
|
||||
if not path:
|
||||
ret = self.gw.stat_dir(time.time())
|
||||
dbg("=root")
|
||||
return ret
|
||||
|
||||
cn = self.get_cached_dir(dirpath)
|
||||
if cn:
|
||||
log("cache ok")
|
||||
dents = cn.data
|
||||
else:
|
||||
log("cache miss")
|
||||
dents = self._readdir(dirpath)
|
||||
|
||||
for cache_name, cache_stat, _ in dents:
|
||||
if cache_name == fname:
|
||||
dbg("=file")
|
||||
return cache_stat
|
||||
|
||||
log("=404")
|
||||
return -errno.ENOENT
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
|
||||
server = CPPF()
|
||||
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
|
||||
server.parse(values=server, errex=1)
|
||||
if not server.url or not str(server.url).startswith("http"):
|
||||
print("\nerror:")
|
||||
print(" need argument: -o url=<...>")
|
||||
print(" need argument: mount-path")
|
||||
print("example:")
|
||||
print(
|
||||
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
server.init2()
|
||||
threading.Thread(target=server.main, daemon=True).start()
|
||||
while True:
|
||||
time.sleep(9001)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -118,7 +118,7 @@ printf ']}' >> /dev/shm/$salt.hs
|
||||
|
||||
printf '\033[36m'
|
||||
|
||||
#curl "http://$target:1234$posturl/handshake.php" -H "Content-Type: text/plain;charset=UTF-8" -H "Cookie: cppwd=$passwd" --data "$(cat "/dev/shm/$salt.hs")" | tee /dev/shm/$salt.res
|
||||
#curl "http://$target:3923$posturl/handshake.php" -H "Content-Type: text/plain;charset=UTF-8" -H "Cookie: cppwd=$passwd" --data "$(cat "/dev/shm/$salt.hs")" | tee /dev/shm/$salt.res
|
||||
|
||||
{
|
||||
{
|
||||
@@ -135,7 +135,7 @@ EOF
|
||||
cat /dev/shm/$salt.hs
|
||||
} |
|
||||
tee /dev/shm/$salt.hsb |
|
||||
ncat $target 1234 |
|
||||
ncat $target 3923 |
|
||||
tee /dev/shm/$salt.hs1r
|
||||
|
||||
wark="$(cat /dev/shm/$salt.hs1r | getwark)"
|
||||
@@ -190,7 +190,7 @@ EOF
|
||||
nchunk=$((nchunk+1))
|
||||
|
||||
done |
|
||||
ncat $target 1234 |
|
||||
ncat $target 3923 |
|
||||
tee /dev/shm/$salt.pr
|
||||
|
||||
t=$(date +%s.%N)
|
||||
@@ -201,7 +201,7 @@ t=$(date +%s.%N)
|
||||
|
||||
printf '\033[36m'
|
||||
|
||||
ncat $target 1234 < /dev/shm/$salt.hsb |
|
||||
ncat $target 3923 < /dev/shm/$salt.hsb |
|
||||
tee /dev/shm/$salt.hs2r |
|
||||
grep -E '"hash": ?\[ *\]'
|
||||
|
||||
|
||||
27
contrib/README.md
Normal file
27
contrib/README.md
Normal file
@@ -0,0 +1,27 @@
|
||||
### [`copyparty.bat`](copyparty.bat)
|
||||
* launches copyparty with no arguments (anon read+write within same folder)
|
||||
* intended for windows machines with no python.exe in PATH
|
||||
* works on windows, linux and macos
|
||||
* assumes `copyparty-sfx.py` was renamed to `copyparty.py` in the same folder as `copyparty.bat`
|
||||
|
||||
### [`index.html`](index.html)
|
||||
* drop-in redirect from an httpd to copyparty
|
||||
* assumes the webserver and copyparty is running on the same server/IP
|
||||
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
|
||||
|
||||
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
|
||||
* disables thumbnails and folder-type detection in windows explorer
|
||||
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
|
||||
|
||||
### [`cfssl.sh`](cfssl.sh)
|
||||
* creates CA and server certificates using cfssl
|
||||
* give a 3rd argument to install it to your copyparty config
|
||||
|
||||
# OS integration
|
||||
init-scripts to start copyparty as a service
|
||||
* [`systemd/copyparty.service`](systemd/copyparty.service)
|
||||
* [`openrc/copyparty`](openrc/copyparty)
|
||||
|
||||
# Reverse-proxy
|
||||
copyparty has basic support for running behind another webserver
|
||||
* [`nginx/copyparty.conf`](nginx/copyparty.conf)
|
||||
72
contrib/cfssl.sh
Executable file
72
contrib/cfssl.sh
Executable file
@@ -0,0 +1,72 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# ca-name and server-name
|
||||
ca_name="$1"
|
||||
srv_name="$2"
|
||||
|
||||
[ -z "$srv_name" ] && {
|
||||
echo "need arg 1: ca name"
|
||||
echo "need arg 2: server name"
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
gen_ca() {
|
||||
(tee /dev/stderr <<EOF
|
||||
{"CN": "$ca_name ca",
|
||||
"CA": {"expiry":"87600h", "pathlen":0},
|
||||
"key": {"algo":"rsa", "size":4096},
|
||||
"names": [{"O":"$ca_name ca"}]}
|
||||
EOF
|
||||
)|
|
||||
cfssl gencert -initca - |
|
||||
cfssljson -bare ca
|
||||
|
||||
mv ca-key.pem ca.key
|
||||
rm ca.csr
|
||||
}
|
||||
|
||||
|
||||
gen_srv() {
|
||||
(tee /dev/stderr <<EOF
|
||||
{"key": {"algo":"rsa", "size":4096},
|
||||
"names": [{"O":"$ca_name - $srv_name"}]}
|
||||
EOF
|
||||
)|
|
||||
cfssl gencert -ca ca.pem -ca-key ca.key \
|
||||
-profile=www -hostname="$srv_name.$ca_name" - |
|
||||
cfssljson -bare "$srv_name"
|
||||
|
||||
mv "$srv_name-key.pem" "$srv_name.key"
|
||||
rm "$srv_name.csr"
|
||||
}
|
||||
|
||||
|
||||
# create ca if not exist
|
||||
[ -e ca.key ] ||
|
||||
gen_ca
|
||||
|
||||
# always create server cert
|
||||
gen_srv
|
||||
|
||||
|
||||
# dump cert info
|
||||
show() {
|
||||
openssl x509 -text -noout -in $1 |
|
||||
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
|
||||
}
|
||||
show ca.pem
|
||||
show "$srv_name.pem"
|
||||
|
||||
|
||||
# write cert into copyparty config
|
||||
[ -z "$3" ] || {
|
||||
mkdir -p ~/.config/copyparty
|
||||
cat "$srv_name".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||
}
|
||||
|
||||
|
||||
# rm *.key *.pem
|
||||
# cfssl print-defaults config
|
||||
# cfssl print-defaults csr
|
||||
33
contrib/copyparty.bat
Normal file
33
contrib/copyparty.bat
Normal file
@@ -0,0 +1,33 @@
|
||||
exec python "$(dirname "$0")"/copyparty.py
|
||||
|
||||
@rem on linux, the above will execute and the script will terminate
|
||||
@rem on windows, the rest of this script will run
|
||||
|
||||
@echo off
|
||||
cls
|
||||
|
||||
set py=
|
||||
for /f %%i in ('where python 2^>nul') do (
|
||||
set "py=%%i"
|
||||
goto c1
|
||||
)
|
||||
:c1
|
||||
|
||||
if [%py%] == [] (
|
||||
for /f %%i in ('where /r "%localappdata%\programs\python" python 2^>nul') do (
|
||||
set "py=%%i"
|
||||
goto c2
|
||||
)
|
||||
)
|
||||
:c2
|
||||
|
||||
if [%py%] == [] set "py=c:\python27\python.exe"
|
||||
|
||||
if not exist "%py%" (
|
||||
echo could not find python
|
||||
echo(
|
||||
pause
|
||||
exit /b
|
||||
)
|
||||
|
||||
start cmd /c %py% "%~dp0\copyparty.py"
|
||||
31
contrib/explorer-nothumbs-nofoldertypes.reg
Normal file
31
contrib/explorer-nothumbs-nofoldertypes.reg
Normal file
@@ -0,0 +1,31 @@
|
||||
Windows Registry Editor Version 5.00
|
||||
|
||||
; this will do 3 things, all optional:
|
||||
; 1) disable thumbnails
|
||||
; 2) delete all existing folder type settings/detections
|
||||
; 3) disable folder type detection (force default columns)
|
||||
;
|
||||
; this makes the file explorer way faster,
|
||||
; especially on slow/networked locations
|
||||
|
||||
|
||||
; =====================================================================
|
||||
; 1) disable thumbnails
|
||||
|
||||
[HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Explorer\Advanced]
|
||||
"IconsOnly"=dword:00000001
|
||||
|
||||
|
||||
; =====================================================================
|
||||
; 2) delete all existing folder type settings/detections
|
||||
|
||||
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags]
|
||||
|
||||
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\BagMRU]
|
||||
|
||||
|
||||
; =====================================================================
|
||||
; 3) disable folder type detection
|
||||
|
||||
[HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags\AllFolders\Shell]
|
||||
"FolderType"="NotSpecified"
|
||||
43
contrib/index.html
Normal file
43
contrib/index.html
Normal file
@@ -0,0 +1,43 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>⇆🎉 redirect</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<style>
|
||||
|
||||
html, body {
|
||||
font-family: sans-serif;
|
||||
}
|
||||
body {
|
||||
padding: 1em 2em;
|
||||
font-size: 1.5em;
|
||||
}
|
||||
a {
|
||||
font-size: 1.2em;
|
||||
padding: .1em;
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<span id="desc">you probably want</span> <a id="redir" href="//10.13.1.1:3923/">copyparty</a>
|
||||
<script>
|
||||
|
||||
var a = document.getElementById('redir'),
|
||||
proto = window.location.protocol.indexOf('https') === 0 ? 'https' : 'http',
|
||||
loc = window.location.hostname || '127.0.0.1',
|
||||
port = a.getAttribute('href').split(':').pop().split('/')[0],
|
||||
url = proto + '://' + loc + ':' + port + '/';
|
||||
|
||||
a.setAttribute('href', url);
|
||||
document.getElementById('desc').innerHTML = 'redirecting to';
|
||||
|
||||
setTimeout(function() {
|
||||
window.location.href = url;
|
||||
}, 500);
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
26
contrib/nginx/copyparty.conf
Normal file
26
contrib/nginx/copyparty.conf
Normal file
@@ -0,0 +1,26 @@
|
||||
upstream cpp {
|
||||
server 127.0.0.1:3923;
|
||||
keepalive 120;
|
||||
}
|
||||
server {
|
||||
listen 443 ssl;
|
||||
listen [::]:443 ssl;
|
||||
|
||||
server_name fs.example.com;
|
||||
|
||||
location / {
|
||||
proxy_pass http://cpp;
|
||||
proxy_redirect off;
|
||||
# disable buffering (next 4 lines)
|
||||
proxy_http_version 1.1;
|
||||
client_max_body_size 0;
|
||||
proxy_buffering off;
|
||||
proxy_request_buffering off;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Connection "Keep-Alive";
|
||||
}
|
||||
}
|
||||
18
contrib/openrc/copyparty
Normal file
18
contrib/openrc/copyparty
Normal file
@@ -0,0 +1,18 @@
|
||||
#!/sbin/openrc-run
|
||||
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty /etc/init.d && rc-update add copyparty
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
|
||||
name="$SVCNAME"
|
||||
command_background=true
|
||||
pidfile="/var/run/$SVCNAME.pid"
|
||||
|
||||
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
|
||||
command_args="-q -v /mnt::a"
|
||||
19
contrib/systemd/copyparty.service
Normal file
19
contrib/systemd/copyparty.service
Normal file
@@ -0,0 +1,19 @@
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
|
||||
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -8,17 +8,26 @@ __copyright__ = 2019
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
import filecmp
|
||||
import locale
|
||||
import argparse
|
||||
from textwrap import dedent
|
||||
|
||||
from .__init__ import E, WINDOWS, VT100
|
||||
from .__init__ import E, WINDOWS, VT100, PY2
|
||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||
from .svchub import SvcHub
|
||||
from .util import py_desc
|
||||
from .util import py_desc, align_tab
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
|
||||
class RiceFormatter(argparse.HelpFormatter):
|
||||
@@ -44,6 +53,10 @@ class RiceFormatter(argparse.HelpFormatter):
|
||||
return "".join(indent + line + "\n" for line in text.splitlines())
|
||||
|
||||
|
||||
def warn(msg):
|
||||
print("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
|
||||
|
||||
|
||||
def ensure_locale():
|
||||
for x in [
|
||||
"en_US.UTF-8",
|
||||
@@ -84,9 +97,77 @@ def ensure_cert():
|
||||
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
|
||||
|
||||
|
||||
def configure_ssl_ver(al):
|
||||
def terse_sslver(txt):
|
||||
txt = txt.lower()
|
||||
for c in ["_", "v", "."]:
|
||||
txt = txt.replace(c, "")
|
||||
|
||||
return txt.replace("tls10", "tls1")
|
||||
|
||||
# oh man i love openssl
|
||||
# check this out
|
||||
# hold my beer
|
||||
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
|
||||
sslver = terse_sslver(al.ssl_ver).split(",")
|
||||
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
||||
# SSLv2 SSLv3 TLSv1 TLSv1_1 TLSv1_2 TLSv1_3
|
||||
if "help" in sslver:
|
||||
avail = [terse_sslver(x[6:]) for x in flags]
|
||||
avail = " ".join(sorted(avail) + ["all"])
|
||||
print("\navailable ssl/tls versions:\n " + avail)
|
||||
sys.exit(0)
|
||||
|
||||
al.ssl_flags_en = 0
|
||||
al.ssl_flags_de = 0
|
||||
for flag in sorted(flags):
|
||||
ver = terse_sslver(flag[6:])
|
||||
num = getattr(ssl, flag)
|
||||
if ver in sslver:
|
||||
al.ssl_flags_en |= num
|
||||
else:
|
||||
al.ssl_flags_de |= num
|
||||
|
||||
if sslver == ["all"]:
|
||||
x = al.ssl_flags_en
|
||||
al.ssl_flags_en = al.ssl_flags_de
|
||||
al.ssl_flags_de = x
|
||||
|
||||
for k in ["ssl_flags_en", "ssl_flags_de"]:
|
||||
num = getattr(al, k)
|
||||
print("{}: {:8x} ({})".format(k, num, num))
|
||||
|
||||
# think i need that beer now
|
||||
|
||||
|
||||
def configure_ssl_ciphers(al):
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
if al.ssl_ver:
|
||||
ctx.options &= ~al.ssl_flags_en
|
||||
ctx.options |= al.ssl_flags_de
|
||||
|
||||
is_help = al.ciphers == "help"
|
||||
|
||||
if al.ciphers and not is_help:
|
||||
try:
|
||||
ctx.set_ciphers(al.ciphers)
|
||||
except:
|
||||
print("\n\033[1;31mfailed to set ciphers\033[0m\n")
|
||||
|
||||
if not hasattr(ctx, "get_ciphers"):
|
||||
print("cannot read cipher list: openssl or python too old")
|
||||
else:
|
||||
ciphers = [x["description"] for x in ctx.get_ciphers()]
|
||||
print("\n ".join(["\nenabled ciphers:"] + align_tab(ciphers) + [""]))
|
||||
|
||||
if is_help:
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if WINDOWS:
|
||||
os.system("") # enables colors
|
||||
os.system("rem") # enables colors
|
||||
|
||||
desc = py_desc().replace("[", "\033[1;30m[")
|
||||
|
||||
@@ -94,7 +175,20 @@ def main():
|
||||
print(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
|
||||
|
||||
ensure_locale()
|
||||
ensure_cert()
|
||||
if HAVE_SSL:
|
||||
ensure_cert()
|
||||
|
||||
deprecated = [["-e2s", "-e2ds"]]
|
||||
for dk, nk in deprecated:
|
||||
try:
|
||||
idx = sys.argv.index(dk)
|
||||
except:
|
||||
continue
|
||||
|
||||
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
|
||||
print(msg.format(dk, nk))
|
||||
sys.argv[idx] = nk
|
||||
time.sleep(2)
|
||||
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=RiceFormatter,
|
||||
@@ -103,38 +197,120 @@ def main():
|
||||
epilog=dedent(
|
||||
"""
|
||||
-a takes username:password,
|
||||
-v takes src:dst:permset:permset:... where "permset" is
|
||||
accesslevel followed by username (no separator)
|
||||
-v takes src:dst:permset:permset:cflag:cflag:...
|
||||
where "permset" is accesslevel followed by username (no separator)
|
||||
and "cflag" is config flags to set on this volume
|
||||
|
||||
list of cflags:
|
||||
"cnodupe" rejects existing files (instead of symlinking them)
|
||||
|
||||
example:\033[35m
|
||||
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed \033[36m
|
||||
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
|
||||
mount current directory at "/" with
|
||||
* r (read-only) for everyone
|
||||
* a (read+write) for ed
|
||||
mount ../inc at "/dump" with
|
||||
* w (write-only) for everyone
|
||||
* a (read+write) for ed \033[0m
|
||||
* a (read+write) for ed
|
||||
* reject duplicate files \033[0m
|
||||
|
||||
if no accounts or volumes are configured,
|
||||
current folder will be read/write for everyone
|
||||
|
||||
consider the config file for more flexible account/volume management,
|
||||
including dynamic reload at runtime (and being more readable w)
|
||||
|
||||
values for --urlform:
|
||||
"stash" dumps the data to file and returns length + checksum
|
||||
"save,get" dumps to file and returns the page like a GET
|
||||
"print,get" prints the data in the log and returns GET
|
||||
(leave out the ",get" to return an error instead)
|
||||
|
||||
--ciphers help = available ssl/tls ciphers,
|
||||
--ssl-ver help = available ssl/tls versions,
|
||||
default is what python considers safe, usually >= TLS1
|
||||
"""
|
||||
),
|
||||
)
|
||||
ap.add_argument(
|
||||
"-c", metavar="PATH", type=str, action="append", help="add config file"
|
||||
)
|
||||
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind")
|
||||
ap.add_argument("-p", metavar="PORT", type=int, default=1234, help="port to bind")
|
||||
ap.add_argument("-nc", metavar="NUM", type=int, default=16, help="max num clients")
|
||||
ap.add_argument("-j", metavar="CORES", type=int, help="max num cpu cores")
|
||||
# fmt: off
|
||||
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
|
||||
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||
ap.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
|
||||
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
||||
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
||||
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
|
||||
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
|
||||
ap.add_argument("-q", action="store_true", help="quiet")
|
||||
ap.add_argument("-nw", action="store_true", help="benchmark: disable writing")
|
||||
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
||||
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)")
|
||||
ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)")
|
||||
ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms")
|
||||
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
|
||||
|
||||
ap2 = ap.add_argument_group('database options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
|
||||
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
||||
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
||||
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
|
||||
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q")
|
||||
|
||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
||||
ap2.add_argument("--ssl-ver", type=str, help="ssl/tls versions to allow")
|
||||
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
||||
|
||||
al = ap.parse_args()
|
||||
# fmt: on
|
||||
|
||||
# propagate implications
|
||||
for k1, k2 in [
|
||||
["e2dsa", "e2ds"],
|
||||
["e2ds", "e2d"],
|
||||
["e2tsr", "e2ts"],
|
||||
["e2ts", "e2t"],
|
||||
["e2t", "e2d"],
|
||||
]:
|
||||
if getattr(al, k1):
|
||||
setattr(al, k2, True)
|
||||
|
||||
al.i = al.i.split(",")
|
||||
try:
|
||||
if "-" in al.p:
|
||||
lo, hi = [int(x) for x in al.p.split("-")]
|
||||
al.p = list(range(lo, hi + 1))
|
||||
else:
|
||||
al.p = [int(x) for x in al.p.split(",")]
|
||||
except:
|
||||
raise Exception("invalid value for -p")
|
||||
|
||||
if HAVE_SSL:
|
||||
if al.ssl_ver:
|
||||
configure_ssl_ver(al)
|
||||
|
||||
if al.ciphers:
|
||||
configure_ssl_ciphers(al)
|
||||
else:
|
||||
warn("ssl module does not exist; cannot enable https")
|
||||
|
||||
if PY2 and WINDOWS and al.e2d:
|
||||
warn(
|
||||
"windows py2 cannot do unicode filenames with -e2d\n"
|
||||
+ " (if you crash with codec errors then that is why)"
|
||||
)
|
||||
|
||||
SvcHub(al).run()
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 4, 2)
|
||||
CODENAME = "NIH"
|
||||
BUILD_DT = (2020, 5, 15)
|
||||
VERSION = (0, 9, 5)
|
||||
CODENAME = "the strongest music server"
|
||||
BUILD_DT = (2021, 3, 7)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -6,18 +6,24 @@ import re
|
||||
import threading
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
from .util import undot, Pebkac, fsdec, fsenc
|
||||
from .util import undot, Pebkac, fsdec, fsenc, statdir, nuprint
|
||||
|
||||
|
||||
class VFS(object):
|
||||
"""single level in the virtual fs"""
|
||||
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[]):
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[], flags={}):
|
||||
self.realpath = realpath # absolute path on host filesystem
|
||||
self.vpath = vpath # absolute path in the virtual filesystem
|
||||
self.uread = uread # users who can read this
|
||||
self.uwrite = uwrite # users who can write this
|
||||
self.flags = flags # config switches
|
||||
self.nodes = {} # child nodes
|
||||
self.all_vols = {vpath: self} # flattened recursive
|
||||
|
||||
def _trk(self, vol):
|
||||
self.all_vols[vol.vpath] = vol
|
||||
return vol
|
||||
|
||||
def add(self, src, dst):
|
||||
"""get existing, or add new path to the vfs"""
|
||||
@@ -29,16 +35,17 @@ class VFS(object):
|
||||
name, dst = dst.split("/", 1)
|
||||
if name in self.nodes:
|
||||
# exists; do not manipulate permissions
|
||||
return self.nodes[name].add(src, dst)
|
||||
return self._trk(self.nodes[name].add(src, dst))
|
||||
|
||||
vn = VFS(
|
||||
"{}/{}".format(self.realpath, name),
|
||||
"{}/{}".format(self.vpath, name).lstrip("/"),
|
||||
self.uread,
|
||||
self.uwrite,
|
||||
self.flags,
|
||||
)
|
||||
self.nodes[name] = vn
|
||||
return vn.add(src, dst)
|
||||
return self._trk(vn.add(src, dst))
|
||||
|
||||
if dst in self.nodes:
|
||||
# leaf exists; return as-is
|
||||
@@ -48,7 +55,7 @@ class VFS(object):
|
||||
vp = "{}/{}".format(self.vpath, dst).lstrip("/")
|
||||
vn = VFS(src, vp)
|
||||
self.nodes[dst] = vn
|
||||
return vn
|
||||
return self._trk(vn)
|
||||
|
||||
def _find(self, vpath):
|
||||
"""return [vfs,remainder]"""
|
||||
@@ -95,20 +102,19 @@ class VFS(object):
|
||||
|
||||
return fsdec(os.path.realpath(fsenc(rp)))
|
||||
|
||||
def ls(self, rem, uname):
|
||||
def ls(self, rem, uname, scandir, lstat=False):
|
||||
"""return user-readable [fsdir,real,virt] items at vpath"""
|
||||
virt_vis = {} # nodes readable by user
|
||||
abspath = self.canonical(rem)
|
||||
items = os.listdir(fsenc(abspath))
|
||||
real = [fsdec(x) for x in items]
|
||||
real = list(statdir(nuprint, scandir, lstat, abspath))
|
||||
real.sort()
|
||||
if not rem:
|
||||
for name, vn2 in sorted(self.nodes.items()):
|
||||
if uname in vn2.uread:
|
||||
if uname in vn2.uread or "*" in vn2.uread:
|
||||
virt_vis[name] = vn2
|
||||
|
||||
# no vfs nodes in the list of real inodes
|
||||
real = [x for x in real if x not in self.nodes]
|
||||
real = [x for x in real if x[0] not in self.nodes]
|
||||
|
||||
return [abspath, real, virt_vis]
|
||||
|
||||
@@ -128,22 +134,21 @@ class VFS(object):
|
||||
class AuthSrv(object):
|
||||
"""verifies users against given paths"""
|
||||
|
||||
def __init__(self, args, log_func):
|
||||
self.log_func = log_func
|
||||
def __init__(self, args, log_func, warn_anonwrite=True):
|
||||
self.args = args
|
||||
|
||||
self.warn_anonwrite = True
|
||||
self.log_func = log_func
|
||||
self.warn_anonwrite = warn_anonwrite
|
||||
|
||||
if WINDOWS:
|
||||
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)")
|
||||
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
|
||||
else:
|
||||
self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)")
|
||||
self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)$")
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.reload()
|
||||
|
||||
def log(self, msg):
|
||||
self.log_func("auth", msg)
|
||||
def log(self, msg, c=0):
|
||||
self.log_func("auth", msg, c)
|
||||
|
||||
def invert(self, orig):
|
||||
if PY2:
|
||||
@@ -161,7 +166,7 @@ class AuthSrv(object):
|
||||
|
||||
yield prev, True
|
||||
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, mount):
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, mflags, mount):
|
||||
vol_src = None
|
||||
vol_dst = None
|
||||
for ln in [x.decode("utf-8").strip() for x in fd]:
|
||||
@@ -191,6 +196,7 @@ class AuthSrv(object):
|
||||
mount[vol_dst] = vol_src
|
||||
mread[vol_dst] = []
|
||||
mwrite[vol_dst] = []
|
||||
mflags[vol_dst] = {}
|
||||
continue
|
||||
|
||||
lvl, uname = ln.split(" ")
|
||||
@@ -198,6 +204,12 @@ class AuthSrv(object):
|
||||
mread[vol_dst].append(uname)
|
||||
if lvl in "wa":
|
||||
mwrite[vol_dst].append(uname)
|
||||
if lvl == "c":
|
||||
cval = True
|
||||
if "=" in uname:
|
||||
uname, cval = uname.split("=", 1)
|
||||
|
||||
mflags[vol_dst][uname] = cval
|
||||
|
||||
def reload(self):
|
||||
"""
|
||||
@@ -210,6 +222,7 @@ class AuthSrv(object):
|
||||
user = {} # username:password
|
||||
mread = {} # mountpoint:[username]
|
||||
mwrite = {} # mountpoint:[username]
|
||||
mflags = {} # mountpoint:[flag]
|
||||
mount = {} # dst:src (mountpoint:realpath)
|
||||
|
||||
if self.args.a:
|
||||
@@ -220,38 +233,51 @@ class AuthSrv(object):
|
||||
if self.args.v:
|
||||
# list of src:dst:permset:permset:...
|
||||
# permset is [rwa]username
|
||||
for vol_match in [self.re_vol.match(x) for x in self.args.v]:
|
||||
try:
|
||||
src, dst, perms = vol_match.groups()
|
||||
except:
|
||||
raise Exception("invalid -v argument")
|
||||
for v_str in self.args.v:
|
||||
m = self.re_vol.match(v_str)
|
||||
if not m:
|
||||
raise Exception("invalid -v argument: [{}]".format(v_str))
|
||||
|
||||
src, dst, perms = m.groups()
|
||||
# print("\n".join([src, dst, perms]))
|
||||
src = fsdec(os.path.abspath(fsenc(src)))
|
||||
dst = dst.strip("/")
|
||||
mount[dst] = src
|
||||
mread[dst] = []
|
||||
mwrite[dst] = []
|
||||
mflags[dst] = {}
|
||||
|
||||
perms = perms.split(":")
|
||||
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
|
||||
if lvl == "c":
|
||||
cval = True
|
||||
if "=" in uname:
|
||||
uname, cval = uname.split("=", 1)
|
||||
|
||||
mflags[dst][uname] = cval
|
||||
continue
|
||||
|
||||
if uname == "":
|
||||
uname = "*"
|
||||
|
||||
if lvl in "ra":
|
||||
mread[dst].append(uname)
|
||||
|
||||
if lvl in "wa":
|
||||
mwrite[dst].append(uname)
|
||||
|
||||
if self.args.c:
|
||||
for cfg_fn in self.args.c:
|
||||
with open(cfg_fn, "rb") as f:
|
||||
self._parse_config_file(f, user, mread, mwrite, mount)
|
||||
self._parse_config_file(f, user, mread, mwrite, mflags, mount)
|
||||
|
||||
if not mount:
|
||||
# -h says our defaults are CWD at root and read/write for everyone
|
||||
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
|
||||
elif "" not in mount:
|
||||
# there's volumes but no root; make root inaccessible
|
||||
vfs = VFS(os.path.abspath("."), "", [], [])
|
||||
vfs = VFS(os.path.abspath("."), "")
|
||||
vfs.flags["d2d"] = True
|
||||
|
||||
maxdepth = 0
|
||||
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
|
||||
@@ -261,12 +287,13 @@ class AuthSrv(object):
|
||||
|
||||
if dst == "":
|
||||
# rootfs was mapped; fully replaces the default CWD vfs
|
||||
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst])
|
||||
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst], mflags[dst])
|
||||
continue
|
||||
|
||||
v = vfs.add(mount[dst], dst)
|
||||
v.uread = mread[dst]
|
||||
v.uwrite = mwrite[dst]
|
||||
v.flags = mflags[dst]
|
||||
|
||||
missing_users = {}
|
||||
for d in [mread, mwrite]:
|
||||
@@ -277,21 +304,33 @@ class AuthSrv(object):
|
||||
|
||||
if missing_users:
|
||||
self.log(
|
||||
"\033[31myou must -a the following users: "
|
||||
+ ", ".join(k for k in sorted(missing_users))
|
||||
+ "\033[0m"
|
||||
"you must -a the following users: "
|
||||
+ ", ".join(k for k in sorted(missing_users)),
|
||||
c=1,
|
||||
)
|
||||
raise Exception("invalid config")
|
||||
|
||||
for vol in vfs.all_vols.values():
|
||||
if (self.args.e2ds and vol.uwrite) or self.args.e2dsa:
|
||||
vol.flags["e2ds"] = True
|
||||
|
||||
if self.args.e2d or "e2ds" in vol.flags:
|
||||
vol.flags["e2d"] = True
|
||||
|
||||
for k in ["e2t", "e2ts", "e2tsr"]:
|
||||
if getattr(self.args, k):
|
||||
vol.flags[k] = True
|
||||
|
||||
# default tag-list if unset
|
||||
if "mte" not in vol.flags:
|
||||
vol.flags["mte"] = self.args.mte
|
||||
|
||||
try:
|
||||
v, _ = vfs.get("/", "*", False, True)
|
||||
if self.warn_anonwrite and os.getcwd() == v.realpath:
|
||||
self.warn_anonwrite = False
|
||||
self.log(
|
||||
"\033[31manyone can read/write the current directory: {}\033[0m".format(
|
||||
v.realpath
|
||||
)
|
||||
)
|
||||
msg = "anyone can read/write the current directory: {}"
|
||||
self.log(msg.format(v.realpath), c=1)
|
||||
except Pebkac:
|
||||
self.warn_anonwrite = True
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ class BrokerMp(object):
|
||||
self.mutex = threading.Lock()
|
||||
|
||||
cores = self.args.j
|
||||
if cores is None:
|
||||
if not cores:
|
||||
cores = mp.cpu_count()
|
||||
|
||||
self.log("broker", "booting {} subprocesses".format(cores))
|
||||
|
||||
@@ -49,11 +49,11 @@ class MpWorker(object):
|
||||
# print('k')
|
||||
pass
|
||||
|
||||
def log(self, src, msg):
|
||||
self.q_yield.put([0, "log", [src, msg]])
|
||||
def log(self, src, msg, c=0):
|
||||
self.q_yield.put([0, "log", [src, msg, c]])
|
||||
|
||||
def logw(self, msg):
|
||||
self.log("mp{}".format(self.n), msg)
|
||||
def logw(self, msg, c=0):
|
||||
self.log("mp{}".format(self.n), msg, c)
|
||||
|
||||
def httpdrop(self, addr):
|
||||
self.q_yield.put([0, "httpdrop", [addr]])
|
||||
@@ -73,7 +73,7 @@ class MpWorker(object):
|
||||
if PY2:
|
||||
sck = pickle.loads(sck) # nosec
|
||||
|
||||
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
|
||||
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
with self.mutex:
|
||||
|
||||
@@ -28,7 +28,7 @@ class BrokerThr(object):
|
||||
def put(self, want_retval, dest, *args):
|
||||
if dest == "httpconn":
|
||||
sck, addr = args
|
||||
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
|
||||
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
else:
|
||||
|
||||
@@ -5,7 +5,10 @@ import os
|
||||
import stat
|
||||
import gzip
|
||||
import time
|
||||
import copy
|
||||
import json
|
||||
import socket
|
||||
import ctypes
|
||||
from datetime import datetime
|
||||
import calendar
|
||||
|
||||
@@ -14,9 +17,6 @@ from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||
|
||||
if not PY2:
|
||||
unicode = str
|
||||
from html import escape as html_escape
|
||||
else:
|
||||
from cgi import escape as html_escape # pylint: disable=no-name-in-module
|
||||
|
||||
|
||||
class HttpCli(object):
|
||||
@@ -25,24 +25,27 @@ class HttpCli(object):
|
||||
"""
|
||||
|
||||
def __init__(self, conn):
|
||||
self.t0 = time.time()
|
||||
self.conn = conn
|
||||
self.s = conn.s
|
||||
self.sr = conn.sr
|
||||
self.ip = conn.addr[0]
|
||||
self.addr = conn.addr
|
||||
self.args = conn.args
|
||||
self.auth = conn.auth
|
||||
self.log_func = conn.log_func
|
||||
self.log_src = conn.log_src
|
||||
self.tls = hasattr(self.s, "cipher")
|
||||
|
||||
self.bufsz = 1024 * 32
|
||||
self.absolute_urls = False
|
||||
self.out_headers = {}
|
||||
self.out_headers = {"Access-Control-Allow-Origin": "*"}
|
||||
|
||||
def log(self, msg):
|
||||
self.log_func(self.log_src, msg)
|
||||
def log(self, msg, c=0):
|
||||
self.log_func(self.log_src, msg, c)
|
||||
|
||||
def _check_nonfatal(self, ex):
|
||||
return ex.code in [403, 404]
|
||||
return ex.code < 400 or ex.code == 404
|
||||
|
||||
def _assert_safe_rem(self, rem):
|
||||
# sanity check to prevent any disasters
|
||||
@@ -60,7 +63,7 @@ class HttpCli(object):
|
||||
|
||||
if not headerlines[0]:
|
||||
# seen after login with IE6.0.2900.5512.xpsp.080413-2111 (xp-sp3)
|
||||
self.log("\033[1;31mBUG: trailing newline from previous request\033[0m")
|
||||
self.log("BUG: trailing newline from previous request", c="1;31")
|
||||
headerlines.pop(0)
|
||||
|
||||
try:
|
||||
@@ -71,9 +74,11 @@ class HttpCli(object):
|
||||
except Pebkac as ex:
|
||||
# self.log("pebkac at httpcli.run #1: " + repr(ex))
|
||||
self.keepalive = self._check_nonfatal(ex)
|
||||
self.loud_reply(str(ex), status=ex.code)
|
||||
self.loud_reply(unicode(ex), status=ex.code)
|
||||
return self.keepalive
|
||||
|
||||
# time.sleep(0.4)
|
||||
|
||||
# normalize incoming headers to lowercase;
|
||||
# outgoing headers however are Correct-Case
|
||||
for header_line in headerlines[1:]:
|
||||
@@ -83,11 +88,16 @@ class HttpCli(object):
|
||||
v = self.headers.get("connection", "").lower()
|
||||
self.keepalive = not v.startswith("close")
|
||||
|
||||
v = self.headers.get("x-forwarded-for", None)
|
||||
if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]:
|
||||
self.ip = v.split(",")[0]
|
||||
self.log_src = self.conn.set_rproxy(self.ip)
|
||||
|
||||
self.uname = "*"
|
||||
if "cookie" in self.headers:
|
||||
cookies = self.headers["cookie"].split(";")
|
||||
for k, v in [x.split("=", 1) for x in cookies]:
|
||||
if k != "cppwd":
|
||||
if k.strip() != "cppwd":
|
||||
continue
|
||||
|
||||
v = unescape_cookie(v)
|
||||
@@ -118,16 +128,25 @@ class HttpCli(object):
|
||||
k, v = k.split("=", 1)
|
||||
uparam[k.lower()] = v.strip()
|
||||
else:
|
||||
uparam[k.lower()] = True
|
||||
uparam[k.lower()] = False
|
||||
|
||||
self.uparam = uparam
|
||||
self.vpath = unquotep(vpath)
|
||||
|
||||
ua = self.headers.get("user-agent", "")
|
||||
if ua.startswith("rclone/"):
|
||||
uparam["raw"] = False
|
||||
uparam["dots"] = False
|
||||
|
||||
try:
|
||||
if self.mode in ["GET", "HEAD"]:
|
||||
return self.handle_get() and self.keepalive
|
||||
elif self.mode == "POST":
|
||||
return self.handle_post() and self.keepalive
|
||||
elif self.mode == "PUT":
|
||||
return self.handle_put() and self.keepalive
|
||||
elif self.mode == "OPTIONS":
|
||||
return self.handle_options() and self.keepalive
|
||||
else:
|
||||
raise Pebkac(400, 'invalid HTTP mode "{0}"'.format(self.mode))
|
||||
|
||||
@@ -135,7 +154,7 @@ class HttpCli(object):
|
||||
try:
|
||||
# self.log("pebkac at httpcli.run #2: " + repr(ex))
|
||||
self.keepalive = self._check_nonfatal(ex)
|
||||
self.loud_reply(str(ex), status=ex.code)
|
||||
self.loud_reply("{}: {}".format(str(ex), self.vpath), status=ex.code)
|
||||
return self.keepalive
|
||||
except Pebkac:
|
||||
return False
|
||||
@@ -143,10 +162,8 @@ class HttpCli(object):
|
||||
def send_headers(self, length, status=200, mime=None, headers={}):
|
||||
response = ["HTTP/1.1 {} {}".format(status, HTTPCODE[status])]
|
||||
|
||||
if length is None:
|
||||
self.keepalive = False
|
||||
else:
|
||||
response.append("Content-Length: " + str(length))
|
||||
if length is not None:
|
||||
response.append("Content-Length: " + unicode(length))
|
||||
|
||||
# close if unknown length, otherwise take client's preference
|
||||
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
|
||||
@@ -176,7 +193,8 @@ class HttpCli(object):
|
||||
self.send_headers(len(body), status, mime, headers)
|
||||
|
||||
try:
|
||||
self.s.sendall(body)
|
||||
if self.mode != "HEAD":
|
||||
self.s.sendall(body)
|
||||
except:
|
||||
raise Pebkac(400, "client d/c while replying body")
|
||||
|
||||
@@ -184,7 +202,7 @@ class HttpCli(object):
|
||||
|
||||
def loud_reply(self, body, *args, **kwargs):
|
||||
self.log(body.rstrip())
|
||||
self.reply(b"<pre>" + body.encode("utf-8"), *list(args), **kwargs)
|
||||
self.reply(b"<pre>" + body.encode("utf-8") + b"\r\n", *list(args), **kwargs)
|
||||
|
||||
def handle_get(self):
|
||||
logmsg = "{:4} {}".format(self.mode, self.req)
|
||||
@@ -204,6 +222,9 @@ class HttpCli(object):
|
||||
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
|
||||
return self.tx_file(static_path)
|
||||
|
||||
if "tree" in self.uparam:
|
||||
return self.tx_tree()
|
||||
|
||||
# conditional redirect to single volumes
|
||||
if self.vpath == "" and not self.uparam:
|
||||
nread = len(self.rvol)
|
||||
@@ -222,7 +243,7 @@ class HttpCli(object):
|
||||
)
|
||||
if not self.readable and not self.writable:
|
||||
self.log("inaccessible: [{}]".format(self.vpath))
|
||||
self.uparam = {"h": True}
|
||||
self.uparam = {"h": False}
|
||||
|
||||
if "h" in self.uparam:
|
||||
self.vpath = None
|
||||
@@ -230,6 +251,30 @@ class HttpCli(object):
|
||||
|
||||
return self.tx_browser()
|
||||
|
||||
def handle_options(self):
|
||||
self.log("OPTIONS " + self.req)
|
||||
self.send_headers(
|
||||
None,
|
||||
204,
|
||||
headers={
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "*",
|
||||
"Access-Control-Allow-Headers": "*",
|
||||
},
|
||||
)
|
||||
return True
|
||||
|
||||
def handle_put(self):
|
||||
self.log("PUT " + self.req)
|
||||
|
||||
if self.headers.get("expect", "").lower() == "100-continue":
|
||||
try:
|
||||
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
|
||||
except:
|
||||
raise Pebkac(400, "client d/c before 100 continue")
|
||||
|
||||
return self.handle_stash()
|
||||
|
||||
def handle_post(self):
|
||||
self.log("POST " + self.req)
|
||||
|
||||
@@ -243,6 +288,9 @@ class HttpCli(object):
|
||||
if not ctype:
|
||||
raise Pebkac(400, "you can't post without a content-type header")
|
||||
|
||||
if "raw" in self.uparam:
|
||||
return self.handle_stash()
|
||||
|
||||
if "multipart/form-data" in ctype:
|
||||
return self.handle_post_multipart()
|
||||
|
||||
@@ -253,7 +301,70 @@ class HttpCli(object):
|
||||
if "application/octet-stream" in ctype:
|
||||
return self.handle_post_binary()
|
||||
|
||||
raise Pebkac(405, "don't know how to handle {} POST".format(ctype))
|
||||
if "application/x-www-form-urlencoded" in ctype:
|
||||
opt = self.args.urlform
|
||||
if "stash" in opt:
|
||||
return self.handle_stash()
|
||||
|
||||
if "save" in opt:
|
||||
post_sz, _, _, path = self.dump_to_file()
|
||||
self.log("urlform: {} bytes, {}".format(post_sz, path))
|
||||
elif "print" in opt:
|
||||
reader, _ = self.get_body_reader()
|
||||
for buf in reader:
|
||||
buf = buf.decode("utf-8", "replace")
|
||||
self.log("urlform @ {}\n {}\n".format(self.vpath, buf))
|
||||
|
||||
if "get" in opt:
|
||||
return self.handle_get()
|
||||
|
||||
raise Pebkac(405, "POST({}) is disabled".format(ctype))
|
||||
|
||||
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
|
||||
|
||||
def get_body_reader(self):
|
||||
chunked = "chunked" in self.headers.get("transfer-encoding", "").lower()
|
||||
remains = int(self.headers.get("content-length", -1))
|
||||
if chunked:
|
||||
return read_socket_chunked(self.sr), remains
|
||||
elif remains == -1:
|
||||
self.keepalive = False
|
||||
return read_socket_unbounded(self.sr), remains
|
||||
else:
|
||||
return read_socket(self.sr, remains), remains
|
||||
|
||||
def dump_to_file(self):
|
||||
reader, remains = self.get_body_reader()
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
|
||||
addr = self.ip.replace(":", ".")
|
||||
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
||||
path = os.path.join(fdir, fn)
|
||||
|
||||
with open(path, "wb", 512 * 1024) as f:
|
||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||
|
||||
self.conn.hsrv.broker.put(
|
||||
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fn
|
||||
)
|
||||
|
||||
return post_sz, sha_b64, remains, path
|
||||
|
||||
def handle_stash(self):
|
||||
post_sz, sha_b64, remains, path = self.dump_to_file()
|
||||
spd = self._spd(post_sz)
|
||||
self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path))
|
||||
self.reply("{}\n{}\n".format(post_sz, sha_b64).encode("utf-8"))
|
||||
return True
|
||||
|
||||
def _spd(self, nbytes, add=True):
|
||||
if add:
|
||||
self.conn.nbyte += nbytes
|
||||
|
||||
spd1 = get_spd(nbytes, self.t0)
|
||||
spd2 = get_spd(self.conn.nbyte, self.conn.t0)
|
||||
return spd1 + " " + spd2
|
||||
|
||||
def handle_post_multipart(self):
|
||||
self.parser = MultipartParser(self.log, self.sr, self.headers)
|
||||
@@ -303,6 +414,9 @@ class HttpCli(object):
|
||||
except:
|
||||
raise Pebkac(422, "you POSTed invalid json")
|
||||
|
||||
if "srch" in self.uparam or "srch" in body:
|
||||
return self.handle_search(body)
|
||||
|
||||
# prefer this over undot; no reason to allow traversion
|
||||
if "/" in body["name"]:
|
||||
raise Pebkac(400, "folders verboten")
|
||||
@@ -314,9 +428,11 @@ class HttpCli(object):
|
||||
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
|
||||
body["vdir"] = self.vpath
|
||||
body["rdir"] = os.path.join(vfs.realpath, rem)
|
||||
body["addr"] = self.addr[0]
|
||||
body["vtop"] = vfs.vpath
|
||||
body["ptop"] = vfs.realpath
|
||||
body["prel"] = rem
|
||||
body["addr"] = self.ip
|
||||
body["vcfg"] = vfs.flags
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||
response = x.get()
|
||||
@@ -326,6 +442,41 @@ class HttpCli(object):
|
||||
self.reply(response.encode("utf-8"), mime="application/json")
|
||||
return True
|
||||
|
||||
def handle_search(self, body):
|
||||
vols = []
|
||||
for vtop in self.rvol:
|
||||
vfs, _ = self.conn.auth.vfs.get(vtop, self.uname, True, False)
|
||||
vols.append([vfs.vpath, vfs.realpath, vfs.flags])
|
||||
|
||||
idx = self.conn.get_u2idx()
|
||||
t0 = time.time()
|
||||
if "srch" in body:
|
||||
# search by up2k hashlist
|
||||
vbody = copy.deepcopy(body)
|
||||
vbody["hash"] = len(vbody["hash"])
|
||||
self.log("qj: " + repr(vbody))
|
||||
hits = idx.fsearch(vols, body)
|
||||
self.log("q#: {} ({:.2f}s)".format(repr(hits), time.time() - t0))
|
||||
taglist = []
|
||||
else:
|
||||
# search by query params
|
||||
self.log("qj: " + repr(body))
|
||||
hits, taglist = idx.search(vols, body)
|
||||
self.log("q#: {} ({:.2f}s)".format(len(hits), time.time() - t0))
|
||||
|
||||
order = []
|
||||
cfg = self.args.mte.split(",")
|
||||
for t in cfg:
|
||||
if t in taglist:
|
||||
order.append(t)
|
||||
for t in taglist:
|
||||
if t not in order:
|
||||
order.append(t)
|
||||
|
||||
r = json.dumps({"hits": hits, "tag_order": order}).encode("utf-8")
|
||||
self.reply(r, mime="application/json")
|
||||
return True
|
||||
|
||||
def handle_post_binary(self):
|
||||
try:
|
||||
remains = int(self.headers["content-length"])
|
||||
@@ -338,7 +489,10 @@ class HttpCli(object):
|
||||
except KeyError:
|
||||
raise Pebkac(400, "need hash and wark headers for binary POST")
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", wark, chash)
|
||||
vfs, _ = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
ptop = vfs.realpath
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash)
|
||||
response = x.get()
|
||||
chunksize, cstart, path, lastmod = response
|
||||
|
||||
@@ -367,7 +521,7 @@ class HttpCli(object):
|
||||
if len(cstart) > 1 and path != os.devnull:
|
||||
self.log(
|
||||
"clone {} to {}".format(
|
||||
cstart[0], " & ".join(str(x) for x in cstart[1:])
|
||||
cstart[0], " & ".join(unicode(x) for x in cstart[1:])
|
||||
)
|
||||
)
|
||||
ofs = 0
|
||||
@@ -383,8 +537,13 @@ class HttpCli(object):
|
||||
|
||||
self.log("clone {} done".format(cstart[0]))
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", wark, chash)
|
||||
num_left = x.get()
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
|
||||
x = x.get()
|
||||
try:
|
||||
num_left, path = x
|
||||
except:
|
||||
self.loud_reply(x, status=500)
|
||||
return False
|
||||
|
||||
if not WINDOWS and num_left == 0:
|
||||
times = (int(time.time()), int(lastmod))
|
||||
@@ -394,7 +553,9 @@ class HttpCli(object):
|
||||
except:
|
||||
self.log("failed to utime ({}, {})".format(path, times))
|
||||
|
||||
self.loud_reply("thank")
|
||||
spd = self._spd(post_sz)
|
||||
self.log("{} thank".format(spd))
|
||||
self.reply(b"thank")
|
||||
return True
|
||||
|
||||
def handle_login(self):
|
||||
@@ -407,7 +568,7 @@ class HttpCli(object):
|
||||
msg = "naw dude"
|
||||
pwd = "x" # nosec
|
||||
|
||||
h = {"Set-Cookie": "cppwd={}; Path=/".format(pwd)}
|
||||
h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
|
||||
html = self.conn.tpl_msg.render(h1=msg, h2='<a href="/">ack</a>', redir="/")
|
||||
self.reply(html.encode("utf-8"), headers=h)
|
||||
return True
|
||||
@@ -438,10 +599,9 @@ class HttpCli(object):
|
||||
raise Pebkac(500, "mkdir failed, check the logs")
|
||||
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
esc_paths = [quotep(vpath), html_escape(vpath)]
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}">go to /{}</a>'.format(
|
||||
quotep(vpath), html_escape(vpath, quote=False)
|
||||
),
|
||||
h2='<a href="/{}">go to /{}</a>'.format(*esc_paths),
|
||||
pre="aight",
|
||||
click=True,
|
||||
)
|
||||
@@ -474,7 +634,7 @@ class HttpCli(object):
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
|
||||
quotep(vpath), html_escape(vpath, quote=False)
|
||||
quotep(vpath), html_escape(vpath)
|
||||
),
|
||||
pre="aight",
|
||||
click=True,
|
||||
@@ -496,38 +656,48 @@ class HttpCli(object):
|
||||
self.log("discarding incoming file without filename")
|
||||
# fallthrough
|
||||
|
||||
fn = os.devnull
|
||||
if p_file and not nullwrite:
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
fn = os.path.join(fdir, sanitize_fn(p_file))
|
||||
fname = sanitize_fn(p_file)
|
||||
|
||||
if not os.path.isdir(fsenc(fdir)):
|
||||
raise Pebkac(404, "that folder does not exist")
|
||||
|
||||
# TODO broker which avoid this race and
|
||||
# provides a new filename if taken (same as up2k)
|
||||
if os.path.exists(fsenc(fn)):
|
||||
fn += ".{:.6f}-{}".format(time.time(), self.addr[0])
|
||||
# using current-time instead of t0 cause clients
|
||||
# may reuse a name for multiple files in one post
|
||||
suffix = ".{:.6f}-{}".format(time.time(), self.ip)
|
||||
open_args = {"fdir": fdir, "suffix": suffix}
|
||||
else:
|
||||
open_args = {}
|
||||
fname = os.devnull
|
||||
fdir = ""
|
||||
|
||||
try:
|
||||
with open(fsenc(fn), "wb") as f:
|
||||
self.log("writing to {0}".format(fn))
|
||||
with ren_open(fname, "wb", 512 * 1024, **open_args) as f:
|
||||
f, fname = f["orz"]
|
||||
self.log("writing to {}/{}".format(fdir, fname))
|
||||
sz, sha512_hex, _ = hashcopy(self.conn, p_data, f)
|
||||
if sz == 0:
|
||||
raise Pebkac(400, "empty files in post")
|
||||
|
||||
files.append([sz, sha512_hex])
|
||||
self.conn.hsrv.broker.put(
|
||||
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname
|
||||
)
|
||||
self.conn.nbyte += sz
|
||||
|
||||
except Pebkac:
|
||||
if fn != os.devnull:
|
||||
os.rename(fsenc(fn), fsenc(fn + ".PARTIAL"))
|
||||
if fname != os.devnull:
|
||||
fp = os.path.join(fdir, fname)
|
||||
suffix = ".PARTIAL"
|
||||
try:
|
||||
os.rename(fsenc(fp), fsenc(fp + suffix))
|
||||
except:
|
||||
fp = fp[: -len(suffix)]
|
||||
os.rename(fsenc(fp), fsenc(fp + suffix))
|
||||
|
||||
raise
|
||||
|
||||
except Pebkac as ex:
|
||||
errmsg = str(ex)
|
||||
errmsg = unicode(ex)
|
||||
|
||||
td = max(0.1, time.time() - t0)
|
||||
sz_total = sum(x[0] for x in files)
|
||||
@@ -546,7 +716,9 @@ class HttpCli(object):
|
||||
# truncated SHA-512 prevents length extension attacks;
|
||||
# using SHA-512/224, optionally SHA-512/256 = :64
|
||||
|
||||
self.log(msg)
|
||||
vspd = self._spd(sz_total, False)
|
||||
self.log("{} {}".format(vspd, msg))
|
||||
|
||||
if not nullwrite:
|
||||
# TODO this is bad
|
||||
log_fn = "up.{:.6f}.txt".format(t0)
|
||||
@@ -556,7 +728,7 @@ class HttpCli(object):
|
||||
"\n".join(
|
||||
unicode(x)
|
||||
for x in [
|
||||
":".join(unicode(x) for x in self.addr),
|
||||
":".join(unicode(x) for x in [self.ip, self.addr[1]]),
|
||||
msg.rstrip(),
|
||||
]
|
||||
)
|
||||
@@ -568,7 +740,7 @@ class HttpCli(object):
|
||||
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}">return to /{}</a>'.format(
|
||||
quotep(self.vpath), html_escape(self.vpath, quote=False)
|
||||
quotep(self.vpath), html_escape(self.vpath)
|
||||
),
|
||||
pre=msg,
|
||||
)
|
||||
@@ -605,7 +777,7 @@ class HttpCli(object):
|
||||
return True
|
||||
|
||||
fp = os.path.join(vfs.realpath, rem)
|
||||
srv_lastmod = -1
|
||||
srv_lastmod = srv_lastmod3 = -1
|
||||
try:
|
||||
st = os.stat(fsenc(fp))
|
||||
srv_lastmod = st.st_mtime
|
||||
@@ -616,7 +788,16 @@ class HttpCli(object):
|
||||
|
||||
# if file exists, chekc that timestamp matches the client's
|
||||
if srv_lastmod >= 0:
|
||||
if cli_lastmod3 not in [-1, srv_lastmod3]:
|
||||
same_lastmod = cli_lastmod3 in [-1, srv_lastmod3]
|
||||
if not same_lastmod:
|
||||
# some filesystems/transports limit precision to 1sec, hopefully floored
|
||||
same_lastmod = (
|
||||
srv_lastmod == int(srv_lastmod)
|
||||
and cli_lastmod3 > srv_lastmod3
|
||||
and cli_lastmod3 - srv_lastmod3 < 1000
|
||||
)
|
||||
|
||||
if not same_lastmod:
|
||||
response = json.dumps(
|
||||
{
|
||||
"ok": False,
|
||||
@@ -647,7 +828,7 @@ class HttpCli(object):
|
||||
if p_field != "body":
|
||||
raise Pebkac(400, "expected body, got {}".format(p_field))
|
||||
|
||||
with open(fp, "wb") as f:
|
||||
with open(fp, "wb", 512 * 1024) as f:
|
||||
sz, sha512, _ = hashcopy(self.conn, p_data, f)
|
||||
|
||||
new_lastmod = os.stat(fsenc(fp)).st_mtime
|
||||
@@ -672,9 +853,12 @@ class HttpCli(object):
|
||||
cli_dt = time.strptime(cli_lastmod, "%a, %d %b %Y %H:%M:%S GMT")
|
||||
cli_ts = calendar.timegm(cli_dt)
|
||||
return file_lastmod, int(file_ts) > int(cli_ts)
|
||||
except:
|
||||
self.log("bad lastmod format: {}".format(cli_lastmod))
|
||||
self.log(" expected format: {}".format(file_lastmod))
|
||||
except Exception as ex:
|
||||
self.log(
|
||||
"lastmod {}\nremote: [{}]\n local: [{}]".format(
|
||||
repr(ex), cli_lastmod, file_lastmod
|
||||
)
|
||||
)
|
||||
return file_lastmod, file_lastmod != cli_lastmod
|
||||
|
||||
return file_lastmod, True
|
||||
@@ -697,6 +881,8 @@ class HttpCli(object):
|
||||
editions[ext or "plain"] = [fs_path, st.st_size]
|
||||
except:
|
||||
pass
|
||||
if not self.vpath.startswith(".cpr/"):
|
||||
break
|
||||
|
||||
if not editions:
|
||||
raise Pebkac(404)
|
||||
@@ -777,9 +963,11 @@ class HttpCli(object):
|
||||
|
||||
except:
|
||||
err = "invalid range ({}), size={}".format(hrange, file_sz)
|
||||
self.loud_reply(err, status=416, headers={
|
||||
"Content-Range": "bytes */{}".format(file_sz)
|
||||
})
|
||||
self.loud_reply(
|
||||
err,
|
||||
status=416,
|
||||
headers={"Content-Range": "bytes */{}".format(file_sz)},
|
||||
)
|
||||
return True
|
||||
|
||||
status = 206
|
||||
@@ -789,6 +977,7 @@ class HttpCli(object):
|
||||
|
||||
logtail += " [\033[36m{}-{}\033[0m]".format(lower, upper)
|
||||
|
||||
use_sendfile = False
|
||||
if decompress:
|
||||
open_func = gzip.open
|
||||
open_args = [fsenc(fs_path), "rb"]
|
||||
@@ -798,10 +987,18 @@ class HttpCli(object):
|
||||
open_func = open
|
||||
# 512 kB is optimal for huge files, use 64k
|
||||
open_args = [fsenc(fs_path), "rb", 64 * 1024]
|
||||
use_sendfile = (
|
||||
not self.tls #
|
||||
and not self.args.no_sendfile
|
||||
and hasattr(os, "sendfile")
|
||||
)
|
||||
|
||||
#
|
||||
# send reply
|
||||
|
||||
if not is_compressed:
|
||||
self.out_headers["Cache-Control"] = "no-cache"
|
||||
|
||||
self.out_headers["Accept-Ranges"] = "bytes"
|
||||
self.send_headers(
|
||||
length=upper - lower,
|
||||
@@ -809,35 +1006,25 @@ class HttpCli(object):
|
||||
mime=guess_mime(req_path)[0] or "application/octet-stream",
|
||||
)
|
||||
|
||||
logmsg += str(status) + logtail
|
||||
logmsg += unicode(status) + logtail
|
||||
|
||||
if self.mode == "HEAD" or not do_send:
|
||||
self.log(logmsg)
|
||||
return True
|
||||
|
||||
ret = True
|
||||
with open_func(*open_args) as f:
|
||||
remains = upper - lower
|
||||
f.seek(lower)
|
||||
while remains > 0:
|
||||
# time.sleep(0.01)
|
||||
buf = f.read(4096)
|
||||
if not buf:
|
||||
break
|
||||
if use_sendfile:
|
||||
remains = sendfile_kern(lower, upper, f, self.s)
|
||||
else:
|
||||
remains = sendfile_py(lower, upper, f, self.s)
|
||||
|
||||
if remains < len(buf):
|
||||
buf = buf[:remains]
|
||||
if remains > 0:
|
||||
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
||||
|
||||
remains -= len(buf)
|
||||
|
||||
try:
|
||||
self.s.sendall(buf)
|
||||
except:
|
||||
logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
|
||||
self.log(logmsg)
|
||||
return False
|
||||
|
||||
self.log(logmsg)
|
||||
return True
|
||||
spd = self._spd((upper - lower) - remains)
|
||||
self.log("{}, {}".format(logmsg, spd))
|
||||
return ret
|
||||
|
||||
def tx_md(self, fs_path):
|
||||
logmsg = "{:4} {} ".format("", self.req)
|
||||
@@ -871,14 +1058,16 @@ class HttpCli(object):
|
||||
|
||||
targs = {
|
||||
"edit": "edit" in self.uparam,
|
||||
"title": html_escape(self.vpath, quote=False),
|
||||
"title": html_escape(self.vpath),
|
||||
"lastmod": int(ts_md * 1000),
|
||||
"md_plug": "true" if self.args.emp else "false",
|
||||
"md_chk_rate": self.args.mcr,
|
||||
"md": "",
|
||||
}
|
||||
sz_html = len(template.render(**targs).encode("utf-8"))
|
||||
self.send_headers(sz_html + sz_md, status)
|
||||
|
||||
logmsg += str(status)
|
||||
logmsg += unicode(status)
|
||||
if self.mode == "HEAD" or not do_send:
|
||||
self.log(logmsg)
|
||||
return True
|
||||
@@ -892,7 +1081,7 @@ class HttpCli(object):
|
||||
self.log(logmsg + " \033[31md/c\033[0m")
|
||||
return False
|
||||
|
||||
self.log(logmsg + " " + str(len(html)))
|
||||
self.log(logmsg + " " + unicode(len(html)))
|
||||
return True
|
||||
|
||||
def tx_mounts(self):
|
||||
@@ -902,6 +1091,61 @@ class HttpCli(object):
|
||||
self.reply(html.encode("utf-8"))
|
||||
return True
|
||||
|
||||
def tx_tree(self):
|
||||
top = self.uparam["tree"] or ""
|
||||
dst = self.vpath
|
||||
if top in [".", ".."]:
|
||||
top = undot(self.vpath + "/" + top)
|
||||
|
||||
if top == dst:
|
||||
dst = ""
|
||||
elif top:
|
||||
if not dst.startswith(top + "/"):
|
||||
raise Pebkac(400, "arg funk")
|
||||
|
||||
dst = dst[len(top) + 1 :]
|
||||
|
||||
ret = self.gen_tree(top, dst)
|
||||
ret = json.dumps(ret)
|
||||
self.reply(ret.encode("utf-8"), mime="application/json")
|
||||
return True
|
||||
|
||||
def gen_tree(self, top, target):
|
||||
ret = {}
|
||||
excl = None
|
||||
if target:
|
||||
excl, target = (target.split("/", 1) + [""])[:2]
|
||||
sub = self.gen_tree("/".join([top, excl]).strip("/"), target)
|
||||
ret["k" + quotep(excl)] = sub
|
||||
|
||||
try:
|
||||
vn, rem = self.auth.vfs.get(top, self.uname, True, False)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
|
||||
except:
|
||||
vfs_ls = []
|
||||
vfs_virt = {}
|
||||
for v in self.rvol:
|
||||
d1, d2 = v.rsplit("/", 1) if "/" in v else ["", v]
|
||||
if d1 == top:
|
||||
vfs_virt[d2] = 0
|
||||
|
||||
dirs = []
|
||||
|
||||
vfs_ls = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||
|
||||
if not self.args.ed or "dots" not in self.uparam:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
for fn in [x for x in vfs_ls if x != excl]:
|
||||
dirs.append(quotep(fn))
|
||||
|
||||
for x in vfs_virt.keys():
|
||||
if x != excl:
|
||||
dirs.append(x)
|
||||
|
||||
ret["a"] = dirs
|
||||
return ret
|
||||
|
||||
def tx_browser(self):
|
||||
vpath = ""
|
||||
vpnodes = [["", "/"]]
|
||||
@@ -912,7 +1156,7 @@ class HttpCli(object):
|
||||
else:
|
||||
vpath += "/" + node
|
||||
|
||||
vpnodes.append([quotep(vpath) + "/", html_escape(node, quote=False)])
|
||||
vpnodes.append([quotep(vpath) + "/", html_escape(node)])
|
||||
|
||||
vn, rem = self.auth.vfs.get(
|
||||
self.vpath, self.uname, self.readable, self.writable
|
||||
@@ -927,9 +1171,14 @@ class HttpCli(object):
|
||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||
return self.tx_md(abspath)
|
||||
|
||||
if rem.startswith(".hist/up2k."):
|
||||
raise Pebkac(403)
|
||||
|
||||
return self.tx_file(abspath)
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
|
||||
stats = {k: v for k, v in vfs_ls}
|
||||
vfs_ls = [x[0] for x in vfs_ls]
|
||||
vfs_ls.extend(vfs_virt.keys())
|
||||
|
||||
# check for old versions of files,
|
||||
@@ -948,22 +1197,39 @@ class HttpCli(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
# show dotfiles if permitted and requested
|
||||
if not self.args.ed or "dots" not in self.uparam:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
hidden = []
|
||||
if rem == ".hist":
|
||||
hidden = ["up2k."]
|
||||
|
||||
is_ls = "ls" in self.uparam
|
||||
|
||||
icur = None
|
||||
if "e2t" in vn.flags:
|
||||
idx = self.conn.get_u2idx()
|
||||
icur = idx.get_cur(vn.realpath)
|
||||
|
||||
dirs = []
|
||||
files = []
|
||||
for fn in exclude_dotfiles(vfs_ls):
|
||||
for fn in vfs_ls:
|
||||
base = ""
|
||||
href = fn
|
||||
if self.absolute_urls and vpath:
|
||||
if not is_ls and self.absolute_urls and vpath:
|
||||
base = "/" + vpath + "/"
|
||||
href = base + fn
|
||||
|
||||
if fn in vfs_virt:
|
||||
fspath = vfs_virt[fn].realpath
|
||||
elif hidden and any(fn.startswith(x) for x in hidden):
|
||||
continue
|
||||
else:
|
||||
fspath = fsroot + "/" + fn
|
||||
|
||||
try:
|
||||
inf = os.stat(fsenc(fspath))
|
||||
inf = stats.get(fn) or os.stat(fsenc(fspath))
|
||||
except:
|
||||
self.log("broken symlink: {}".format(repr(fspath)))
|
||||
continue
|
||||
@@ -983,33 +1249,131 @@ class HttpCli(object):
|
||||
dt = datetime.utcfromtimestamp(inf.st_mtime)
|
||||
dt = dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
item = [margin, quotep(href), html_escape(fn, quote=False), sz, dt]
|
||||
try:
|
||||
ext = "---" if is_dir else fn.rsplit(".", 1)[1]
|
||||
except:
|
||||
ext = "%"
|
||||
|
||||
item = {
|
||||
"lead": margin,
|
||||
"href": quotep(href),
|
||||
"name": fn,
|
||||
"sz": sz,
|
||||
"ext": ext,
|
||||
"dt": dt,
|
||||
"ts": int(inf.st_mtime),
|
||||
}
|
||||
if is_dir:
|
||||
dirs.append(item)
|
||||
else:
|
||||
files.append(item)
|
||||
item["rd"] = rem
|
||||
|
||||
logues = [None, None]
|
||||
taglist = {}
|
||||
for f in files:
|
||||
fn = f["name"]
|
||||
rd = f["rd"]
|
||||
del f["rd"]
|
||||
if icur:
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
try:
|
||||
r = icur.execute(q, (rd, fn)).fetchone()
|
||||
except:
|
||||
args = s3enc(idx.mem_cur, rd, fn)
|
||||
r = icur.execute(q, args).fetchone()
|
||||
|
||||
if not r:
|
||||
continue
|
||||
|
||||
w = r[0][:16]
|
||||
tags = {}
|
||||
q = "select k, v from mt where w = ? and k != 'x'"
|
||||
for k, v in icur.execute(q, (w,)):
|
||||
taglist[k] = True
|
||||
tags[k] = v
|
||||
|
||||
f["tags"] = tags
|
||||
|
||||
if icur:
|
||||
taglist = [k for k in self.args.mte.split(",") if k in taglist]
|
||||
for f in dirs:
|
||||
f["tags"] = {}
|
||||
|
||||
srv_info = []
|
||||
|
||||
try:
|
||||
if not self.args.nih:
|
||||
srv_info.append(unicode(socket.gethostname()).split(".")[0])
|
||||
except:
|
||||
self.log("#wow #whoa")
|
||||
pass
|
||||
|
||||
try:
|
||||
# some fuses misbehave
|
||||
if not self.args.nid:
|
||||
if WINDOWS:
|
||||
bfree = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(
|
||||
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
|
||||
)
|
||||
srv_info.append(humansize(bfree.value) + " free")
|
||||
else:
|
||||
sv = os.statvfs(abspath)
|
||||
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
||||
total = humansize(sv.f_frsize * sv.f_blocks, True)
|
||||
|
||||
srv_info.append(free + " free")
|
||||
srv_info.append(total)
|
||||
except:
|
||||
pass
|
||||
|
||||
srv_info = "</span> /// <span>".join(srv_info)
|
||||
|
||||
perms = []
|
||||
if self.readable:
|
||||
perms.append("read")
|
||||
if self.writable:
|
||||
perms.append("write")
|
||||
|
||||
logues = ["", ""]
|
||||
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
|
||||
fn = os.path.join(abspath, fn)
|
||||
if os.path.exists(fsenc(fn)):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
logues[n] = f.read().decode("utf-8")
|
||||
|
||||
if is_ls:
|
||||
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
|
||||
ret = {
|
||||
"dirs": dirs,
|
||||
"files": files,
|
||||
"srvinf": srv_info,
|
||||
"perms": perms,
|
||||
"logues": logues,
|
||||
"taglist": taglist,
|
||||
}
|
||||
ret = json.dumps(ret)
|
||||
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
|
||||
return True
|
||||
|
||||
ts = ""
|
||||
# ts = "?{}".format(time.time())
|
||||
|
||||
dirs.extend(files)
|
||||
|
||||
html = self.conn.tpl_browser.render(
|
||||
vdir=quotep(self.vpath),
|
||||
vpnodes=vpnodes,
|
||||
files=dirs,
|
||||
can_upload=self.writable,
|
||||
can_read=self.readable,
|
||||
ts=ts,
|
||||
prologue=logues[0],
|
||||
epilogue=logues[1],
|
||||
title=html_escape(self.vpath, quote=False),
|
||||
perms=json.dumps(perms),
|
||||
taglist=taglist,
|
||||
tag_order=json.dumps(self.args.mte.split(",")),
|
||||
have_up2k_idx=("e2d" in vn.flags),
|
||||
have_tags_idx=("e2t" in vn.flags),
|
||||
logues=logues,
|
||||
title=html_escape(self.vpath),
|
||||
srv_info=srv_info,
|
||||
)
|
||||
self.reply(html.encode("utf-8", "replace"))
|
||||
return True
|
||||
|
||||
@@ -3,9 +3,15 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import ssl
|
||||
import time
|
||||
import socket
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
try:
|
||||
import jinja2
|
||||
except ImportError:
|
||||
@@ -14,16 +20,19 @@ except ImportError:
|
||||
you do not have jinja2 installed,\033[33m
|
||||
choose one of these:\033[0m
|
||||
* apt install python-jinja2
|
||||
* python3 -m pip install --user jinja2
|
||||
* {} -m pip install --user jinja2
|
||||
* (try another python version, if you have one)
|
||||
* (try copyparty.sfx instead)
|
||||
"""
|
||||
""".format(
|
||||
os.path.basename(sys.executable)
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
from .__init__ import E
|
||||
from .util import Unrecv
|
||||
from .httpcli import HttpCli
|
||||
from .u2idx import U2idx
|
||||
|
||||
|
||||
class HttpConn(object):
|
||||
@@ -41,9 +50,12 @@ class HttpConn(object):
|
||||
self.auth = hsrv.auth
|
||||
self.cert_path = hsrv.cert_path
|
||||
|
||||
self.t0 = time.time()
|
||||
self.nbyte = 0
|
||||
self.workload = 0
|
||||
self.u2idx = None
|
||||
self.log_func = hsrv.log
|
||||
self.log_src = "{} \033[36m{}".format(addr[0], addr[1]).ljust(26)
|
||||
self.set_rproxy()
|
||||
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
||||
@@ -53,15 +65,33 @@ class HttpConn(object):
|
||||
self.tpl_md = env.get_template("md.html")
|
||||
self.tpl_mde = env.get_template("mde.html")
|
||||
|
||||
def set_rproxy(self, ip=None):
|
||||
if ip is None:
|
||||
color = 36
|
||||
ip = self.addr[0]
|
||||
self.rproxy = None
|
||||
else:
|
||||
color = 34
|
||||
self.rproxy = ip
|
||||
|
||||
self.ip = ip
|
||||
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
|
||||
return self.log_src
|
||||
|
||||
def respath(self, res_name):
|
||||
return os.path.join(E.mod, "web", res_name)
|
||||
|
||||
def log(self, msg):
|
||||
self.log_func(self.log_src, msg)
|
||||
def log(self, msg, c=0):
|
||||
self.log_func(self.log_src, msg, c)
|
||||
|
||||
def run(self):
|
||||
def get_u2idx(self):
|
||||
if not self.u2idx:
|
||||
self.u2idx = U2idx(self.args, self.log_func)
|
||||
|
||||
return self.u2idx
|
||||
|
||||
def _detect_https(self):
|
||||
method = None
|
||||
self.sr = None
|
||||
if self.cert_path:
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
@@ -86,16 +116,58 @@ class HttpConn(object):
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return
|
||||
|
||||
if method not in [None, b"GET ", b"HEAD", b"POST"]:
|
||||
return method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]
|
||||
|
||||
def run(self):
|
||||
self.sr = None
|
||||
if self.args.https_only:
|
||||
is_https = True
|
||||
elif self.args.http_only or not HAVE_SSL:
|
||||
is_https = False
|
||||
else:
|
||||
is_https = self._detect_https()
|
||||
|
||||
if is_https:
|
||||
if self.sr:
|
||||
self.log("\033[1;31mTODO: cannot do https in jython\033[0m")
|
||||
self.log("TODO: cannot do https in jython", c="1;31")
|
||||
return
|
||||
|
||||
self.log_src = self.log_src.replace("[36m", "[35m")
|
||||
try:
|
||||
self.s = ssl.wrap_socket(
|
||||
self.s, server_side=True, certfile=self.cert_path
|
||||
)
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
ctx.load_cert_chain(self.cert_path)
|
||||
if self.args.ssl_ver:
|
||||
ctx.options &= ~self.args.ssl_flags_en
|
||||
ctx.options |= self.args.ssl_flags_de
|
||||
# print(repr(ctx.options))
|
||||
|
||||
if self.args.ssl_log:
|
||||
try:
|
||||
ctx.keylog_filename = self.args.ssl_log
|
||||
except:
|
||||
self.log("keylog failed; openssl or python too old")
|
||||
|
||||
if self.args.ciphers:
|
||||
ctx.set_ciphers(self.args.ciphers)
|
||||
|
||||
self.s = ctx.wrap_socket(self.s, server_side=True)
|
||||
msg = [
|
||||
"\033[1;3{:d}m{}".format(c, s)
|
||||
for c, s in zip([0, 5, 0], self.s.cipher())
|
||||
]
|
||||
self.log(" ".join(msg) + "\033[0m")
|
||||
|
||||
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
|
||||
overlap = [y[::-1] for y in self.s.shared_ciphers()]
|
||||
lines = [str(x) for x in (["TLS cipher overlap:"] + overlap)]
|
||||
self.log("\n".join(lines))
|
||||
for k, v in [
|
||||
["compression", self.s.compression()],
|
||||
["ALPN proto", self.s.selected_alpn_protocol()],
|
||||
["NPN proto", self.s.selected_npn_protocol()],
|
||||
]:
|
||||
self.log("TLS {}: {}".format(k, v or "nah"))
|
||||
|
||||
except Exception as ex:
|
||||
em = str(ex)
|
||||
|
||||
@@ -108,7 +180,7 @@ class HttpConn(object):
|
||||
pass
|
||||
|
||||
else:
|
||||
self.log("\033[35mhandshake\033[0m " + em)
|
||||
self.log("handshake\033[0m " + em, c=5)
|
||||
|
||||
return
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ class HttpSrv(object):
|
||||
|
||||
def accept(self, sck, addr):
|
||||
"""takes an incoming tcp connection and creates a thread to handle it"""
|
||||
self.log("%s %s" % addr, "-" * 5 + "C-cthr")
|
||||
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
|
||||
thr = threading.Thread(target=self.thr_client, args=(sck, addr))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
@@ -66,11 +66,11 @@ class HttpSrv(object):
|
||||
thr.start()
|
||||
|
||||
try:
|
||||
self.log("%s %s" % addr, "-" * 6 + "C-crun")
|
||||
self.log("%s %s" % addr, "|%sC-crun" % ("-" * 6,), c="1;30")
|
||||
cli.run()
|
||||
|
||||
finally:
|
||||
self.log("%s %s" % addr, "-" * 7 + "C-done")
|
||||
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")
|
||||
try:
|
||||
sck.shutdown(socket.SHUT_RDWR)
|
||||
sck.close()
|
||||
@@ -78,7 +78,8 @@ class HttpSrv(object):
|
||||
if not MACOS:
|
||||
self.log(
|
||||
"%s %s" % addr,
|
||||
"shut_rdwr err:\n {}\n {}".format(repr(sck), ex),
|
||||
"shut({}): {}".format(sck.fileno(), ex),
|
||||
c="1;30",
|
||||
)
|
||||
if ex.errno not in [10038, 10054, 107, 57, 9]:
|
||||
# 10038 No longer considered a socket
|
||||
|
||||
314
copyparty/mtag.py
Normal file
314
copyparty/mtag.py
Normal file
@@ -0,0 +1,314 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
from .util import fsenc, fsdec
|
||||
|
||||
if not PY2:
|
||||
unicode = str
|
||||
|
||||
|
||||
class MTag(object):
|
||||
def __init__(self, log_func, args):
|
||||
self.log_func = log_func
|
||||
self.usable = True
|
||||
self.prefer_mt = False
|
||||
mappings = args.mtm
|
||||
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
|
||||
or_ffprobe = " or ffprobe"
|
||||
|
||||
if self.backend == "mutagen":
|
||||
self.get = self.get_mutagen
|
||||
try:
|
||||
import mutagen
|
||||
except:
|
||||
self.log("could not load mutagen, trying ffprobe instead", c=3)
|
||||
self.backend = "ffprobe"
|
||||
|
||||
if self.backend == "ffprobe":
|
||||
self.get = self.get_ffprobe
|
||||
self.prefer_mt = True
|
||||
# about 20x slower
|
||||
if PY2:
|
||||
cmd = [b"ffprobe", b"-version"]
|
||||
try:
|
||||
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
except:
|
||||
self.usable = False
|
||||
else:
|
||||
if not shutil.which("ffprobe"):
|
||||
self.usable = False
|
||||
|
||||
if self.usable and WINDOWS and sys.version_info < (3, 8):
|
||||
self.usable = False
|
||||
or_ffprobe = " or python >= 3.8"
|
||||
msg = "found ffprobe but your python is too old; need 3.8 or newer"
|
||||
self.log(msg, c=1)
|
||||
|
||||
if not self.usable:
|
||||
msg = "need mutagen{} to read media tags so please run this:\n {} -m pip install --user mutagen"
|
||||
self.log(msg.format(or_ffprobe, os.path.basename(sys.executable)), c=1)
|
||||
return
|
||||
|
||||
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
tagmap = {
|
||||
"album": ["album", "talb", "\u00a9alb", "original-album", "toal"],
|
||||
"artist": [
|
||||
"artist",
|
||||
"tpe1",
|
||||
"\u00a9art",
|
||||
"composer",
|
||||
"performer",
|
||||
"arranger",
|
||||
"\u00a9wrt",
|
||||
"tcom",
|
||||
"tpe3",
|
||||
"original-artist",
|
||||
"tope",
|
||||
],
|
||||
"title": ["title", "tit2", "\u00a9nam"],
|
||||
"circle": [
|
||||
"album-artist",
|
||||
"tpe2",
|
||||
"aart",
|
||||
"conductor",
|
||||
"organization",
|
||||
"band",
|
||||
],
|
||||
".tn": ["tracknumber", "trck", "trkn", "track"],
|
||||
"genre": ["genre", "tcon", "\u00a9gen"],
|
||||
"date": [
|
||||
"original-release-date",
|
||||
"release-date",
|
||||
"date",
|
||||
"tdrc",
|
||||
"\u00a9day",
|
||||
"original-date",
|
||||
"original-year",
|
||||
"tyer",
|
||||
"tdor",
|
||||
"tory",
|
||||
"year",
|
||||
"creation-time",
|
||||
],
|
||||
".bpm": ["bpm", "tbpm", "tmpo", "tbp"],
|
||||
"key": ["initial-key", "tkey", "key"],
|
||||
"comment": ["comment", "comm", "\u00a9cmt", "comments", "description"],
|
||||
}
|
||||
|
||||
if mappings:
|
||||
for k, v in [x.split("=") for x in mappings]:
|
||||
tagmap[k] = v.split(",")
|
||||
|
||||
self.tagmap = {}
|
||||
for k, vs in tagmap.items():
|
||||
vs2 = []
|
||||
for v in vs:
|
||||
if "-" not in v:
|
||||
vs2.append(v)
|
||||
continue
|
||||
|
||||
vs2.append(v.replace("-", " "))
|
||||
vs2.append(v.replace("-", "_"))
|
||||
vs2.append(v.replace("-", ""))
|
||||
|
||||
self.tagmap[k] = vs2
|
||||
|
||||
self.rmap = {
|
||||
v: [n, k] for k, vs in self.tagmap.items() for n, v in enumerate(vs)
|
||||
}
|
||||
# self.get = self.compare
|
||||
|
||||
def log(self, msg, c=0):
|
||||
self.log_func("mtag", msg, c)
|
||||
|
||||
def normalize_tags(self, ret, md):
|
||||
for k, v in dict(md).items():
|
||||
if not v:
|
||||
continue
|
||||
|
||||
k = k.lower().split("::")[0].strip()
|
||||
mk = self.rmap.get(k)
|
||||
if not mk:
|
||||
continue
|
||||
|
||||
pref, mk = mk
|
||||
if mk not in ret or ret[mk][0] > pref:
|
||||
ret[mk] = [pref, v[0]]
|
||||
|
||||
# take first value
|
||||
ret = {k: unicode(v[1]).strip() for k, v in ret.items()}
|
||||
|
||||
# track 3/7 => track 3
|
||||
for k, v in ret.items():
|
||||
if k[0] == ".":
|
||||
v = v.split("/")[0].strip().lstrip("0")
|
||||
ret[k] = v or 0
|
||||
|
||||
return ret
|
||||
|
||||
def compare(self, abspath):
|
||||
if abspath.endswith(".au"):
|
||||
return {}
|
||||
|
||||
print("\n" + abspath)
|
||||
r1 = self.get_mutagen(abspath)
|
||||
r2 = self.get_ffprobe(abspath)
|
||||
|
||||
keys = {}
|
||||
for d in [r1, r2]:
|
||||
for k in d.keys():
|
||||
keys[k] = True
|
||||
|
||||
diffs = []
|
||||
l1 = []
|
||||
l2 = []
|
||||
for k in sorted(keys.keys()):
|
||||
if k in [".q", ".dur"]:
|
||||
continue # lenient
|
||||
|
||||
v1 = r1.get(k)
|
||||
v2 = r2.get(k)
|
||||
if v1 == v2:
|
||||
print(" ", k, v1)
|
||||
elif v1 != "0000": # ffprobe date=0
|
||||
diffs.append(k)
|
||||
print(" 1", k, v1)
|
||||
print(" 2", k, v2)
|
||||
if v1:
|
||||
l1.append(k)
|
||||
if v2:
|
||||
l2.append(k)
|
||||
|
||||
if diffs:
|
||||
raise Exception()
|
||||
|
||||
return r1
|
||||
|
||||
def get_mutagen(self, abspath):
|
||||
import mutagen
|
||||
|
||||
try:
|
||||
md = mutagen.File(abspath, easy=True)
|
||||
x = md.info.length
|
||||
except Exception as ex:
|
||||
return {}
|
||||
|
||||
ret = {}
|
||||
try:
|
||||
dur = int(md.info.length)
|
||||
try:
|
||||
q = int(md.info.bitrate / 1024)
|
||||
except:
|
||||
q = int((os.path.getsize(abspath) / dur) / 128)
|
||||
|
||||
ret[".dur"] = [0, dur]
|
||||
ret[".q"] = [0, q]
|
||||
except:
|
||||
pass
|
||||
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_ffprobe(self, abspath):
|
||||
cmd = [b"ffprobe", b"-hide_banner", b"--", fsenc(abspath)]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
r = p.communicate()
|
||||
txt = r[1].decode("utf-8", "replace")
|
||||
txt = [x.rstrip("\r") for x in txt.split("\n")]
|
||||
|
||||
"""
|
||||
note:
|
||||
tags which contain newline will be truncated on first \n,
|
||||
ffmpeg emits \n and spacepads the : to align visually
|
||||
note:
|
||||
the Stream ln always mentions Audio: if audio
|
||||
the Stream ln usually has kb/s, is more accurate
|
||||
the Duration ln always has kb/s
|
||||
the Metadata: after Chapter may contain BPM info,
|
||||
title : Tempo: 126.0
|
||||
|
||||
Input #0, wav,
|
||||
Metadata:
|
||||
date : <OK>
|
||||
Duration:
|
||||
Chapter #
|
||||
Metadata:
|
||||
title : <NG>
|
||||
|
||||
Input #0, mp3,
|
||||
Metadata:
|
||||
album : <OK>
|
||||
Duration:
|
||||
Stream #0:0: Audio:
|
||||
Stream #0:1: Video:
|
||||
Metadata:
|
||||
comment : <NG>
|
||||
"""
|
||||
|
||||
ptn_md_beg = re.compile("^( +)Metadata:$")
|
||||
ptn_md_kv = re.compile("^( +)([^:]+) *: (.*)")
|
||||
ptn_dur = re.compile("^ *Duration: ([^ ]+)(, |$)")
|
||||
ptn_br1 = re.compile("^ *Duration: .*, bitrate: ([0-9]+) kb/s(, |$)")
|
||||
ptn_br2 = re.compile("^ *Stream.*: Audio:.* ([0-9]+) kb/s(, |$)")
|
||||
ptn_audio = re.compile("^ *Stream .*: Audio: ")
|
||||
ptn_au_parent = re.compile("^ *(Input #|Stream .*: Audio: )")
|
||||
|
||||
ret = {}
|
||||
md = {}
|
||||
in_md = False
|
||||
is_audio = False
|
||||
au_parent = False
|
||||
for ln in txt:
|
||||
m = ptn_md_kv.match(ln)
|
||||
if m and in_md and len(m.group(1)) == in_md:
|
||||
_, k, v = [x.strip() for x in m.groups()]
|
||||
if k != "" and v != "":
|
||||
md[k] = [v]
|
||||
continue
|
||||
else:
|
||||
in_md = False
|
||||
|
||||
m = ptn_md_beg.match(ln)
|
||||
if m and au_parent:
|
||||
in_md = len(m.group(1)) + 2
|
||||
continue
|
||||
|
||||
au_parent = bool(ptn_au_parent.search(ln))
|
||||
|
||||
if ptn_audio.search(ln):
|
||||
is_audio = True
|
||||
|
||||
m = ptn_dur.search(ln)
|
||||
if m:
|
||||
sec = 0
|
||||
tstr = m.group(1)
|
||||
if tstr.lower() != "n/a":
|
||||
try:
|
||||
tf = tstr.split(",")[0].split(".")[0].split(":")
|
||||
for f in tf:
|
||||
sec *= 60
|
||||
sec += int(f)
|
||||
except:
|
||||
self.log("invalid timestr from ffmpeg: [{}]".format(tstr), c=3)
|
||||
|
||||
ret[".dur"] = sec
|
||||
m = ptn_br1.search(ln)
|
||||
if m:
|
||||
ret[".q"] = m.group(1)
|
||||
|
||||
m = ptn_br2.search(ln)
|
||||
if m:
|
||||
ret[".q"] = m.group(1)
|
||||
|
||||
if not is_audio:
|
||||
return {}
|
||||
|
||||
ret = {k: [0, v] for k, v in ret.items()}
|
||||
|
||||
return self.normalize_tags(ret, md)
|
||||
@@ -65,16 +65,16 @@ class SvcHub(object):
|
||||
self.broker.shutdown()
|
||||
print("nailed it")
|
||||
|
||||
def _log_disabled(self, src, msg):
|
||||
def _log_disabled(self, src, msg, c=0):
|
||||
pass
|
||||
|
||||
def _log_enabled(self, src, msg):
|
||||
def _log_enabled(self, src, msg, c=0):
|
||||
"""handles logging from all components"""
|
||||
with self.log_mutex:
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
dt = datetime.utcfromtimestamp(now)
|
||||
print("\033[36m{}\033[0m".format(dt.strftime("%Y-%m-%d")))
|
||||
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
|
||||
|
||||
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
||||
day_now = dt.day
|
||||
@@ -84,23 +84,30 @@ class SvcHub(object):
|
||||
dt = dt.replace(hour=0, minute=0, second=0)
|
||||
self.next_day = calendar.timegm(dt.utctimetuple())
|
||||
|
||||
fmt = "\033[36m{} \033[33m{:21} \033[0m{}"
|
||||
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
|
||||
if not VT100:
|
||||
fmt = "{} {:21} {}"
|
||||
fmt = "{} {:21} {}\n"
|
||||
if "\033" in msg:
|
||||
msg = self.ansi_re.sub("", msg)
|
||||
if "\033" in src:
|
||||
src = self.ansi_re.sub("", src)
|
||||
elif c:
|
||||
if isinstance(c, int):
|
||||
msg = "\033[3{}m{}".format(c, msg)
|
||||
elif "\033" not in c:
|
||||
msg = "\033[{}m{}\033[0m".format(c, msg)
|
||||
else:
|
||||
msg = "{}{}\033[0m".format(c, msg)
|
||||
|
||||
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
|
||||
msg = fmt.format(ts, src, msg)
|
||||
try:
|
||||
print(msg)
|
||||
print(msg, end="")
|
||||
except UnicodeEncodeError:
|
||||
try:
|
||||
print(msg.encode("utf-8", "replace").decode())
|
||||
print(msg.encode("utf-8", "replace").decode(), end="")
|
||||
except:
|
||||
print(msg.encode("ascii", "replace").decode())
|
||||
print(msg.encode("ascii", "replace").decode(), end="")
|
||||
|
||||
def check_mp_support(self):
|
||||
vmin = sys.version_info[1]
|
||||
@@ -129,8 +136,8 @@ class SvcHub(object):
|
||||
return None
|
||||
|
||||
def check_mp_enable(self):
|
||||
if self.args.j == 0:
|
||||
self.log("root", "multiprocessing disabled by argument -j 0;")
|
||||
if self.args.j == 1:
|
||||
self.log("root", "multiprocessing disabled by argument -j 1;")
|
||||
return False
|
||||
|
||||
if mp.cpu_count() <= 1:
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import re
|
||||
import time
|
||||
import socket
|
||||
import select
|
||||
|
||||
from .util import chkcmd, Counter
|
||||
|
||||
@@ -23,55 +24,74 @@ class TcpSrv(object):
|
||||
|
||||
ip = "127.0.0.1"
|
||||
eps = {ip: "local only"}
|
||||
if self.args.i != ip:
|
||||
eps = self.detect_interfaces(self.args.i) or {self.args.i: "external"}
|
||||
nonlocals = [x for x in self.args.i if x != ip]
|
||||
if nonlocals:
|
||||
eps = self.detect_interfaces(self.args.i)
|
||||
if not eps:
|
||||
for x in nonlocals:
|
||||
eps[x] = "external"
|
||||
|
||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||
self.log(
|
||||
"tcpsrv",
|
||||
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
|
||||
ip, self.args.p, desc
|
||||
),
|
||||
)
|
||||
for port in sorted(self.args.p):
|
||||
self.log(
|
||||
"tcpsrv",
|
||||
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
|
||||
ip, port, desc
|
||||
),
|
||||
)
|
||||
|
||||
self.srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
self.srv = []
|
||||
for ip in self.args.i:
|
||||
for port in self.args.p:
|
||||
self.srv.append(self._listen(ip, port))
|
||||
|
||||
def _listen(self, ip, port):
|
||||
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
try:
|
||||
self.srv.bind((self.args.i, self.args.p))
|
||||
srv.bind((ip, port))
|
||||
return srv
|
||||
except (OSError, socket.error) as ex:
|
||||
if ex.errno == 98:
|
||||
raise Exception(
|
||||
"\033[1;31mport {} is busy on interface {}\033[0m".format(
|
||||
self.args.p, self.args.i
|
||||
)
|
||||
)
|
||||
|
||||
if ex.errno == 99:
|
||||
raise Exception(
|
||||
"\033[1;31minterface {} does not exist\033[0m".format(self.args.i)
|
||||
)
|
||||
if ex.errno in [98, 48]:
|
||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
elif ex.errno in [99, 49]:
|
||||
e = "\033[1;31minterface {} does not exist\033[0m".format(ip)
|
||||
else:
|
||||
raise
|
||||
raise Exception(e)
|
||||
|
||||
def run(self):
|
||||
self.srv.listen(self.args.nc)
|
||||
|
||||
self.log("tcpsrv", "listening @ {0}:{1}".format(self.args.i, self.args.p))
|
||||
for srv in self.srv:
|
||||
srv.listen(self.args.nc)
|
||||
ip, port = srv.getsockname()
|
||||
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
|
||||
|
||||
while True:
|
||||
self.log("tcpsrv", "-" * 1 + "C-ncli")
|
||||
self.log("tcpsrv", "|%sC-ncli" % ("-" * 1,), c="1;30")
|
||||
if self.num_clients.v >= self.args.nc:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
self.log("tcpsrv", "-" * 2 + "C-acc1")
|
||||
sck, addr = self.srv.accept()
|
||||
self.log("%s %s" % addr, "-" * 3 + "C-acc2")
|
||||
self.num_clients.add()
|
||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||
self.log("tcpsrv", "|%sC-acc1" % ("-" * 2,), c="1;30")
|
||||
ready, _, _ = select.select(self.srv, [], [])
|
||||
for srv in ready:
|
||||
sck, addr = srv.accept()
|
||||
sip, sport = srv.getsockname()
|
||||
self.log(
|
||||
"%s %s" % addr,
|
||||
"|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||
"-" * 3, sip, sport % 8, sport
|
||||
),
|
||||
c="1;30",
|
||||
)
|
||||
self.num_clients.add()
|
||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||
|
||||
def shutdown(self):
|
||||
self.log("tcpsrv", "ok bye")
|
||||
|
||||
def detect_interfaces(self, listen_ip):
|
||||
def detect_interfaces(self, listen_ips):
|
||||
eps = {}
|
||||
|
||||
# get all ips and their interfaces
|
||||
@@ -85,8 +105,9 @@ class TcpSrv(object):
|
||||
for ln in ip_addr.split("\n"):
|
||||
try:
|
||||
ip, dev = r.match(ln.rstrip()).groups()
|
||||
if listen_ip in ["0.0.0.0", ip]:
|
||||
eps[ip] = dev
|
||||
for lip in listen_ips:
|
||||
if lip in ["0.0.0.0", ip]:
|
||||
eps[ip] = dev
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -113,11 +134,12 @@ class TcpSrv(object):
|
||||
|
||||
s.close()
|
||||
|
||||
if default_route and listen_ip in ["0.0.0.0", default_route]:
|
||||
desc = "\033[32mexternal"
|
||||
try:
|
||||
eps[default_route] += ", " + desc
|
||||
except:
|
||||
eps[default_route] = desc
|
||||
for lip in listen_ips:
|
||||
if default_route and lip in ["0.0.0.0", default_route]:
|
||||
desc = "\033[32mexternal"
|
||||
try:
|
||||
eps[default_route] += ", " + desc
|
||||
except:
|
||||
eps[default_route] = desc
|
||||
|
||||
return eps
|
||||
|
||||
200
copyparty/u2idx.py
Normal file
200
copyparty/u2idx.py
Normal file
@@ -0,0 +1,200 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from .util import u8safe
|
||||
from .up2k import up2k_wark_from_hashlist
|
||||
|
||||
|
||||
try:
|
||||
HAVE_SQLITE3 = True
|
||||
import sqlite3
|
||||
except:
|
||||
HAVE_SQLITE3 = False
|
||||
|
||||
|
||||
class U2idx(object):
|
||||
def __init__(self, args, log_func):
|
||||
self.args = args
|
||||
self.log_func = log_func
|
||||
|
||||
if not HAVE_SQLITE3:
|
||||
self.log("could not load sqlite3; searchign wqill be disabled")
|
||||
return
|
||||
|
||||
self.cur = {}
|
||||
self.mem_cur = sqlite3.connect(":memory:")
|
||||
self.mem_cur.execute(r"create table a (b text)")
|
||||
|
||||
def log(self, msg, c=0):
|
||||
self.log_func("u2idx", msg, c)
|
||||
|
||||
def fsearch(self, vols, body):
|
||||
"""search by up2k hashlist"""
|
||||
if not HAVE_SQLITE3:
|
||||
return []
|
||||
|
||||
fsize = body["size"]
|
||||
fhash = body["hash"]
|
||||
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
|
||||
|
||||
uq = "substr(w,1,16) = ? and w = ?"
|
||||
uv = [wark[:16], wark]
|
||||
|
||||
return self.run_query(vols, uq, uv, "", [])[0]
|
||||
|
||||
def get_cur(self, ptop):
|
||||
cur = self.cur.get(ptop)
|
||||
if cur:
|
||||
return cur
|
||||
|
||||
cur = _open(ptop)
|
||||
if not cur:
|
||||
return None
|
||||
|
||||
self.cur[ptop] = cur
|
||||
return cur
|
||||
|
||||
def search(self, vols, body):
|
||||
"""search by query params"""
|
||||
if not HAVE_SQLITE3:
|
||||
return []
|
||||
|
||||
qobj = {}
|
||||
_conv_sz(qobj, body, "sz_min", "up.sz >= ?")
|
||||
_conv_sz(qobj, body, "sz_max", "up.sz <= ?")
|
||||
_conv_dt(qobj, body, "dt_min", "up.mt >= ?")
|
||||
_conv_dt(qobj, body, "dt_max", "up.mt <= ?")
|
||||
for seg, dk in [["path", "up.rd"], ["name", "up.fn"]]:
|
||||
if seg in body:
|
||||
_conv_txt(qobj, body, seg, dk)
|
||||
|
||||
uq, uv = _sqlize(qobj)
|
||||
|
||||
tq = ""
|
||||
tv = []
|
||||
qobj = {}
|
||||
if "tags" in body:
|
||||
_conv_txt(qobj, body, "tags", "mt.v")
|
||||
tq, tv = _sqlize(qobj)
|
||||
|
||||
return self.run_query(vols, uq, uv, tq, tv)
|
||||
|
||||
def run_query(self, vols, uq, uv, tq, tv):
|
||||
self.log("qs: {} {} , {} {}".format(uq, repr(uv), tq, repr(tv)))
|
||||
|
||||
ret = []
|
||||
lim = 1000
|
||||
taglist = {}
|
||||
for (vtop, ptop, flags) in vols:
|
||||
cur = self.get_cur(ptop)
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
if not tq:
|
||||
if not uq:
|
||||
q = "select * from up"
|
||||
v = ()
|
||||
else:
|
||||
q = "select * from up where " + uq
|
||||
v = tuple(uv)
|
||||
else:
|
||||
# naive assumption: tags first
|
||||
q = "select up.* from up inner join mt on substr(up.w,1,16) = mt.w where {}"
|
||||
q = q.format(" and ".join([tq, uq]) if uq else tq)
|
||||
v = tuple(tv + uv)
|
||||
|
||||
sret = []
|
||||
c = cur.execute(q, v)
|
||||
for hit in c:
|
||||
w, ts, sz, rd, fn = hit
|
||||
lim -= 1
|
||||
if lim <= 0:
|
||||
break
|
||||
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
rp = os.path.join(vtop, rd, fn).replace("\\", "/")
|
||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||
|
||||
for hit in sret:
|
||||
w = hit["w"]
|
||||
del hit["w"]
|
||||
tags = {}
|
||||
q = "select k, v from mt where w = ? and k != 'x'"
|
||||
for k, v in cur.execute(q, (w,)):
|
||||
taglist[k] = True
|
||||
tags[k] = v
|
||||
|
||||
hit["tags"] = tags
|
||||
|
||||
ret.extend(sret)
|
||||
|
||||
return ret, list(taglist.keys())
|
||||
|
||||
|
||||
def _open(ptop):
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
if os.path.exists(db_path):
|
||||
return sqlite3.connect(db_path).cursor()
|
||||
|
||||
|
||||
def _conv_sz(q, body, k, sql):
|
||||
if k in body:
|
||||
q[sql] = int(float(body[k]) * 1024 * 1024)
|
||||
|
||||
|
||||
def _conv_dt(q, body, k, sql):
|
||||
if k not in body:
|
||||
return
|
||||
|
||||
v = body[k].upper().rstrip("Z").replace(",", " ").replace("T", " ")
|
||||
while " " in v:
|
||||
v = v.replace(" ", " ")
|
||||
|
||||
for fmt in ["%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M", "%Y-%m-%d %H", "%Y-%m-%d"]:
|
||||
try:
|
||||
ts = datetime.strptime(v, fmt).timestamp()
|
||||
break
|
||||
except:
|
||||
ts = None
|
||||
|
||||
if ts:
|
||||
q[sql] = ts
|
||||
|
||||
|
||||
def _conv_txt(q, body, k, sql):
|
||||
for v in body[k].split(" "):
|
||||
inv = ""
|
||||
if v.startswith("-"):
|
||||
inv = "not"
|
||||
v = v[1:]
|
||||
|
||||
if not v:
|
||||
continue
|
||||
|
||||
head = "'%'||"
|
||||
if v.startswith("^"):
|
||||
head = ""
|
||||
v = v[1:]
|
||||
|
||||
tail = "||'%'"
|
||||
if v.endswith("$"):
|
||||
tail = ""
|
||||
v = v[:-1]
|
||||
|
||||
qk = "{} {} like {}?{}".format(sql, inv, head, tail)
|
||||
q[qk + "\n" + v] = u8safe(v)
|
||||
|
||||
|
||||
def _sqlize(qobj):
|
||||
keys = []
|
||||
values = []
|
||||
for k, v in sorted(qobj.items()):
|
||||
keys.append(k.split("\n")[0])
|
||||
values.append(v)
|
||||
|
||||
return " and ".join(keys), values
|
||||
1039
copyparty/up2k.py
1039
copyparty/up2k.py
File diff suppressed because it is too large
Load Diff
@@ -2,13 +2,17 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import base64
|
||||
import select
|
||||
import struct
|
||||
import hashlib
|
||||
import platform
|
||||
import threading
|
||||
import mimetypes
|
||||
import contextlib
|
||||
import subprocess as sp # nosec
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
@@ -42,6 +46,7 @@ if WINDOWS and PY2:
|
||||
|
||||
HTTPCODE = {
|
||||
200: "OK",
|
||||
204: "No Content",
|
||||
206: "Partial Content",
|
||||
304: "Not Modified",
|
||||
400: "Bad Request",
|
||||
@@ -94,6 +99,120 @@ class Unrecv(object):
|
||||
self.buf = buf + self.buf
|
||||
|
||||
|
||||
class ProgressPrinter(threading.Thread):
|
||||
"""
|
||||
periodically print progress info without linefeeds
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
threading.Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self.msg = None
|
||||
self.end = False
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
msg = None
|
||||
while not self.end:
|
||||
time.sleep(0.1)
|
||||
if msg == self.msg or self.end:
|
||||
continue
|
||||
|
||||
msg = self.msg
|
||||
uprint(" {}\033[K\r".format(msg))
|
||||
|
||||
print("\033[K", end="")
|
||||
sys.stdout.flush() # necessary on win10 even w/ stderr btw
|
||||
|
||||
|
||||
def uprint(msg):
|
||||
try:
|
||||
print(msg, end="")
|
||||
except UnicodeEncodeError:
|
||||
try:
|
||||
print(msg.encode("utf-8", "replace").decode(), end="")
|
||||
except:
|
||||
print(msg.encode("ascii", "replace").decode(), end="")
|
||||
|
||||
|
||||
def nuprint(msg):
|
||||
uprint("{}\n".format(msg))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ren_open(fname, *args, **kwargs):
|
||||
fdir = kwargs.pop("fdir", None)
|
||||
suffix = kwargs.pop("suffix", None)
|
||||
|
||||
if fname == os.devnull:
|
||||
with open(fname, *args, **kwargs) as f:
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
orig_name = fname
|
||||
bname = fname
|
||||
ext = ""
|
||||
while True:
|
||||
ofs = bname.rfind(".")
|
||||
if ofs < 0 or ofs < len(bname) - 7:
|
||||
# doesn't look like an extension anymore
|
||||
break
|
||||
|
||||
ext = bname[ofs:] + ext
|
||||
bname = bname[:ofs]
|
||||
|
||||
b64 = ""
|
||||
while True:
|
||||
try:
|
||||
if fdir:
|
||||
fpath = os.path.join(fdir, fname)
|
||||
else:
|
||||
fpath = fname
|
||||
|
||||
if suffix and os.path.exists(fpath):
|
||||
fpath += suffix
|
||||
fname += suffix
|
||||
ext += suffix
|
||||
|
||||
with open(fsenc(fpath), *args, **kwargs) as f:
|
||||
if b64:
|
||||
fp2 = "fn-trunc.{}.txt".format(b64)
|
||||
fp2 = os.path.join(fdir, fp2)
|
||||
with open(fsenc(fp2), "wb") as f2:
|
||||
f2.write(orig_name.encode("utf-8"))
|
||||
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
except OSError as ex_:
|
||||
ex = ex_
|
||||
if ex.errno not in [36, 63] and (not WINDOWS or ex.errno != 22):
|
||||
raise
|
||||
|
||||
if not b64:
|
||||
b64 = (bname + ext).encode("utf-8", "replace")
|
||||
b64 = hashlib.sha512(b64).digest()[:12]
|
||||
b64 = base64.urlsafe_b64encode(b64).decode("utf-8").rstrip("=")
|
||||
|
||||
badlen = len(fname)
|
||||
while len(fname) >= badlen:
|
||||
if len(bname) < 8:
|
||||
raise ex
|
||||
|
||||
if len(bname) > len(ext):
|
||||
# drop the last letter of the filename
|
||||
bname = bname[:-1]
|
||||
else:
|
||||
try:
|
||||
# drop the leftmost sub-extension
|
||||
_, ext = ext.split(".", 1)
|
||||
except:
|
||||
# okay do the first letter then
|
||||
ext = "." + ext[2:]
|
||||
|
||||
fname = "{}~{}{}".format(bname, b64, ext)
|
||||
|
||||
|
||||
class MultipartParser(object):
|
||||
def __init__(self, log_func, sr, http_headers):
|
||||
self.sr = sr
|
||||
@@ -333,6 +452,31 @@ def read_header(sr):
|
||||
return ret[:ofs].decode("utf-8", "surrogateescape").split("\r\n")
|
||||
|
||||
|
||||
def humansize(sz, terse=False):
|
||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||
if sz < 1024:
|
||||
break
|
||||
|
||||
sz /= 1024.0
|
||||
|
||||
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||
|
||||
if not terse:
|
||||
return ret
|
||||
|
||||
return ret.replace("iB", "").replace(" ", "")
|
||||
|
||||
|
||||
def get_spd(nbyte, t0, t=None):
|
||||
if t is None:
|
||||
t = time.time()
|
||||
|
||||
bps = nbyte / ((t - t0) + 0.001)
|
||||
s1 = humansize(nbyte).replace(" ", "\033[33m").replace("iB", "")
|
||||
s2 = humansize(bps).replace(" ", "\033[35m").replace("iB", "")
|
||||
return "{} \033[0m{}/s\033[0m".format(s1, s2)
|
||||
|
||||
|
||||
def undot(path):
|
||||
ret = []
|
||||
for node in path.split("/"):
|
||||
@@ -376,10 +520,30 @@ def sanitize_fn(fn):
|
||||
return fn.strip()
|
||||
|
||||
|
||||
def u8safe(txt):
|
||||
try:
|
||||
return txt.encode("utf-8", "xmlcharrefreplace").decode("utf-8", "replace")
|
||||
except:
|
||||
return txt.encode("utf-8", "replace").decode("utf-8", "replace")
|
||||
|
||||
|
||||
def exclude_dotfiles(filepaths):
|
||||
for fpath in filepaths:
|
||||
if not fpath.split("/")[-1].startswith("."):
|
||||
yield fpath
|
||||
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
||||
|
||||
|
||||
def html_escape(s, quote=False):
|
||||
"""html.escape but also newlines"""
|
||||
s = (
|
||||
s.replace("&", "&")
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace("\r", " ")
|
||||
.replace("\n", " ")
|
||||
)
|
||||
if quote:
|
||||
s = s.replace('"', """).replace("'", "'")
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def quotep(txt):
|
||||
@@ -396,8 +560,8 @@ def quotep(txt):
|
||||
def unquotep(txt):
|
||||
"""url unquoter which deals with bytes correctly"""
|
||||
btxt = w8enc(txt)
|
||||
unq1 = btxt.replace(b"+", b" ")
|
||||
unq2 = unquote(unq1)
|
||||
# btxt = btxt.replace(b"+", b" ")
|
||||
unq2 = unquote(btxt)
|
||||
return w8dec(unq2)
|
||||
|
||||
|
||||
@@ -417,6 +581,16 @@ def w8enc(txt):
|
||||
return txt.encode(FS_ENCODING, "surrogateescape")
|
||||
|
||||
|
||||
def w8b64dec(txt):
|
||||
"""decodes base64(filesystem-bytes) to wtf8"""
|
||||
return w8dec(base64.urlsafe_b64decode(txt.encode("ascii")))
|
||||
|
||||
|
||||
def w8b64enc(txt):
|
||||
"""encodes wtf8 to base64(filesystem-bytes)"""
|
||||
return base64.urlsafe_b64encode(w8enc(txt)).decode("ascii")
|
||||
|
||||
|
||||
if PY2 and WINDOWS:
|
||||
# moonrunes become \x3f with bytestrings,
|
||||
# losing mojibake support is worth
|
||||
@@ -430,6 +604,41 @@ else:
|
||||
fsdec = w8dec
|
||||
|
||||
|
||||
def s3enc(mem_cur, rd, fn):
|
||||
ret = []
|
||||
for v in [rd, fn]:
|
||||
try:
|
||||
mem_cur.execute("select * from a where b = ?", (v,))
|
||||
ret.append(v)
|
||||
except:
|
||||
ret.append("//" + w8b64enc(v))
|
||||
# self.log("mojien/{} [{}] {}".format(k, v, ret[-1][2:]))
|
||||
|
||||
return tuple(ret)
|
||||
|
||||
|
||||
def s3dec(rd, fn):
|
||||
ret = []
|
||||
for k, v in [["d", rd], ["f", fn]]:
|
||||
if v.startswith("//"):
|
||||
ret.append(w8b64dec(v[2:]))
|
||||
# self.log("mojide/{} [{}] {}".format(k, ret[-1], v[2:]))
|
||||
else:
|
||||
ret.append(v)
|
||||
|
||||
return tuple(ret)
|
||||
|
||||
|
||||
def atomic_move(src, dst):
|
||||
if not PY2:
|
||||
os.replace(src, dst)
|
||||
else:
|
||||
if os.path.exists(dst):
|
||||
os.unlink(dst)
|
||||
|
||||
os.rename(src, dst)
|
||||
|
||||
|
||||
def read_socket(sr, total_size):
|
||||
remains = total_size
|
||||
while remains > 0:
|
||||
@@ -445,6 +654,49 @@ def read_socket(sr, total_size):
|
||||
yield buf
|
||||
|
||||
|
||||
def read_socket_unbounded(sr):
|
||||
while True:
|
||||
buf = sr.recv(32 * 1024)
|
||||
if not buf:
|
||||
return
|
||||
|
||||
yield buf
|
||||
|
||||
|
||||
def read_socket_chunked(sr, log=None):
|
||||
err = "expected chunk length, got [{}] |{}| instead"
|
||||
while True:
|
||||
buf = b""
|
||||
while b"\r" not in buf:
|
||||
rbuf = sr.recv(2)
|
||||
if not rbuf or len(buf) > 16:
|
||||
err = err.format(buf.decode("utf-8", "replace"), len(buf))
|
||||
raise Pebkac(400, err)
|
||||
|
||||
buf += rbuf
|
||||
|
||||
if not buf.endswith(b"\n"):
|
||||
sr.recv(1)
|
||||
|
||||
try:
|
||||
chunklen = int(buf.rstrip(b"\r\n"), 16)
|
||||
except:
|
||||
err = err.format(buf.decode("utf-8", "replace"), len(buf))
|
||||
raise Pebkac(400, err)
|
||||
|
||||
if chunklen == 0:
|
||||
sr.recv(2) # \r\n after final chunk
|
||||
return
|
||||
|
||||
if log:
|
||||
log("receiving {} byte chunk".format(chunklen))
|
||||
|
||||
for chunk in read_socket(sr, chunklen):
|
||||
yield chunk
|
||||
|
||||
sr.recv(2) # \r\n after each chunk too
|
||||
|
||||
|
||||
def hashcopy(actor, fin, fout):
|
||||
u32_lim = int((2 ** 31) * 0.9)
|
||||
hashobj = hashlib.sha512()
|
||||
@@ -464,6 +716,73 @@ def hashcopy(actor, fin, fout):
|
||||
return tlen, hashobj.hexdigest(), digest_b64
|
||||
|
||||
|
||||
def sendfile_py(lower, upper, f, s):
|
||||
remains = upper - lower
|
||||
f.seek(lower)
|
||||
while remains > 0:
|
||||
# time.sleep(0.01)
|
||||
buf = f.read(min(4096, remains))
|
||||
if not buf:
|
||||
return remains
|
||||
|
||||
try:
|
||||
s.sendall(buf)
|
||||
remains -= len(buf)
|
||||
except:
|
||||
return remains
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def sendfile_kern(lower, upper, f, s):
|
||||
out_fd = s.fileno()
|
||||
in_fd = f.fileno()
|
||||
ofs = lower
|
||||
while ofs < upper:
|
||||
try:
|
||||
req = min(2 ** 30, upper - ofs)
|
||||
select.select([], [out_fd], [], 10)
|
||||
n = os.sendfile(out_fd, in_fd, ofs, req)
|
||||
except Exception as ex:
|
||||
# print("sendfile: " + repr(ex))
|
||||
n = 0
|
||||
|
||||
if n <= 0:
|
||||
return upper - ofs
|
||||
|
||||
ofs += n
|
||||
# print("sendfile: ok, sent {} now, {} total, {} remains".format(n, ofs - lower, upper - ofs))
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def statdir(logger, scandir, lstat, top):
|
||||
try:
|
||||
btop = fsenc(top)
|
||||
if scandir and hasattr(os, "scandir"):
|
||||
src = "scandir"
|
||||
with os.scandir(btop) as dh:
|
||||
for fh in dh:
|
||||
try:
|
||||
yield [fsdec(fh.name), fh.stat(follow_symlinks=not lstat)]
|
||||
except Exception as ex:
|
||||
msg = "scan-stat: \033[36m{} @ {}"
|
||||
logger(msg.format(repr(ex), fsdec(fh.path)))
|
||||
else:
|
||||
src = "listdir"
|
||||
fun = os.lstat if lstat else os.stat
|
||||
for name in os.listdir(btop):
|
||||
abspath = os.path.join(btop, name)
|
||||
try:
|
||||
yield [fsdec(name), fun(abspath)]
|
||||
except Exception as ex:
|
||||
msg = "list-stat: \033[36m{} @ {}"
|
||||
logger(msg.format(repr(ex), fsdec(abspath)))
|
||||
|
||||
except Exception as ex:
|
||||
logger("{}: \033[31m{} @ {}".format(src, repr(ex), top))
|
||||
|
||||
|
||||
def unescape_cookie(orig):
|
||||
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
|
||||
ret = ""
|
||||
@@ -540,7 +859,26 @@ def py_desc():
|
||||
)
|
||||
|
||||
|
||||
def align_tab(lines):
|
||||
rows = []
|
||||
ncols = 0
|
||||
for ln in lines:
|
||||
row = [x for x in ln.split(" ") if x]
|
||||
ncols = max(ncols, len(row))
|
||||
rows.append(row)
|
||||
|
||||
lens = [0] * ncols
|
||||
for row in rows:
|
||||
for n, col in enumerate(row):
|
||||
lens[n] = max(lens[n], len(col))
|
||||
|
||||
return ["".join(x.ljust(y + 2) for x, y in zip(row, lens)) for row in rows]
|
||||
|
||||
|
||||
class Pebkac(Exception):
|
||||
def __init__(self, code, msg=None):
|
||||
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
||||
self.code = code
|
||||
|
||||
def __repr__(self):
|
||||
return "Pebkac({}, {})".format(self.code, repr(self.args))
|
||||
|
||||
12
copyparty/web/Makefile
Normal file
12
copyparty/web/Makefile
Normal file
@@ -0,0 +1,12 @@
|
||||
# run me to zopfli all the static files
|
||||
# which should help on really slow connections
|
||||
# but then why are you using copyparty in the first place
|
||||
|
||||
pk: $(addsuffix .gz, $(wildcard *.js *.css))
|
||||
un: $(addsuffix .un, $(wildcard *.gz))
|
||||
|
||||
%.gz: %
|
||||
pigz -11 -J 34 -I 5730 $<
|
||||
|
||||
%.un: %
|
||||
pigz -d $<
|
||||
@@ -39,15 +39,22 @@ body {
|
||||
margin: 1.3em 0 0 0;
|
||||
font-size: 1.4em;
|
||||
}
|
||||
#path #entree {
|
||||
margin-left: -.7em;
|
||||
}
|
||||
#files {
|
||||
border-collapse: collapse;
|
||||
margin-top: 2em;
|
||||
border-spacing: 0;
|
||||
z-index: 1;
|
||||
position: relative;
|
||||
}
|
||||
#files tbody a {
|
||||
display: block;
|
||||
padding: .3em 0;
|
||||
}
|
||||
a {
|
||||
#files tbody div a {
|
||||
color: #f5a;
|
||||
}
|
||||
a, #files tbody div a:last-child {
|
||||
color: #fc5;
|
||||
padding: .2em;
|
||||
text-decoration: none;
|
||||
@@ -55,16 +62,18 @@ a {
|
||||
#files a:hover {
|
||||
color: #fff;
|
||||
background: #161616;
|
||||
text-decoration: underline;
|
||||
}
|
||||
#files thead a {
|
||||
color: #999;
|
||||
font-weight: normal;
|
||||
}
|
||||
#files tr:hover {
|
||||
#files tr+tr:hover {
|
||||
background: #1c1c1c;
|
||||
}
|
||||
#files thead th {
|
||||
padding: .5em 1.3em .3em 1.3em;
|
||||
cursor: pointer;
|
||||
}
|
||||
#files thead th:last-child {
|
||||
background: #444;
|
||||
@@ -82,6 +91,16 @@ a {
|
||||
margin: 0;
|
||||
padding: 0 .5em;
|
||||
}
|
||||
#files td {
|
||||
border-bottom: 1px solid #111;
|
||||
}
|
||||
#files td+td+td {
|
||||
max-width: 30em;
|
||||
overflow: hidden;
|
||||
}
|
||||
#files tr+tr td {
|
||||
border-top: 1px solid #383838;
|
||||
}
|
||||
#files tbody td:nth-child(3) {
|
||||
font-family: monospace;
|
||||
font-size: 1.3em;
|
||||
@@ -100,6 +119,9 @@ a {
|
||||
padding-bottom: 1.3em;
|
||||
border-bottom: .5em solid #444;
|
||||
}
|
||||
#files tbody tr td:last-child {
|
||||
white-space: nowrap;
|
||||
}
|
||||
#files thead th[style] {
|
||||
width: auto !important;
|
||||
}
|
||||
@@ -131,11 +153,34 @@ a {
|
||||
.logue {
|
||||
padding: .2em 1.5em;
|
||||
}
|
||||
a.play {
|
||||
color: #e70;
|
||||
.logue:empty {
|
||||
display: none;
|
||||
}
|
||||
a.play.act {
|
||||
color: #af0;
|
||||
#pro.logue {
|
||||
margin-bottom: .8em;
|
||||
}
|
||||
#epi.logue {
|
||||
margin: .8em 0;
|
||||
}
|
||||
#srv_info {
|
||||
opacity: .5;
|
||||
font-size: .8em;
|
||||
color: #fc5;
|
||||
position: absolute;
|
||||
top: .5em;
|
||||
left: 2em;
|
||||
}
|
||||
#srv_info span {
|
||||
color: #fff;
|
||||
}
|
||||
#files tbody a.play {
|
||||
color: #e70;
|
||||
padding: .2em;
|
||||
margin: -.2em;
|
||||
}
|
||||
#files tbody a.play.act {
|
||||
color: #840;
|
||||
text-shadow: 0 0 .3em #b80;
|
||||
}
|
||||
#blocked {
|
||||
position: fixed;
|
||||
@@ -145,7 +190,7 @@ a.play.act {
|
||||
height: 100%;
|
||||
background: #333;
|
||||
font-size: 2.5em;
|
||||
z-index:99;
|
||||
z-index: 99;
|
||||
}
|
||||
#blk_play,
|
||||
#blk_abrt {
|
||||
@@ -179,6 +224,7 @@ a.play.act {
|
||||
bottom: -6em;
|
||||
height: 6em;
|
||||
width: 100%;
|
||||
z-index: 3;
|
||||
transition: bottom 0.15s;
|
||||
}
|
||||
#widget.open {
|
||||
@@ -203,6 +249,9 @@ a.play.act {
|
||||
75% {cursor: url(/.cpr/dd/5.png), pointer}
|
||||
85% {cursor: url(/.cpr/dd/1.png), pointer}
|
||||
}
|
||||
@keyframes spin {
|
||||
100% {transform: rotate(360deg)}
|
||||
}
|
||||
#wtoggle {
|
||||
position: absolute;
|
||||
top: -1.2em;
|
||||
@@ -262,3 +311,324 @@ a.play.act {
|
||||
width: calc(100% - 10.5em);
|
||||
background: rgba(0,0,0,0.2);
|
||||
}
|
||||
@media (min-width: 90em) {
|
||||
#barpos,
|
||||
#barbuf {
|
||||
width: calc(100% - 24em);
|
||||
left: 9.8em;
|
||||
top: .7em;
|
||||
height: 1.6em;
|
||||
bottom: auto;
|
||||
}
|
||||
#widget {
|
||||
bottom: -3.2em;
|
||||
height: 3.2em;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
.opview {
|
||||
display: none;
|
||||
}
|
||||
.opview.act {
|
||||
display: block;
|
||||
}
|
||||
#ops a {
|
||||
color: #fc5;
|
||||
font-size: 1.5em;
|
||||
padding: .25em .3em;
|
||||
margin: 0;
|
||||
outline: none;
|
||||
}
|
||||
#ops a.act {
|
||||
background: #281838;
|
||||
border-radius: 0 0 .2em .2em;
|
||||
border-bottom: .3em solid #d90;
|
||||
box-shadow: 0 -.15em .2em #000 inset;
|
||||
padding-bottom: .3em;
|
||||
}
|
||||
#ops i {
|
||||
font-size: 1.5em;
|
||||
}
|
||||
#ops i:before {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #01a7e1;
|
||||
position: relative;
|
||||
}
|
||||
#ops i:after {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #ff3f1a;
|
||||
margin-left: -.35em;
|
||||
font-size: 1.05em;
|
||||
}
|
||||
#ops,
|
||||
.opbox {
|
||||
border: 1px solid #3a3a3a;
|
||||
box-shadow: 0 0 1em #222 inset;
|
||||
}
|
||||
#ops {
|
||||
background: #333;
|
||||
margin: 1.7em 1.5em 0 1.5em;
|
||||
padding: .3em .6em;
|
||||
border-radius: .3em;
|
||||
border-width: .15em 0;
|
||||
}
|
||||
.opbox {
|
||||
background: #2d2d2d;
|
||||
margin: 1.5em 0 0 0;
|
||||
padding: .5em;
|
||||
border-radius: 0 1em 1em 0;
|
||||
border-width: .15em .3em .3em 0;
|
||||
max-width: 40em;
|
||||
}
|
||||
.opbox input {
|
||||
margin: .5em;
|
||||
}
|
||||
.opview input[type=text] {
|
||||
color: #fff;
|
||||
background: #383838;
|
||||
border: none;
|
||||
box-shadow: 0 0 .3em #222;
|
||||
border-bottom: 1px solid #fc5;
|
||||
border-radius: .2em;
|
||||
padding: .2em .3em;
|
||||
}
|
||||
input[type="checkbox"]+label {
|
||||
color: #f5a;
|
||||
}
|
||||
input[type="checkbox"]:checked+label {
|
||||
color: #fc5;
|
||||
}
|
||||
|
||||
|
||||
|
||||
#op_search table {
|
||||
border: 1px solid #3a3a3a;
|
||||
box-shadow: 0 0 1em #222 inset;
|
||||
background: #2d2d2d;
|
||||
border-radius: .4em;
|
||||
margin: 1.4em;
|
||||
margin-bottom: 0;
|
||||
padding: 0 .5em .5em 0;
|
||||
}
|
||||
#srch_form td {
|
||||
padding: .6em .6em;
|
||||
}
|
||||
#op_search input {
|
||||
margin: 0;
|
||||
}
|
||||
#srch_q {
|
||||
white-space: pre;
|
||||
}
|
||||
#files td div span {
|
||||
color: #fff;
|
||||
padding: 0 .4em;
|
||||
font-weight: bold;
|
||||
font-style: italic;
|
||||
}
|
||||
#files td div a:hover {
|
||||
background: #444;
|
||||
color: #fff;
|
||||
}
|
||||
#files td div a {
|
||||
display: inline-block;
|
||||
white-space: nowrap;
|
||||
}
|
||||
#files td div a:last-child {
|
||||
width: 100%;
|
||||
}
|
||||
#files td div {
|
||||
border-collapse: collapse;
|
||||
width: 100%;
|
||||
}
|
||||
#files td div a:last-child {
|
||||
width: 100%;
|
||||
}
|
||||
#wrap {
|
||||
margin-top: 2em;
|
||||
}
|
||||
#tree {
|
||||
display: none;
|
||||
position: fixed;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
top: 7em;
|
||||
padding-top: .2em;
|
||||
overflow-y: auto;
|
||||
-ms-scroll-chaining: none;
|
||||
overscroll-behavior-y: none;
|
||||
scrollbar-color: #eb0 #333;
|
||||
}
|
||||
#thx_ff {
|
||||
padding: 5em 0;
|
||||
}
|
||||
#tree::-webkit-scrollbar-track {
|
||||
background: #333;
|
||||
}
|
||||
#tree::-webkit-scrollbar {
|
||||
background: #333;
|
||||
}
|
||||
#tree::-webkit-scrollbar-thumb {
|
||||
background: #eb0;
|
||||
}
|
||||
#tree:hover {
|
||||
z-index: 2;
|
||||
}
|
||||
#treeul {
|
||||
position: relative;
|
||||
left: -1.7em;
|
||||
width: calc(100% + 1.3em);
|
||||
}
|
||||
#tree>a+a {
|
||||
padding: .2em .4em;
|
||||
font-size: 1.2em;
|
||||
background: #2a2a2a;
|
||||
box-shadow: 0 .1em .2em #222 inset;
|
||||
border-radius: .3em;
|
||||
margin: .2em;
|
||||
position: relative;
|
||||
top: -.2em;
|
||||
}
|
||||
#tree>a+a:hover {
|
||||
background: #805;
|
||||
}
|
||||
#tree>a+a.on {
|
||||
background: #fc4;
|
||||
color: #400;
|
||||
text-shadow: none;
|
||||
}
|
||||
#detree {
|
||||
padding: .3em .5em;
|
||||
font-size: 1.5em;
|
||||
}
|
||||
#tree ul,
|
||||
#tree li {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
#tree ul {
|
||||
border-left: .2em solid #555;
|
||||
}
|
||||
#tree li {
|
||||
margin-left: 1em;
|
||||
list-style: none;
|
||||
border-top: 1px solid #4c4c4c;
|
||||
border-bottom: 1px solid #222;
|
||||
}
|
||||
#tree li:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
#treeul a.hl {
|
||||
color: #400;
|
||||
background: #fc4;
|
||||
border-radius: .3em;
|
||||
text-shadow: none;
|
||||
}
|
||||
#treeul a {
|
||||
display: inline-block;
|
||||
}
|
||||
#treeul a+a {
|
||||
width: calc(100% - 2em);
|
||||
background: #333;
|
||||
line-height: 1em;
|
||||
}
|
||||
#treeul a+a:hover {
|
||||
background: #222;
|
||||
color: #fff;
|
||||
}
|
||||
#treeul a:first-child {
|
||||
font-family: monospace, monospace;
|
||||
}
|
||||
.dumb_loader_thing {
|
||||
display: inline-block;
|
||||
margin: 1em .3em 1em 1em;
|
||||
padding: 0 1.2em 0 0;
|
||||
font-size: 4em;
|
||||
animation: spin 1s linear infinite;
|
||||
position: absolute;
|
||||
z-index: 9;
|
||||
}
|
||||
#files .cfg {
|
||||
display: none;
|
||||
font-size: 2em;
|
||||
white-space: nowrap;
|
||||
}
|
||||
#files th:hover .cfg,
|
||||
#files th.min .cfg {
|
||||
display: block;
|
||||
width: 1em;
|
||||
border-radius: .2em;
|
||||
margin: -1.3em auto 0 auto;
|
||||
background: #444;
|
||||
}
|
||||
#files th.min .cfg {
|
||||
margin: -.6em;
|
||||
}
|
||||
#files>thead>tr>th.min span {
|
||||
position: absolute;
|
||||
transform: rotate(270deg);
|
||||
background: linear-gradient(90deg, rgba(68,68,68,0), rgba(68,68,68,0.5) 70%, #444);
|
||||
margin-left: -4.6em;
|
||||
padding: .4em;
|
||||
top: 5.4em;
|
||||
width: 8em;
|
||||
text-align: right;
|
||||
letter-spacing: .04em;
|
||||
}
|
||||
#files td:nth-child(2n) {
|
||||
color: #f5a;
|
||||
}
|
||||
#files td.min a {
|
||||
display: none;
|
||||
}
|
||||
#files tr.play td {
|
||||
background: #fc4;
|
||||
border-color: transparent;
|
||||
color: #400;
|
||||
text-shadow: none;
|
||||
}
|
||||
#files tr.play a {
|
||||
color: inherit;
|
||||
}
|
||||
#files tr.play a:hover {
|
||||
color: #300;
|
||||
background: #fea;
|
||||
}
|
||||
#op_cfg {
|
||||
max-width: none;
|
||||
margin-right: 1.5em;
|
||||
}
|
||||
#key_notation>span {
|
||||
display: inline-block;
|
||||
padding: .2em .4em;
|
||||
}
|
||||
#op_cfg h3 {
|
||||
margin: .8em 0 0 .6em;
|
||||
padding: 0;
|
||||
border-bottom: 1px solid #555;
|
||||
}
|
||||
#opdesc {
|
||||
display: none;
|
||||
}
|
||||
#ops:hover #opdesc {
|
||||
display: block;
|
||||
background: linear-gradient(0deg,#555, #4c4c4c 80%, #444);
|
||||
box-shadow: 0 .3em 1em #222;
|
||||
padding: 1em;
|
||||
border-radius: .3em;
|
||||
position: absolute;
|
||||
z-index: 3;
|
||||
top: 6em;
|
||||
right: 1.5em;
|
||||
}
|
||||
#opdesc code {
|
||||
background: #3c3c3c;
|
||||
padding: .2em .3em;
|
||||
border-top: 1px solid #777;
|
||||
border-radius: .3em;
|
||||
font-family: monospace, monospace;
|
||||
line-height: 2em;
|
||||
}
|
||||
@@ -7,52 +7,104 @@
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css{{ ts }}">
|
||||
{%- if can_upload %}
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css{{ ts }}">
|
||||
{%- endif %}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
{%- if can_upload %}
|
||||
<div id="ops">
|
||||
<a href="#" data-dest="" data-desc="close submenu">---</a>
|
||||
<a href="#" data-perm="read" data-dest="search" data-desc="search for files by attributes, path/name, music tags, or any combination of those.<br /><br /><code>foo bar</code> = must contain both foo and bar,<br /><code>foo -bar</code> = must contain foo but not bar,<br /><code>^yana .opus$</code> = must start with yana and have the opus extension">🔎</a>
|
||||
{%- if have_up2k_idx %}
|
||||
<a href="#" data-dest="up2k" data-desc="up2k: upload files (if you have write-access) or toggle into the search-mode and drag files onto the search button to see if they exist somewhere on the server">🚀</a>
|
||||
{%- else %}
|
||||
<a href="#" data-perm="write" data-dest="up2k" data-desc="up2k: upload files with resume support (close your browser and drop the same files in later)">🚀</a>
|
||||
{%- endif %}
|
||||
<a href="#" data-perm="write" data-dest="bup" data-desc="bup: basic uploader, even supports netscape 4.0">🎈</a>
|
||||
<a href="#" data-perm="write" data-dest="mkdir" data-desc="mkdir: create a new directory">📂</a>
|
||||
<a href="#" data-perm="write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
|
||||
<a href="#" data-perm="write" data-dest="msg" data-desc="msg: send a message to the server log">📟</a>
|
||||
<a href="#" data-dest="cfg" data-desc="configuration options">⚙️</a>
|
||||
<div id="opdesc"></div>
|
||||
</div>
|
||||
|
||||
<div id="op_search" class="opview">
|
||||
{%- if have_tags_idx %}
|
||||
<table id="srch_form" class="tags"></table>
|
||||
{%- else %}
|
||||
<table id="srch_form"></table>
|
||||
{%- endif %}
|
||||
<div id="srch_q"></div>
|
||||
</div>
|
||||
|
||||
{%- include 'upload.html' %}
|
||||
{%- endif %}
|
||||
|
||||
<div id="op_cfg" class="opview opbox">
|
||||
<h3>key notation</h3>
|
||||
<div id="key_notation"></div>
|
||||
</div>
|
||||
|
||||
<h1 id="path">
|
||||
<a href="#" id="entree">🌲</a>
|
||||
{%- for n in vpnodes %}
|
||||
<a href="/{{ n[0] }}">{{ n[1] }}</a>
|
||||
{%- endfor %}
|
||||
</h1>
|
||||
|
||||
{%- if can_read %}
|
||||
{%- if prologue %}
|
||||
<div id="pro" class="logue">{{ prologue }}</div>
|
||||
{%- endif %}
|
||||
<div id="tree">
|
||||
<a href="#" id="detree">🍞...</a>
|
||||
<a href="#" step="2" id="twobytwo">+</a>
|
||||
<a href="#" step="-2" id="twig">–</a>
|
||||
<a href="#" id="dyntree">a</a>
|
||||
<ul id="treeul"></ul>
|
||||
<div id="thx_ff"> </div>
|
||||
</div>
|
||||
|
||||
<div id="wrap">
|
||||
|
||||
<div id="pro" class="logue">{{ logues[0] }}</div>
|
||||
|
||||
<table id="files">
|
||||
<thead>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>File Name</th>
|
||||
<th>File Size</th>
|
||||
<th>Date</th>
|
||||
<th><span>File Name</span></th>
|
||||
<th sort="int"><span>Size</span></th>
|
||||
{%- for k in taglist %}
|
||||
{%- if k.startswith('.') %}
|
||||
<th sort="int"><span>{{ k[1:] }}</span></th>
|
||||
{%- else %}
|
||||
<th><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
<th><span>T</span></th>
|
||||
<th><span>Date</span></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
||||
{%- for f in files %}
|
||||
<tr><td>{{ f[0] }}</td><td><a href="{{ f[1] }}">{{ f[2] }}</a></td><td>{{ f[3] }}</td><td>{{ f[4] }}</td></tr>
|
||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
|
||||
{%- if f.tags is defined %}
|
||||
{%- for k in taglist %}
|
||||
<td>{{ f.tags[k] }}</td>
|
||||
{%- endfor %}
|
||||
{%- endif %}
|
||||
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
||||
{%- endfor %}
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
{%- if epilogue %}
|
||||
<div id="epi" class="logue">{{ epilogue }}</div>
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
<div id="epi" class="logue">{{ logues[1] }}</div>
|
||||
|
||||
<h2><a href="?h">control-panel</a></h2>
|
||||
|
||||
</div>
|
||||
|
||||
{%- if srv_info %}
|
||||
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
||||
{%- endif %}
|
||||
|
||||
<div id="widget">
|
||||
<div id="wtoggle">♫</div>
|
||||
<div id="widgeti">
|
||||
@@ -62,14 +114,16 @@
|
||||
<canvas id="barbuf"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{%- if can_read %}
|
||||
|
||||
<script>
|
||||
var tag_order_cfg = {{ tag_order }};
|
||||
</script>
|
||||
<script src="/.cpr/util.js{{ ts }}"></script>
|
||||
<script src="/.cpr/browser.js{{ ts }}"></script>
|
||||
{%- endif %}
|
||||
|
||||
{%- if can_upload %}
|
||||
<script src="/.cpr/up2k.js{{ ts }}"></script>
|
||||
{%- endif %}
|
||||
<script>
|
||||
apply_perms({{ perms }});
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,7 @@
|
||||
@font-face {
|
||||
font-family: 'scp';
|
||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
||||
}
|
||||
html, body {
|
||||
color: #333;
|
||||
background: #eee;
|
||||
@@ -9,6 +13,7 @@ html, body {
|
||||
}
|
||||
#mw {
|
||||
margin: 0 auto;
|
||||
padding: 0 1.5em;
|
||||
}
|
||||
pre, code, a {
|
||||
color: #480;
|
||||
@@ -22,7 +27,7 @@ code {
|
||||
font-size: .96em;
|
||||
}
|
||||
pre, code {
|
||||
font-family: monospace, monospace;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
@@ -42,7 +47,7 @@ pre code {
|
||||
pre code:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
pre code:before {
|
||||
pre code::before {
|
||||
content: counter(precode);
|
||||
-webkit-user-select: none;
|
||||
display: inline-block;
|
||||
@@ -104,8 +109,12 @@ h2 a, h4 a, h6 a {
|
||||
#mp ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em {
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
@@ -168,14 +177,12 @@ small {
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
margin: 1em 0;
|
||||
}
|
||||
td {
|
||||
th, td {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
th {
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
blink {
|
||||
animation: blinker .7s cubic-bezier(.9, 0, .1, 1) infinite;
|
||||
}
|
||||
@@ -198,13 +205,15 @@ blink {
|
||||
height: 100%;
|
||||
}
|
||||
#mw {
|
||||
padding: 0 1em;
|
||||
margin: 0 auto;
|
||||
right: 0;
|
||||
}
|
||||
#mp {
|
||||
max-width: 54em;
|
||||
max-width: 52em;
|
||||
margin-bottom: 6em;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
}
|
||||
a {
|
||||
color: #fff;
|
||||
@@ -238,12 +247,6 @@ blink {
|
||||
z-index: 10;
|
||||
width: calc(100% - 1em);
|
||||
}
|
||||
#mn.undocked {
|
||||
position: fixed;
|
||||
padding: 1.7em 0 1.5em 1em;
|
||||
box-shadow: 0 0 .5em rgba(0, 0, 0, 0.3);
|
||||
background: #f7f7f7;
|
||||
}
|
||||
#mn a {
|
||||
color: #444;
|
||||
background: none;
|
||||
@@ -261,7 +264,7 @@ blink {
|
||||
#mn a:last-child {
|
||||
padding-right: .5em;
|
||||
}
|
||||
#mn a:not(:last-child):after {
|
||||
#mn a:not(:last-child)::after {
|
||||
content: '';
|
||||
width: 1.05em;
|
||||
height: 1.05em;
|
||||
@@ -290,6 +293,32 @@ blink {
|
||||
text-decoration: underline;
|
||||
border: none;
|
||||
}
|
||||
#mh a:hover {
|
||||
color: #000;
|
||||
background: #ddd;
|
||||
}
|
||||
#toolsbox {
|
||||
overflow: hidden;
|
||||
display: inline-block;
|
||||
background: #eee;
|
||||
height: 1.5em;
|
||||
padding: 0 .2em;
|
||||
margin: 0 .2em;
|
||||
position: absolute;
|
||||
}
|
||||
#toolsbox.open {
|
||||
height: auto;
|
||||
overflow: visible;
|
||||
background: #eee;
|
||||
box-shadow: 0 .2em .2em #ccc;
|
||||
padding-bottom: .2em;
|
||||
}
|
||||
#toolsbox a {
|
||||
display: block;
|
||||
}
|
||||
#toolsbox a+a {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -333,8 +362,12 @@ blink {
|
||||
html.dark #m>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em {
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
@@ -355,7 +388,7 @@ blink {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
html.dark #mn a:not(:last-child):after {
|
||||
html.dark #mn a:not(:last-child)::after {
|
||||
border-color: rgba(255,255,255,0.3);
|
||||
}
|
||||
html.dark #mn a {
|
||||
@@ -372,21 +405,32 @@ blink {
|
||||
color: #ccc;
|
||||
background: none;
|
||||
}
|
||||
html.dark #mh a:hover {
|
||||
background: #333;
|
||||
color: #fff;
|
||||
}
|
||||
html.dark #toolsbox {
|
||||
background: #222;
|
||||
}
|
||||
html.dark #toolsbox.open {
|
||||
box-shadow: 0 .2em .2em #069;
|
||||
border-radius: 0 0 .4em .4em;
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (min-width: 70em) {
|
||||
@media screen and (min-width: 66em) {
|
||||
#mw {
|
||||
position: fixed;
|
||||
overflow-y: auto;
|
||||
left: 14em;
|
||||
left: calc(100% - 57em);
|
||||
left: calc(100% - 55em);
|
||||
max-width: none;
|
||||
bottom: 0;
|
||||
scrollbar-color: #eb0 #f7f7f7;
|
||||
}
|
||||
#toc {
|
||||
width: 13em;
|
||||
width: calc(100% - 57.3em);
|
||||
width: calc(100% - 55.3em);
|
||||
max-width: 30em;
|
||||
background: #eee;
|
||||
position: fixed;
|
||||
@@ -433,34 +477,119 @@ blink {
|
||||
width: .8em;
|
||||
}
|
||||
html.dark #toc::-webkit-scrollbar-thumb {
|
||||
background: #eb0;
|
||||
}
|
||||
html.dark #mn.undocked {
|
||||
box-shadow: 0 0 .5em #555;
|
||||
border: none;
|
||||
background: #0a0a0a;
|
||||
background: #b80;
|
||||
}
|
||||
}
|
||||
@media screen and (min-width: 87.5em) {
|
||||
@media screen and (min-width: 85.5em) {
|
||||
#toc { width: 30em }
|
||||
#mw { left: 30.5em }
|
||||
}
|
||||
@media print {
|
||||
@page {
|
||||
size: A4;
|
||||
padding: 0;
|
||||
margin: .5in .6in;
|
||||
mso-header-margin: .6in;
|
||||
mso-footer-margin: .6in;
|
||||
mso-paper-source: 0;
|
||||
}
|
||||
a {
|
||||
color: #079;
|
||||
text-decoration: none;
|
||||
border-bottom: .07em solid #4ac;
|
||||
padding: 0 .3em;
|
||||
}
|
||||
#toc {
|
||||
margin: 0 !important;
|
||||
}
|
||||
#toc>ul {
|
||||
border-left: .1em solid #84c4dd;
|
||||
}
|
||||
#mn, #mh {
|
||||
display: none;
|
||||
}
|
||||
html, body, #toc, #mw {
|
||||
margin: 0 !important;
|
||||
word-break: break-word;
|
||||
width: 52em;
|
||||
}
|
||||
#toc {
|
||||
margin-left: 1em !important;
|
||||
}
|
||||
#toc a {
|
||||
color: #000 !important;
|
||||
}
|
||||
#toc a::after {
|
||||
/* hopefully supported by browsers eventually */
|
||||
content: leader('.') target-counter(attr(href), page);
|
||||
}
|
||||
a[ctr]::before {
|
||||
content: attr(ctr) '. ';
|
||||
}
|
||||
h1 {
|
||||
margin: 2em 0;
|
||||
}
|
||||
h2 {
|
||||
margin: 2em 0 0 0;
|
||||
}
|
||||
h1, h2, h3 {
|
||||
page-break-inside: avoid;
|
||||
}
|
||||
h1::after,
|
||||
h2::after,
|
||||
h3::after {
|
||||
content: 'orz';
|
||||
color: transparent;
|
||||
display: block;
|
||||
line-height: 1em;
|
||||
padding: 4em 0 0 0;
|
||||
margin: 0 0 -5em 0;
|
||||
}
|
||||
p {
|
||||
page-break-inside: avoid;
|
||||
}
|
||||
table {
|
||||
page-break-inside: auto;
|
||||
}
|
||||
tr {
|
||||
page-break-inside: avoid;
|
||||
page-break-after: auto;
|
||||
}
|
||||
thead {
|
||||
display: table-header-group;
|
||||
}
|
||||
tfoot {
|
||||
display: table-footer-group;
|
||||
}
|
||||
#mp a.vis::after {
|
||||
content: ' (' attr(href) ')';
|
||||
border-bottom: 1px solid #bbb;
|
||||
color: #444;
|
||||
}
|
||||
blockquote {
|
||||
border-color: #555;
|
||||
}
|
||||
code {
|
||||
border-color: #bbb;
|
||||
}
|
||||
pre, pre code {
|
||||
border-color: #999;
|
||||
}
|
||||
pre code::before {
|
||||
color: #058;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark a {
|
||||
color: #000;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
color: #240;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #940;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -17,15 +17,23 @@
|
||||
<a id="save" href="?edit">save</a>
|
||||
<a id="sbs" href="#">sbs</a>
|
||||
<a id="nsbs" href="#">editor</a>
|
||||
<a id="help" href="#">help</a>
|
||||
<div id="toolsbox">
|
||||
<a id="tools" href="#">tools</a>
|
||||
<a id="fmt_table" href="#">prettify table (ctrl-k)</a>
|
||||
<a id="iter_uni" href="#">non-ascii: iterate (ctrl-u)</a>
|
||||
<a id="mark_uni" href="#">non-ascii: markup</a>
|
||||
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
|
||||
<a id="help" href="#">help</a>
|
||||
</div>
|
||||
{%- else %}
|
||||
<a href="?edit">edit (basic)</a>
|
||||
<a href="?edit2">edit (fancy)</a>
|
||||
<a href="?raw">view raw</a>
|
||||
{%- endif %}
|
||||
</div>
|
||||
<div id="toc"></div>
|
||||
<div id="mtw">
|
||||
<textarea id="mt">{{ md }}</textarea>
|
||||
<textarea id="mt" autocomplete="off">{{ md }}</textarea>
|
||||
</div>
|
||||
<div id="mw">
|
||||
<div id="ml">
|
||||
@@ -39,16 +47,19 @@
|
||||
|
||||
{%- if edit %}
|
||||
<div id="helpbox">
|
||||
<textarea>
|
||||
<textarea autocomplete="off">
|
||||
|
||||
write markdown (html is permitted)
|
||||
write markdown (most html is 🙆 too)
|
||||
|
||||
### hotkey list
|
||||
## hotkey list
|
||||
* `Ctrl-S` to save
|
||||
* `Ctrl-E` to toggle mode
|
||||
* `Ctrl-K` to prettyprint a table
|
||||
* `Ctrl-U` to iterate non-ascii chars
|
||||
* `Ctrl-H` / `Ctrl-Shift-H` to create a header
|
||||
* `TAB` / `Shift-TAB` to indent/dedent a selection
|
||||
|
||||
### toolbar
|
||||
## toolbar
|
||||
1. toggle dark mode
|
||||
2. show/hide navigation bar
|
||||
3. save changes on server
|
||||
@@ -56,16 +67,68 @@ write markdown (html is permitted)
|
||||
5. toggle editor/preview
|
||||
6. this thing :^)
|
||||
|
||||
.
|
||||
## markdown
|
||||
|||
|
||||
|--|--|
|
||||
|`**bold**`|**bold**|
|
||||
|`_italic_`|_italic_|
|
||||
|`~~strike~~`|~~strike~~|
|
||||
|`` `code` ``|`code`|
|
||||
|`[](#hotkey-list)`|[](#hotkey-list)|
|
||||
|`[](/foo/bar.md#header)`|[](/foo/bar.md#header)|
|
||||
|`<blink>💯</blink>`|<blink>💯</blink>|
|
||||
|
||||
## tables
|
||||
|left-aligned|centered|right-aligned
|
||||
| ---------- | :----: | ----------:
|
||||
|one |two |three
|
||||
|
||||
|left-aligned|centered|right-aligned
|
||||
| ---------- | :----: | ----------:
|
||||
|one |two |three
|
||||
|
||||
## lists
|
||||
* one
|
||||
* two
|
||||
1. one
|
||||
1. two
|
||||
* one
|
||||
* two
|
||||
1. one
|
||||
1. two
|
||||
|
||||
## headers
|
||||
# level 1
|
||||
## level 2
|
||||
### level 3
|
||||
|
||||
## quote
|
||||
> hello
|
||||
> hello
|
||||
|
||||
## codeblock
|
||||
four spaces (no tab pls)
|
||||
|
||||
## code in lists
|
||||
* foo
|
||||
bar
|
||||
six spaces total
|
||||
* foo
|
||||
bar
|
||||
six spaces total
|
||||
.
|
||||
</textarea>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
<script>
|
||||
|
||||
var link_md_as_html = false; // TODO (does nothing)
|
||||
var last_modified = {{ lastmod }};
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
(function () {
|
||||
var btn = document.getElementById("lightswitch");
|
||||
@@ -82,17 +145,11 @@ var last_modified = {{ lastmod }};
|
||||
toggle();
|
||||
})();
|
||||
|
||||
if (!String.startsWith) {
|
||||
String.prototype.startsWith = function(s, i) {
|
||||
i = i>0 ? i|0 : 0;
|
||||
return this.substring(i, i + s.length) === s;
|
||||
};
|
||||
}
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js"></script>
|
||||
<script src="/.cpr/deps/marked.full.js"></script>
|
||||
<script src="/.cpr/md.js"></script>
|
||||
{%- if edit %}
|
||||
<script src="/.cpr/md2.js"></script>
|
||||
<script src="/.cpr/md2.js"></script>
|
||||
{%- endif %}
|
||||
</body></html>
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
var dom_toc = document.getElementById('toc');
|
||||
var dom_wrap = document.getElementById('mw');
|
||||
var dom_hbar = document.getElementById('mh');
|
||||
var dom_nav = document.getElementById('mn');
|
||||
var dom_pre = document.getElementById('mp');
|
||||
var dom_src = document.getElementById('mt');
|
||||
var dom_navtgl = document.getElementById('navtoggle');
|
||||
"use strict";
|
||||
|
||||
var dom_toc = ebi('toc');
|
||||
var dom_wrap = ebi('mw');
|
||||
var dom_hbar = ebi('mh');
|
||||
var dom_nav = ebi('mn');
|
||||
var dom_pre = ebi('mp');
|
||||
var dom_src = ebi('mt');
|
||||
var dom_navtgl = ebi('navtoggle');
|
||||
|
||||
|
||||
// chrome 49 needs this
|
||||
@@ -18,6 +20,10 @@ var dbg = function () { };
|
||||
// dbg = console.log
|
||||
|
||||
|
||||
// plugins
|
||||
var md_plug = {};
|
||||
|
||||
|
||||
function hesc(txt) {
|
||||
return txt.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
}
|
||||
@@ -30,11 +36,24 @@ function cls(dom, name, add) {
|
||||
}
|
||||
|
||||
|
||||
function static(obj) {
|
||||
function statify(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
|
||||
// dodge browser issues
|
||||
(function () {
|
||||
var ua = navigator.userAgent;
|
||||
if (ua.indexOf(') Gecko/') !== -1 && /Linux| Mac /.exec(ua)) {
|
||||
// necessary on ff-68.7 at least
|
||||
var s = document.createElement('style');
|
||||
s.innerHTML = '@page { margin: .5in .6in .8in .6in; }';
|
||||
console.log(s.innerHTML);
|
||||
document.head.appendChild(s);
|
||||
}
|
||||
})();
|
||||
|
||||
|
||||
// add navbar
|
||||
(function () {
|
||||
var n = document.location + '';
|
||||
@@ -46,7 +65,7 @@ function static(obj) {
|
||||
if (a > 0)
|
||||
loc.push(n[a]);
|
||||
|
||||
var dec = hesc(decodeURIComponent(n[a]));
|
||||
var dec = hesc(uricom_dec(n[a])[0]);
|
||||
|
||||
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
||||
}
|
||||
@@ -141,17 +160,125 @@ function copydom(src, dst, lv) {
|
||||
}
|
||||
|
||||
|
||||
function convert_markdown(md_text) {
|
||||
marked.setOptions({
|
||||
function md_plug_err(ex, js) {
|
||||
var errbox = ebi('md_errbox');
|
||||
if (errbox)
|
||||
errbox.parentNode.removeChild(errbox);
|
||||
|
||||
if (!ex)
|
||||
return;
|
||||
|
||||
var msg = (ex + '').split('\n')[0];
|
||||
var ln = ex.lineNumber;
|
||||
var o = null;
|
||||
if (ln) {
|
||||
msg = "Line " + ln + ", " + msg;
|
||||
var lns = js.split('\n');
|
||||
if (ln < lns.length) {
|
||||
o = document.createElement('span');
|
||||
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
|
||||
o.textContent = lns[ln - 1];
|
||||
}
|
||||
}
|
||||
errbox = document.createElement('div');
|
||||
errbox.setAttribute('id', 'md_errbox');
|
||||
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
|
||||
errbox.textContent = msg;
|
||||
errbox.onclick = function () {
|
||||
alert('' + ex.stack);
|
||||
};
|
||||
if (o) {
|
||||
errbox.appendChild(o);
|
||||
errbox.style.padding = '.25em .5em';
|
||||
}
|
||||
dom_nav.appendChild(errbox);
|
||||
|
||||
try {
|
||||
console.trace();
|
||||
}
|
||||
catch (ex2) { }
|
||||
}
|
||||
|
||||
|
||||
function load_plug(md_text, plug_type) {
|
||||
if (!md_opt.allow_plugins)
|
||||
return md_text;
|
||||
|
||||
var find = '\n```copyparty_' + plug_type + '\n';
|
||||
var ofs = md_text.indexOf(find);
|
||||
if (ofs === -1)
|
||||
return md_text;
|
||||
|
||||
var ofs2 = md_text.indexOf('\n```', ofs + 1);
|
||||
if (ofs2 == -1)
|
||||
return md_text;
|
||||
|
||||
var js = md_text.slice(ofs + find.length, ofs2 + 1);
|
||||
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
|
||||
|
||||
var old_plug = md_plug[plug_type];
|
||||
if (!old_plug || old_plug[1] != js) {
|
||||
js = 'const x = { ' + js + ' }; x;';
|
||||
try {
|
||||
var x = eval(js);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug[plug_type] = null;
|
||||
md_plug_err(ex, js);
|
||||
return md;
|
||||
}
|
||||
if (x['ctor']) {
|
||||
x['ctor']();
|
||||
delete x['ctor'];
|
||||
}
|
||||
md_plug[plug_type] = [x, js];
|
||||
}
|
||||
|
||||
return md;
|
||||
}
|
||||
|
||||
|
||||
function convert_markdown(md_text, dest_dom) {
|
||||
md_text = md_text.replace(/\r/g, '');
|
||||
|
||||
md_plug_err(null);
|
||||
md_text = load_plug(md_text, 'pre');
|
||||
md_text = load_plug(md_text, 'post');
|
||||
|
||||
var marked_opts = {
|
||||
//headerPrefix: 'h-',
|
||||
breaks: true,
|
||||
gfm: true
|
||||
});
|
||||
var md_html = marked(md_text);
|
||||
};
|
||||
|
||||
var ext = md_plug['pre'];
|
||||
if (ext)
|
||||
Object.assign(marked_opts, ext[0]);
|
||||
|
||||
try {
|
||||
var md_html = marked(md_text, marked_opts);
|
||||
}
|
||||
catch (ex) {
|
||||
if (ext)
|
||||
md_plug_err(ex, ext[1]);
|
||||
|
||||
throw ex;
|
||||
}
|
||||
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
||||
|
||||
var nodes = md_dom.getElementsByTagName('a');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
var href = nodes[a].getAttribute('href');
|
||||
var txt = nodes[a].textContent;
|
||||
|
||||
if (!txt)
|
||||
nodes[a].textContent = href;
|
||||
else if (href !== txt)
|
||||
nodes[a].setAttribute('class', 'vis');
|
||||
}
|
||||
|
||||
// todo-lists (should probably be a marked extension)
|
||||
var nodes = md_dom.getElementsByTagName('input');
|
||||
nodes = md_dom.getElementsByTagName('input');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
var dom_box = nodes[a];
|
||||
if (dom_box.getAttribute('type') !== 'checkbox')
|
||||
@@ -172,7 +299,7 @@ function convert_markdown(md_text) {
|
||||
}
|
||||
|
||||
// separate <code> for each line in <pre>
|
||||
var nodes = md_dom.getElementsByTagName('pre');
|
||||
nodes = md_dom.getElementsByTagName('pre');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
var el = nodes[a];
|
||||
|
||||
@@ -185,7 +312,7 @@ function convert_markdown(md_text) {
|
||||
continue;
|
||||
|
||||
var nline = parseInt(el.getAttribute('data-ln')) + 1;
|
||||
var lines = el.innerHTML.replace(/\r?\n<\/code>$/i, '</code>').split(/\r?\n/g);
|
||||
var lines = el.innerHTML.replace(/\n<\/code>$/i, '</code>').split(/\n/g);
|
||||
for (var b = 0; b < lines.length - 1; b++)
|
||||
lines[b] += '</code>\n<code data-ln="' + (nline + b) + '">';
|
||||
|
||||
@@ -218,18 +345,36 @@ function convert_markdown(md_text) {
|
||||
el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>';
|
||||
}
|
||||
|
||||
copydom(md_dom, dom_pre, 0);
|
||||
ext = md_plug['post'];
|
||||
if (ext && ext[0].render)
|
||||
try {
|
||||
ext[0].render(md_dom);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug_err(ex, ext[1]);
|
||||
}
|
||||
|
||||
copydom(md_dom, dest_dom, 0);
|
||||
|
||||
if (ext && ext[0].render2)
|
||||
try {
|
||||
ext[0].render2(dest_dom);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug_err(ex, ext[1]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function init_toc() {
|
||||
var loader = document.getElementById('ml');
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
|
||||
var anchors = []; // list of toc entries, complex objects
|
||||
var anchor = null; // current toc node
|
||||
var html = []; // generated toc html
|
||||
var lv = 0; // current indentation level in the toc html
|
||||
var ctr = [0, 0, 0, 0, 0, 0];
|
||||
|
||||
var manip_nodes_dyn = dom_pre.getElementsByTagName('*');
|
||||
var manip_nodes = [];
|
||||
@@ -250,8 +395,18 @@ function init_toc() {
|
||||
html.push('</ul>');
|
||||
lv--;
|
||||
}
|
||||
ctr[lv - 1]++;
|
||||
for (var b = lv; b < 6; b++)
|
||||
ctr[b] = 0;
|
||||
|
||||
html.push('<li>' + elm.innerHTML + '</li>');
|
||||
elm.childNodes[0].setAttribute('ctr', ctr.slice(0, lv).join('.'));
|
||||
|
||||
var elm2 = elm.cloneNode(true);
|
||||
elm2.childNodes[0].textContent = elm.textContent;
|
||||
while (elm2.childNodes.length > 1)
|
||||
elm2.removeChild(elm2.childNodes[1]);
|
||||
|
||||
html.push('<li>' + elm2.innerHTML + '</li>');
|
||||
|
||||
if (anchor != null)
|
||||
anchors.push(anchor);
|
||||
@@ -333,7 +488,7 @@ function init_toc() {
|
||||
|
||||
|
||||
// "main" :p
|
||||
convert_markdown(dom_src.value);
|
||||
convert_markdown(dom_src.value, dom_pre);
|
||||
var toc = init_toc();
|
||||
|
||||
|
||||
@@ -365,45 +520,13 @@ var redraw = (function () {
|
||||
|
||||
|
||||
dom_navtgl.onclick = function () {
|
||||
var timeout = null;
|
||||
function show_nav(e) {
|
||||
if (e && e.target == dom_hbar && e.pageX && e.pageX < dom_hbar.offsetWidth / 2)
|
||||
return;
|
||||
|
||||
clearTimeout(timeout);
|
||||
dom_nav.style.display = 'block';
|
||||
}
|
||||
function hide_nav() {
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(function () {
|
||||
dom_nav.style.display = 'none';
|
||||
}, 30);
|
||||
}
|
||||
var hidden = dom_navtgl.innerHTML == 'hide nav';
|
||||
dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav';
|
||||
if (hidden) {
|
||||
dom_nav.setAttribute('class', 'undocked');
|
||||
dom_nav.style.display = 'none';
|
||||
dom_nav.style.top = dom_hbar.offsetHeight + 'px';
|
||||
dom_nav.onmouseenter = show_nav;
|
||||
dom_nav.onmouseleave = hide_nav;
|
||||
dom_hbar.onmouseenter = show_nav;
|
||||
dom_hbar.onmouseleave = hide_nav;
|
||||
}
|
||||
else {
|
||||
dom_nav.setAttribute('class', '');
|
||||
dom_nav.style.display = 'block';
|
||||
dom_nav.style.top = '0';
|
||||
dom_nav.onmouseenter = null;
|
||||
dom_nav.onmouseleave = null;
|
||||
dom_hbar.onmouseenter = null;
|
||||
dom_hbar.onmouseleave = null;
|
||||
}
|
||||
if (window.localStorage)
|
||||
localStorage.setItem('hidenav', hidden ? 1 : 0);
|
||||
dom_nav.style.display = hidden ? 'none' : 'block';
|
||||
|
||||
swrite('hidenav', hidden ? 1 : 0);
|
||||
redraw();
|
||||
};
|
||||
|
||||
if (window.localStorage && localStorage.getItem('hidenav') == 1)
|
||||
if (sread('hidenav') == 1)
|
||||
dom_navtgl.onclick();
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
#mtw {
|
||||
display: block;
|
||||
position: fixed;
|
||||
left: 0;
|
||||
left: .5em;
|
||||
bottom: 0;
|
||||
width: calc(100% - 58em);
|
||||
width: calc(100% - 56em);
|
||||
}
|
||||
#mw {
|
||||
left: calc(100% - 57em);
|
||||
left: calc(100% - 55em);
|
||||
overflow-y: auto;
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
@@ -30,11 +30,11 @@
|
||||
#mw.single {
|
||||
margin: 0;
|
||||
left: 1em;
|
||||
left: max(1em, calc((100% - 58em) / 2));
|
||||
left: max(1em, calc((100% - 56em) / 2));
|
||||
}
|
||||
#mtw.single {
|
||||
width: 57em;
|
||||
width: min(57em, calc(100% - 2em));
|
||||
width: 55em;
|
||||
width: min(55em, calc(100% - 2em));
|
||||
}
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@
|
||||
}
|
||||
#mt, #mtr {
|
||||
width: 100%;
|
||||
height: calc(100% - 5px);
|
||||
height: calc(100% - 1px);
|
||||
color: #444;
|
||||
background: #f7f7f7;
|
||||
border: 1px solid #999;
|
||||
@@ -77,32 +77,50 @@ html.dark #mt {
|
||||
background: #f97;
|
||||
border-radius: .15em;
|
||||
}
|
||||
html.dark #save.force-save {
|
||||
color: #fca;
|
||||
background: #720;
|
||||
}
|
||||
#save.disabled {
|
||||
opacity: .4;
|
||||
}
|
||||
#helpbox,
|
||||
#toast {
|
||||
background: #f7f7f7;
|
||||
border-radius: .4em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#helpbox {
|
||||
display: none;
|
||||
position: fixed;
|
||||
background: #f7f7f7;
|
||||
box-shadow: 0 .5em 2em #777;
|
||||
border-radius: .4em;
|
||||
padding: 2em;
|
||||
top: 4em;
|
||||
overflow-y: auto;
|
||||
box-shadow: 0 .5em 2em #777;
|
||||
height: calc(100% - 12em);
|
||||
left: calc(50% - 15em);
|
||||
right: 0;
|
||||
width: 30em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#helpclose {
|
||||
display: block;
|
||||
}
|
||||
html.dark #helpbox {
|
||||
background: #222;
|
||||
box-shadow: 0 .5em 2em #444;
|
||||
}
|
||||
html.dark #helpbox,
|
||||
html.dark #toast {
|
||||
background: #222;
|
||||
border: 1px solid #079;
|
||||
border-width: 1px 0;
|
||||
}
|
||||
|
||||
# mt {opacity: .5;top:1px}
|
||||
#toast {
|
||||
font-weight: bold;
|
||||
text-align: center;
|
||||
padding: .6em 0;
|
||||
position: fixed;
|
||||
z-index: 9001;
|
||||
top: 30%;
|
||||
transition: opacity 0.2s ease-in-out;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
@@ -1,16 +1,25 @@
|
||||
"use strict";
|
||||
|
||||
|
||||
// server state
|
||||
var server_md = dom_src.value;
|
||||
|
||||
|
||||
// the non-ascii whitelist
|
||||
var esc_uni_whitelist = '\\n\\t\\x20-\\x7eÆØÅæøå';
|
||||
var js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
|
||||
|
||||
|
||||
// dom nodes
|
||||
var dom_swrap = document.getElementById('mtw');
|
||||
var dom_sbs = document.getElementById('sbs');
|
||||
var dom_nsbs = document.getElementById('nsbs');
|
||||
var dom_swrap = ebi('mtw');
|
||||
var dom_sbs = ebi('sbs');
|
||||
var dom_nsbs = ebi('nsbs');
|
||||
var dom_tbox = ebi('toolsbox');
|
||||
var dom_ref = (function () {
|
||||
var d = document.createElement('div');
|
||||
d.setAttribute('id', 'mtr');
|
||||
dom_swrap.appendChild(d);
|
||||
d = document.getElementById('mtr');
|
||||
d = ebi('mtr');
|
||||
// hide behind the textarea (offsetTop is not computed if display:none)
|
||||
dom_src.style.zIndex = '4';
|
||||
d.style.zIndex = '3';
|
||||
@@ -19,14 +28,12 @@ var dom_ref = (function () {
|
||||
|
||||
|
||||
// line->scrollpos maps
|
||||
var map_src = [];
|
||||
var map_pre = [];
|
||||
function genmap(dom) {
|
||||
function genmapq(dom, query) {
|
||||
var ret = [];
|
||||
var last_y = -1;
|
||||
var parent_y = 0;
|
||||
var parent_n = null;
|
||||
var nodes = dom.querySelectorAll('*[data-ln]');
|
||||
var nodes = dom.querySelectorAll(query);
|
||||
for (var a = 0; a < nodes.length; a++) {
|
||||
var n = nodes[a];
|
||||
var ln = parseInt(n.getAttribute('data-ln'));
|
||||
@@ -35,7 +42,7 @@ function genmap(dom) {
|
||||
|
||||
var y = 0;
|
||||
var par = n.offsetParent;
|
||||
if (par != parent_n) {
|
||||
if (par && par != parent_n) {
|
||||
while (par && par != dom) {
|
||||
y += par.offsetTop;
|
||||
par = par.offsetParent;
|
||||
@@ -49,7 +56,7 @@ function genmap(dom) {
|
||||
while (ln > ret.length)
|
||||
ret.push(null);
|
||||
|
||||
var y = parent_y + n.offsetTop;
|
||||
y = parent_y + n.offsetTop;
|
||||
if (y <= last_y)
|
||||
//console.log('awawa');
|
||||
continue;
|
||||
@@ -60,6 +67,25 @@ function genmap(dom) {
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
var map_src = [];
|
||||
var map_pre = [];
|
||||
function genmap(dom, oldmap) {
|
||||
var find = nlines;
|
||||
while (oldmap && find --> 0) {
|
||||
var tmap = genmapq(dom, '*[data-ln="' + find + '"]');
|
||||
if (!tmap || !tmap.length)
|
||||
continue;
|
||||
|
||||
var cy = tmap[find];
|
||||
var oy = parseInt(oldmap[find]);
|
||||
if (cy + 24 > oy && cy - 24 < oy)
|
||||
return oldmap;
|
||||
|
||||
console.log('map regen', dom.getAttribute('id'), find, oy, cy, oy - cy);
|
||||
break;
|
||||
}
|
||||
return genmapq(dom, '*[data-ln]');
|
||||
}
|
||||
|
||||
|
||||
// input handler
|
||||
@@ -70,7 +96,7 @@ var draw_md = (function () {
|
||||
function draw_md() {
|
||||
var t0 = new Date().getTime();
|
||||
var src = dom_src.value;
|
||||
convert_markdown(src);
|
||||
convert_markdown(src, dom_pre);
|
||||
|
||||
var lines = hesc(src).replace(/\r/g, "").split('\n');
|
||||
nlines = lines.length;
|
||||
@@ -79,13 +105,13 @@ var draw_md = (function () {
|
||||
html.push('<span data-ln="' + (a + 1) + '">' + lines[a] + "</span>");
|
||||
|
||||
dom_ref.innerHTML = html.join('\n');
|
||||
map_src = genmap(dom_ref);
|
||||
map_pre = genmap(dom_pre);
|
||||
map_src = genmap(dom_ref, map_src);
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
|
||||
cls(document.getElementById('save'), 'disabled', src == server_md);
|
||||
cls(ebi('save'), 'disabled', src == server_md);
|
||||
|
||||
var t1 = new Date().getTime();
|
||||
delay = t1 - t0 > 150 ? 25 : 1;
|
||||
delay = t1 - t0 > 100 ? 25 : 1;
|
||||
}
|
||||
|
||||
var timeout = null;
|
||||
@@ -108,8 +134,8 @@ redraw = (function () {
|
||||
dom_wrap.style.top = y;
|
||||
dom_swrap.style.top = y;
|
||||
dom_ref.style.width = getComputedStyle(dom_src).offsetWidth + 'px';
|
||||
map_src = genmap(dom_ref);
|
||||
map_pre = genmap(dom_pre);
|
||||
map_src = genmap(dom_ref, map_src);
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
dbg(document.body.clientWidth + 'x' + document.body.clientHeight);
|
||||
}
|
||||
function setsbs() {
|
||||
@@ -118,7 +144,7 @@ redraw = (function () {
|
||||
onresize();
|
||||
}
|
||||
function modetoggle() {
|
||||
mode = dom_nsbs.innerHTML;
|
||||
var mode = dom_nsbs.innerHTML;
|
||||
dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor';
|
||||
mode += ' single';
|
||||
dom_wrap.setAttribute('class', mode);
|
||||
@@ -147,14 +173,14 @@ redraw = (function () {
|
||||
dst.scrollTop = 0;
|
||||
return;
|
||||
}
|
||||
if (y + 8 + src.clientHeight > src.scrollHeight) {
|
||||
if (y + 48 + src.clientHeight > src.scrollHeight) {
|
||||
dst.scrollTop = dst.scrollHeight - dst.clientHeight;
|
||||
return;
|
||||
}
|
||||
y += src.clientHeight / 2;
|
||||
var sy1 = -1, sy2 = -1, dy1 = -1, dy2 = -1;
|
||||
for (var a = 1; a < nlines + 1; a++) {
|
||||
if (srcmap[a] === null || dstmap[a] === null)
|
||||
if (srcmap[a] == null || dstmap[a] == null)
|
||||
continue;
|
||||
|
||||
if (srcmap[a] > y) {
|
||||
@@ -197,14 +223,108 @@ redraw = (function () {
|
||||
})();
|
||||
|
||||
|
||||
// modification checker
|
||||
function Modpoll() {
|
||||
this.skip_one = true;
|
||||
this.disabled = false;
|
||||
|
||||
this.periodic = function () {
|
||||
var that = this;
|
||||
setTimeout(function () {
|
||||
that.periodic();
|
||||
}, 1000 * md_opt.modpoll_freq);
|
||||
|
||||
var skip = null;
|
||||
|
||||
if (ebi('toast'))
|
||||
skip = 'toast';
|
||||
|
||||
else if (this.skip_one)
|
||||
skip = 'saved';
|
||||
|
||||
else if (this.disabled)
|
||||
skip = 'disabled';
|
||||
|
||||
if (skip) {
|
||||
console.log('modpoll skip, ' + skip);
|
||||
this.skip_one = false;
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('modpoll...');
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + new Date().getTime();
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.modpoll = this;
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = this.cb;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
this.cb = function () {
|
||||
if (this.modpoll.disabled || this.modpoll.skip_one) {
|
||||
console.log('modpoll abort');
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
console.log('modpoll err ' + this.status + ": " + this.responseText);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.responseText)
|
||||
return;
|
||||
|
||||
var server_ref = server_md.replace(/\r/g, '');
|
||||
var server_now = this.responseText.replace(/\r/g, '');
|
||||
|
||||
if (server_ref != server_now) {
|
||||
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
|
||||
this.modpoll.disabled = true;
|
||||
var msg = [
|
||||
"The document has changed on the server.<br />" +
|
||||
"The changes will NOT be loaded into your editor automatically.",
|
||||
|
||||
"Press F5 or CTRL-R to refresh the page,<br />" +
|
||||
"replacing your document with the server copy.",
|
||||
|
||||
"You can click this message to ignore and contnue."
|
||||
];
|
||||
return toast(false, "box-shadow:0 1em 2em rgba(64,64,64,0.8);font-weight:normal",
|
||||
36, "<p>" + msg.join('</p>\n<p>') + '</p>');
|
||||
}
|
||||
|
||||
console.log('modpoll eq');
|
||||
}
|
||||
|
||||
if (md_opt.modpoll_freq > 0)
|
||||
this.periodic();
|
||||
|
||||
return this;
|
||||
}
|
||||
var modpoll = new Modpoll();
|
||||
|
||||
|
||||
window.onbeforeunload = function (e) {
|
||||
if ((ebi("save").getAttribute('class') + '').indexOf('disabled') >= 0)
|
||||
return; //nice (todo)
|
||||
|
||||
e.preventDefault(); //ff
|
||||
e.returnValue = ''; //chrome
|
||||
};
|
||||
|
||||
|
||||
// save handler
|
||||
function save(e) {
|
||||
if (e) e.preventDefault();
|
||||
var save_btn = document.getElementById("save"),
|
||||
var save_btn = ebi("save"),
|
||||
save_cls = save_btn.getAttribute('class') + '';
|
||||
|
||||
if (save_cls.indexOf('disabled') >= 0) {
|
||||
alert('there is nothing to save');
|
||||
toast(true, ";font-size:2em;color:#c90", 9, "no changes");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -221,13 +341,15 @@ function save(e) {
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
|
||||
var url = (document.location + '').split('?')[0] + '?raw';
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.txt = txt;
|
||||
|
||||
modpoll.skip_one = true; // skip one iteration while we save
|
||||
xhr.send(fd);
|
||||
}
|
||||
|
||||
@@ -272,19 +394,24 @@ function save_cb() {
|
||||
this.btn.classList.remove('force-save');
|
||||
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
|
||||
|
||||
run_savechk(r.lastmod, this.txt, this.btn, 0);
|
||||
}
|
||||
|
||||
function run_savechk(lastmod, txt, btn, ntry) {
|
||||
// download the saved doc from the server and compare
|
||||
var url = (document.location + '').split('?')[0] + '?raw';
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + new Date().getTime();
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_chk;
|
||||
xhr.btn = this.save_btn;
|
||||
xhr.txt = this.txt;
|
||||
xhr.lastmod = r.lastmod;
|
||||
xhr.onreadystatechange = savechk_cb;
|
||||
xhr.lastmod = lastmod;
|
||||
xhr.txt = txt;
|
||||
xhr.btn = btn;
|
||||
xhr.ntry = ntry;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
function save_chk() {
|
||||
function savechk_cb() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
@@ -296,6 +423,14 @@ function save_chk() {
|
||||
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
||||
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
||||
if (doc1 != doc2) {
|
||||
var that = this;
|
||||
if (that.ntry < 10) {
|
||||
// qnap funny, try a few more times
|
||||
setTimeout(function () {
|
||||
run_savechk(that.lastmod, that.txt, that.btn, that.ntry + 1)
|
||||
}, 100);
|
||||
return;
|
||||
}
|
||||
alert(
|
||||
'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' +
|
||||
'Length: yours=' + doc1.length + ', server=' + doc2.length
|
||||
@@ -308,18 +443,44 @@ function save_chk() {
|
||||
last_modified = this.lastmod;
|
||||
server_md = this.txt;
|
||||
draw_md();
|
||||
toast(true, ";font-size:6em;font-family:serif;color:#9b4", 4,
|
||||
'OK✔️<span style="font-size:.2em;color:#999;position:absolute">' + this.ntry + '</span>');
|
||||
|
||||
var ok = document.createElement('div');
|
||||
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
|
||||
ok.innerHTML = 'OK✔️';
|
||||
var parent = document.getElementById('m');
|
||||
document.documentElement.appendChild(ok);
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
modpoll.disabled = false;
|
||||
}
|
||||
|
||||
function toast(autoclose, style, width, msg) {
|
||||
var ok = ebi("toast");
|
||||
if (ok)
|
||||
ok.parentNode.removeChild(ok);
|
||||
}, 750);
|
||||
|
||||
style = "width:" + width + "em;left:calc(50% - " + (width / 2) + "em);" + style;
|
||||
ok = document.createElement('div');
|
||||
ok.setAttribute('id', 'toast');
|
||||
ok.setAttribute('style', style);
|
||||
ok.innerHTML = msg;
|
||||
var parent = ebi('m');
|
||||
document.documentElement.appendChild(ok);
|
||||
|
||||
var hide = function (delay) {
|
||||
delay = delay || 0;
|
||||
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
}, delay);
|
||||
|
||||
setTimeout(function () {
|
||||
if (ok.parentNode)
|
||||
ok.parentNode.removeChild(ok);
|
||||
}, delay + 250);
|
||||
}
|
||||
|
||||
ok.onclick = function () {
|
||||
hide(0);
|
||||
};
|
||||
|
||||
if (autoclose)
|
||||
hide(500);
|
||||
}
|
||||
|
||||
|
||||
@@ -392,6 +553,9 @@ function setsel(s) {
|
||||
dom_src.value = [s.pre, s.sel, s.post].join('');
|
||||
dom_src.setSelectionRange(s.car, s.cdr, dom_src.selectionDirection);
|
||||
dom_src.oninput();
|
||||
// support chrome:
|
||||
dom_src.blur();
|
||||
dom_src.focus();
|
||||
}
|
||||
|
||||
|
||||
@@ -465,28 +629,44 @@ function md_newline() {
|
||||
var s = linebounds(true),
|
||||
ln = s.md.substring(s.n1, s.n2),
|
||||
m1 = /^( *)([0-9]+)(\. +)/.exec(ln),
|
||||
m2 = /^[ \t>+-]*(\* )?/.exec(ln);
|
||||
m2 = /^[ \t>+-]*(\* )?/.exec(ln),
|
||||
drop = dom_src.selectionEnd - dom_src.selectionStart;
|
||||
|
||||
var pre = m2[0];
|
||||
if (m1 !== null)
|
||||
pre = m1[1] + (parseInt(m1[2]) + 1) + m1[3];
|
||||
|
||||
if (pre.length > s.car - s.n1)
|
||||
// in gutter, do nothing
|
||||
return true;
|
||||
|
||||
s.pre = s.md.substring(0, s.car) + '\n' + pre;
|
||||
s.sel = '';
|
||||
s.post = s.md.substring(s.car);
|
||||
s.post = s.md.substring(s.car + drop);
|
||||
s.car = s.cdr = s.pre.length;
|
||||
setsel(s);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
// backspace
|
||||
function md_backspace() {
|
||||
var s = linebounds(true),
|
||||
ln = s.md.substring(s.n1, s.n2),
|
||||
m = /^[ \t>+-]*(\* )?([0-9]+\. +)?/.exec(ln);
|
||||
o0 = dom_src.selectionStart,
|
||||
left = s.md.slice(s.n1, o0),
|
||||
m = /^[ \t>+-]*(\* )?([0-9]+\. +)?/.exec(left);
|
||||
|
||||
// if car is in whitespace area, do nothing
|
||||
if (/^\s*$/.test(left))
|
||||
return true;
|
||||
|
||||
// same if selection
|
||||
if (o0 != dom_src.selectionEnd)
|
||||
return true;
|
||||
|
||||
// same if line is all-whitespace or non-markup
|
||||
var v = m[0].replace(/[^ ]/g, " ");
|
||||
if (v === m[0] || v.length !== ln.length)
|
||||
if (v === m[0] || v.length !== left.length)
|
||||
return true;
|
||||
|
||||
s.pre = s.md.substring(0, s.n1) + v;
|
||||
@@ -498,6 +678,248 @@ function md_backspace() {
|
||||
}
|
||||
|
||||
|
||||
// paragraph jump
|
||||
function md_p_jump(down) {
|
||||
var txt = dom_src.value,
|
||||
ofs = dom_src.selectionStart;
|
||||
|
||||
if (down) {
|
||||
while (txt[ofs] == '\n' && --ofs > 0);
|
||||
ofs = txt.indexOf("\n\n", ofs);
|
||||
if (ofs < 0)
|
||||
ofs = txt.length - 1;
|
||||
|
||||
while (txt[ofs] == '\n' && ++ofs < txt.length - 1);
|
||||
}
|
||||
else {
|
||||
txt += '\n\n';
|
||||
while (ofs > 1 && txt[ofs - 1] == '\n') ofs--;
|
||||
ofs = Math.max(0, txt.lastIndexOf("\n\n", ofs - 1));
|
||||
while (txt[ofs] == '\n' && ++ofs < txt.length - 1);
|
||||
}
|
||||
|
||||
dom_src.setSelectionRange(ofs, ofs, "none");
|
||||
}
|
||||
|
||||
|
||||
function reLastIndexOf(txt, ptn, end) {
|
||||
var ofs = (typeof end !== 'undefined') ? end : txt.length;
|
||||
end = ofs;
|
||||
while (ofs >= 0) {
|
||||
var sub = txt.slice(ofs, end);
|
||||
if (ptn.test(sub))
|
||||
return ofs;
|
||||
|
||||
ofs--;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
// table formatter
|
||||
function fmt_table(e) {
|
||||
if (e) e.preventDefault();
|
||||
//dom_tbox.setAttribute('class', '');
|
||||
|
||||
var txt = dom_src.value,
|
||||
ofs = dom_src.selectionStart,
|
||||
//o0 = txt.lastIndexOf('\n\n', ofs),
|
||||
//o1 = txt.indexOf('\n\n', ofs);
|
||||
o0 = reLastIndexOf(txt, /\n\s*\n/m, ofs),
|
||||
o1 = txt.slice(ofs).search(/\n\s*\n|\n\s*$/m);
|
||||
// note \s contains \n but its fine
|
||||
|
||||
if (o0 < 0)
|
||||
o0 = 0;
|
||||
else {
|
||||
// seek past the hit
|
||||
var m = /\n\s*\n/m.exec(txt.slice(o0));
|
||||
o0 += m[0].length;
|
||||
}
|
||||
|
||||
o1 = o1 < 0 ? txt.length : o1 + ofs;
|
||||
|
||||
var err = 'cannot format table due to ',
|
||||
tab = txt.slice(o0, o1).split(/\s*\n/),
|
||||
re_ind = /^\s*/,
|
||||
ind = tab[1].match(re_ind)[0],
|
||||
r0_ind = tab[0].slice(0, ind.length),
|
||||
lpipe = tab[1].indexOf('|') < tab[1].indexOf('-'),
|
||||
rpipe = tab[1].lastIndexOf('|') > tab[1].lastIndexOf('-'),
|
||||
re_lpipe = lpipe ? /^\s*\|\s*/ : /^\s*/,
|
||||
re_rpipe = rpipe ? /\s*\|\s*$/ : /\s*$/,
|
||||
ncols;
|
||||
|
||||
// the second row defines the table,
|
||||
// need to process that first
|
||||
var tmp = tab[0];
|
||||
tab[0] = tab[1];
|
||||
tab[1] = tmp;
|
||||
|
||||
for (var a = 0; a < tab.length; a++) {
|
||||
var row_name = (a == 1) ? 'header' : 'row#' + (a + 1);
|
||||
|
||||
var ind2 = tab[a].match(re_ind)[0];
|
||||
if (ind != ind2 && a != 1) // the table can be a list entry or something, ignore [0]
|
||||
return alert(err + 'indentation mismatch on row#2 and ' + row_name + ',\n' + tab[a]);
|
||||
|
||||
var t = tab[a].slice(ind.length);
|
||||
t = t.replace(re_lpipe, "");
|
||||
t = t.replace(re_rpipe, "");
|
||||
tab[a] = t.split(/\s*\|\s*/g);
|
||||
|
||||
if (a == 0)
|
||||
ncols = tab[a].length;
|
||||
else if (ncols < tab[a].length)
|
||||
return alert(err + 'num.columns(' + row_name + ') exceeding row#2; ' + ncols + ' < ' + tab[a].length);
|
||||
|
||||
// if row has less columns than row2, fill them in
|
||||
while (tab[a].length < ncols)
|
||||
tab[a].push('');
|
||||
}
|
||||
|
||||
// aight now swap em back
|
||||
tmp = tab[0];
|
||||
tab[0] = tab[1];
|
||||
tab[1] = tmp;
|
||||
|
||||
var re_align = /^ *(:?)-+(:?) *$/;
|
||||
var align = [];
|
||||
for (var col = 0; col < tab[1].length; col++) {
|
||||
var m = tab[1][col].match(re_align);
|
||||
if (!m)
|
||||
return alert(err + 'invalid column specification, row#2, col ' + (col + 1) + ', [' + tab[1][col] + ']');
|
||||
|
||||
if (m[2]) {
|
||||
if (m[1])
|
||||
align.push('c');
|
||||
else
|
||||
align.push('r');
|
||||
}
|
||||
else
|
||||
align.push('l');
|
||||
}
|
||||
|
||||
var pad = [];
|
||||
var tmax = 0;
|
||||
for (var col = 0; col < ncols; col++) {
|
||||
var max = 0;
|
||||
for (var row = 0; row < tab.length; row++)
|
||||
if (row != 1)
|
||||
max = Math.max(max, tab[row][col].length);
|
||||
|
||||
var s = '';
|
||||
for (var n = 0; n < max; n++)
|
||||
s += ' ';
|
||||
|
||||
pad.push(s);
|
||||
tmax = Math.max(max, tmax);
|
||||
}
|
||||
|
||||
var dashes = '';
|
||||
for (var a = 0; a < tmax; a++)
|
||||
dashes += '-';
|
||||
|
||||
var ret = [];
|
||||
for (var row = 0; row < tab.length; row++) {
|
||||
var ln = [];
|
||||
for (var col = 0; col < tab[row].length; col++) {
|
||||
var p = pad[col];
|
||||
var s = tab[row][col];
|
||||
|
||||
if (align[col] == 'l') {
|
||||
s = (s + p).slice(0, p.length);
|
||||
}
|
||||
else if (align[col] == 'r') {
|
||||
s = (p + s).slice(-p.length);
|
||||
}
|
||||
else {
|
||||
var pt = p.length - s.length;
|
||||
var pl = p.slice(0, Math.floor(pt / 2));
|
||||
var pr = p.slice(0, pt - pl.length);
|
||||
s = pl + s + pr;
|
||||
}
|
||||
|
||||
if (row == 1) {
|
||||
if (align[col] == 'l')
|
||||
s = dashes.slice(0, p.length);
|
||||
else if (align[col] == 'r')
|
||||
s = dashes.slice(0, p.length - 1) + ':';
|
||||
else
|
||||
s = ':' + dashes.slice(0, p.length - 2) + ':';
|
||||
}
|
||||
ln.push(s);
|
||||
}
|
||||
ret.push(ind + '| ' + ln.join(' | ') + ' |');
|
||||
}
|
||||
|
||||
// restore any markup in the row0 gutter
|
||||
ret[0] = r0_ind + ret[0].slice(ind.length);
|
||||
|
||||
ret = {
|
||||
"pre": txt.slice(0, o0),
|
||||
"sel": ret.join('\n'),
|
||||
"post": txt.slice(o1),
|
||||
"car": o0,
|
||||
"cdr": o0
|
||||
};
|
||||
setsel(ret);
|
||||
}
|
||||
|
||||
|
||||
// show unicode
|
||||
function mark_uni(e) {
|
||||
if (e) e.preventDefault();
|
||||
dom_tbox.setAttribute('class', '');
|
||||
|
||||
var txt = dom_src.value,
|
||||
ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g'),
|
||||
mod = txt.replace(/\r/g, "").replace(ptn, "\u2588\u2770$1\u2771");
|
||||
|
||||
if (txt == mod) {
|
||||
alert('no results; no modifications were made');
|
||||
return;
|
||||
}
|
||||
dom_src.value = mod;
|
||||
}
|
||||
|
||||
|
||||
// iterate unicode
|
||||
function iter_uni(e) {
|
||||
if (e) e.preventDefault();
|
||||
|
||||
var txt = dom_src.value,
|
||||
ofs = dom_src.selectionDirection == "forward" ? dom_src.selectionEnd : dom_src.selectionStart,
|
||||
re = new RegExp('([^' + js_uni_whitelist + ']+)'),
|
||||
m = re.exec(txt.slice(ofs));
|
||||
|
||||
if (!m) {
|
||||
alert('no more hits from cursor onwards');
|
||||
return;
|
||||
}
|
||||
ofs += m.index;
|
||||
|
||||
dom_src.setSelectionRange(ofs, ofs + m[0].length, "forward");
|
||||
dom_src.oninput();
|
||||
// support chrome:
|
||||
dom_src.blur();
|
||||
dom_src.focus();
|
||||
}
|
||||
|
||||
|
||||
// configure whitelist
|
||||
function cfg_uni(e) {
|
||||
if (e) e.preventDefault();
|
||||
|
||||
var reply = prompt("unicode whitelist", esc_uni_whitelist);
|
||||
if (reply === null)
|
||||
return;
|
||||
|
||||
esc_uni_whitelist = reply;
|
||||
js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
|
||||
}
|
||||
|
||||
|
||||
// hotkeys / toolbar
|
||||
(function () {
|
||||
function keydown(ev) {
|
||||
@@ -509,6 +931,11 @@ function md_backspace() {
|
||||
save();
|
||||
return false;
|
||||
}
|
||||
if (ev.code == "Escape" || kc == 27) {
|
||||
var d = ebi('helpclose');
|
||||
if (d)
|
||||
d.click();
|
||||
}
|
||||
if (document.activeElement == dom_src) {
|
||||
if (ev.code == "Tab" || kc == 9) {
|
||||
md_indent(ev.shiftKey);
|
||||
@@ -523,8 +950,7 @@ function md_backspace() {
|
||||
return false;
|
||||
}
|
||||
if (!ctrl && !ev.shiftKey && (ev.code == "Enter" || kc == 13)) {
|
||||
md_newline();
|
||||
return false;
|
||||
return md_newline();
|
||||
}
|
||||
if (ctrl && (ev.code == "KeyZ" || kc == 90)) {
|
||||
if (ev.shiftKey)
|
||||
@@ -541,27 +967,63 @@ function md_backspace() {
|
||||
if (!ctrl && !ev.shiftKey && kc == 8) {
|
||||
return md_backspace();
|
||||
}
|
||||
if (ctrl && (ev.code == "KeyK")) {
|
||||
fmt_table();
|
||||
return false;
|
||||
}
|
||||
if (ctrl && (ev.code == "KeyU")) {
|
||||
iter_uni();
|
||||
return false;
|
||||
}
|
||||
if (ctrl && (ev.code == "KeyE")) {
|
||||
dom_nsbs.click();
|
||||
//fmt_table();
|
||||
return false;
|
||||
}
|
||||
var up = ev.code == "ArrowUp" || kc == 38;
|
||||
var dn = ev.code == "ArrowDown" || kc == 40;
|
||||
if (ctrl && (up || dn)) {
|
||||
md_p_jump(dn);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
document.onkeydown = keydown;
|
||||
document.getElementById('save').onclick = save;
|
||||
ebi('save').onclick = save;
|
||||
})();
|
||||
|
||||
|
||||
document.getElementById('help').onclick = function (e) {
|
||||
ebi('tools').onclick = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
var dom = document.getElementById('helpbox');
|
||||
var is_open = dom_tbox.getAttribute('class') != 'open';
|
||||
dom_tbox.setAttribute('class', is_open ? 'open' : '');
|
||||
};
|
||||
|
||||
|
||||
ebi('help').onclick = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
dom_tbox.setAttribute('class', '');
|
||||
|
||||
var dom = ebi('helpbox');
|
||||
var dtxt = dom.getElementsByTagName('textarea');
|
||||
if (dtxt.length > 0)
|
||||
dom.innerHTML = '<a href="#" id="helpclose">close</a>' + marked(dtxt[0].value);
|
||||
if (dtxt.length > 0) {
|
||||
convert_markdown(dtxt[0].value, dom);
|
||||
dom.innerHTML = '<a href="#" id="helpclose">close</a>' + dom.innerHTML;
|
||||
}
|
||||
|
||||
dom.style.display = 'block';
|
||||
document.getElementById('helpclose').onclick = function () {
|
||||
ebi('helpclose').onclick = function () {
|
||||
dom.style.display = 'none';
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
ebi('fmt_table').onclick = fmt_table;
|
||||
ebi('mark_uni').onclick = mark_uni;
|
||||
ebi('iter_uni').onclick = iter_uni;
|
||||
ebi('cfg_uni').onclick = cfg_uni;
|
||||
|
||||
|
||||
// blame steen
|
||||
action_stack = (function () {
|
||||
var hist = {
|
||||
@@ -631,7 +1093,7 @@ action_stack = (function () {
|
||||
dom_src.value = ref;
|
||||
dom_src.setSelectionRange(cpos, cpos);
|
||||
ignore = true; // all browsers
|
||||
draw_md();
|
||||
dom_src.oninput();
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -667,13 +1129,12 @@ action_stack = (function () {
|
||||
ref = newtxt;
|
||||
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
|
||||
if (hist.un.length > 0)
|
||||
dbg(static(hist.un.slice(-1)[0]));
|
||||
dbg(statify(hist.un.slice(-1)[0]));
|
||||
if (hist.re.length > 0)
|
||||
dbg(static(hist.re.slice(-1)[0]));
|
||||
dbg(statify(hist.re.slice(-1)[0]));
|
||||
}
|
||||
|
||||
return {
|
||||
push: push,
|
||||
undo: undo,
|
||||
redo: redo,
|
||||
push: schedule_push,
|
||||
@@ -683,7 +1144,7 @@ action_stack = (function () {
|
||||
})();
|
||||
|
||||
/*
|
||||
document.getElementById('help').onclick = function () {
|
||||
ebi('help').onclick = function () {
|
||||
var c1 = getComputedStyle(dom_src).cssText.split(';');
|
||||
var c2 = getComputedStyle(dom_ref).cssText.split(';');
|
||||
var max = Math.min(c1.length, c2.length);
|
||||
@@ -691,4 +1152,4 @@ document.getElementById('help').onclick = function () {
|
||||
if (c1[a] !== c2[a])
|
||||
console.log(c1[a] + '\n' + c2[a]);
|
||||
}
|
||||
*/
|
||||
*/
|
||||
|
||||
@@ -160,8 +160,12 @@ h2 {
|
||||
.mdo ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em {
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
@@ -253,8 +257,12 @@ html.dark .mdo>ul,
|
||||
html.dark .mdo>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em {
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
|
||||
@@ -17,13 +17,17 @@
|
||||
</div>
|
||||
</div>
|
||||
<div id="m">
|
||||
<textarea id="mt" style="display:none">{{ md }}</textarea>
|
||||
<textarea id="mt" style="display:none" autocomplete="off">{{ md }}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
|
||||
var link_md_as_html = false; // TODO (does nothing)
|
||||
var last_modified = {{ lastmod }};
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
var lightswitch = (function () {
|
||||
var fun = function () {
|
||||
@@ -39,6 +43,7 @@ var lightswitch = (function () {
|
||||
})();
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/deps/easymde.full.js"></script>
|
||||
<script src="/.cpr/util.js"></script>
|
||||
<script src="/.cpr/deps/easymde.js"></script>
|
||||
<script src="/.cpr/mde.js"></script>
|
||||
</body></html>
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
var dom_wrap = document.getElementById('mw');
|
||||
var dom_nav = document.getElementById('mn');
|
||||
var dom_doc = document.getElementById('m');
|
||||
var dom_md = document.getElementById('mt');
|
||||
"use strict";
|
||||
|
||||
var dom_wrap = ebi('mw');
|
||||
var dom_nav = ebi('mn');
|
||||
var dom_doc = ebi('m');
|
||||
var dom_md = ebi('mt');
|
||||
|
||||
(function () {
|
||||
var n = document.location + '';
|
||||
@@ -13,7 +15,7 @@ var dom_md = document.getElementById('mt');
|
||||
if (a > 0)
|
||||
loc.push(n[a]);
|
||||
|
||||
var dec = decodeURIComponent(n[a]).replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
var dec = uricom_dec(n[a])[0].replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
|
||||
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
||||
}
|
||||
@@ -63,7 +65,7 @@ var mde = (function () {
|
||||
mde.codemirror.on("change", function () {
|
||||
md_changed(mde);
|
||||
});
|
||||
var loader = document.getElementById('ml');
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
return mde;
|
||||
})();
|
||||
@@ -121,7 +123,7 @@ function save(mde) {
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
|
||||
var url = (document.location + '').split('?')[0] + '?raw';
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
@@ -213,7 +215,7 @@ function save_chk() {
|
||||
var ok = document.createElement('div');
|
||||
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
|
||||
ok.innerHTML = 'OK✔️';
|
||||
var parent = document.getElementById('m');
|
||||
var parent = ebi('m');
|
||||
document.documentElement.appendChild(ok);
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,92 +1,4 @@
|
||||
.opview {
|
||||
display: none;
|
||||
}
|
||||
.opview.act {
|
||||
display: block;
|
||||
}
|
||||
#ops a {
|
||||
color: #fc5;
|
||||
font-size: 1.5em;
|
||||
padding: 0 .3em;
|
||||
margin: 0;
|
||||
outline: none;
|
||||
}
|
||||
#ops a.act {
|
||||
text-decoration: underline;
|
||||
}
|
||||
/*
|
||||
#ops a+a:after,
|
||||
#ops a:first-child:after {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #01a7e1;
|
||||
margin-left: .3em;
|
||||
position: relative;
|
||||
}
|
||||
#ops a+a:before {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #ff3f1a;
|
||||
margin-right: .3em;
|
||||
margin-left: -.3em;
|
||||
}
|
||||
#ops a:last-child:after {
|
||||
content: '';
|
||||
}
|
||||
#ops a.act:before,
|
||||
#ops a.act:after {
|
||||
text-decoration: none !important;
|
||||
}
|
||||
*/
|
||||
#ops i {
|
||||
font-size: 1.5em;
|
||||
}
|
||||
#ops i:before {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #01a7e1;
|
||||
position: relative;
|
||||
}
|
||||
#ops i:after {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #ff3f1a;
|
||||
margin-left: -.35em;
|
||||
font-size: 1.05em;
|
||||
}
|
||||
#ops,
|
||||
.opbox {
|
||||
border: 1px solid #3a3a3a;
|
||||
box-shadow: 0 0 1em #222 inset;
|
||||
}
|
||||
#ops {
|
||||
display: none;
|
||||
background: #333;
|
||||
margin: 1.7em 1.5em 0 1.5em;
|
||||
padding: .3em .6em;
|
||||
border-radius: .3em;
|
||||
border-width: .15em 0;
|
||||
}
|
||||
.opbox {
|
||||
background: #2d2d2d;
|
||||
margin: 1.5em 0 0 0;
|
||||
padding: .5em;
|
||||
border-radius: 0 1em 1em 0;
|
||||
border-width: .15em .3em .3em 0;
|
||||
max-width: 40em;
|
||||
}
|
||||
.opbox input {
|
||||
margin: .5em;
|
||||
}
|
||||
.opbox input[type=text] {
|
||||
color: #fff;
|
||||
background: #383838;
|
||||
border: none;
|
||||
box-shadow: 0 0 .3em #222;
|
||||
border-bottom: 1px solid #fc5;
|
||||
border-radius: .2em;
|
||||
padding: .2em .3em;
|
||||
}
|
||||
|
||||
#op_up2k {
|
||||
padding: 0 1em 1em 1em;
|
||||
}
|
||||
@@ -94,6 +6,9 @@
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 2px;
|
||||
height: 2px;
|
||||
overflow: hidden;
|
||||
}
|
||||
#u2form input {
|
||||
background: #444;
|
||||
@@ -104,11 +19,6 @@
|
||||
color: #f87;
|
||||
padding: .5em;
|
||||
}
|
||||
#u2form {
|
||||
width: 2px;
|
||||
height: 2px;
|
||||
overflow: hidden;
|
||||
}
|
||||
#u2btn {
|
||||
color: #eee;
|
||||
background: #555;
|
||||
@@ -117,17 +27,27 @@
|
||||
background: linear-gradient(to bottom, #367 0%, #489 50%, #38788a 51%, #367 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#489', endColorstr='#38788a', GradientType=0);
|
||||
text-decoration: none;
|
||||
line-height: 1.5em;
|
||||
line-height: 1.3em;
|
||||
border: 1px solid #222;
|
||||
border-radius: .4em;
|
||||
text-align: center;
|
||||
font-size: 2em;
|
||||
margin: 1em auto;
|
||||
padding: 1em 0;
|
||||
width: 12em;
|
||||
font-size: 1.5em;
|
||||
margin: .5em auto;
|
||||
padding: .8em 0;
|
||||
width: 16em;
|
||||
cursor: pointer;
|
||||
box-shadow: .4em .4em 0 #111;
|
||||
}
|
||||
#op_up2k.srch #u2btn {
|
||||
background: linear-gradient(to bottom, #ca3 0%, #fd8 50%, #fc6 51%, #b92 100%);
|
||||
text-shadow: 1px 1px 1px #fc6;
|
||||
color: #333;
|
||||
}
|
||||
#u2conf #u2btn {
|
||||
margin: -1.5em 0;
|
||||
padding: .8em 0;
|
||||
width: 100%;
|
||||
}
|
||||
#u2notbtn {
|
||||
display: none;
|
||||
text-align: center;
|
||||
@@ -142,6 +62,9 @@
|
||||
width: calc(100% - 2em);
|
||||
max-width: 100em;
|
||||
}
|
||||
#op_up2k.srch #u2tab {
|
||||
max-width: none;
|
||||
}
|
||||
#u2tab td {
|
||||
border: 1px solid #ccc;
|
||||
border-width: 0 0px 1px 0;
|
||||
@@ -153,12 +76,19 @@
|
||||
#u2tab td:nth-child(3) {
|
||||
width: 40%;
|
||||
}
|
||||
#op_up2k.srch #u2tab td:nth-child(3) {
|
||||
font-family: sans-serif;
|
||||
width: auto;
|
||||
}
|
||||
#u2tab tr+tr:hover td {
|
||||
background: #222;
|
||||
}
|
||||
#u2conf {
|
||||
margin: 1em auto;
|
||||
width: 26em;
|
||||
width: 30em;
|
||||
}
|
||||
#u2conf.has_btn {
|
||||
width: 46em;
|
||||
}
|
||||
#u2conf * {
|
||||
text-align: center;
|
||||
@@ -194,10 +124,72 @@
|
||||
#u2conf input+a {
|
||||
background: #d80;
|
||||
}
|
||||
#u2conf label {
|
||||
font-size: 1.6em;
|
||||
width: 2em;
|
||||
height: 1em;
|
||||
padding: .4em 0;
|
||||
display: block;
|
||||
user-select: none;
|
||||
border-radius: .25em;
|
||||
}
|
||||
#u2conf input[type="checkbox"] {
|
||||
position: relative;
|
||||
opacity: .02;
|
||||
top: 2em;
|
||||
}
|
||||
#u2conf input[type="checkbox"]+label {
|
||||
position: relative;
|
||||
background: #603;
|
||||
border-bottom: .2em solid #a16;
|
||||
box-shadow: 0 .1em .3em #a00 inset;
|
||||
}
|
||||
#u2conf input[type="checkbox"]:checked+label {
|
||||
background: #6a1;
|
||||
border-bottom: .2em solid #efa;
|
||||
box-shadow: 0 .1em .5em #0c0;
|
||||
}
|
||||
#u2conf input[type="checkbox"]+label:hover {
|
||||
box-shadow: 0 .1em .3em #fb0;
|
||||
border-color: #fb0;
|
||||
}
|
||||
#op_up2k.srch #u2conf td:nth-child(1)>*,
|
||||
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
||||
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
||||
background: #777;
|
||||
border-color: #ccc;
|
||||
box-shadow: none;
|
||||
opacity: .2;
|
||||
}
|
||||
#u2cdesc {
|
||||
position: absolute;
|
||||
width: 34em;
|
||||
left: calc(50% - 15em);
|
||||
background: #222;
|
||||
border: 0 solid #555;
|
||||
text-align: center;
|
||||
overflow: hidden;
|
||||
margin: 0 -2em;
|
||||
height: 0;
|
||||
padding: 0 1em;
|
||||
opacity: .1;
|
||||
transition: all 0.14s ease-in-out;
|
||||
border-radius: .4em;
|
||||
box-shadow: 0 .2em .5em #222;
|
||||
}
|
||||
#u2cdesc.show {
|
||||
padding: 1em;
|
||||
height: auto;
|
||||
border-width: .2em 0;
|
||||
opacity: 1;
|
||||
}
|
||||
#u2foot {
|
||||
color: #fff;
|
||||
font-style: italic;
|
||||
}
|
||||
#u2footfoot {
|
||||
margin-bottom: -1em;
|
||||
}
|
||||
.prog {
|
||||
font-family: monospace;
|
||||
}
|
||||
@@ -219,3 +211,13 @@
|
||||
bottom: 0;
|
||||
background: #0a0;
|
||||
}
|
||||
#u2tab a>span {
|
||||
font-weight: bold;
|
||||
font-style: italic;
|
||||
color: #fff;
|
||||
padding-left: .2em;
|
||||
}
|
||||
#u2cleanup {
|
||||
float: right;
|
||||
margin-bottom: -.3em;
|
||||
}
|
||||
|
||||
@@ -1,13 +1,7 @@
|
||||
<div id="ops"><a
|
||||
href="#" data-dest="">---</a><i></i><a
|
||||
href="#" data-dest="up2k">up2k</a><i></i><a
|
||||
href="#" data-dest="bup">bup</a><i></i><a
|
||||
href="#" data-dest="mkdir">mkdir</a><i></i><a
|
||||
href="#" data-dest="new_md">new.md</a></div>
|
||||
|
||||
<div id="op_bup" class="opview opbox act">
|
||||
<div id="u2err"></div>
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8">
|
||||
<input type="hidden" name="act" value="bput" />
|
||||
<input type="file" name="f" multiple><br />
|
||||
<input type="submit" value="start upload">
|
||||
@@ -15,7 +9,7 @@
|
||||
</div>
|
||||
|
||||
<div id="op_mkdir" class="opview opbox act">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8">
|
||||
<input type="hidden" name="act" value="mkdir" />
|
||||
<input type="text" name="name" size="30">
|
||||
<input type="submit" value="mkdir">
|
||||
@@ -23,19 +17,45 @@
|
||||
</div>
|
||||
|
||||
<div id="op_new_md" class="opview opbox">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8">
|
||||
<input type="hidden" name="act" value="new_md" />
|
||||
<input type="text" name="name" size="30">
|
||||
<input type="submit" value="create doc">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_msg" class="opview opbox">
|
||||
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8">
|
||||
<input type="text" name="msg" size="30">
|
||||
<input type="submit" value="send msg">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_up2k" class="opview">
|
||||
<form id="u2form" method="post" enctype="multipart/form-data" onsubmit="return false;"></form>
|
||||
|
||||
<table id="u2conf">
|
||||
<tr>
|
||||
<td>parallel uploads</td>
|
||||
<td rowspan="2">
|
||||
<input type="checkbox" id="multitask" />
|
||||
<label for="multitask" alt="continue hashing other files while uploading">🏃</label>
|
||||
</td>
|
||||
<td rowspan="2">
|
||||
<input type="checkbox" id="ask_up" />
|
||||
<label for="ask_up" alt="ask for confirmation befofre upload starts">💭</label>
|
||||
</td>
|
||||
<td rowspan="2">
|
||||
<input type="checkbox" id="flag_en" />
|
||||
<label for="flag_en" alt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</label>
|
||||
</td>
|
||||
{%- if have_up2k_idx %}
|
||||
<td data-perm="read" rowspan="2">
|
||||
<input type="checkbox" id="fsearch" />
|
||||
<label for="fsearch" alt="don't actually upload, instead check if the files already $N exist on the server (will scan all folders you can read)">🔎</label>
|
||||
</td>
|
||||
{%- endif %}
|
||||
<td data-perm="read" rowspan="2" id="u2btn_cw"></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
@@ -43,28 +63,29 @@
|
||||
<input class="txtbox" id="nthread" value="2" />
|
||||
<a href="#" id="nthread_add">+</a>
|
||||
</td>
|
||||
<td rowspan="2">
|
||||
<input type="checkbox" id="multitask" />
|
||||
<label for="multitask">hash while<br />uploading</label>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<div id="u2cdesc"></div>
|
||||
|
||||
<div id="u2notbtn"></div>
|
||||
|
||||
<div id="u2btn">
|
||||
drop files here<br />
|
||||
(or click me)
|
||||
<div id="u2btn_ct">
|
||||
<div id="u2btn">
|
||||
<span id="u2bm"></span><br />
|
||||
drop files here<br />
|
||||
(or click me)
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<table id="u2tab">
|
||||
<tr>
|
||||
<td>filename</td>
|
||||
<td>status</td>
|
||||
<td>progress</td>
|
||||
<td>progress<a href="#" id="u2cleanup">cleanup</a></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<p id="u2foot"></p>
|
||||
<p>( if you don't need lastmod timestamps, resumable uploads or progress bars just use the <a href="#" id="u2nope">basic uploader</a>)</p>
|
||||
<p id="u2footfoot">( if you don't need lastmod timestamps, resumable uploads or progress bars just use the <a href="#" id="u2nope">basic uploader</a>)</p>
|
||||
</div>
|
||||
|
||||
375
copyparty/web/util.js
Normal file
375
copyparty/web/util.js
Normal file
@@ -0,0 +1,375 @@
|
||||
"use strict";
|
||||
|
||||
// error handler for mobile devices
|
||||
function hcroak(msg) {
|
||||
document.body.innerHTML = msg;
|
||||
window.onerror = undefined;
|
||||
throw 'fatal_err';
|
||||
}
|
||||
function croak(msg) {
|
||||
document.body.textContent = msg;
|
||||
window.onerror = undefined;
|
||||
throw msg;
|
||||
}
|
||||
function esc(txt) {
|
||||
return txt.replace(/[&"<>]/g, function (c) {
|
||||
return {
|
||||
'&': '&',
|
||||
'"': '"',
|
||||
'<': '<',
|
||||
'>': '>'
|
||||
}[c];
|
||||
});
|
||||
}
|
||||
function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
window.onerror = undefined;
|
||||
window['vis_exh'] = null;
|
||||
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
||||
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
||||
|
||||
if (error) {
|
||||
var find = ['desc', 'stack', 'trace'];
|
||||
for (var a = 0; a < find.length; a++)
|
||||
if (String(error[find[a]]) !== 'undefined')
|
||||
html.push('<h2>' + find[a] + '</h2>' +
|
||||
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
|
||||
}
|
||||
document.body.style.fontSize = '0.8em';
|
||||
document.body.style.padding = '0 1em 1em 1em';
|
||||
hcroak(html.join('\n'));
|
||||
}
|
||||
|
||||
|
||||
function ebi(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
|
||||
function ev(e) {
|
||||
e = e || window.event;
|
||||
if (!e)
|
||||
return;
|
||||
|
||||
if (e.preventDefault)
|
||||
e.preventDefault()
|
||||
|
||||
if (e.stopPropagation)
|
||||
e.stopPropagation();
|
||||
|
||||
e.returnValue = false;
|
||||
return e;
|
||||
}
|
||||
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||
if (!String.prototype.endsWith) {
|
||||
String.prototype.endsWith = function (search, this_len) {
|
||||
if (this_len === undefined || this_len > this.length) {
|
||||
this_len = this.length;
|
||||
}
|
||||
return this.substring(this_len - search.length, this_len) === search;
|
||||
};
|
||||
}
|
||||
if (!String.startsWith) {
|
||||
String.prototype.startsWith = function (s, i) {
|
||||
i = i > 0 ? i | 0 : 0;
|
||||
return this.substring(i, i + s.length) === s;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = document.createElement('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
|
||||
script.onreadystatechange = cb;
|
||||
script.onload = cb;
|
||||
|
||||
head.appendChild(script);
|
||||
}
|
||||
|
||||
|
||||
function sortTable(table, col) {
|
||||
var tb = table.tBodies[0],
|
||||
th = table.tHead.rows[0].cells,
|
||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||
i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1;
|
||||
for (var a = 0, thl = th.length; a < thl; a++)
|
||||
th[a].className = th[a].className.replace(/ *sort-?1 */, " ");
|
||||
th[col].className += ' sort' + reverse;
|
||||
var stype = th[col].getAttribute('sort');
|
||||
var vl = [];
|
||||
for (var a = 0; a < tr.length; a++) {
|
||||
var cell = tr[a].cells[col];
|
||||
if (!cell) {
|
||||
vl.push([null, a]);
|
||||
continue;
|
||||
}
|
||||
var v = cell.getAttribute('sortv') || cell.textContent.trim();
|
||||
if (stype == 'int') {
|
||||
v = parseInt(v.replace(/[, ]/g, '')) || 0;
|
||||
}
|
||||
vl.push([v, a]);
|
||||
}
|
||||
vl.sort(function (a, b) {
|
||||
a = a[0];
|
||||
b = b[0];
|
||||
if (a === null)
|
||||
return -1;
|
||||
if (b === null)
|
||||
return 1;
|
||||
|
||||
if (stype == 'int') {
|
||||
return reverse * (a - b);
|
||||
}
|
||||
return reverse * (a.localeCompare(b));
|
||||
});
|
||||
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[vl[i][1]]);
|
||||
}
|
||||
function makeSortable(table) {
|
||||
var th = table.tHead, i;
|
||||
th && (th = th.rows[0]) && (th = th.cells);
|
||||
if (th) i = th.length;
|
||||
else return; // if no `<thead>` then do nothing
|
||||
while (--i >= 0) (function (i) {
|
||||
th[i].onclick = function (e) {
|
||||
ev(e);
|
||||
sortTable(table, i);
|
||||
};
|
||||
}(i));
|
||||
}
|
||||
|
||||
|
||||
|
||||
(function () {
|
||||
var ops = document.querySelectorAll('#ops>a');
|
||||
for (var a = 0; a < ops.length; a++) {
|
||||
ops[a].onclick = opclick;
|
||||
}
|
||||
})();
|
||||
|
||||
|
||||
function opclick(e) {
|
||||
ev(e);
|
||||
|
||||
var dest = this.getAttribute('data-dest');
|
||||
goto(dest);
|
||||
|
||||
swrite('opmode', dest || null);
|
||||
|
||||
var input = document.querySelector('.opview.act input:not([type="hidden"])')
|
||||
if (input)
|
||||
input.focus();
|
||||
}
|
||||
|
||||
|
||||
function goto(dest) {
|
||||
var obj = document.querySelectorAll('.opview.act');
|
||||
for (var a = obj.length - 1; a >= 0; a--)
|
||||
obj[a].classList.remove('act');
|
||||
|
||||
obj = document.querySelectorAll('#ops>a');
|
||||
for (var a = obj.length - 1; a >= 0; a--)
|
||||
obj[a].classList.remove('act');
|
||||
|
||||
if (dest) {
|
||||
var ui = ebi('op_' + dest);
|
||||
ui.classList.add('act');
|
||||
document.querySelector('#ops>a[data-dest=' + dest + ']').classList.add('act');
|
||||
|
||||
var fn = window['goto_' + dest];
|
||||
if (fn)
|
||||
fn();
|
||||
}
|
||||
|
||||
if (window['treectl'])
|
||||
treectl.onscroll();
|
||||
}
|
||||
|
||||
|
||||
(function () {
|
||||
goto();
|
||||
var op = sread('opmode');
|
||||
if (op !== null && op !== '.')
|
||||
goto(op);
|
||||
})();
|
||||
|
||||
|
||||
function linksplit(rp) {
|
||||
var ret = [];
|
||||
var apath = '/';
|
||||
if (rp && rp.charAt(0) == '/')
|
||||
rp = rp.slice(1);
|
||||
|
||||
while (rp) {
|
||||
var link = rp;
|
||||
var ofs = rp.indexOf('/');
|
||||
if (ofs === -1) {
|
||||
rp = null;
|
||||
}
|
||||
else {
|
||||
link = rp.slice(0, ofs + 1);
|
||||
rp = rp.slice(ofs + 1);
|
||||
}
|
||||
var vlink = link;
|
||||
if (link.indexOf('/') !== -1)
|
||||
vlink = link.slice(0, -1) + '<span>/</span>';
|
||||
|
||||
ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
|
||||
apath += link;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
function uricom_enc(txt, do_fb_enc) {
|
||||
try {
|
||||
return encodeURIComponent(txt);
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("uce-err [" + txt + "]");
|
||||
if (do_fb_enc)
|
||||
return esc(txt);
|
||||
|
||||
return txt;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function uricom_dec(txt) {
|
||||
try {
|
||||
return [decodeURIComponent(txt), true];
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("ucd-err [" + txt + "]");
|
||||
return [txt, false];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function get_evpath() {
|
||||
var ret = document.location.pathname;
|
||||
|
||||
if (ret.indexOf('/') !== 0)
|
||||
ret = '/' + ret;
|
||||
|
||||
if (ret.lastIndexOf('/') !== ret.length - 1)
|
||||
ret += '/';
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
function get_vpath() {
|
||||
return uricom_dec(get_evpath())[0];
|
||||
}
|
||||
|
||||
|
||||
function unix2iso(ts) {
|
||||
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
|
||||
}
|
||||
|
||||
|
||||
function s2ms(s) {
|
||||
var m = Math.floor(s / 60);
|
||||
return m + ":" + ("0" + (s - m * 60)).slice(-2);
|
||||
}
|
||||
|
||||
|
||||
function has(haystack, needle) {
|
||||
for (var a = 0; a < haystack.length; a++)
|
||||
if (haystack[a] == needle)
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
function sread(key) {
|
||||
if (window.localStorage)
|
||||
return localStorage.getItem(key);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function swrite(key, val) {
|
||||
if (window.localStorage) {
|
||||
if (val === undefined || val === null)
|
||||
localStorage.removeItem(key);
|
||||
else
|
||||
localStorage.setItem(key, val);
|
||||
}
|
||||
}
|
||||
|
||||
function jread(key, fb) {
|
||||
var str = sread(key);
|
||||
if (!str)
|
||||
return fb;
|
||||
|
||||
return JSON.parse(str);
|
||||
}
|
||||
|
||||
function jwrite(key, val) {
|
||||
if (!val)
|
||||
swrite(key);
|
||||
else
|
||||
swrite(key, JSON.stringify(val));
|
||||
}
|
||||
|
||||
function icfg_get(name, defval) {
|
||||
var o = ebi(name);
|
||||
|
||||
var val = parseInt(sread(name));
|
||||
if (isNaN(val))
|
||||
return parseInt(o ? o.value : defval);
|
||||
|
||||
if (o)
|
||||
o.value = val;
|
||||
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_get(name, defval) {
|
||||
var o = ebi(name);
|
||||
if (!o)
|
||||
return defval;
|
||||
|
||||
var val = sread(name);
|
||||
if (val === null)
|
||||
val = defval;
|
||||
else
|
||||
val = (val == '1');
|
||||
|
||||
bcfg_upd_ui(name, val);
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_set(name, val) {
|
||||
swrite(name, val ? '1' : '0');
|
||||
bcfg_upd_ui(name, val);
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_upd_ui(name, val) {
|
||||
var o = ebi(name);
|
||||
if (!o)
|
||||
return;
|
||||
|
||||
if (o.getAttribute('type') == 'checkbox')
|
||||
o.checked = val;
|
||||
else if (o)
|
||||
o.setAttribute('class', val ? 'on' : '');
|
||||
}
|
||||
|
||||
|
||||
function hist_push(url) {
|
||||
console.log("h-push " + url);
|
||||
history.pushState(url, url, url);
|
||||
}
|
||||
|
||||
function hist_replace(url) {
|
||||
console.log("h-repl " + url);
|
||||
history.replaceState(url, url, url);
|
||||
}
|
||||
@@ -3,6 +3,21 @@ echo not a script
|
||||
exit 1
|
||||
|
||||
|
||||
##
|
||||
## delete all partial uploads
|
||||
## (supports linux/macos, probably windows+msys2)
|
||||
|
||||
gzip -d < .hist/up2k.snap | jq -r '.[].tnam' | while IFS= read -r f; do rm -f -- "$f"; done
|
||||
gzip -d < .hist/up2k.snap | jq -r '.[].name' | while IFS= read -r f; do wc -c -- "$f" | grep -qiE '^[^0-9a-z]*0' && rm -f -- "$f"; done
|
||||
|
||||
|
||||
##
|
||||
## detect partial uploads based on file contents
|
||||
## (in case of context loss or old copyparties)
|
||||
|
||||
echo; find -type f | while IFS= read -r x; do printf '\033[A\033[36m%s\033[K\033[0m\n' "$x"; tail -c$((1024*1024)) <"$x" | xxd -a | awk 'NR==1&&/^[0: ]+.{16}$/{next} NR==2&&/^\*$/{next} NR==3&&/^[0f]+: [0 ]+65 +.{16}$/{next} {e=1} END {exit e}' || continue; printf '\033[A\033[31msus:\033[33m %s \033[0m\n\n' "$x"; done
|
||||
|
||||
|
||||
##
|
||||
## create a test payload
|
||||
|
||||
@@ -13,7 +28,7 @@ head -c $((2*1024*1024*1024)) /dev/zero | openssl enc -aes-256-ctr -pass pass:hu
|
||||
## testing multiple parallel uploads
|
||||
## usage: para | tee log
|
||||
|
||||
para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}; do for ((n=0;n<s;n++)); do curl -sF "act=bput" -F "f=@garbage.file" http://127.0.0.1:1234/ 2>&1 & done; wait; echo; done; done; }
|
||||
para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4}; do for ((n=0;n<s;n++)); do curl -sF "act=bput" -F "f=@garbage.file" http://127.0.0.1:3923/ 2>&1 & done; wait; echo; done; done; }
|
||||
|
||||
|
||||
##
|
||||
@@ -36,13 +51,13 @@ for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qwe,rty;asd
|
||||
|
||||
fn=$(printf '\xba\xdc\xab.cab')
|
||||
echo asdf > "$fn"
|
||||
curl --cookie cppwd=wark -sF "act=bput" -F "f=@$fn" http://127.0.0.1:1234/moji/%ED%91/
|
||||
curl --cookie cppwd=wark -sF "act=bput" -F "f=@$fn" http://127.0.0.1:3923/moji/%ED%91/
|
||||
|
||||
|
||||
##
|
||||
## test compression
|
||||
|
||||
wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:1234/.cpr/deps/ogv.js -O- | md5sum; p=~ed/dev/copyparty/copyparty/web/deps/ogv.js.gz; md5sum $p; gzip -d < $p | md5sum
|
||||
wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:3923/.cpr/deps/ogv.js -O- | md5sum; p=~ed/dev/copyparty/copyparty/web/deps/ogv.js.gz; md5sum $p; gzip -d < $p | md5sum
|
||||
|
||||
|
||||
##
|
||||
@@ -99,3 +114,26 @@ Range: bytes=24- "yz" Content-Range: bytes 24-25/26
|
||||
Range: bytes=25-29 "z" Content-Range: bytes 25-25/26
|
||||
Range: bytes=26- Content-Range: bytes */26
|
||||
HTTP/1.1 416 Requested Range Not Satisfiable
|
||||
|
||||
|
||||
##
|
||||
## md perf
|
||||
|
||||
var tsh = [];
|
||||
function convert_markdown(md_text, dest_dom) {
|
||||
tsh.push(new Date().getTime());
|
||||
while (tsh.length > 10)
|
||||
tsh.shift();
|
||||
if (tsh.length > 1) {
|
||||
var end = tsh.slice(-2);
|
||||
console.log("render", end.pop() - end.pop(), (tsh[tsh.length - 1] - tsh[0]) / (tsh.length - 1));
|
||||
}
|
||||
|
||||
|
||||
##
|
||||
## tmpfiles.d meme
|
||||
|
||||
mk() { rm -rf /tmp/foo; sudo -u ed bash -c 'mkdir /tmp/foo; echo hi > /tmp/foo/bar'; }
|
||||
mk && t0="$(date)" && while true; do date -s "$(date '+ 1 hour')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
||||
mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
||||
mk && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done; echo "$t0"
|
||||
|
||||
35
docs/pretend-youre-qnap.patch
Normal file
35
docs/pretend-youre-qnap.patch
Normal file
@@ -0,0 +1,35 @@
|
||||
diff --git a/copyparty/httpcli.py b/copyparty/httpcli.py
|
||||
index 2d3c1ad..e1e85a0 100644
|
||||
--- a/copyparty/httpcli.py
|
||||
+++ b/copyparty/httpcli.py
|
||||
@@ -864,6 +864,30 @@ class HttpCli(object):
|
||||
#
|
||||
# send reply
|
||||
|
||||
+ try:
|
||||
+ fakefn = self.conn.hsrv.fakefn
|
||||
+ fakectr = self.conn.hsrv.fakectr
|
||||
+ fakedata = self.conn.hsrv.fakedata
|
||||
+ except:
|
||||
+ fakefn = b''
|
||||
+ fakectr = 0
|
||||
+ fakedata = b''
|
||||
+
|
||||
+ self.log('\n{} {}\n{}'.format(fakefn, fakectr, open_args[0]))
|
||||
+ if fakefn == open_args[0] and fakectr > 0:
|
||||
+ self.reply(fakedata, mime=guess_mime(req_path)[0])
|
||||
+ self.conn.hsrv.fakectr = fakectr - 1
|
||||
+ else:
|
||||
+ with open_func(*open_args) as f:
|
||||
+ fakedata = f.read()
|
||||
+
|
||||
+ self.conn.hsrv.fakefn = open_args[0]
|
||||
+ self.conn.hsrv.fakedata = fakedata
|
||||
+ self.conn.hsrv.fakectr = 15
|
||||
+ self.reply(fakedata, mime=guess_mime(req_path)[0])
|
||||
+
|
||||
+ return True
|
||||
+
|
||||
self.out_headers["Accept-Ranges"] = "bytes"
|
||||
self.send_headers(
|
||||
length=upper - lower,
|
||||
62
docs/rclone.md
Normal file
62
docs/rclone.md
Normal file
@@ -0,0 +1,62 @@
|
||||
# using rclone to mount a remote copyparty server as a local filesystem
|
||||
|
||||
speed estimates with server and client on the same win10 machine:
|
||||
* `1070 MiB/s` with rclone as both server and client
|
||||
* `570 MiB/s` with rclone-client and `copyparty -ed -j16` as server
|
||||
* `220 MiB/s` with rclone-client and `copyparty -ed` as server
|
||||
* `100 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
|
||||
|
||||
when server is on another machine (1gbit LAN),
|
||||
* `75 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
|
||||
* `92 MiB/s` with rclone-client and `copyparty -ed` as server
|
||||
* `103 MiB/s` (connection max) with `copyparty -ed -j16` and all the others
|
||||
|
||||
|
||||
# creating the config file
|
||||
|
||||
if you want to use password auth, add `headers = Cookie,cppwd=fgsfds` below
|
||||
|
||||
|
||||
### on windows clients:
|
||||
```
|
||||
(
|
||||
echo [cpp]
|
||||
echo type = http
|
||||
echo url = http://127.0.0.1:3923/
|
||||
) > %userprofile%\.config\rclone\rclone.conf
|
||||
```
|
||||
|
||||
also install the windows dependencies: [winfsp](https://github.com/billziss-gh/winfsp/releases/latest)
|
||||
|
||||
|
||||
### on unix clients:
|
||||
```
|
||||
cat > ~/.config/rclone/rclone.conf <<'EOF'
|
||||
[cpp]
|
||||
type = http
|
||||
url = http://127.0.0.1:3923/
|
||||
EOF
|
||||
```
|
||||
|
||||
|
||||
# mounting the copyparty server locally
|
||||
```
|
||||
rclone.exe mount --vfs-cache-max-age 5s --attr-timeout 5s --dir-cache-time 5s cpp: Z:
|
||||
```
|
||||
|
||||
|
||||
# use rclone as server too, replacing copyparty
|
||||
|
||||
feels out of place but is too good not to mention
|
||||
|
||||
```
|
||||
rclone.exe serve http --read-only .
|
||||
```
|
||||
|
||||
* `webdav` gives write-access but `http` is twice as fast
|
||||
* `ftp` is buggy, avoid
|
||||
|
||||
|
||||
# bugs
|
||||
|
||||
* rclone-client throws an exception if you try to read an empty file (should return zero bytes)
|
||||
10
docs/unirange.py
Normal file
10
docs/unirange.py
Normal file
@@ -0,0 +1,10 @@
|
||||
v = "U+0000-00FF, U+0131, U+0152-0153, U+02BB-02BC, U+02C6, U+02DA, U+02DC, U+2000-206F, U+2074, U+20AC, U+2122, U+2191, U+2193, U+2212, U+2215, U+FEFF, U+FFFD"
|
||||
for v in v.split(","):
|
||||
if "+" in v:
|
||||
v = v.split("+")[1]
|
||||
if "-" in v:
|
||||
lo, hi = v.split("-")
|
||||
else:
|
||||
lo = hi = v
|
||||
for v in range(int(lo, 16), int(hi, 16) + 1):
|
||||
print("{:4x} [{}]".format(v, chr(v)))
|
||||
129
scripts/copyparty-repack.sh
Executable file
129
scripts/copyparty-repack.sh
Executable file
@@ -0,0 +1,129 @@
|
||||
#!/bin/bash
|
||||
repacker=1
|
||||
set -e
|
||||
|
||||
# -- download latest copyparty (source.tgz and sfx),
|
||||
# -- build minimal sfx versions,
|
||||
# -- create a .tar.gz bundle
|
||||
#
|
||||
# convenient for deploying updates to inconvenient locations
|
||||
# (and those are usually linux so bash is good inaff)
|
||||
# (but that said this even has macos support)
|
||||
#
|
||||
# bundle will look like:
|
||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty
|
||||
# -rw-r--r-- 0 ed ed 491318 Nov 19 00:40 copyparty-extras/copyparty-0.5.4.tar.gz
|
||||
# -rwxr-xr-x 0 ed ed 30254 Nov 17 23:58 copyparty-extras/copyparty-fuse.py
|
||||
# -rwxr-xr-x 0 ed ed 481403 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.sh
|
||||
# -rwxr-xr-x 0 ed ed 506043 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.py
|
||||
# -rwxr-xr-x 0 ed ed 167699 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.sh
|
||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
|
||||
|
||||
|
||||
command -v gtar && tar() { gtar "$@"; }
|
||||
command -v gsed && sed() { gsed "$@"; }
|
||||
td="$(mktemp -d)"
|
||||
od="$(pwd)"
|
||||
cd "$td"
|
||||
pwd
|
||||
|
||||
|
||||
dl_text() {
|
||||
command -v curl && exec curl "$@"
|
||||
exec wget -O- "$@"
|
||||
}
|
||||
dl_files() {
|
||||
command -v curl && exec curl -L --remote-name-all "$@"
|
||||
exec wget "$@"
|
||||
}
|
||||
export -f dl_files
|
||||
|
||||
|
||||
# if cache exists, use that instead of bothering github
|
||||
cache="$od/.copyparty-repack.cache"
|
||||
[ -e "$cache" ] &&
|
||||
tar -xf "$cache" ||
|
||||
{
|
||||
# get download links from github
|
||||
dl_text https://api.github.com/repos/9001/copyparty/releases/latest |
|
||||
(
|
||||
# prefer jq if available
|
||||
jq -r '.assets[]|select(.name|test("-sfx|tar.gz")).browser_download_url' ||
|
||||
|
||||
# fallback to awk (sorry)
|
||||
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
|
||||
tar -czf "$cache" *
|
||||
}
|
||||
|
||||
|
||||
# move src into copyparty-extras/,
|
||||
# move sfx into copyparty-extras/sfx-full/
|
||||
mkdir -p copyparty-extras/sfx-{full,lite}
|
||||
mv copyparty-sfx.* copyparty-extras/sfx-full/
|
||||
mv copyparty-*.tar.gz copyparty-extras/
|
||||
|
||||
|
||||
# unpack the source code
|
||||
( cd copyparty-extras/
|
||||
tar -xf *.tar.gz
|
||||
)
|
||||
|
||||
|
||||
# use repacker from release if that is newer
|
||||
p_other=copyparty-extras/copyparty-*/scripts/copyparty-repack.sh
|
||||
other=$(awk -F= 'BEGIN{v=-1} NR<10&&/^repacker=/{v=$NF} END{print v}' <$p_other)
|
||||
[ $repacker -lt $other ] &&
|
||||
cat $p_other >"$od/$0" && cd "$od" && rm -rf "$td" && exec "$0" "$@"
|
||||
|
||||
|
||||
# now drop the cache
|
||||
rm -f "$cache"
|
||||
|
||||
|
||||
# fix permissions
|
||||
chmod 755 \
|
||||
copyparty-extras/sfx-full/* \
|
||||
copyparty-extras/copyparty-*/{scripts,bin}/*
|
||||
|
||||
|
||||
# extract and repack the sfx with less features enabled
|
||||
( cd copyparty-extras/sfx-full/
|
||||
./copyparty-sfx.py -h
|
||||
cd ../copyparty-*/
|
||||
./scripts/make-sfx.sh re no-ogv no-cm
|
||||
)
|
||||
|
||||
|
||||
# put new sfx into copyparty-extras/sfx-lite/,
|
||||
# fuse client into copyparty-extras/,
|
||||
# copy lite-sfx.py to ./copyparty,
|
||||
# delete extracted source code
|
||||
( cd copyparty-extras/
|
||||
mv copyparty-*/dist/* sfx-lite/
|
||||
mv copyparty-*/bin/copyparty-fuse.py .
|
||||
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
|
||||
rm -rf copyparty-{0..9}*.*.*{0..9}
|
||||
)
|
||||
|
||||
|
||||
# and include the repacker itself too
|
||||
cp -av "$od/$0" copyparty-extras/ ||
|
||||
cp -av "$0" copyparty-extras/ ||
|
||||
true
|
||||
|
||||
|
||||
# create the bundle
|
||||
fn=copyparty-$(date +%Y-%m%d-%H%M%S).tgz
|
||||
tar -czvf "$od/$fn" *
|
||||
cd "$od"
|
||||
rm -rf "$td"
|
||||
|
||||
|
||||
echo
|
||||
echo "done, here's your bundle:"
|
||||
ls -al "$fn"
|
||||
@@ -1,20 +1,21 @@
|
||||
FROM alpine:3.11
|
||||
FROM alpine:3.13
|
||||
WORKDIR /z
|
||||
ENV ver_asmcrypto=2821dd1dedd1196c378f5854037dda5c869313f3 \
|
||||
ver_markdownit=10.0.0 \
|
||||
ver_showdown=1.9.1 \
|
||||
ver_marked=1.0.0 \
|
||||
ver_ogvjs=1.6.1 \
|
||||
ver_mde=2.10.1 \
|
||||
ver_codemirror=5.53.2 \
|
||||
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
||||
ver_marked=1.1.0 \
|
||||
ver_ogvjs=1.8.0 \
|
||||
ver_mde=2.14.0 \
|
||||
ver_codemirror=5.59.3 \
|
||||
ver_fontawesome=5.13.0 \
|
||||
ver_zopfli=1.0.3
|
||||
|
||||
|
||||
# download
|
||||
RUN apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev \
|
||||
# download;
|
||||
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
||||
RUN mkdir -p /z/dist/no-pk \
|
||||
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
|
||||
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
|
||||
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \
|
||||
&& wget https://github.com/asmcrypto/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
|
||||
&& wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
|
||||
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
|
||||
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
|
||||
&& wget https://github.com/codemirror/CodeMirror/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
|
||||
@@ -36,23 +37,7 @@ RUN apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzi
|
||||
&& npm install \
|
||||
&& npm i gulp-cli -g ) \
|
||||
&& unzip fontawesome.zip \
|
||||
&& tar -xf zopfli.tgz \
|
||||
&& mkdir -p /z/dist/no-pk
|
||||
|
||||
|
||||
# uncomment if you wanna test the abandoned markdown converters
|
||||
#ENV build_abandoned=1
|
||||
|
||||
|
||||
RUN [ $build_abandoned ] || exit 0; \
|
||||
git clone --depth 1 --branch $ver_showdown https://github.com/showdownjs/showdown/ \
|
||||
&& wget https://github.com/markdown-it/markdown-it/archive/$ver_markdownit.tar.gz -O markdownit.tgz \
|
||||
&& (cd showdown \
|
||||
&& npm install \
|
||||
&& npm i grunt -g ) \
|
||||
&& (tar -xf markdownit.tgz \
|
||||
&& cd markdown-it-$ver_markdownit \
|
||||
&& npm install )
|
||||
&& tar -xf zopfli.tgz
|
||||
|
||||
|
||||
# build fonttools (which needs zopfli)
|
||||
@@ -65,6 +50,7 @@ RUN tar -xf zopfli.tgz \
|
||||
-S . \
|
||||
&& make -C build \
|
||||
&& make -C build install \
|
||||
&& python3 -m ensurepip \
|
||||
&& python3 -m pip install fonttools zopfli
|
||||
|
||||
|
||||
@@ -80,31 +66,27 @@ RUN cd ogvjs-$ver_ogvjs \
|
||||
&& cp -pv \
|
||||
ogv.js \
|
||||
ogv-worker-audio.js \
|
||||
ogv-demuxer-ogg.js \
|
||||
ogv-demuxer-ogg-wasm.js \
|
||||
ogv-demuxer-ogg-wasm.wasm \
|
||||
ogv-demuxer-webm.js \
|
||||
ogv-demuxer-webm-wasm.js \
|
||||
ogv-demuxer-webm-wasm.wasm \
|
||||
ogv-decoder-audio-opus.js \
|
||||
ogv-decoder-audio-opus-wasm.js \
|
||||
ogv-decoder-audio-opus-wasm.wasm \
|
||||
ogv-decoder-audio-vorbis.js \
|
||||
ogv-decoder-audio-vorbis-wasm.js \
|
||||
ogv-decoder-audio-vorbis-wasm.wasm \
|
||||
dynamicaudio.swf \
|
||||
/z/dist
|
||||
|
||||
# ogv-demuxer-ogg.js \
|
||||
# ogv-demuxer-webm.js \
|
||||
# ogv-decoder-audio-opus.js \
|
||||
# ogv-decoder-audio-vorbis.js \
|
||||
# dynamicaudio.swf \
|
||||
|
||||
|
||||
# build marked
|
||||
RUN wget https://github.com/markedjs/marked/commit/5c166d4164791f643693478e4ac094d63d6e0c9a.patch -O marked-git-1.patch \
|
||||
&& wget https://patch-diff.githubusercontent.com/raw/markedjs/marked/pull/1652.patch -O marked-git-2.patch
|
||||
|
||||
COPY marked.patch /z/
|
||||
COPY marked-ln.patch /z/
|
||||
RUN cd marked-$ver_marked \
|
||||
&& patch -p1 < /z/marked-git-1.patch \
|
||||
&& patch -p1 < /z/marked-git-2.patch \
|
||||
&& patch -p1 < /z/marked-ln.patch \
|
||||
&& patch -p1 < /z/marked.patch \
|
||||
&& npm run build \
|
||||
@@ -138,57 +120,10 @@ RUN cd easy-markdown-editor-$ver_mde \
|
||||
&& patch -p1 < /z/easymde-ln.patch \
|
||||
&& gulp \
|
||||
&& cp -pv dist/easymde.min.css /z/dist/easymde.css \
|
||||
&& cp -pv dist/easymde.min.js /z/dist/easymde.js \
|
||||
&& sed -ri '/pipe.terser/d; /cleanCSS/d' gulpfile.js \
|
||||
&& gulp \
|
||||
&& cp -pv dist/easymde.min.css /z/dist/easymde.full.css \
|
||||
&& cp -pv dist/easymde.min.js /z/dist/easymde.full.js
|
||||
&& cp -pv dist/easymde.min.js /z/dist/easymde.js
|
||||
|
||||
|
||||
# build showdown (abandoned; disabled by default)
|
||||
COPY showdown.patch /z/
|
||||
RUN [ $build_abandoned ] || exit 0; \
|
||||
cd showdown \
|
||||
&& rm -rf bin dist \
|
||||
# # remove ellipsis plugin \
|
||||
&& rm \
|
||||
src/subParsers/ellipsis.js \
|
||||
test/cases/ellipsis* \
|
||||
# # remove html-to-md converter \
|
||||
&& rm \
|
||||
test/node/testsuite.makemd.js \
|
||||
test/node/showdown.Converter.makeMarkdown.js \
|
||||
# # remove emojis \
|
||||
&& rm src/subParsers/emoji.js \
|
||||
&& awk '/^showdown.helper.emojis/ {o=1} !o; /^\}/ {o=0}' \
|
||||
>f <src/helpers.js \
|
||||
&& mv f src/helpers.js \
|
||||
&& rm -rf test/features/emojis \
|
||||
# # remove ghmentions \
|
||||
&& rm test/features/ghMentions.* \
|
||||
# # remove option descriptions \
|
||||
&& sed -ri '/descri(ption|be): /d' src/options.js \
|
||||
&& patch -p1 < /z/showdown.patch
|
||||
|
||||
RUN [ $build_abandoned ] || exit 0; \
|
||||
cd showdown \
|
||||
&& grunt build \
|
||||
&& sed -ri '/sourceMappingURL=showdown.min.js.map/d' dist/showdown.min.js \
|
||||
&& mv dist/showdown.min.js /z/dist/showdown.js \
|
||||
&& ls -al /z/dist/showdown.js
|
||||
|
||||
|
||||
# build markdownit (abandoned; disabled by default)
|
||||
COPY markdown-it.patch /z/
|
||||
RUN [ $build_abandoned ] || exit 0; \
|
||||
cd markdown-it-$ver_markdownit \
|
||||
&& patch -p1 < /z/markdown-it.patch \
|
||||
&& make browserify \
|
||||
&& cp -pv dist/markdown-it.min.js /z/dist/markdown-it.js \
|
||||
&& cp -pv dist/markdown-it.js /z/dist/markdown-it-full.js
|
||||
|
||||
|
||||
# build fontawesome
|
||||
# build fontawesome and scp
|
||||
COPY mini-fa.sh /z
|
||||
COPY mini-fa.css /z
|
||||
RUN /bin/ash /z/mini-fa.sh
|
||||
@@ -203,38 +138,6 @@ RUN cd /z/dist \
|
||||
&& rmdir no-pk
|
||||
|
||||
|
||||
# showdown: abandoned due to code-blocks in lists failing
|
||||
# 22770 orig
|
||||
# 12154 no-emojis
|
||||
# 12134 no-srcmap
|
||||
# 11189 no-descriptions
|
||||
# 11152 no-ellipsis
|
||||
# 10617 no-this.makeMd
|
||||
# 9569 no-extensions
|
||||
# 9537 no-extensions
|
||||
# 9410 no-mentions
|
||||
|
||||
|
||||
# markdown-it: abandoned because no header anchors (and too big)
|
||||
# 32322 107754 orig (wowee)
|
||||
# 19619 21392 71540 less entities
|
||||
|
||||
|
||||
# marked:
|
||||
# 9253 29773 orig
|
||||
# 9159 29633 no copyright (reverted)
|
||||
# 9040 29057 no sanitize
|
||||
# 8870 28631 no email-mangle
|
||||
# so really not worth it, just drop the patch when that stops working
|
||||
|
||||
|
||||
# easymde:
|
||||
# 91836 orig
|
||||
# 88635 no spellcheck
|
||||
# 88392 no urlRE
|
||||
# 85651 less bidi
|
||||
# 82855 less mode meta
|
||||
|
||||
|
||||
# d=/home/ed/dev/copyparty/scripts/deps-docker/; tar -cf ../x . && ssh root@$bip "cd $d && tar -xv >&2 && make >&2 && tar -cC ../../copyparty/web deps" <../x | (cd ../../copyparty/web/; cat > the.tgz; tar -xvf the.tgz)
|
||||
# git diff -U2 --no-index marked-1.1.0-orig/ marked-1.1.0-edit/ -U2 | sed -r '/^index /d;s`^(diff --git a/)[^/]+/(.* b/)[^/]+/`\1\2`; s`^(---|\+\+\+) ([ab]/)[^/]+/`\1 \2`' > ../dev/copyparty/scripts/deps-docker/marked-ln.patch
|
||||
# d=/home/ed/dev/copyparty/scripts/deps-docker/; tar -cf ../x . && ssh root@$bip "cd $d && tar -xv >&2 && make >&2 && tar -cC ../../copyparty/web deps" <../x | (cd ../../copyparty/web/; cat > the.tgz; tar -xvf the.tgz; rm the.tgz)
|
||||
# gzip -dkf ../dev/copyparty/copyparty/web/deps/deps/marked.full.js.gz && diff -NarU2 ../dev/copyparty/copyparty/web/deps/{,deps/}marked.full.js
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
diff -NarU2 CodeMirror-orig/mode/gfm/gfm.js CodeMirror-edit/mode/gfm/gfm.js
|
||||
--- CodeMirror-orig/mode/gfm/gfm.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/mode/gfm/gfm.js 2020-05-02 02:13:32.142131800 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/mode/gfm/gfm.js codemirror-5.59.3/mode/gfm/gfm.js
|
||||
--- codemirror-5.59.3-orig/mode/gfm/gfm.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/mode/gfm/gfm.js 2021-02-21 20:42:02.166174775 +0000
|
||||
@@ -97,5 +97,5 @@
|
||||
}
|
||||
}
|
||||
@@ -15,9 +15,9 @@ diff -NarU2 CodeMirror-orig/mode/gfm/gfm.js CodeMirror-edit/mode/gfm/gfm.js
|
||||
+ }*/
|
||||
stream.next();
|
||||
return null;
|
||||
diff -NarU2 CodeMirror-orig/mode/meta.js CodeMirror-edit/mode/meta.js
|
||||
--- CodeMirror-orig/mode/meta.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/mode/meta.js 2020-05-02 03:56:58.852408400 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/mode/meta.js codemirror-5.59.3/mode/meta.js
|
||||
--- codemirror-5.59.3-orig/mode/meta.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/mode/meta.js 2021-02-21 20:42:54.798742821 +0000
|
||||
@@ -13,4 +13,5 @@
|
||||
|
||||
CodeMirror.modeInfo = [
|
||||
@@ -28,7 +28,7 @@ diff -NarU2 CodeMirror-orig/mode/meta.js CodeMirror-edit/mode/meta.js
|
||||
{name: "Gas", mime: "text/x-gas", mode: "gas", ext: ["s"]},
|
||||
{name: "Gherkin", mime: "text/x-feature", mode: "gherkin", ext: ["feature"]},
|
||||
+ */
|
||||
{name: "GitHub Flavored Markdown", mime: "text/x-gfm", mode: "gfm", file: /^(readme|contributing|history).md$/i},
|
||||
{name: "GitHub Flavored Markdown", mime: "text/x-gfm", mode: "gfm", file: /^(readme|contributing|history)\.md$/i},
|
||||
+ /*
|
||||
{name: "Go", mime: "text/x-go", mode: "go", ext: ["go"]},
|
||||
{name: "Groovy", mime: "text/x-groovy", mode: "groovy", ext: ["groovy", "gradle"], file: /^Jenkinsfile$/},
|
||||
@@ -56,16 +56,16 @@ diff -NarU2 CodeMirror-orig/mode/meta.js CodeMirror-edit/mode/meta.js
|
||||
+ /*
|
||||
{name: "XQuery", mime: "application/xquery", mode: "xquery", ext: ["xy", "xquery"]},
|
||||
{name: "Yacas", mime: "text/x-yacas", mode: "yacas", ext: ["ys"]},
|
||||
@@ -171,4 +180,5 @@
|
||||
{name: "xu", mime: "text/x-xu", mode: "mscgen", ext: ["xu"]},
|
||||
{name: "msgenny", mime: "text/x-msgenny", mode: "mscgen", ext: ["msgenny"]}
|
||||
@@ -172,4 +181,5 @@
|
||||
{name: "msgenny", mime: "text/x-msgenny", mode: "mscgen", ext: ["msgenny"]},
|
||||
{name: "WebAssembly", mime: "text/webassembly", mode: "wast", ext: ["wat", "wast"]},
|
||||
+ */
|
||||
];
|
||||
// Ensure all modes have a mime property for backwards compatibility
|
||||
diff -NarU2 CodeMirror-orig/src/display/selection.js CodeMirror-edit/src/display/selection.js
|
||||
--- CodeMirror-orig/src/display/selection.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/display/selection.js 2020-05-02 03:27:30.144662800 +0200
|
||||
@@ -83,29 +83,21 @@
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/display/selection.js codemirror-5.59.3/src/display/selection.js
|
||||
--- codemirror-5.59.3-orig/src/display/selection.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/display/selection.js 2021-02-21 20:44:14.860894328 +0000
|
||||
@@ -84,29 +84,21 @@
|
||||
let order = getOrder(lineObj, doc.direction)
|
||||
iterateBidiSections(order, fromArg || 0, toArg == null ? lineLen : toArg, (from, to, dir, i) => {
|
||||
- let ltr = dir == "ltr"
|
||||
@@ -105,24 +105,24 @@ diff -NarU2 CodeMirror-orig/src/display/selection.js CodeMirror-edit/src/display
|
||||
+ botRight = openEnd && last ? rightSide : toPos.right
|
||||
add(topLeft, fromPos.top, topRight - topLeft, fromPos.bottom)
|
||||
if (fromPos.bottom < toPos.top) add(leftSide, fromPos.bottom, null, toPos.top)
|
||||
diff -NarU2 CodeMirror-orig/src/input/ContentEditableInput.js CodeMirror-edit/src/input/ContentEditableInput.js
|
||||
--- CodeMirror-orig/src/input/ContentEditableInput.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/input/ContentEditableInput.js 2020-05-02 03:33:05.707995500 +0200
|
||||
@@ -391,4 +391,5 @@
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/input/ContentEditableInput.js codemirror-5.59.3/src/input/ContentEditableInput.js
|
||||
--- codemirror-5.59.3-orig/src/input/ContentEditableInput.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/input/ContentEditableInput.js 2021-02-21 20:44:33.273953867 +0000
|
||||
@@ -399,4 +399,5 @@
|
||||
let info = mapFromLineView(view, line, pos.line)
|
||||
|
||||
+ /*
|
||||
let order = getOrder(line, cm.doc.direction), side = "left"
|
||||
if (order) {
|
||||
@@ -396,4 +397,5 @@
|
||||
@@ -404,4 +405,5 @@
|
||||
side = partPos % 2 ? "right" : "left"
|
||||
}
|
||||
+ */
|
||||
let result = nodeAndOffsetInLineMap(info.map, pos.ch, side)
|
||||
result.offset = result.collapse == "right" ? result.end : result.start
|
||||
diff -NarU2 CodeMirror-orig/src/input/movement.js CodeMirror-edit/src/input/movement.js
|
||||
--- CodeMirror-orig/src/input/movement.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/input/movement.js 2020-05-02 03:31:19.710773500 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/input/movement.js codemirror-5.59.3/src/input/movement.js
|
||||
--- codemirror-5.59.3-orig/src/input/movement.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/input/movement.js 2021-02-21 20:45:12.763093671 +0000
|
||||
@@ -15,4 +15,5 @@
|
||||
|
||||
export function endOfLine(visually, cm, lineObj, lineNo, dir) {
|
||||
@@ -146,9 +146,9 @@ diff -NarU2 CodeMirror-orig/src/input/movement.js CodeMirror-edit/src/input/move
|
||||
return null
|
||||
+ */
|
||||
}
|
||||
diff -NarU2 CodeMirror-orig/src/line/line_data.js CodeMirror-edit/src/line/line_data.js
|
||||
--- CodeMirror-orig/src/line/line_data.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/line/line_data.js 2020-05-02 03:17:02.785065000 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/line/line_data.js codemirror-5.59.3/src/line/line_data.js
|
||||
--- codemirror-5.59.3-orig/src/line/line_data.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/line/line_data.js 2021-02-21 20:45:36.472549599 +0000
|
||||
@@ -79,6 +79,6 @@
|
||||
// Optionally wire in some hacks into the token-rendering
|
||||
// algorithm, to deal with browser quirks.
|
||||
@@ -158,9 +158,9 @@ diff -NarU2 CodeMirror-orig/src/line/line_data.js CodeMirror-edit/src/line/line_
|
||||
+ // builder.addToken = buildTokenBadBidi(builder.addToken, order)
|
||||
builder.map = []
|
||||
let allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line)
|
||||
diff -NarU2 CodeMirror-orig/src/measurement/position_measurement.js CodeMirror-edit/src/measurement/position_measurement.js
|
||||
--- CodeMirror-orig/src/measurement/position_measurement.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/measurement/position_measurement.js 2020-05-02 03:35:20.674159600 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/measurement/position_measurement.js codemirror-5.59.3/src/measurement/position_measurement.js
|
||||
--- codemirror-5.59.3-orig/src/measurement/position_measurement.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/measurement/position_measurement.js 2021-02-21 20:50:52.372945293 +0000
|
||||
@@ -380,5 +380,6 @@
|
||||
sticky = "after"
|
||||
}
|
||||
@@ -199,9 +199,9 @@ diff -NarU2 CodeMirror-orig/src/measurement/position_measurement.js CodeMirror-e
|
||||
+*/
|
||||
|
||||
let measureText
|
||||
diff -NarU2 CodeMirror-orig/src/util/bidi.js CodeMirror-edit/src/util/bidi.js
|
||||
--- CodeMirror-orig/src/util/bidi.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/util/bidi.js 2020-05-02 03:12:44.418649800 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/util/bidi.js codemirror-5.59.3/src/util/bidi.js
|
||||
--- codemirror-5.59.3-orig/src/util/bidi.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/util/bidi.js 2021-02-21 20:52:18.168092225 +0000
|
||||
@@ -4,5 +4,5 @@
|
||||
|
||||
export function iterateBidiSections(order, from, to, f) {
|
||||
@@ -239,20 +239,19 @@ diff -NarU2 CodeMirror-orig/src/util/bidi.js CodeMirror-edit/src/util/bidi.js
|
||||
+ var fun = function(str, direction) {
|
||||
let outerType = direction == "ltr" ? "L" : "R"
|
||||
|
||||
@@ -204,12 +210,16 @@
|
||||
@@ -204,5 +210,11 @@
|
||||
return direction == "rtl" ? order.reverse() : order
|
||||
}
|
||||
-})()
|
||||
|
||||
+ return function(str, direction) {
|
||||
+ var ret = fun(str, direction);
|
||||
+ console.log("bidiOrdering inner ([%s], %s) => [%s]", str, direction, ret);
|
||||
+ return ret;
|
||||
+ }
|
||||
+})()
|
||||
})()
|
||||
+*/
|
||||
|
||||
// Get the bidi ordering for the given line (and cache it). Returns
|
||||
// false for lines that are fully left-to-right, and an array of
|
||||
@@ -210,6 +222,4 @@
|
||||
// BidiSpan objects otherwise.
|
||||
export function getOrder(line, direction) {
|
||||
- let order = line.order
|
||||
@@ -260,9 +259,9 @@ diff -NarU2 CodeMirror-orig/src/util/bidi.js CodeMirror-edit/src/util/bidi.js
|
||||
- return order
|
||||
+ return false;
|
||||
}
|
||||
diff -NarU2 CodeMirror-orig/src/util/feature_detection.js CodeMirror-edit/src/util/feature_detection.js
|
||||
--- CodeMirror-orig/src/util/feature_detection.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/util/feature_detection.js 2020-05-02 03:16:21.085621400 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/util/feature_detection.js codemirror-5.59.3/src/util/feature_detection.js
|
||||
--- codemirror-5.59.3-orig/src/util/feature_detection.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/util/feature_detection.js 2021-02-21 20:49:22.191269270 +0000
|
||||
@@ -25,4 +25,5 @@
|
||||
}
|
||||
|
||||
|
||||
@@ -1,33 +1,57 @@
|
||||
diff -NarU2 easymde-orig/gulpfile.js easymde-mod1/gulpfile.js
|
||||
--- easymde-orig/gulpfile.js 2020-04-06 14:09:36.000000000 +0200
|
||||
+++ easymde-mod1/gulpfile.js 2020-05-01 14:33:52.260175200 +0200
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/gulpfile.js easy-markdown-editor-2.14.0/gulpfile.js
|
||||
--- easy-markdown-editor-2.14.0-orig/gulpfile.js 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/gulpfile.js 2021-02-21 20:55:37.134701007 +0000
|
||||
@@ -25,5 +25,4 @@
|
||||
'./node_modules/codemirror/lib/codemirror.css',
|
||||
'./src/css/*.css',
|
||||
- './node_modules/codemirror-spell-checker/src/css/spell-checker.css',
|
||||
];
|
||||
|
||||
diff -NarU2 easymde-orig/package.json easymde-mod1/package.json
|
||||
--- easymde-orig/package.json 2020-04-06 14:09:36.000000000 +0200
|
||||
+++ easymde-mod1/package.json 2020-05-01 14:33:57.189975800 +0200
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/package.json easy-markdown-editor-2.14.0/package.json
|
||||
--- easy-markdown-editor-2.14.0-orig/package.json 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/package.json 2021-02-21 20:55:47.761190082 +0000
|
||||
@@ -21,5 +21,4 @@
|
||||
"dependencies": {
|
||||
"codemirror": "^5.52.2",
|
||||
"codemirror": "^5.59.2",
|
||||
- "codemirror-spell-checker": "1.1.2",
|
||||
"marked": "^0.8.2"
|
||||
"marked": "^2.0.0"
|
||||
},
|
||||
diff -NarU2 easymde-orig/src/js/easymde.js easymde-mod1/src/js/easymde.js
|
||||
--- easymde-orig/src/js/easymde.js 2020-04-06 14:09:36.000000000 +0200
|
||||
+++ easymde-mod1/src/js/easymde.js 2020-05-01 14:34:19.878774400 +0200
|
||||
@@ -11,5 +11,4 @@
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/src/js/easymde.js easy-markdown-editor-2.14.0/src/js/easymde.js
|
||||
--- easy-markdown-editor-2.14.0-orig/src/js/easymde.js 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/src/js/easymde.js 2021-02-21 20:57:09.143171536 +0000
|
||||
@@ -12,5 +12,4 @@
|
||||
require('codemirror/mode/gfm/gfm.js');
|
||||
require('codemirror/mode/xml/xml.js');
|
||||
-var CodeMirrorSpellChecker = require('codemirror-spell-checker');
|
||||
var marked = require('marked/lib/marked');
|
||||
|
||||
@@ -1889,18 +1888,7 @@
|
||||
@@ -1762,9 +1761,4 @@
|
||||
options.autosave.uniqueId = options.autosave.unique_id;
|
||||
|
||||
- // If overlay mode is specified and combine is not provided, default it to true
|
||||
- if (options.overlayMode && options.overlayMode.combine === undefined) {
|
||||
- options.overlayMode.combine = true;
|
||||
- }
|
||||
-
|
||||
// Update this options
|
||||
this.options = options;
|
||||
@@ -2003,28 +1997,7 @@
|
||||
var mode, backdrop;
|
||||
|
||||
- // CodeMirror overlay mode
|
||||
- if (options.overlayMode) {
|
||||
- CodeMirror.defineMode('overlay-mode', function(config) {
|
||||
- return CodeMirror.overlayMode(CodeMirror.getMode(config, options.spellChecker !== false ? 'spell-checker' : 'gfm'), options.overlayMode.mode, options.overlayMode.combine);
|
||||
- });
|
||||
-
|
||||
- mode = 'overlay-mode';
|
||||
- backdrop = options.parsingConfig;
|
||||
- backdrop.gitHubSpice = false;
|
||||
- } else {
|
||||
mode = options.parsingConfig;
|
||||
mode.name = 'gfm';
|
||||
mode.gitHubSpice = false;
|
||||
- }
|
||||
- if (options.spellChecker !== false) {
|
||||
- mode = 'spell-checker';
|
||||
- backdrop = options.parsingConfig;
|
||||
@@ -37,16 +61,28 @@ diff -NarU2 easymde-orig/src/js/easymde.js easymde-mod1/src/js/easymde.js
|
||||
- CodeMirrorSpellChecker({
|
||||
- codeMirrorInstance: CodeMirror,
|
||||
- });
|
||||
- } else {
|
||||
mode = options.parsingConfig;
|
||||
mode.name = 'gfm';
|
||||
mode.gitHubSpice = false;
|
||||
- }
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
@@ -1927,5 +1915,4 @@
|
||||
configureMouse: configureMouse,
|
||||
inputStyle: (options.inputStyle != undefined) ? options.inputStyle : isMobile() ? 'contenteditable' : 'textarea',
|
||||
- spellcheck: (options.nativeSpellcheck != undefined) ? options.nativeSpellcheck : true,
|
||||
});
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/types/easymde.d.ts easy-markdown-editor-2.14.0/types/easymde.d.ts
|
||||
--- easy-markdown-editor-2.14.0-orig/types/easymde.d.ts 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/types/easymde.d.ts 2021-02-21 20:57:42.492620979 +0000
|
||||
@@ -160,9 +160,4 @@
|
||||
}
|
||||
|
||||
- interface OverlayModeOptions {
|
||||
- mode: CodeMirror.Mode<any>
|
||||
- combine?: boolean
|
||||
- }
|
||||
-
|
||||
interface Options {
|
||||
autoDownloadFontAwesome?: boolean;
|
||||
@@ -214,7 +209,5 @@
|
||||
|
||||
promptTexts?: PromptTexts;
|
||||
- syncSideBySidePreviewScroll?: boolean;
|
||||
-
|
||||
- overlayMode?: OverlayModeOptions
|
||||
+ syncSideBySidePreviewScroll?: boolean
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ add data-ln="%d" to most tags, %d is the source markdown line
|
||||
+ // this.ln will be bumped by recursive calls into this func;
|
||||
+ // reset the count and rely on the outermost token's raw only
|
||||
+ ln = this.ln;
|
||||
+
|
||||
+
|
||||
// newline
|
||||
if (token = this.tokenizer.space(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
@@ -234,7 +234,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
- return '<pre><code>'
|
||||
+ return '<pre' + this.ln + '><code>'
|
||||
+ (escaped ? code : escape(code, true))
|
||||
+ '</code></pre>';
|
||||
+ '</code></pre>\n';
|
||||
}
|
||||
|
||||
- return '<pre><code class="'
|
||||
|
||||
@@ -1,7 +1,141 @@
|
||||
diff -NarU1 marked-1.0.0-orig/src/defaults.js marked-1.0.0-edit/src/defaults.js
|
||||
--- marked-1.0.0-orig/src/defaults.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/src/defaults.js 2020-04-25 19:16:56.124621393 +0000
|
||||
@@ -9,10 +9,6 @@
|
||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||
--- a/src/Lexer.js
|
||||
+++ b/src/Lexer.js
|
||||
@@ -5,5 +5,5 @@ const { block, inline } = require('./rules.js');
|
||||
/**
|
||||
* smartypants text replacement
|
||||
- */
|
||||
+ *
|
||||
function smartypants(text) {
|
||||
return text
|
||||
@@ -26,5 +26,5 @@ function smartypants(text) {
|
||||
/**
|
||||
* mangle email addresses
|
||||
- */
|
||||
+ *
|
||||
function mangle(text) {
|
||||
let out = '',
|
||||
@@ -439,5 +439,5 @@ module.exports = class Lexer {
|
||||
|
||||
// autolink
|
||||
- if (token = this.tokenizer.autolink(src, mangle)) {
|
||||
+ if (token = this.tokenizer.autolink(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
@@ -446,5 +446,5 @@ module.exports = class Lexer {
|
||||
|
||||
// url (gfm)
|
||||
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
|
||||
+ if (!inLink && (token = this.tokenizer.url(src))) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
@@ -453,5 +453,5 @@ module.exports = class Lexer {
|
||||
|
||||
// text
|
||||
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
|
||||
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
--- a/src/Renderer.js
|
||||
+++ b/src/Renderer.js
|
||||
@@ -140,5 +140,5 @@ module.exports = class Renderer {
|
||||
|
||||
link(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
+ href = cleanUrl(this.options.baseUrl, href);
|
||||
if (href === null) {
|
||||
return text;
|
||||
@@ -153,5 +153,5 @@ module.exports = class Renderer {
|
||||
|
||||
image(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
+ href = cleanUrl(this.options.baseUrl, href);
|
||||
if (href === null) {
|
||||
return text;
|
||||
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
--- a/src/Tokenizer.js
|
||||
+++ b/src/Tokenizer.js
|
||||
@@ -287,11 +287,8 @@ module.exports = class Tokenizer {
|
||||
if (cap) {
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
- ? 'paragraph'
|
||||
- : 'html',
|
||||
+ type: 'html',
|
||||
raw: cap[0],
|
||||
- pre: !this.options.sanitizer
|
||||
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
|
||||
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
|
||||
+ text: cap[0]
|
||||
};
|
||||
}
|
||||
@@ -421,15 +418,9 @@ module.exports = class Tokenizer {
|
||||
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
- ? 'text'
|
||||
- : 'html',
|
||||
+ type: 'html',
|
||||
raw: cap[0],
|
||||
inLink,
|
||||
inRawBlock,
|
||||
- text: this.options.sanitize
|
||||
- ? (this.options.sanitizer
|
||||
- ? this.options.sanitizer(cap[0])
|
||||
- : escape(cap[0]))
|
||||
- : cap[0]
|
||||
+ text: cap[0]
|
||||
};
|
||||
}
|
||||
@@ -550,10 +541,10 @@ module.exports = class Tokenizer {
|
||||
}
|
||||
|
||||
- autolink(src, mangle) {
|
||||
+ autolink(src) {
|
||||
const cap = this.rules.inline.autolink.exec(src);
|
||||
if (cap) {
|
||||
let text, href;
|
||||
if (cap[2] === '@') {
|
||||
- text = escape(this.options.mangle ? mangle(cap[1]) : cap[1]);
|
||||
+ text = escape(cap[1]);
|
||||
href = 'mailto:' + text;
|
||||
} else {
|
||||
@@ -578,10 +569,10 @@ module.exports = class Tokenizer {
|
||||
}
|
||||
|
||||
- url(src, mangle) {
|
||||
+ url(src) {
|
||||
let cap;
|
||||
if (cap = this.rules.inline.url.exec(src)) {
|
||||
let text, href;
|
||||
if (cap[2] === '@') {
|
||||
- text = escape(this.options.mangle ? mangle(cap[0]) : cap[0]);
|
||||
+ text = escape(cap[0]);
|
||||
href = 'mailto:' + text;
|
||||
} else {
|
||||
@@ -615,12 +606,12 @@ module.exports = class Tokenizer {
|
||||
}
|
||||
|
||||
- inlineText(src, inRawBlock, smartypants) {
|
||||
+ inlineText(src, inRawBlock) {
|
||||
const cap = this.rules.inline.text.exec(src);
|
||||
if (cap) {
|
||||
let text;
|
||||
if (inRawBlock) {
|
||||
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
|
||||
+ text = cap[0];
|
||||
} else {
|
||||
- text = escape(this.options.smartypants ? smartypants(cap[0]) : cap[0]);
|
||||
+ text = escape(cap[0]);
|
||||
}
|
||||
return {
|
||||
diff --git a/src/defaults.js b/src/defaults.js
|
||||
--- a/src/defaults.js
|
||||
+++ b/src/defaults.js
|
||||
@@ -8,12 +8,8 @@ function getDefaults() {
|
||||
highlight: null,
|
||||
langPrefix: 'language-',
|
||||
- mangle: true,
|
||||
pedantic: false,
|
||||
@@ -12,10 +146,12 @@ diff -NarU1 marked-1.0.0-orig/src/defaults.js marked-1.0.0-edit/src/defaults.js
|
||||
smartLists: false,
|
||||
- smartypants: false,
|
||||
tokenizer: null,
|
||||
diff -NarU1 marked-1.0.0-orig/src/helpers.js marked-1.0.0-edit/src/helpers.js
|
||||
--- marked-1.0.0-orig/src/helpers.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/src/helpers.js 2020-04-25 18:58:43.001320210 +0000
|
||||
@@ -65,16 +65,3 @@
|
||||
walkTokens: null,
|
||||
diff --git a/src/helpers.js b/src/helpers.js
|
||||
--- a/src/helpers.js
|
||||
+++ b/src/helpers.js
|
||||
@@ -64,18 +64,5 @@ function edit(regex, opt) {
|
||||
const nonWordAndColonTest = /[^\w:]/g;
|
||||
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
|
||||
-function cleanUrl(sanitize, base, href) {
|
||||
- if (sanitize) {
|
||||
@@ -33,7 +169,9 @@ diff -NarU1 marked-1.0.0-orig/src/helpers.js marked-1.0.0-edit/src/helpers.js
|
||||
- }
|
||||
+function cleanUrl(base, href) {
|
||||
if (base && !originIndependentUrl.test(href)) {
|
||||
@@ -224,8 +211,2 @@
|
||||
href = resolveUrl(base, href);
|
||||
@@ -223,10 +210,4 @@ function findClosingBracket(str, b) {
|
||||
}
|
||||
|
||||
-function checkSanitizeDeprecation(opt) {
|
||||
- if (opt && opt.sanitize && !opt.silent) {
|
||||
@@ -42,228 +180,161 @@ diff -NarU1 marked-1.0.0-orig/src/helpers.js marked-1.0.0-edit/src/helpers.js
|
||||
-}
|
||||
-
|
||||
module.exports = {
|
||||
@@ -240,4 +221,3 @@
|
||||
escape,
|
||||
@@ -239,5 +220,4 @@ module.exports = {
|
||||
splitCells,
|
||||
rtrim,
|
||||
- findClosingBracket,
|
||||
- checkSanitizeDeprecation
|
||||
+ findClosingBracket
|
||||
};
|
||||
diff -NarU1 marked-1.0.0-orig/src/Lexer.js marked-1.0.0-edit/src/Lexer.js
|
||||
--- marked-1.0.0-orig/src/Lexer.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/src/Lexer.js 2020-04-25 22:46:54.107584066 +0000
|
||||
@@ -6,3 +6,3 @@
|
||||
* smartypants text replacement
|
||||
- */
|
||||
+ *
|
||||
function smartypants(text) {
|
||||
@@ -27,3 +27,3 @@
|
||||
* mangle email addresses
|
||||
- */
|
||||
+ *
|
||||
function mangle(text) {
|
||||
@@ -388,3 +388,3 @@
|
||||
// autolink
|
||||
- if (token = this.tokenizer.autolink(src, mangle)) {
|
||||
+ if (token = this.tokenizer.autolink(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
@@ -395,3 +395,3 @@
|
||||
// url (gfm)
|
||||
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
|
||||
+ if (!inLink && (token = this.tokenizer.url(src))) {
|
||||
src = src.substring(token.raw.length);
|
||||
@@ -402,3 +402,3 @@
|
||||
// text
|
||||
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
|
||||
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
|
||||
src = src.substring(token.raw.length);
|
||||
diff -NarU1 marked-1.0.0-orig/src/marked.js marked-1.0.0-edit/src/marked.js
|
||||
--- marked-1.0.0-orig/src/marked.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/src/marked.js 2020-04-25 22:42:55.140924439 +0000
|
||||
@@ -8,3 +8,2 @@
|
||||
diff --git a/src/marked.js b/src/marked.js
|
||||
--- a/src/marked.js
|
||||
+++ b/src/marked.js
|
||||
@@ -7,5 +7,4 @@ const Slugger = require('./Slugger.js');
|
||||
const {
|
||||
merge,
|
||||
- checkSanitizeDeprecation,
|
||||
escape
|
||||
@@ -37,3 +36,2 @@
|
||||
opt = merge({}, marked.defaults, opt || {});
|
||||
- checkSanitizeDeprecation(opt);
|
||||
const highlight = opt.highlight;
|
||||
@@ -101,6 +99,5 @@
|
||||
opt = merge({}, marked.defaults, opt || {});
|
||||
- checkSanitizeDeprecation(opt);
|
||||
return Parser.parse(Lexer.lex(src, opt), opt);
|
||||
} = require('./helpers.js');
|
||||
@@ -35,5 +34,4 @@ function marked(src, opt, callback) {
|
||||
|
||||
opt = merge({}, marked.defaults, opt || {});
|
||||
- checkSanitizeDeprecation(opt);
|
||||
|
||||
if (callback) {
|
||||
@@ -108,5 +106,5 @@ function marked(src, opt, callback) {
|
||||
return Parser.parse(tokens, opt);
|
||||
} catch (e) {
|
||||
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
|
||||
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
|
||||
if ((opt || marked.defaults).silent) {
|
||||
diff -NarU1 marked-1.0.0-orig/src/Renderer.js marked-1.0.0-edit/src/Renderer.js
|
||||
--- marked-1.0.0-orig/src/Renderer.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/src/Renderer.js 2020-04-25 18:59:15.091319265 +0000
|
||||
@@ -134,3 +134,3 @@
|
||||
link(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
+ href = cleanUrl(this.options.baseUrl, href);
|
||||
if (href === null) {
|
||||
@@ -147,3 +147,3 @@
|
||||
image(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
+ href = cleanUrl(this.options.baseUrl, href);
|
||||
if (href === null) {
|
||||
diff -NarU1 marked-1.0.0-orig/src/Tokenizer.js marked-1.0.0-edit/src/Tokenizer.js
|
||||
--- marked-1.0.0-orig/src/Tokenizer.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/src/Tokenizer.js 2020-04-25 22:47:07.610917004 +0000
|
||||
@@ -256,9 +256,6 @@
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
- ? 'paragraph'
|
||||
- : 'html',
|
||||
- raw: cap[0],
|
||||
- pre: !this.options.sanitizer
|
||||
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
|
||||
+ type: 'html',
|
||||
+ raw: cap[0],
|
||||
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
|
||||
+ text: cap[0]
|
||||
};
|
||||
@@ -382,5 +379,3 @@
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
- ? 'text'
|
||||
- : 'html',
|
||||
+ type: 'html',
|
||||
raw: cap[0],
|
||||
@@ -388,7 +383,3 @@
|
||||
inRawBlock,
|
||||
- text: this.options.sanitize
|
||||
- ? (this.options.sanitizer
|
||||
- ? this.options.sanitizer(cap[0])
|
||||
- : escape(cap[0]))
|
||||
- : cap[0]
|
||||
+ text: cap[0]
|
||||
};
|
||||
@@ -504,3 +495,3 @@
|
||||
|
||||
- autolink(src, mangle) {
|
||||
+ autolink(src) {
|
||||
const cap = this.rules.inline.autolink.exec(src);
|
||||
@@ -509,3 +500,3 @@
|
||||
if (cap[2] === '@') {
|
||||
- text = escape(this.options.mangle ? mangle(cap[1]) : cap[1]);
|
||||
+ text = escape(cap[1]);
|
||||
href = 'mailto:' + text;
|
||||
@@ -532,3 +523,3 @@
|
||||
|
||||
- url(src, mangle) {
|
||||
+ url(src) {
|
||||
let cap;
|
||||
@@ -537,3 +528,3 @@
|
||||
if (cap[2] === '@') {
|
||||
- text = escape(this.options.mangle ? mangle(cap[0]) : cap[0]);
|
||||
+ text = escape(cap[0]);
|
||||
href = 'mailto:' + text;
|
||||
@@ -569,3 +560,3 @@
|
||||
|
||||
- inlineText(src, inRawBlock, smartypants) {
|
||||
+ inlineText(src, inRawBlock) {
|
||||
const cap = this.rules.inline.text.exec(src);
|
||||
@@ -574,5 +565,5 @@
|
||||
if (inRawBlock) {
|
||||
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
|
||||
+ text = cap[0];
|
||||
} else {
|
||||
- text = escape(this.options.smartypants ? smartypants(cap[0]) : cap[0]);
|
||||
+ text = escape(cap[0]);
|
||||
}
|
||||
diff -NarU1 marked-1.0.0-orig/test/bench.js marked-1.0.0-edit/test/bench.js
|
||||
--- marked-1.0.0-orig/test/bench.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/test/bench.js 2020-04-25 19:02:27.227980287 +0000
|
||||
@@ -34,3 +34,2 @@
|
||||
if (opt.silent) {
|
||||
return '<p>An error occurred:</p><pre>'
|
||||
diff --git a/test/bench.js b/test/bench.js
|
||||
--- a/test/bench.js
|
||||
+++ b/test/bench.js
|
||||
@@ -33,5 +33,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
@@ -46,3 +45,2 @@
|
||||
});
|
||||
@@ -45,5 +44,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
@@ -59,3 +57,2 @@
|
||||
});
|
||||
@@ -58,5 +56,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
@@ -71,3 +68,2 @@
|
||||
});
|
||||
@@ -70,5 +67,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
@@ -84,3 +80,2 @@
|
||||
});
|
||||
@@ -83,5 +79,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: true,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
@@ -96,3 +91,2 @@
|
||||
});
|
||||
@@ -95,5 +90,4 @@ async function runBench(options) {
|
||||
breaks: false,
|
||||
pedantic: true,
|
||||
- sanitize: false,
|
||||
smartLists: false
|
||||
diff -NarU1 marked-1.0.0-orig/test/specs/run-spec.js marked-1.0.0-edit/test/specs/run-spec.js
|
||||
--- marked-1.0.0-orig/test/specs/run-spec.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/test/specs/run-spec.js 2020-04-25 19:05:24.321308408 +0000
|
||||
@@ -21,6 +21,2 @@
|
||||
});
|
||||
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
|
||||
--- a/test/specs/run-spec.js
|
||||
+++ b/test/specs/run-spec.js
|
||||
@@ -22,8 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
|
||||
}
|
||||
|
||||
- if (spec.options.sanitizer) {
|
||||
- // eslint-disable-next-line no-eval
|
||||
- spec.options.sanitizer = eval(spec.options.sanitizer);
|
||||
- }
|
||||
|
||||
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
|
||||
@@ -49,2 +45 @@
|
||||
@@ -53,3 +49,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
||||
runSpecs('New', './new');
|
||||
runSpecs('ReDOS', './redos');
|
||||
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
|
||||
diff -NarU1 marked-1.0.0-orig/test/unit/Lexer-spec.js marked-1.0.0-edit/test/unit/Lexer-spec.js
|
||||
--- marked-1.0.0-orig/test/unit/Lexer-spec.js 2020-04-21 01:03:48.000000000 +0000
|
||||
+++ marked-1.0.0-edit/test/unit/Lexer-spec.js 2020-04-25 22:47:27.170916427 +0000
|
||||
@@ -464,3 +464,3 @@
|
||||
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
|
||||
--- a/test/unit/Lexer-spec.js
|
||||
+++ b/test/unit/Lexer-spec.js
|
||||
@@ -465,5 +465,5 @@ a | b
|
||||
});
|
||||
|
||||
- it('sanitize', () => {
|
||||
+ /*it('sanitize', () => {
|
||||
expectTokens({
|
||||
@@ -482,3 +482,3 @@
|
||||
md: '<div>html</div>',
|
||||
@@ -483,5 +483,5 @@ a | b
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
@@ -586,3 +586,3 @@
|
||||
|
||||
@@ -587,5 +587,5 @@ a | b
|
||||
});
|
||||
|
||||
- it('html sanitize', () => {
|
||||
+ /*it('html sanitize', () => {
|
||||
expectInlineTokens({
|
||||
@@ -596,3 +596,3 @@
|
||||
md: '<div>html</div>',
|
||||
@@ -597,5 +597,5 @@ a | b
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
@@ -825,3 +825,3 @@
|
||||
it('link', () => {
|
||||
@@ -909,5 +909,5 @@ a | b
|
||||
});
|
||||
|
||||
- it('autolink mangle email', () => {
|
||||
+ /*it('autolink mangle email', () => {
|
||||
expectInlineTokens({
|
||||
@@ -845,3 +845,3 @@
|
||||
md: '<test@example.com>',
|
||||
@@ -929,5 +929,5 @@ a | b
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
@@ -882,3 +882,3 @@
|
||||
it('url', () => {
|
||||
@@ -966,5 +966,5 @@ a | b
|
||||
});
|
||||
|
||||
- it('url mangle email', () => {
|
||||
+ /*it('url mangle email', () => {
|
||||
expectInlineTokens({
|
||||
@@ -902,3 +902,3 @@
|
||||
md: 'test@example.com',
|
||||
@@ -986,5 +986,5 @@ a | b
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
@@ -918,3 +918,3 @@
|
||||
|
||||
@@ -1002,5 +1002,5 @@ a | b
|
||||
});
|
||||
|
||||
- describe('smartypants', () => {
|
||||
+ /*describe('smartypants', () => {
|
||||
it('single quotes', () => {
|
||||
@@ -988,3 +988,3 @@
|
||||
expectInlineTokens({
|
||||
@@ -1072,5 +1072,5 @@ a | b
|
||||
});
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
});
|
||||
|
||||
@@ -26,3 +26,6 @@ awk '/:before .content:"\\/ {sub(/[^"]+"./,""); sub(/".*/,""); print}' </z/dist/
|
||||
|
||||
# and finally create a woff with just our icons
|
||||
pyftsubset "$orig_woff" --unicodes-file=/z/icon.list --no-ignore-missing-unicodes --flavor=woff --with-zopfli --output-file=/z/dist/no-pk/mini-fa.woff --verbose
|
||||
|
||||
# scp is easier, just want basic latin
|
||||
pyftsubset /z/scp.woff2 --unicodes="20-7e,ab,b7,bb,2022" --no-ignore-missing-unicodes --flavor=woff2 --output-file=/z/dist/no-pk/scp.woff2 --verbose
|
||||
|
||||
100
scripts/fusefuzz.py
Executable file
100
scripts/fusefuzz.py
Executable file
@@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
"""
|
||||
td=/dev/shm/; [ -e $td ] || td=$HOME; mkdir -p $td/fusefuzz/{r,v}
|
||||
PYTHONPATH=.. python3 -m copyparty -v $td/fusefuzz/r::r -i 127.0.0.1
|
||||
../bin/copyparty-fuse.py http://127.0.0.1:3923/ $td/fusefuzz/v -cf 2 -cd 0.5
|
||||
(d="$PWD"; cd $td/fusefuzz && "$d"/fusefuzz.py)
|
||||
"""
|
||||
|
||||
|
||||
def chk(fsz, rsz, ofs0, shift, ofs, rf, vf):
|
||||
if ofs != rf.tell():
|
||||
rf.seek(ofs)
|
||||
vf.seek(ofs)
|
||||
|
||||
rb = rf.read(rsz)
|
||||
vb = vf.read(rsz)
|
||||
|
||||
print(f"fsz {fsz} rsz {rsz} ofs {ofs0} shift {shift} ofs {ofs} = {len(rb)}")
|
||||
|
||||
if rb != vb:
|
||||
for n, buf in enumerate([rb, vb]):
|
||||
with open("buf." + str(n), "wb") as f:
|
||||
f.write(buf)
|
||||
|
||||
raise Exception(f"{len(rb)} != {len(vb)}")
|
||||
|
||||
return rb, vb
|
||||
|
||||
|
||||
def main():
|
||||
v = "v"
|
||||
for n in range(5):
|
||||
with open(f"r/{n}", "wb") as f:
|
||||
f.write(b"h" * n)
|
||||
|
||||
rand = os.urandom(7919) # prime
|
||||
for fsz in range(1024 * 1024 * 2 - 3, 1024 * 1024 * 2 + 3):
|
||||
with open("r/f", "wb", fsz) as f:
|
||||
f.write((rand * int(fsz / len(rand) + 1))[:fsz])
|
||||
|
||||
for rsz in range(64 * 1024 - 2, 64 * 1024 + 2):
|
||||
ofslist = [0, 1, 2]
|
||||
for n in range(3):
|
||||
ofslist.append(fsz - n)
|
||||
ofslist.append(fsz - (rsz * 1 + n))
|
||||
ofslist.append(fsz - (rsz * 2 + n))
|
||||
|
||||
for ofs0 in ofslist:
|
||||
for shift in range(-3, 3):
|
||||
print(f"fsz {fsz} rsz {rsz} ofs {ofs0} shift {shift}")
|
||||
ofs = ofs0
|
||||
if ofs < 0 or ofs >= fsz:
|
||||
continue
|
||||
|
||||
for n in range(1, 3):
|
||||
with open(f"{v}/{n}", "rb") as f:
|
||||
f.read()
|
||||
|
||||
prev_ofs = -99
|
||||
with open("r/f", "rb", rsz) as rf:
|
||||
with open(f"{v}/f", "rb", rsz) as vf:
|
||||
while True:
|
||||
ofs += shift
|
||||
if ofs < 0 or ofs > fsz or ofs == prev_ofs:
|
||||
break
|
||||
|
||||
prev_ofs = ofs
|
||||
|
||||
rb, vb = chk(fsz, rsz, ofs0, shift, ofs, rf, vf)
|
||||
|
||||
if not rb:
|
||||
break
|
||||
|
||||
ofs += len(rb)
|
||||
|
||||
for n in range(1, 3):
|
||||
with open(f"{v}/{n}", "rb") as f:
|
||||
f.read()
|
||||
|
||||
with open("r/f", "rb", rsz) as rf:
|
||||
with open(f"{v}/f", "rb", rsz) as vf:
|
||||
for n in range(2):
|
||||
ofs += shift
|
||||
if ofs < 0 or ofs > fsz:
|
||||
break
|
||||
|
||||
rb, vb = chk(fsz, rsz, ofs0, shift, ofs, rf, vf)
|
||||
|
||||
ofs -= rsz
|
||||
|
||||
# bumping fsz, sleep away the dentry cache in cppf
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -3,12 +3,15 @@ set -e
|
||||
echo
|
||||
|
||||
# osx support
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
which md5sum 2>/dev/null >/dev/null &&
|
||||
@@ -83,6 +86,8 @@ function have() {
|
||||
python -c "import $1; $1; $1.__version__"
|
||||
}
|
||||
|
||||
mv copyparty/web/deps/marked.full.js.gz srv/ || true
|
||||
|
||||
. buildenv/bin/activate
|
||||
have setuptools
|
||||
have wheel
|
||||
|
||||
@@ -18,13 +18,16 @@ echo
|
||||
# (the fancy markdown editor)
|
||||
|
||||
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
unexpand() { gunexpand "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
[ -e copyparty/__main__.py ] || cd ..
|
||||
@@ -59,28 +62,32 @@ cd sfx
|
||||
)/pe-copyparty"
|
||||
|
||||
echo "repack of files in $old"
|
||||
cp -pR "$old/"*{jinja2,copyparty} .
|
||||
mv {x.,}jinja2 2>/dev/null || true
|
||||
cp -pR "$old/"*{dep-j2,copyparty} .
|
||||
}
|
||||
|
||||
[ $repack ] || {
|
||||
echo collecting jinja2
|
||||
f="../build/Jinja2-2.6.tar.gz"
|
||||
f="../build/Jinja2-2.11.3.tar.gz"
|
||||
[ -e "$f" ] ||
|
||||
(url=https://files.pythonhosted.org/packages/25/c8/212b1c2fd6df9eaf536384b6c6619c4e70a3afd2dffdd00e5296ffbae940/Jinja2-2.6.tar.gz;
|
||||
(url=https://files.pythonhosted.org/packages/4f/e7/65300e6b32e69768ded990494809106f87da1d436418d5f1367ed3966fd7/Jinja2-2.11.3.tar.gz;
|
||||
wget -O$f "$url" || curl -L "$url" >$f)
|
||||
|
||||
tar -zxf $f
|
||||
mv Jinja2-*/jinja2 .
|
||||
rm -rf Jinja2-* jinja2/testsuite jinja2/_markupsafe/tests.py jinja2/_stringdefs.py
|
||||
mv Jinja2-*/src/jinja2 .
|
||||
rm -rf Jinja2-*
|
||||
|
||||
f=jinja2/lexer.py
|
||||
sed -r '/.*föö.*/ raise SyntaxError/' <$f >t
|
||||
tmv $f
|
||||
|
||||
f=jinja2/_markupsafe/_constants.py
|
||||
awk '!/: [0-9]+,?$/ || /(amp|gt|lt|quot|apos|nbsp).:/' <$f >t
|
||||
tmv $f
|
||||
echo collecting markupsafe
|
||||
f="../build/MarkupSafe-1.1.1.tar.gz"
|
||||
[ -e "$f" ] ||
|
||||
(url=https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz;
|
||||
wget -O$f "$url" || curl -L "$url" >$f)
|
||||
|
||||
tar -zxf $f
|
||||
mv MarkupSafe-*/src/markupsafe .
|
||||
rm -rf MarkupSafe-* markupsafe/_speedups.c
|
||||
|
||||
mkdir dep-j2/
|
||||
mv {markupsafe,jinja2} dep-j2/
|
||||
|
||||
# msys2 tar is bad, make the best of it
|
||||
echo collecting source
|
||||
@@ -94,8 +101,39 @@ cd sfx
|
||||
rm -f ../tar
|
||||
}
|
||||
|
||||
ver="$(awk '/^VERSION *= \(/ {
|
||||
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < ../copyparty/__version__.py)"
|
||||
ver=
|
||||
git describe --tags >/dev/null 2>/dev/null && {
|
||||
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
|
||||
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//; s/-g?/./g')";
|
||||
t_ver=
|
||||
|
||||
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+$' && {
|
||||
# short format (exact version number)
|
||||
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g')";
|
||||
}
|
||||
|
||||
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+-[0-9]+-g[0-9a-f]+$' && {
|
||||
# long format (unreleased commit)
|
||||
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g; s/(.*) (.*)/\1 "\2"/')"
|
||||
}
|
||||
|
||||
[ -z "$t_ver" ] && {
|
||||
printf 'unexpected git version format: [%s]\n' "$git_ver"
|
||||
exit 1
|
||||
}
|
||||
|
||||
dt="$(git log -1 --format=%cd --date=short | sed -E 's/-0?/, /g')"
|
||||
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
|
||||
sed -ri '
|
||||
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;
|
||||
s/^(S_VERSION =)(.*)/#\1\2\n\1 "'"$ver"'"/;
|
||||
s/^(BUILD_DT =)(.*)/#\1\2\n\1 ('"$dt"')/;
|
||||
' copyparty/__version__.py
|
||||
}
|
||||
|
||||
[ -z "$ver" ] &&
|
||||
ver="$(awk '/^VERSION *= \(/ {
|
||||
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
|
||||
|
||||
ts=$(date -u +%s)
|
||||
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
|
||||
@@ -131,6 +169,15 @@ done
|
||||
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
|
||||
}
|
||||
|
||||
find | grep -E '\.py$' |
|
||||
grep -vE '__version__' |
|
||||
tr '\n' '\0' |
|
||||
xargs -0 python ../scripts/uncomment.py
|
||||
|
||||
f=dep-j2/jinja2/constants.py
|
||||
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
|
||||
tmv "$f"
|
||||
|
||||
# up2k goes from 28k to 22k laff
|
||||
echo entabbening
|
||||
find | grep -E '\.(js|css|html|py)$' | while IFS= read -r f; do
|
||||
@@ -143,7 +190,7 @@ args=(--owner=1000 --group=1000)
|
||||
[ "$OSTYPE" = msys ] &&
|
||||
args=()
|
||||
|
||||
tar -cf tar "${args[@]}" --numeric-owner copyparty jinja2
|
||||
tar -cf tar "${args[@]}" --numeric-owner copyparty dep-j2
|
||||
|
||||
echo compressing tar
|
||||
# detect best level; bzip2 -7 is usually better than -9
|
||||
|
||||
@@ -2,12 +2,16 @@
|
||||
set -e
|
||||
echo
|
||||
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# osx support
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
which md5sum 2>/dev/null >/dev/null &&
|
||||
@@ -16,27 +20,29 @@ which md5sum 2>/dev/null >/dev/null &&
|
||||
|
||||
ver="$1"
|
||||
|
||||
[[ "x$ver" == x ]] &&
|
||||
[ "x$ver" = x ] &&
|
||||
{
|
||||
echo "need argument 1: version"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
[[ -e copyparty/__main__.py ]] || cd ..
|
||||
[[ -e copyparty/__main__.py ]] ||
|
||||
[ -e copyparty/__main__.py ] || cd ..
|
||||
[ -e copyparty/__main__.py ] ||
|
||||
{
|
||||
echo "run me from within the project root folder"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
mv copyparty/web/deps/marked.full.js.gz srv/ || true
|
||||
|
||||
mkdir -p dist
|
||||
zip_path="$(pwd)/dist/copyparty-$ver.zip"
|
||||
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"
|
||||
|
||||
[[ -e "$zip_path" ]] ||
|
||||
[[ -e "$tgz_path" ]] &&
|
||||
[ -e "$zip_path" ] ||
|
||||
[ -e "$tgz_path" ] &&
|
||||
{
|
||||
echo "found existing archives for this version"
|
||||
echo " $zip_path"
|
||||
|
||||
226
scripts/sfx.py
226
scripts/sfx.py
@@ -2,7 +2,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re, os, sys, stat, time, shutil, tarfile, hashlib, platform, tempfile
|
||||
import os, sys, time, shutil, signal, tarfile, hashlib, platform, tempfile
|
||||
import subprocess as sp
|
||||
|
||||
"""
|
||||
@@ -29,6 +29,7 @@ STAMP = None
|
||||
PY2 = sys.version_info[0] == 2
|
||||
sys.dont_write_bytecode = True
|
||||
me = os.path.abspath(os.path.realpath(__file__))
|
||||
cpp = None
|
||||
|
||||
|
||||
def eprint(*args, **kwargs):
|
||||
@@ -191,91 +192,14 @@ def makesfx(tar_src, ver, ts):
|
||||
# skip 0
|
||||
|
||||
|
||||
def get_py_win(ret):
|
||||
tops = []
|
||||
p = str(os.getenv("LocalAppdata"))
|
||||
if p:
|
||||
tops.append(os.path.join(p, "Programs", "Python"))
|
||||
|
||||
progfiles = {}
|
||||
for p in ["ProgramFiles", "ProgramFiles(x86)"]:
|
||||
p = str(os.getenv(p))
|
||||
if p:
|
||||
progfiles[p] = 1
|
||||
# 32bit apps get x86 for both
|
||||
if p.endswith(" (x86)"):
|
||||
progfiles[p[:-6]] = 1
|
||||
|
||||
tops += list(progfiles.keys())
|
||||
|
||||
for sysroot in [me, sys.executable]:
|
||||
sysroot = sysroot[:3].upper()
|
||||
if sysroot[1] == ":" and sysroot not in tops:
|
||||
tops.append(sysroot)
|
||||
|
||||
# $WIRESHARK_SLOGAN
|
||||
for top in tops:
|
||||
try:
|
||||
for name1 in sorted(os.listdir(top), reverse=True):
|
||||
if name1.lower().startswith("python"):
|
||||
path1 = os.path.join(top, name1)
|
||||
try:
|
||||
for name2 in os.listdir(path1):
|
||||
if name2.lower() == "python.exe":
|
||||
path2 = os.path.join(path1, name2)
|
||||
ret[path2.lower()] = path2
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def get_py_nix(ret):
|
||||
ptn = re.compile(r"^(python|pypy)[0-9\.-]*$")
|
||||
for bindir in os.getenv("PATH").split(":"):
|
||||
if not bindir:
|
||||
next
|
||||
|
||||
try:
|
||||
for fn in os.listdir(bindir):
|
||||
if ptn.match(fn):
|
||||
fn = os.path.join(bindir, fn)
|
||||
ret[fn.lower()] = fn
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def read_py(binp):
|
||||
cmd = [
|
||||
binp,
|
||||
"-c",
|
||||
"import sys; sys.stdout.write(' '.join(str(x) for x in sys.version_info)); import jinja2",
|
||||
]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
ver, _ = p.communicate()
|
||||
ver = ver.decode("utf-8").split(" ")[:3]
|
||||
ver = [int(x) if x.isdigit() else 0 for x in ver]
|
||||
return ver, p.returncode == 0
|
||||
|
||||
|
||||
def get_pys():
|
||||
ver, chk = read_py(sys.executable)
|
||||
if chk or PY2:
|
||||
return [[chk, ver, sys.executable]]
|
||||
|
||||
hits = {sys.executable.lower(): sys.executable}
|
||||
if platform.system() == "Windows":
|
||||
get_py_win(hits)
|
||||
else:
|
||||
get_py_nix(hits)
|
||||
|
||||
ret = []
|
||||
for binp in hits.values():
|
||||
ver, chk = read_py(binp)
|
||||
ret.append([chk, ver, binp])
|
||||
msg("\t".join(str(x) for x in ret[-1]))
|
||||
|
||||
return ret
|
||||
def u8(gen):
|
||||
try:
|
||||
for s in gen:
|
||||
yield s.decode("utf-8", "ignore")
|
||||
except:
|
||||
yield s
|
||||
for s in gen:
|
||||
yield s
|
||||
|
||||
|
||||
def yieldfile(fn):
|
||||
@@ -295,17 +219,19 @@ def hashfile(fn):
|
||||
def unpack():
|
||||
"""unpacks the tar yielded by `data`"""
|
||||
name = "pe-copyparty"
|
||||
tag = "v" + str(STAMP)
|
||||
withpid = "{}.{}".format(name, os.getpid())
|
||||
top = tempfile.gettempdir()
|
||||
final = os.path.join(top, name)
|
||||
mine = os.path.join(top, withpid)
|
||||
tar = os.path.join(mine, "tar")
|
||||
tag_mine = os.path.join(mine, "v" + str(STAMP))
|
||||
tag_final = os.path.join(final, "v" + str(STAMP))
|
||||
|
||||
if os.path.exists(tag_final):
|
||||
msg("found early")
|
||||
return final
|
||||
try:
|
||||
if tag in os.listdir(final):
|
||||
msg("found early")
|
||||
return final
|
||||
except:
|
||||
pass
|
||||
|
||||
nwrite = 0
|
||||
os.mkdir(mine)
|
||||
@@ -328,12 +254,15 @@ def unpack():
|
||||
|
||||
os.remove(tar)
|
||||
|
||||
with open(tag_mine, "wb") as f:
|
||||
with open(os.path.join(mine, tag), "wb") as f:
|
||||
f.write(b"h\n")
|
||||
|
||||
if os.path.exists(tag_final):
|
||||
msg("found late")
|
||||
return final
|
||||
try:
|
||||
if tag in os.listdir(final):
|
||||
msg("found late")
|
||||
return final
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.path.islink(final):
|
||||
@@ -352,7 +281,7 @@ def unpack():
|
||||
msg("reloc fail,", mine)
|
||||
return mine
|
||||
|
||||
for fn in os.listdir(top):
|
||||
for fn in u8(os.listdir(top)):
|
||||
if fn.startswith(name) and fn not in [name, withpid]:
|
||||
try:
|
||||
old = os.path.join(top, fn)
|
||||
@@ -418,44 +347,61 @@ def get_payload():
|
||||
def confirm():
|
||||
msg()
|
||||
msg("*** hit enter to exit ***")
|
||||
raw_input() if PY2 else input()
|
||||
|
||||
|
||||
def run(tmp, py):
|
||||
msg("OK")
|
||||
msg("will use:", py)
|
||||
msg("bound to:", tmp)
|
||||
|
||||
fp_py = os.path.join(tmp, "py")
|
||||
with open(fp_py, "wb") as f:
|
||||
f.write(py.encode("utf-8") + b"\n")
|
||||
|
||||
# avoid loading ./copyparty.py
|
||||
cmd = [
|
||||
py,
|
||||
"-c",
|
||||
'import sys, runpy; sys.path.insert(0, r"'
|
||||
+ tmp
|
||||
+ '"); runpy.run_module("copyparty", run_name="__main__")',
|
||||
] + list(sys.argv[1:])
|
||||
|
||||
msg("\n", cmd, "\n")
|
||||
p = sp.Popen(str(x) for x in cmd)
|
||||
try:
|
||||
p.wait()
|
||||
raw_input() if PY2 else input()
|
||||
except:
|
||||
p.wait()
|
||||
pass
|
||||
|
||||
if p.returncode != 0:
|
||||
|
||||
def run(tmp, j2ver):
|
||||
global cpp
|
||||
|
||||
msg("jinja2:", j2ver or "bundled")
|
||||
msg("sfxdir:", tmp)
|
||||
|
||||
# "systemd-tmpfiles-clean.timer"?? HOW do you even come up with this shit
|
||||
try:
|
||||
import fcntl
|
||||
|
||||
fd = os.open(tmp, os.O_RDONLY)
|
||||
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
tmp = os.readlink(tmp) # can't flock a symlink, even with O_NOFOLLOW
|
||||
except:
|
||||
pass
|
||||
|
||||
ld = [tmp, os.path.join(tmp, "dep-j2")]
|
||||
if j2ver:
|
||||
del ld[-1]
|
||||
|
||||
cmd = (
|
||||
"import sys, runpy; "
|
||||
+ "".join(['sys.path.insert(0, r"' + x + '"); ' for x in ld])
|
||||
+ 'runpy.run_module("copyparty", run_name="__main__")'
|
||||
)
|
||||
cmd = [sys.executable, "-c", cmd] + list(sys.argv[1:])
|
||||
|
||||
cmd = [str(x) for x in cmd]
|
||||
msg("\n", cmd, "\n")
|
||||
cpp = sp.Popen(cmd)
|
||||
try:
|
||||
cpp.wait()
|
||||
except:
|
||||
cpp.wait()
|
||||
|
||||
if cpp.returncode != 0:
|
||||
confirm()
|
||||
|
||||
sys.exit(p.returncode)
|
||||
sys.exit(cpp.returncode)
|
||||
|
||||
|
||||
def bye(sig, frame):
|
||||
if cpp is not None:
|
||||
cpp.terminate()
|
||||
|
||||
|
||||
def main():
|
||||
sysver = str(sys.version).replace("\n", "\n" + " " * 18)
|
||||
pktime = time.strftime("%Y-%m-%d, %H:%M:%S", time.gmtime(STAMP))
|
||||
os.system("")
|
||||
msg()
|
||||
msg(" this is: copyparty", VER)
|
||||
msg(" packed at:", pktime, "UTC,", STAMP)
|
||||
@@ -484,34 +430,16 @@ def main():
|
||||
|
||||
# skip 0
|
||||
|
||||
signal.signal(signal.SIGTERM, bye)
|
||||
|
||||
tmp = unpack()
|
||||
fp_py = os.path.join(tmp, "py")
|
||||
if os.path.exists(fp_py):
|
||||
with open(fp_py, "rb") as f:
|
||||
py = f.read().decode("utf-8").rstrip()
|
||||
|
||||
return run(tmp, py)
|
||||
try:
|
||||
from jinja2 import __version__ as j2ver
|
||||
except:
|
||||
j2ver = None
|
||||
|
||||
pys = get_pys()
|
||||
pys.sort(reverse=True)
|
||||
j2, ver, py = pys[0]
|
||||
if j2:
|
||||
try:
|
||||
os.rename(os.path.join(tmp, "jinja2"), os.path.join(tmp, "x.jinja2"))
|
||||
except:
|
||||
pass
|
||||
|
||||
return run(tmp, py)
|
||||
|
||||
msg("\n could not find jinja2; will use py2 + the bundled version\n")
|
||||
for _, ver, py in pys:
|
||||
if ver > [2, 7] and ver < [3, 0]:
|
||||
return run(tmp, py)
|
||||
|
||||
m = "\033[1;31m\n\n\ncould not find a python with jinja2 installed; please do one of these:\n\n pip install --user jinja2\n\n install python2\n\n\033[0m"
|
||||
msg(m)
|
||||
confirm()
|
||||
sys.exit(1)
|
||||
return run(tmp, j2ver)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -32,8 +32,12 @@ dir="$(
|
||||
|
||||
# detect available pythons
|
||||
(IFS=:; for d in $PATH; do
|
||||
printf '%s\n' "$d"/python* "$d"/pypy* | tac;
|
||||
done) | grep -E '(python|pypy)[0-9\.-]*$' > $dir/pys || true
|
||||
printf '%s\n' "$d"/python* "$d"/pypy*;
|
||||
done) |
|
||||
(sed -E 's/(.*\/[^/0-9]+)([0-9]?[^/]*)$/\2 \1/' || cat) |
|
||||
(sort -nr || cat) |
|
||||
(sed -E 's/([^ ]*) (.*)/\2\1/' || cat) |
|
||||
grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
|
||||
|
||||
# see if we made a choice before
|
||||
[ -z "$pybin" ] && pybin="$(cat $dir/py 2>/dev/null || true)"
|
||||
|
||||
164
scripts/speedtest-fs.py
Normal file
164
scripts/speedtest-fs.py
Normal file
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import time
|
||||
import signal
|
||||
import traceback
|
||||
import threading
|
||||
from queue import Queue
|
||||
|
||||
|
||||
"""speedtest-fs: filesystem performance estimate"""
|
||||
__author__ = "ed <copyparty@ocv.me>"
|
||||
__copyright__ = 2020
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
|
||||
def get_spd(nbyte, nsec):
|
||||
if not nsec:
|
||||
return "0.000 MB 0.000 sec 0.000 MB/s"
|
||||
|
||||
mb = nbyte / (1024 * 1024.0)
|
||||
spd = mb / nsec
|
||||
|
||||
return f"{mb:.3f} MB {nsec:.3f} sec {spd:.3f} MB/s"
|
||||
|
||||
|
||||
class Inf(object):
|
||||
def __init__(self, t0):
|
||||
self.msgs = []
|
||||
self.errors = []
|
||||
self.reports = []
|
||||
self.mtx_msgs = threading.Lock()
|
||||
self.mtx_reports = threading.Lock()
|
||||
|
||||
self.n_byte = 0
|
||||
self.n_sec = 0
|
||||
self.n_done = 0
|
||||
self.t0 = t0
|
||||
|
||||
thr = threading.Thread(target=self.print_msgs)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def msg(self, fn, n_read):
|
||||
with self.mtx_msgs:
|
||||
self.msgs.append(f"{fn} {n_read}")
|
||||
|
||||
def err(self, fn):
|
||||
with self.mtx_reports:
|
||||
self.errors.append(f"{fn}\n{traceback.format_exc()}")
|
||||
|
||||
def print_msgs(self):
|
||||
while True:
|
||||
time.sleep(0.02)
|
||||
with self.mtx_msgs:
|
||||
msgs = self.msgs
|
||||
self.msgs = []
|
||||
|
||||
if not msgs:
|
||||
continue
|
||||
|
||||
msgs = msgs[-64:]
|
||||
msgs = [f"{get_spd(self.n_byte, self.n_sec)} {x}" for x in msgs]
|
||||
print("\n".join(msgs))
|
||||
|
||||
def report(self, fn, n_byte, n_sec):
|
||||
with self.mtx_reports:
|
||||
self.reports.append([n_byte, n_sec, fn])
|
||||
self.n_byte += n_byte
|
||||
self.n_sec += n_sec
|
||||
|
||||
def done(self):
|
||||
with self.mtx_reports:
|
||||
self.n_done += 1
|
||||
|
||||
|
||||
def get_files(dir_path):
|
||||
for fn in os.listdir(dir_path):
|
||||
fn = os.path.join(dir_path, fn)
|
||||
st = os.stat(fn).st_mode
|
||||
|
||||
if stat.S_ISDIR(st):
|
||||
yield from get_files(fn)
|
||||
|
||||
if stat.S_ISREG(st):
|
||||
yield fn
|
||||
|
||||
|
||||
def worker(q, inf, read_sz):
|
||||
while True:
|
||||
fn = q.get()
|
||||
if not fn:
|
||||
break
|
||||
|
||||
n_read = 0
|
||||
try:
|
||||
t0 = time.time()
|
||||
with open(fn, "rb") as f:
|
||||
while True:
|
||||
buf = f.read(read_sz)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
n_read += len(buf)
|
||||
inf.msg(fn, n_read)
|
||||
|
||||
inf.report(fn, n_read, time.time() - t0)
|
||||
except:
|
||||
inf.err(fn)
|
||||
|
||||
inf.done()
|
||||
|
||||
|
||||
def sighandler(signo, frame):
|
||||
os._exit(0)
|
||||
|
||||
|
||||
def main():
|
||||
signal.signal(signal.SIGINT, sighandler)
|
||||
|
||||
root = "."
|
||||
if len(sys.argv) > 1:
|
||||
root = sys.argv[1]
|
||||
|
||||
t0 = time.time()
|
||||
q = Queue(256)
|
||||
inf = Inf(t0)
|
||||
|
||||
num_threads = 8
|
||||
read_sz = 32 * 1024
|
||||
for _ in range(num_threads):
|
||||
thr = threading.Thread(target=worker, args=(q, inf, read_sz,))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
for fn in get_files(root):
|
||||
q.put(fn)
|
||||
|
||||
for _ in range(num_threads):
|
||||
q.put(None)
|
||||
|
||||
while inf.n_done < num_threads:
|
||||
time.sleep(0.1)
|
||||
|
||||
t2 = time.time()
|
||||
print("\n")
|
||||
|
||||
log = inf.reports
|
||||
log.sort()
|
||||
for nbyte, nsec, fn in log[-64:]:
|
||||
print(f"{get_spd(nbyte, nsec)} {fn}")
|
||||
|
||||
print()
|
||||
print("\n".join(inf.errors))
|
||||
|
||||
print(get_spd(inf.n_byte, t2 - t0))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
77
scripts/uncomment.py
Normal file
77
scripts/uncomment.py
Normal file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import io
|
||||
import sys
|
||||
import tokenize
|
||||
|
||||
|
||||
def uncomment(fpath):
|
||||
""" modified https://stackoverflow.com/a/62074206 """
|
||||
|
||||
with open(fpath, "rb") as f:
|
||||
orig = f.read().decode("utf-8")
|
||||
|
||||
out = ""
|
||||
for ln in orig.split("\n"):
|
||||
if not ln.startswith("#"):
|
||||
break
|
||||
|
||||
out += ln + "\n"
|
||||
|
||||
io_obj = io.StringIO(orig)
|
||||
prev_toktype = tokenize.INDENT
|
||||
last_lineno = -1
|
||||
last_col = 0
|
||||
for tok in tokenize.generate_tokens(io_obj.readline):
|
||||
# print(repr(tok))
|
||||
token_type = tok[0]
|
||||
token_string = tok[1]
|
||||
start_line, start_col = tok[2]
|
||||
end_line, end_col = tok[3]
|
||||
|
||||
if start_line > last_lineno:
|
||||
last_col = 0
|
||||
|
||||
if start_col > last_col:
|
||||
out += " " * (start_col - last_col)
|
||||
|
||||
is_legalese = (
|
||||
"copyright" in token_string.lower() or "license" in token_string.lower()
|
||||
)
|
||||
|
||||
if token_type == tokenize.STRING:
|
||||
if (
|
||||
prev_toktype != tokenize.INDENT
|
||||
and prev_toktype != tokenize.NEWLINE
|
||||
and start_col > 0
|
||||
or is_legalese
|
||||
):
|
||||
out += token_string
|
||||
else:
|
||||
out += '"a"'
|
||||
elif token_type != tokenize.COMMENT or is_legalese:
|
||||
out += token_string
|
||||
|
||||
prev_toktype = token_type
|
||||
last_lineno = end_line
|
||||
last_col = end_col
|
||||
|
||||
# out = "\n".join(x for x in out.splitlines() if x.strip())
|
||||
|
||||
with open(fpath, "wb") as f:
|
||||
f.write(out.encode("utf-8"))
|
||||
|
||||
|
||||
def main():
|
||||
print("uncommenting", end="")
|
||||
for f in sys.argv[1:]:
|
||||
print(".", end="")
|
||||
uncomment(f)
|
||||
|
||||
print("k")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
6
setup.py
6
setup.py
@@ -2,10 +2,8 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function
|
||||
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
from glob import glob
|
||||
from shutil import rmtree
|
||||
|
||||
setuptools_available = True
|
||||
@@ -49,7 +47,7 @@ with open(here + "/README.md", "rb") as f:
|
||||
about = {}
|
||||
if not VERSION:
|
||||
with open(os.path.join(here, NAME, "__version__.py"), "rb") as f:
|
||||
exec(f.read().decode("utf-8").split("\n\n", 1)[1], about)
|
||||
exec (f.read().decode("utf-8").split("\n\n", 1)[1], about)
|
||||
else:
|
||||
about["__version__"] = VERSION
|
||||
|
||||
@@ -110,13 +108,13 @@ args = {
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.2",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Environment :: Console",
|
||||
|
||||
141
srv/extend.md
Normal file
141
srv/extend.md
Normal file
@@ -0,0 +1,141 @@
|
||||
# hi
|
||||
this showcases my worst idea yet; *extending markdown with inline javascript*
|
||||
|
||||
due to obvious reasons it's disabled by default, and can be enabled with `-emp`
|
||||
|
||||
the examples are by no means correct, they're as much of a joke as this feature itself
|
||||
|
||||
|
||||
### sub-header
|
||||
nothing special about this one
|
||||
|
||||
|
||||
## except/
|
||||
this one becomes a hyperlink to ./except/ thanks to
|
||||
* the `copyparty_pre` plugin at the end of this file
|
||||
* which is invoked as a markdown filter every time the document is modified
|
||||
* which looks for headers ending with a `/` and erwrites all headers below that
|
||||
|
||||
it is a passthrough to the markdown extension api, see https://marked.js.org/using_pro
|
||||
|
||||
in addition to the markdown extension functions, `ctor` will be called on document init
|
||||
|
||||
|
||||
### these/
|
||||
and this one becomes ./except/these/
|
||||
|
||||
|
||||
#### ones.md
|
||||
finally ./except/these/ones.md
|
||||
|
||||
|
||||
### also-this.md
|
||||
whic hshoud be ./except/also-this.md
|
||||
|
||||
|
||||
|
||||
|
||||
# ok
|
||||
now for another extension type, `copyparty_post` which is called to manipulate the generated dom instead
|
||||
|
||||
`copyparty_post` can have the following functions, all optional
|
||||
* `ctor` is called on document init
|
||||
* `render` is called when the dom is done but still in-memory
|
||||
* `render2` is called with the live browser dom as-displayed
|
||||
|
||||
## post example
|
||||
|
||||
the values in the `ex:` columns are linkified to `example.com/$value`
|
||||
|
||||
| ex:foo | bar | ex:baz |
|
||||
| ------------ | -------- | ------ |
|
||||
| asdf | nice | fgsfds |
|
||||
| more one row | hi hello | aaa |
|
||||
|
||||
and the table can be sorted by clicking the headers
|
||||
|
||||
the difference is that with `copyparty_pre` you'll probably break various copyparty features but if you use `copyparty_post` then future copyparty versions will probably break you
|
||||
|
||||
|
||||
|
||||
|
||||
# heres the plugins
|
||||
if there is anything below ths line in the preview then the plugin feature is disabled (good)
|
||||
|
||||
|
||||
|
||||
|
||||
```copyparty_pre
|
||||
ctor() {
|
||||
md_plug['h'] = {
|
||||
on: false,
|
||||
lv: -1,
|
||||
path: []
|
||||
}
|
||||
},
|
||||
walkTokens(token) {
|
||||
if (token.type == 'heading') {
|
||||
var h = md_plug['h'],
|
||||
is_dir = token.text.endsWith('/');
|
||||
|
||||
if (h.lv >= token.depth) {
|
||||
h.on = false;
|
||||
}
|
||||
if (!h.on && is_dir) {
|
||||
h.on = true;
|
||||
h.lv = token.depth;
|
||||
h.path = [token.text];
|
||||
}
|
||||
else if (h.on && h.lv < token.depth) {
|
||||
h.path = h.path.slice(0, token.depth - h.lv);
|
||||
h.path.push(token.text);
|
||||
}
|
||||
if (!h.on)
|
||||
return false;
|
||||
|
||||
var path = h.path.join('');
|
||||
var emoji = is_dir ? '📂' : '📜';
|
||||
token.tokens[0].text = '<a href="' + path + '">' + emoji + ' ' + path + '</a>';
|
||||
}
|
||||
if (token.type == 'paragraph') {
|
||||
//console.log(JSON.parse(JSON.stringify(token.tokens)));
|
||||
for (var a = 0; a < token.tokens.length; a++) {
|
||||
var t = token.tokens[a];
|
||||
if (t.type == 'text' || t.type == 'strong' || t.type == 'em') {
|
||||
var ret = '', text = t.text;
|
||||
for (var b = 0; b < text.length; b++)
|
||||
ret += (Math.random() > 0.5) ? text[b] : text[b].toUpperCase();
|
||||
|
||||
t.text = ret;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
```copyparty_post
|
||||
render(dom) {
|
||||
var ths = dom.querySelectorAll('th');
|
||||
for (var a = 0; a < ths.length; a++) {
|
||||
var th = ths[a];
|
||||
if (th.textContent.indexOf('ex:') === 0) {
|
||||
th.textContent = th.textContent.slice(3);
|
||||
var nrow = 0;
|
||||
while ((th = th.previousSibling) != null)
|
||||
nrow++;
|
||||
|
||||
var trs = ths[a].parentNode.parentNode.parentNode.querySelectorAll('tr');
|
||||
for (var b = 1; b < trs.length; b++) {
|
||||
var td = trs[b].childNodes[nrow];
|
||||
td.innerHTML = '<a href="//example.com/' + td.innerHTML + '">' + td.innerHTML + '</a>';
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
render2(dom) {
|
||||
window.makeSortable(dom.getElementsByTagName('table')[0]);
|
||||
}
|
||||
```
|
||||
34
srv/test.md
34
srv/test.md
@@ -1,5 +1,16 @@
|
||||
### hello world
|
||||
|
||||
* qwe
|
||||
* asd
|
||||
* zxc
|
||||
* 573
|
||||
* one
|
||||
* two
|
||||
|
||||
* |||
|
||||
|--|--|
|
||||
|listed|table|
|
||||
|
||||
```
|
||||
[72....................................................................]
|
||||
[80............................................................................]
|
||||
@@ -17,6 +28,16 @@
|
||||
[80............................................................................]
|
||||
```
|
||||
|
||||
```
|
||||
l[i]=1I;(){}o0O</> var foo = "$(`bar`)"; a's'd
|
||||
```
|
||||
|
||||
🔍🌽.📕.🍙🔎
|
||||
|
||||
[](#s1)
|
||||
[s1](#s1)
|
||||
[#s1](#s1)
|
||||
|
||||
a123456789b123456789c123456789d123456789e123456789f123456789g123456789h123456789i123456789j123456789k123456789l123456789m123456789n123456789o123456789p123456789q123456789r123456789s123456789t123456789u123456789v123456789w123456789x123456789y123456789z123456789
|
||||
|
||||
<foo> bar & <span>baz</span>
|
||||
@@ -113,6 +134,15 @@ a newline toplevel
|
||||
| a table | on the right |
|
||||
| second row | foo bar |
|
||||
|
||||
||
|
||||
--|:-:|-:
|
||||
a table | big text in this | aaakbfddd
|
||||
second row | centred | bbb
|
||||
|
||||
||
|
||||
--|--|--
|
||||
foo
|
||||
|
||||
* list entry
|
||||
* [x] yes
|
||||
* [ ] no
|
||||
@@ -201,3 +231,7 @@ unrelated neat stuff:
|
||||
awk '/./ {printf "%s %d\n", $0, NR; next} 1' <test.md >ln.md
|
||||
gawk '{print gensub(/([a-zA-Z\.])/,NR" \\1","1")}' <test.md >ln.md
|
||||
```
|
||||
|
||||
a|b|c
|
||||
--|--|--
|
||||
foo
|
||||
|
||||
@@ -3,8 +3,10 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
import subprocess as sp # nosec
|
||||
|
||||
@@ -14,6 +16,12 @@ from copyparty.authsrv import AuthSrv
|
||||
from copyparty import util
|
||||
|
||||
|
||||
class Cfg(Namespace):
|
||||
def __init__(self, a=[], v=[], c=None):
|
||||
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr mte".split()}
|
||||
super(Cfg, self).__init__(a=a, v=v, c=c, **ex)
|
||||
|
||||
|
||||
class TestVFS(unittest.TestCase):
|
||||
def dump(self, vfs):
|
||||
print(json.dumps(vfs, indent=4, sort_keys=True, default=lambda o: o.__dict__))
|
||||
@@ -30,13 +38,16 @@ class TestVFS(unittest.TestCase):
|
||||
response = self.unfoo(response)
|
||||
self.assertEqual(util.undot(query), response)
|
||||
|
||||
def absify(self, root, names):
|
||||
return ["{}/{}".format(root, x).replace("//", "/") for x in names]
|
||||
|
||||
def ls(self, vfs, vpath, uname):
|
||||
"""helper for resolving and listing a folder"""
|
||||
vn, rem = vfs.get(vpath, uname, True, False)
|
||||
return vn.ls(rem, uname)
|
||||
r1 = vn.ls(rem, uname, False)
|
||||
r2 = vn.ls(rem, uname, False)
|
||||
self.assertEqual(r1, r2)
|
||||
|
||||
fsdir, real, virt = r1
|
||||
real = [x[0] for x in real]
|
||||
return fsdir, real, virt
|
||||
|
||||
def runcmd(self, *argv):
|
||||
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
@@ -59,16 +70,31 @@ class TestVFS(unittest.TestCase):
|
||||
|
||||
if os.path.exists("/Volumes"):
|
||||
devname, _ = self.chkcmd("hdiutil", "attach", "-nomount", "ram://8192")
|
||||
_, _ = self.chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
|
||||
return "/Volumes/cptd"
|
||||
devname = devname.strip()
|
||||
print("devname: [{}]".format(devname))
|
||||
for _ in range(10):
|
||||
try:
|
||||
_, _ = self.chkcmd(
|
||||
"diskutil", "eraseVolume", "HFS+", "cptd", devname
|
||||
)
|
||||
return "/Volumes/cptd"
|
||||
except Exception as ex:
|
||||
print(repr(ex))
|
||||
time.sleep(0.25)
|
||||
|
||||
raise Exception("TODO support windows")
|
||||
raise Exception("ramdisk creation failed")
|
||||
|
||||
def log(self, src, msg):
|
||||
ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
|
||||
try:
|
||||
os.mkdir(ret)
|
||||
finally:
|
||||
return ret
|
||||
|
||||
def log(self, src, msg, c=0):
|
||||
pass
|
||||
|
||||
def test(self):
|
||||
td = self.get_ramdisk() + "/vfs"
|
||||
td = os.path.join(self.get_ramdisk(), "vfs")
|
||||
try:
|
||||
shutil.rmtree(td)
|
||||
except OSError:
|
||||
@@ -88,7 +114,7 @@ class TestVFS(unittest.TestCase):
|
||||
f.write(fn)
|
||||
|
||||
# defaults
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=[]), self.log).vfs
|
||||
vfs = AuthSrv(Cfg(), self.log).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td)
|
||||
@@ -96,26 +122,24 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(vfs.uwrite, ["*"])
|
||||
|
||||
# single read-only rootfs (relative path)
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs
|
||||
vfs = AuthSrv(Cfg(v=["a/ab/::r"]), self.log).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td + "/a/ab")
|
||||
self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
|
||||
self.assertEqual(vfs.uread, ["*"])
|
||||
self.assertEqual(vfs.uwrite, [])
|
||||
|
||||
# single read-only rootfs (absolute path)
|
||||
vfs = AuthSrv(
|
||||
Namespace(c=None, a=[], v=[td + "//a/ac/../aa//::r"]), self.log
|
||||
).vfs
|
||||
vfs = AuthSrv(Cfg(v=[td + "//a/ac/../aa//::r"]), self.log).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td + "/a/aa")
|
||||
self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
|
||||
self.assertEqual(vfs.uread, ["*"])
|
||||
self.assertEqual(vfs.uwrite, [])
|
||||
|
||||
# read-only rootfs with write-only subdirectory (read-write for k)
|
||||
vfs = AuthSrv(
|
||||
Namespace(c=None, a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]),
|
||||
Cfg(a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]),
|
||||
self.log,
|
||||
).vfs
|
||||
self.assertEqual(len(vfs.nodes), 1)
|
||||
@@ -138,47 +162,69 @@ class TestVFS(unittest.TestCase):
|
||||
n = n.nodes["acb"]
|
||||
self.assertEqual(n.nodes, {})
|
||||
self.assertEqual(n.vpath, "a/ac/acb")
|
||||
self.assertEqual(n.realpath, td + "/a/ac/acb")
|
||||
self.assertEqual(n.realpath, os.path.join(td, "a", "ac", "acb"))
|
||||
self.assertEqual(n.uread, ["k"])
|
||||
self.assertEqual(n.uwrite, ["*", "k"])
|
||||
|
||||
# something funky about the windows path normalization,
|
||||
# doesn't really matter but makes the test messy, TODO?
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "/", "*")
|
||||
self.assertEqual(fsdir, td)
|
||||
self.assertEqual(real, ["b", "c"])
|
||||
self.assertEqual(list(virt), ["a"])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a", "*")
|
||||
self.assertEqual(fsdir, td + "/a")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a"))
|
||||
self.assertEqual(real, ["aa", "ab"])
|
||||
self.assertEqual(list(virt), ["ac"])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ab", "*")
|
||||
self.assertEqual(fsdir, td + "/a/ab")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ab"))
|
||||
self.assertEqual(real, ["aba", "abb", "abc"])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac", "*")
|
||||
self.assertEqual(fsdir, td + "/a/ac")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
|
||||
self.assertEqual(real, ["aca", "acc"])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac", "k")
|
||||
self.assertEqual(fsdir, td + "/a/ac")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
|
||||
self.assertEqual(real, ["aca", "acc"])
|
||||
self.assertEqual(list(virt), ["acb"])
|
||||
|
||||
self.assertRaises(util.Pebkac, vfs.get, "a/ac/acb", "*", True, False)
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac/acb", "k")
|
||||
self.assertEqual(fsdir, td + "/a/ac/acb")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ac", "acb"))
|
||||
self.assertEqual(real, ["acba", "acbb", "acbc"])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
# admin-only rootfs with all-read-only subfolder
|
||||
vfs = AuthSrv(
|
||||
Cfg(a=["k:k"], v=[".::ak", "a:a:r"]),
|
||||
self.log,
|
||||
).vfs
|
||||
self.assertEqual(len(vfs.nodes), 1)
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td)
|
||||
self.assertEqual(vfs.uread, ["k"])
|
||||
self.assertEqual(vfs.uwrite, ["k"])
|
||||
n = vfs.nodes["a"]
|
||||
self.assertEqual(len(vfs.nodes), 1)
|
||||
self.assertEqual(n.vpath, "a")
|
||||
self.assertEqual(n.realpath, os.path.join(td, "a"))
|
||||
self.assertEqual(n.uread, ["*"])
|
||||
self.assertEqual(n.uwrite, [])
|
||||
self.assertEqual(vfs.can_access("/", "*"), [False, False])
|
||||
self.assertEqual(vfs.can_access("/", "k"), [True, True])
|
||||
self.assertEqual(vfs.can_access("/a", "*"), [True, False])
|
||||
self.assertEqual(vfs.can_access("/a", "k"), [True, False])
|
||||
|
||||
# breadth-first construction
|
||||
vfs = AuthSrv(
|
||||
Namespace(
|
||||
c=None,
|
||||
a=[],
|
||||
Cfg(
|
||||
v=[
|
||||
"a/ac/acb:a/ac/acb:w",
|
||||
"a:a:w",
|
||||
@@ -199,7 +245,7 @@ class TestVFS(unittest.TestCase):
|
||||
self.undot(vfs, "./.././foo/..", "")
|
||||
|
||||
# shadowing
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=[".::r", "b:a/ac:r"]), self.log).vfs
|
||||
vfs = AuthSrv(Cfg(v=[".::r", "b:a/ac:r"]), self.log).vfs
|
||||
|
||||
fsp, r1, v1 = self.ls(vfs, "", "*")
|
||||
self.assertEqual(fsp, td)
|
||||
@@ -207,20 +253,20 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(list(v1), ["a"])
|
||||
|
||||
fsp, r1, v1 = self.ls(vfs, "a", "*")
|
||||
self.assertEqual(fsp, td + "/a")
|
||||
self.assertEqual(fsp, os.path.join(td, "a"))
|
||||
self.assertEqual(r1, ["aa", "ab"])
|
||||
self.assertEqual(list(v1), ["ac"])
|
||||
|
||||
fsp1, r1, v1 = self.ls(vfs, "a/ac", "*")
|
||||
fsp2, r2, v2 = self.ls(vfs, "b", "*")
|
||||
self.assertEqual(fsp1, td + "/b")
|
||||
self.assertEqual(fsp2, td + "/b")
|
||||
self.assertEqual(fsp1, os.path.join(td, "b"))
|
||||
self.assertEqual(fsp2, os.path.join(td, "b"))
|
||||
self.assertEqual(r1, ["ba", "bb", "bc"])
|
||||
self.assertEqual(r1, r2)
|
||||
self.assertEqual(list(v1), list(v2))
|
||||
|
||||
# config file parser
|
||||
cfg_path = self.get_ramdisk() + "/test.cfg"
|
||||
cfg_path = os.path.join(self.get_ramdisk(), "test.cfg")
|
||||
with open(cfg_path, "wb") as f:
|
||||
f.write(
|
||||
dedent(
|
||||
@@ -236,7 +282,7 @@ class TestVFS(unittest.TestCase):
|
||||
).encode("utf-8")
|
||||
)
|
||||
|
||||
au = AuthSrv(Namespace(c=[cfg_path], a=[], v=[]), self.log)
|
||||
au = AuthSrv(Cfg(c=[cfg_path]), self.log)
|
||||
self.assertEqual(au.user["a"], "123")
|
||||
self.assertEqual(au.user["asd"], "fgh:jkl")
|
||||
n = au.vfs
|
||||
@@ -248,10 +294,11 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(len(n.nodes), 1)
|
||||
n = n.nodes["dst"]
|
||||
self.assertEqual(n.vpath, "dst")
|
||||
self.assertEqual(n.realpath, td + "/src")
|
||||
self.assertEqual(n.realpath, os.path.join(td, "src"))
|
||||
self.assertEqual(n.uread, ["a", "asd"])
|
||||
self.assertEqual(n.uwrite, ["asd"])
|
||||
self.assertEqual(len(n.nodes), 0)
|
||||
|
||||
os.chdir(tempfile.gettempdir())
|
||||
shutil.rmtree(td)
|
||||
os.unlink(cfg_path)
|
||||
|
||||
Reference in New Issue
Block a user