mirror of
https://github.com/9001/copyparty.git
synced 2025-10-23 16:14:10 +00:00
Compare commits
242 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
cd3e0afad2 | ||
|
d8d1f94a86 | ||
|
00dfd8cfd1 | ||
|
273de6db31 | ||
|
c6c0eeb0ff | ||
|
e70c74a3b5 | ||
|
f7d939eeab | ||
|
e815c091b9 | ||
|
963529b7cf | ||
|
638a52374d | ||
|
d9d42b7aa2 | ||
|
ec7e5f36a2 | ||
|
56110883ea | ||
|
7f8d7d6006 | ||
|
49e4fb7e12 | ||
|
8dbbea473f | ||
|
3d375d5114 | ||
|
f3eae67d97 | ||
|
40c1b19235 | ||
|
ccaf0ab159 | ||
|
d07f147423 | ||
|
f5cb9f92b9 | ||
|
f991f74983 | ||
|
6b3295059e | ||
|
b18a07ae6b | ||
|
8ab03dabda | ||
|
5e760e35dc | ||
|
afbfa04514 | ||
|
7aace470c5 | ||
|
b4acb24f6a | ||
|
bcee8a4934 | ||
|
36b0718542 | ||
|
9a92bca45d | ||
|
b07445a363 | ||
|
a62ec0c27e | ||
|
57e3a2d382 | ||
|
b61022b374 | ||
|
a3e2b2ec87 | ||
|
a83d3f8801 | ||
|
90c5f2b9d2 | ||
|
4885653c07 | ||
|
21e1cd87ca | ||
|
81f82e8e9f | ||
|
c0e31851da | ||
|
6599c3eced | ||
|
5d6c61a861 | ||
|
1a5c66edd3 | ||
|
deae9fe95a | ||
|
abd65c6334 | ||
|
8137a99904 | ||
|
6f6f9c1f74 | ||
|
7b575f716f | ||
|
6ba6ea3572 | ||
|
9a22ad5ea3 | ||
|
beaab9778e | ||
|
f327bdb6b4 | ||
|
ae180e0f5f | ||
|
e3f1d19756 | ||
|
93c2bd6ef6 | ||
|
4d0e5ff6db | ||
|
0893f06919 | ||
|
46b6abde3f | ||
|
0696610dee | ||
|
edf0d3684c | ||
|
7af159f5f6 | ||
|
7f2cb6764a | ||
|
96495a9bf1 | ||
|
b2fafec5fc | ||
|
0850b8ae2b | ||
|
8a68a96c57 | ||
|
d3aae8ed6a | ||
|
c62ebadda8 | ||
|
ffcee6d390 | ||
|
de32838346 | ||
|
b9a4e47ea2 | ||
|
57d994422d | ||
|
6ecd745323 | ||
|
bd769f5bdb | ||
|
2381692aba | ||
|
24fdada0a0 | ||
|
bb5169710a | ||
|
9cde2352f3 | ||
|
482dd7a938 | ||
|
bddcc69438 | ||
|
19d4540630 | ||
|
4f5f6c81f5 | ||
|
7e4c1238ba | ||
|
f7196ac773 | ||
|
7a7c832000 | ||
|
2b4ccdbebb | ||
|
0d16b49489 | ||
|
768405b691 | ||
|
da01413b7b | ||
|
914e22c53e | ||
|
43a23bf733 | ||
|
92bb00c6d2 | ||
|
b0b97a2648 | ||
|
2c452fe323 | ||
|
ad73d0c77d | ||
|
7f9bf1c78c | ||
|
61a6bc3a65 | ||
|
46e10b0e9f | ||
|
8441206e26 | ||
|
9fdc5ee748 | ||
|
00ff133387 | ||
|
96164cb934 | ||
|
82fb21ae69 | ||
|
89d4a2b4c4 | ||
|
fc0c7ff374 | ||
|
5148c4f2e9 | ||
|
c3b59f7bcf | ||
|
61e148202b | ||
|
8a4e0739bc | ||
|
f75c5f2fe5 | ||
|
81d5859588 | ||
|
721886bb7a | ||
|
b23c272820 | ||
|
cd02bfea7a | ||
|
6774bd88f9 | ||
|
1046a4f376 | ||
|
8081f9ddfd | ||
|
fa656577d1 | ||
|
b14b86990f | ||
|
2a6dd7b512 | ||
|
feebdee88b | ||
|
99d9277f5d | ||
|
9af64d6156 | ||
|
5e3775c1af | ||
|
2d2e8a3da7 | ||
|
b2a560b76f | ||
|
39397a489d | ||
|
ff593a0904 | ||
|
f12789cf44 | ||
|
4f8cf2fc87 | ||
|
fda98730ac | ||
|
06c6ddffb6 | ||
|
d29f0c066c | ||
|
c9e4de3346 | ||
|
ca0b97f72d | ||
|
b38f20b408 | ||
|
05b1dbaf56 | ||
|
b8481e32ba | ||
|
9c03c65e07 | ||
|
d8ed006b9b | ||
|
63c0623a5e | ||
|
fd84506db0 | ||
|
d8bcb44e44 | ||
|
56a26b0916 | ||
|
efcf1d6b90 | ||
|
9f578bfec6 | ||
|
1f170d7d28 | ||
|
5ae14cf9be | ||
|
aaf9d53be9 | ||
|
75c73f7ba7 | ||
|
b6dba8beee | ||
|
94521cdc1a | ||
|
3365b1c355 | ||
|
6c957c4923 | ||
|
833997f04c | ||
|
68d51e4037 | ||
|
ce274d2011 | ||
|
280778ed43 | ||
|
0f558ecbbf | ||
|
58f9e05d93 | ||
|
1ec981aea7 | ||
|
2a90286a7c | ||
|
12d25d09b2 | ||
|
a039fae1a4 | ||
|
322b9abadc | ||
|
0aaf954cea | ||
|
c2d22aa3d1 | ||
|
6934c75bba | ||
|
c58cf78f86 | ||
|
7f0de790ab | ||
|
d4bb4e3a73 | ||
|
d25612d038 | ||
|
116b2351b0 | ||
|
69b83dfdc4 | ||
|
3b1839c2ce | ||
|
13742ebdf8 | ||
|
634657bea1 | ||
|
46e70d50b7 | ||
|
d64e9b85a7 | ||
|
fb853edbe3 | ||
|
cc076c1be1 | ||
|
98cc9a6755 | ||
|
7bd2b9c23a | ||
|
de724a1ff3 | ||
|
2163055dae | ||
|
93ed0fc10b | ||
|
0d98cefd40 | ||
|
d58988a033 | ||
|
2acfab1e3f | ||
|
b915dfe9a6 | ||
|
25bd5a823e | ||
|
1c35de4716 | ||
|
4c00435a0a | ||
|
844e3079a8 | ||
|
4778cb5b2c | ||
|
ec5d60b919 | ||
|
e1f4b960e8 | ||
|
669e46da54 | ||
|
ba94cc5df7 | ||
|
d08245c3df | ||
|
5c18d12cbf | ||
|
580a42dec7 | ||
|
29286e159b | ||
|
19bcf90e9f | ||
|
dae9c00742 | ||
|
35324ceb7c | ||
|
5aadd47199 | ||
|
7d9057cc62 | ||
|
c4b322b883 | ||
|
19b09c898a | ||
|
eafe2098b6 | ||
|
2bc6a20d71 | ||
|
8b502a7235 | ||
|
37567844af | ||
|
2f6c4e0e34 | ||
|
1c7cc4cb2b | ||
|
f83db3648e | ||
|
b164aa00d4 | ||
|
a2d866d0c2 | ||
|
2dfe4ac4c6 | ||
|
db65d05cb5 | ||
|
300c0194c7 | ||
|
37a0d2b087 | ||
|
a4959300ea | ||
|
223657e5f8 | ||
|
0c53de6767 | ||
|
9c309b1498 | ||
|
1aa1b34c80 | ||
|
755a2ee023 | ||
|
69d3359e47 | ||
|
a90c49b8fb | ||
|
b1222edb27 | ||
|
b967a92f69 | ||
|
90a5cb5e59 | ||
|
7aba9cb76b | ||
|
f550a8171d | ||
|
82e568d4c9 | ||
|
7b2a4a3d59 |
12
.eslintrc.json
Normal file
12
.eslintrc.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2021": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 12
|
||||
},
|
||||
"rules": {
|
||||
}
|
||||
}
|
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,4 +1,6 @@
|
||||
* text eol=lf
|
||||
|
||||
*.reg text eol=crlf
|
||||
|
||||
*.png binary
|
||||
*.gif binary
|
||||
|
15
.vscode/launch.json
vendored
15
.vscode/launch.json
vendored
@@ -10,12 +10,25 @@
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": [
|
||||
//"-nw",
|
||||
"-ed",
|
||||
"-emp",
|
||||
"-e2dsa",
|
||||
"-e2ts",
|
||||
"-a",
|
||||
"ed:wark",
|
||||
"-v",
|
||||
"srv::r:aed"
|
||||
"srv::r:aed:cnodupe",
|
||||
"-v",
|
||||
"dist:dist:r"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "No debug",
|
||||
"preLaunchTask": "no_dbg",
|
||||
"type": "python",
|
||||
//"request": "attach", "port": 42069
|
||||
// fork: nc -l 42069 </dev/null
|
||||
},
|
||||
{
|
||||
"name": "Run active unit test",
|
||||
"type": "python",
|
||||
|
12
.vscode/settings.json
vendored
12
.vscode/settings.json
vendored
@@ -50,11 +50,9 @@
|
||||
"files.associations": {
|
||||
"*.makefile": "makefile"
|
||||
},
|
||||
"editor.codeActionsOnSaveTimeout": 9001,
|
||||
"editor.formatOnSaveTimeout": 9001,
|
||||
//
|
||||
// things you may wanna edit:
|
||||
//
|
||||
"python.pythonPath": "/usr/bin/python3",
|
||||
//"python.linting.enabled": true,
|
||||
"python.formatting.blackArgs": [
|
||||
"-t",
|
||||
"py27"
|
||||
],
|
||||
"python.linting.enabled": true,
|
||||
}
|
17
.vscode/tasks.json
vendored
Normal file
17
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "pre",
|
||||
"command": "true;rm -rf inc/* inc/.hist/;mkdir -p inc;",
|
||||
"type": "shell"
|
||||
},
|
||||
{
|
||||
"label": "no_dbg",
|
||||
"type": "shell",
|
||||
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -e2ts -a ed:wark -v srv::r:aed:cnodupe -v dist:dist:r ;exit 1"
|
||||
// -v ~/Music/mt:mt:r:cmtp=.bpm=~/dev/copyparty/bin/mtag/audio-bpm.py:cmtp=key=~/dev/copyparty/bin/mtag/audio-key.py:ce2tsr
|
||||
// -v ~/Music/mt:mt:r:cmtp=.bpm=~/dev/copyparty/bin/mtag/audio-bpm.py:ce2tsr
|
||||
}
|
||||
]
|
||||
}
|
217
README.md
217
README.md
@@ -8,11 +8,46 @@
|
||||
|
||||
turn your phone or raspi into a portable file server with resumable uploads/downloads using IE6 or any other browser
|
||||
|
||||
* server runs on anything with `py2.7` or `py3.2+`
|
||||
* server runs on anything with `py2.7` or `py3.3+`
|
||||
* *resumable* uploads need `firefox 12+` / `chrome 6+` / `safari 6+` / `IE 10+`
|
||||
* code standard: `black`
|
||||
|
||||
|
||||
## readme toc
|
||||
|
||||
* top
|
||||
* [quickstart](#quickstart)
|
||||
* [notes](#notes)
|
||||
* [status](#status)
|
||||
* [bugs](#bugs)
|
||||
* [usage](#usage)
|
||||
* [searching](#searching)
|
||||
* [search configuration](#search-configuration)
|
||||
* [metadata from audio files](#metadata-from-audio-files)
|
||||
* [file parser plugins](#file-parser-plugins)
|
||||
* [complete examples](#complete-examples)
|
||||
* [client examples](#client-examples)
|
||||
* [dependencies](#dependencies)
|
||||
* [optional gpl stuff](#optional-gpl-stuff)
|
||||
* [sfx](#sfx)
|
||||
* [sfx repack](#sfx-repack)
|
||||
* [install on android](#install-on-android)
|
||||
* [dev env setup](#dev-env-setup)
|
||||
* [how to release](#how-to-release)
|
||||
* [todo](#todo)
|
||||
|
||||
|
||||
## quickstart
|
||||
|
||||
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
|
||||
|
||||
running the sfx without arguments (for example doubleclicking it on Windows) will let anyone access the current folder; see `-h` for help if you want accounts and volumes etc
|
||||
|
||||
you may also want these, especially on servers:
|
||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for legit https)
|
||||
|
||||
|
||||
## notes
|
||||
|
||||
* iPhone/iPad: use Firefox to download files
|
||||
@@ -25,47 +60,170 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
|
||||
## status
|
||||
|
||||
* [x] sanic multipart parser
|
||||
* [x] load balancer (multiprocessing)
|
||||
* [x] upload (plain multipart, ie6 support)
|
||||
* [x] upload (js, resumable, multithreaded)
|
||||
* [x] download
|
||||
* [x] browser
|
||||
* [x] media player
|
||||
* [ ] thumbnails
|
||||
* [ ] download as zip
|
||||
* [x] volumes
|
||||
* [x] accounts
|
||||
* [x] markdown viewer
|
||||
* [x] markdown editor
|
||||
* [x] FUSE client
|
||||
* backend stuff
|
||||
* ☑ sanic multipart parser
|
||||
* ☑ load balancer (multiprocessing)
|
||||
* ☑ volumes (mountpoints)
|
||||
* ☑ accounts
|
||||
* upload
|
||||
* ☑ basic: plain multipart, ie6 support
|
||||
* ☑ up2k: js, resumable, multithreaded
|
||||
* ☑ stash: simple PUT filedropper
|
||||
* ☑ symlink/discard existing files (content-matching)
|
||||
* download
|
||||
* ☑ single files in browser
|
||||
* ✖ folders as zip files
|
||||
* ☑ FUSE client (read-only)
|
||||
* browser
|
||||
* ☑ tree-view
|
||||
* ☑ media player
|
||||
* ✖ thumbnails
|
||||
* ✖ SPA (browse while uploading)
|
||||
* currently safe using the file-tree on the left only, not folders in the file list
|
||||
* server indexing
|
||||
* ☑ locate files by contents
|
||||
* ☑ search by name/path/date/size
|
||||
* ☑ search by ID3-tags etc.
|
||||
* markdown
|
||||
* ☑ viewer
|
||||
* ☑ editor (sure why not)
|
||||
|
||||
summary: it works! you can use it! (but technically not even close to beta)
|
||||
|
||||
|
||||
# bugs
|
||||
|
||||
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
|
||||
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
||||
* probably more, pls let me know
|
||||
|
||||
|
||||
# usage
|
||||
|
||||
the browser has the following hotkeys
|
||||
* `0..9` jump to 10%..90%
|
||||
* `U/O` skip 10sec back/forward
|
||||
* `J/L` prev/next song
|
||||
* `I/K` prev/next folder
|
||||
* `P` parent folder
|
||||
|
||||
|
||||
# searching
|
||||
|
||||
when started with `-e2dsa` copyparty will scan/index all your files. This avoids duplicates on upload, and also makes the volumes searchable through the web-ui:
|
||||
* make search queries by `size`/`date`/`directory-path`/`filename`, or...
|
||||
* drag/drop a local file to see if the same contents exist somewhere on the server (you get the URL if it does)
|
||||
|
||||
path/name queries are space-separated, AND'ed together, and words are negated with a `-` prefix, so for example:
|
||||
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
|
||||
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
|
||||
|
||||
add `-e2ts` to also scan/index tags from music files:
|
||||
|
||||
|
||||
## search configuration
|
||||
|
||||
searching relies on two databases, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`). Configuration can be done through arguments, volume flags, or a mix of both.
|
||||
|
||||
through arguments:
|
||||
* `-e2d` enables file indexing on upload
|
||||
* `-e2ds` scans writable folders on startup
|
||||
* `-e2dsa` scans all mounted volumes (including readonly ones)
|
||||
* `-e2t` enables metadata indexing on upload
|
||||
* `-e2ts` scans for tags in all files that don't have tags yet
|
||||
* `-e2tsr` deletes all existing tags, so a full reindex
|
||||
|
||||
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
|
||||
* `-v ~/music::r:ce2dsa:ce2tsr` does a full reindex of everything on startup
|
||||
* `-v ~/music::r:cd2d` disables **all** indexing, even if any `-e2*` are on
|
||||
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
||||
|
||||
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
|
||||
|
||||
|
||||
## metadata from audio files
|
||||
|
||||
`-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume:
|
||||
* `-v ~/music::r:cmte=title,artist` indexes and displays *title* followed by *artist*
|
||||
|
||||
if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected
|
||||
|
||||
`-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux`
|
||||
|
||||
tags that start with a `.` such as `.bpm` and `.dur`(ation) indicate numeric value
|
||||
|
||||
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
|
||||
|
||||
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
|
||||
* is about 20x slower than mutagen
|
||||
* catches a few tags that mutagen doesn't
|
||||
* avoids pulling any GPL code into copyparty
|
||||
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
|
||||
|
||||
|
||||
## file parser plugins
|
||||
|
||||
copyparty can invoke external programs to collect additional metadata for files using `mtp` (as argument or volume flag), there is a default timeout of 30sec
|
||||
|
||||
* `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata
|
||||
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
|
||||
* `-v ~/music::r:cmtp=.bpm=~/bin/audio-bpm.py:cmtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly
|
||||
|
||||
|
||||
## complete examples
|
||||
|
||||
* read-only music server with bpm and key scanning
|
||||
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts -mtp .bpm=f,audio-bpm.py -mtp key=f,audio-key.py`
|
||||
|
||||
|
||||
# client examples
|
||||
|
||||
* javascript: dump some state into a file (two separate examples)
|
||||
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
|
||||
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
||||
|
||||
* FUSE: mount a copyparty server as a local filesystem (see [./bin/](bin/))
|
||||
* curl/wget: upload some files (post=file, chunk=stdin)
|
||||
* `post(){ curl -b cppwd=wark http://127.0.0.1:3923/ -F act=bput -F f=@"$1";}`
|
||||
`post movie.mkv`
|
||||
* `post(){ wget --header='Cookie: cppwd=wark' http://127.0.0.1:3923/?raw --post-file="$1" -O-;}`
|
||||
`post movie.mkv`
|
||||
* `chunk(){ curl -b cppwd=wark http://127.0.0.1:3923/ -T-;}`
|
||||
`chunk <movie.mkv`
|
||||
|
||||
* FUSE: mount a copyparty server as a local filesystem
|
||||
* cross-platform python client available in [./bin/](bin/)
|
||||
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
||||
|
||||
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
||||
|
||||
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
|
||||
b512 <movie.mkv
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
* `jinja2`
|
||||
* pulls in `markupsafe` as of v2.7; use jinja 2.6 on py3.2
|
||||
* `jinja2` (is built into the SFX)
|
||||
|
||||
optional, enables thumbnails:
|
||||
**optional,** enables music tags:
|
||||
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
|
||||
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
|
||||
|
||||
**optional,** will eventually enable thumbnails:
|
||||
* `Pillow` (requires py2.7 or py3.5+)
|
||||
|
||||
|
||||
## optional gpl stuff
|
||||
|
||||
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
|
||||
|
||||
these are standalone and will never be imported / evaluated by copyparty
|
||||
|
||||
|
||||
# sfx
|
||||
|
||||
currently there are two self-contained binaries:
|
||||
* `copyparty-sfx.sh` for unix (linux and osx) -- smaller, more robust
|
||||
* `copyparty-sfx.py` for windows (unix too) -- crossplatform, beta
|
||||
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere
|
||||
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos
|
||||
|
||||
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
|
||||
|
||||
@@ -85,16 +243,18 @@ the features you can opt to drop are
|
||||
|
||||
for the `re`pack to work, first run one of the sfx'es once to unpack it
|
||||
|
||||
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a `no-ogv no-cm` repack; works on linux/macos (and windows with msys2 or WSL)
|
||||
|
||||
|
||||
# install on android
|
||||
|
||||
install [Termux](https://termux.com/) (see [ocv.me/termux](https://ocv.me/termux/)) and then copy-paste this into Termux (long-tap) all at once:
|
||||
```sh
|
||||
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install curl && cd && curl -L https://github.com/9001/copyparty/raw/master/scripts/copyparty-android.sh > copyparty-android.sh && chmod 755 copyparty-android.sh && ./copyparty-android.sh -h
|
||||
apt update && apt -y full-upgrade && termux-setup-storage && apt -y install python && python -m ensurepip && python -m pip install -U copyparty
|
||||
echo $?
|
||||
```
|
||||
|
||||
after the initial setup (and restarting bash), you can launch copyparty at any time by running "copyparty" in Termux
|
||||
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
|
||||
|
||||
|
||||
# dev env setup
|
||||
@@ -113,6 +273,7 @@ pip install black bandit pylint flake8 # vscode tooling
|
||||
in the `scripts` folder:
|
||||
|
||||
* run `make -C deps-docker` to build all dependencies
|
||||
* `git tag v1.2.3 && git push origin --tags`
|
||||
* create github release with `make-tgz-release.sh`
|
||||
* upload to pypi with `make-pypi-release.(sh|bat)`
|
||||
* create sfx with `make-sfx.sh`
|
||||
@@ -122,13 +283,15 @@ in the `scripts` folder:
|
||||
|
||||
roughly sorted by priority
|
||||
|
||||
* up2k handle filename too long
|
||||
* up2k fails on empty files? alert then stuck
|
||||
* reduce up2k roundtrips
|
||||
* start from a chunk index and just go
|
||||
* terminate client on bad data
|
||||
* drop onto folders
|
||||
* look into android thumbnail cache file format
|
||||
* `os.copy_file_range` for up2k cloning
|
||||
* up2k partials ui
|
||||
* support pillow-simd
|
||||
* cache sha512 chunks on client
|
||||
* symlink existing files on upload
|
||||
* comment field
|
||||
* ~~look into android thumbnail cache file format~~ bad idea
|
||||
* figure out the deal with pixel3a not being connectable as hotspot
|
||||
* pixel3a having unpredictable 3sec latency in general :||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# copyparty-fuse.py
|
||||
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||
* **supports Linux** -- expect `117 MiB/s` sequential read
|
||||
@@ -10,6 +10,8 @@ filecache is default-on for windows and macos;
|
||||
|
||||
note that copyparty should run with `-ed` to enable dotfiles (hidden otherwise)
|
||||
|
||||
also consider using [../docs/rclone.md](../docs/rclone.md) instead for 5x performance
|
||||
|
||||
|
||||
## to run this on windows:
|
||||
* install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/)
|
||||
@@ -27,8 +29,19 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
||||
|
||||
|
||||
|
||||
# copyparty-fuse🅱️.py
|
||||
# [`copyparty-fuse🅱️.py`](copyparty-fuseb.py)
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* does the same thing except more correct, `samba` approves
|
||||
* **supports Linux** -- expect `18 MiB/s` (wait what)
|
||||
* **supports Macos** -- probably
|
||||
|
||||
|
||||
|
||||
# [`copyparty-fuse-streaming.py`](copyparty-fuse-streaming.py)
|
||||
* pretend this doesn't exist
|
||||
|
||||
|
||||
|
||||
# [`mtag/`](mtag/)
|
||||
* standalone programs which perform misc. file analysis
|
||||
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
||||
|
1100
bin/copyparty-fuse-streaming.py
Executable file
1100
bin/copyparty-fuse-streaming.py
Executable file
File diff suppressed because it is too large
Load Diff
@@ -12,13 +12,20 @@ __url__ = "https://github.com/9001/copyparty/"
|
||||
mount a copyparty server (local or remote) as a filesystem
|
||||
|
||||
usage:
|
||||
python copyparty-fuse.py ./music http://192.168.1.69:3923/
|
||||
python copyparty-fuse.py http://192.168.1.69:3923/ ./music
|
||||
|
||||
dependencies:
|
||||
python3 -m pip install --user fusepy
|
||||
+ on Linux: sudo apk add fuse
|
||||
+ on Macos: https://osxfuse.github.io/
|
||||
+ on Windows: https://github.com/billziss-gh/winfsp/releases/latest
|
||||
|
||||
note:
|
||||
you probably want to run this on windows clients:
|
||||
https://github.com/9001/copyparty/blob/master/contrib/explorer-nothumbs-nofoldertypes.reg
|
||||
|
||||
get server cert:
|
||||
awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem
|
||||
"""
|
||||
|
||||
|
||||
@@ -26,24 +33,25 @@ import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
import stat
|
||||
import errno
|
||||
import struct
|
||||
import codecs
|
||||
import builtins
|
||||
import platform
|
||||
import argparse
|
||||
import threading
|
||||
import traceback
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
|
||||
|
||||
DEBUG = False # ctrl-f this to configure logging
|
||||
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
WINDOWS = sys.platform == "win32"
|
||||
MACOS = platform.system() == "Darwin"
|
||||
info = log = dbg = None
|
||||
|
||||
|
||||
try:
|
||||
@@ -97,13 +105,54 @@ def rice_tid():
|
||||
|
||||
|
||||
def fancy_log(msg):
|
||||
print("{} {}\n".format(rice_tid(), msg), end="")
|
||||
print("{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg), end="")
|
||||
|
||||
|
||||
def null_log(msg):
|
||||
pass
|
||||
|
||||
|
||||
def hexler(binary):
|
||||
return binary.replace("\r", "\\r").replace("\n", "\\n")
|
||||
return " ".join(["{}\033[36m{:02x}\033[0m".format(b, ord(b)) for b in binary])
|
||||
return " ".join(map(lambda b: format(ord(b), "02x"), binary))
|
||||
|
||||
|
||||
def register_wtf8():
|
||||
def wtf8_enc(text):
|
||||
return str(text).encode("utf-8", "surrogateescape"), len(text)
|
||||
|
||||
def wtf8_dec(binary):
|
||||
return bytes(binary).decode("utf-8", "surrogateescape"), len(binary)
|
||||
|
||||
def wtf8_search(encoding_name):
|
||||
return codecs.CodecInfo(wtf8_enc, wtf8_dec, name="wtf-8")
|
||||
|
||||
codecs.register(wtf8_search)
|
||||
|
||||
|
||||
bad_good = {}
|
||||
good_bad = {}
|
||||
|
||||
|
||||
def enwin(txt):
|
||||
return "".join([bad_good.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(bad, good)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
def dewin(txt):
|
||||
return "".join([good_bad.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(good, bad)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
class RecentLog(object):
|
||||
def __init__(self):
|
||||
self.mtx = threading.Lock()
|
||||
@@ -115,7 +164,7 @@ class RecentLog(object):
|
||||
thr.start()
|
||||
|
||||
def put(self, msg):
|
||||
msg = "{} {}\n".format(rice_tid(), msg)
|
||||
msg = "{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg)
|
||||
if self.f:
|
||||
fmsg = " ".join([datetime.utcnow().strftime("%H%M%S.%f"), str(msg)])
|
||||
self.f.write(fmsg.encode("utf-8"))
|
||||
@@ -138,22 +187,6 @@ class RecentLog(object):
|
||||
print("".join(q), end="")
|
||||
|
||||
|
||||
if DEBUG:
|
||||
# debug=on,
|
||||
# windows terminals are slow (cmd.exe, mintty)
|
||||
# otoh fancy_log beats RecentLog on linux
|
||||
logger = RecentLog().put if WINDOWS else fancy_log
|
||||
|
||||
info = logger
|
||||
log = logger
|
||||
dbg = logger
|
||||
else:
|
||||
# debug=off, speed is dontcare
|
||||
info = fancy_log
|
||||
log = null_log
|
||||
dbg = null_log
|
||||
|
||||
|
||||
# [windows/cmd/cpy3] python dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/
|
||||
# [windows/cmd/msys2] C:\msys64\mingw64\bin\python3 dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/
|
||||
# [windows/mty/msys2] /mingw64/bin/python3 /c/Users/ed/dev/copyparty/bin/copyparty-fuse.py q: http://192.168.1.159:1234/
|
||||
@@ -183,6 +216,8 @@ def html_dec(txt):
|
||||
txt.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace(""", '"')
|
||||
.replace(" ", "\r")
|
||||
.replace(" ", "\n")
|
||||
.replace("&", "&")
|
||||
)
|
||||
|
||||
@@ -195,10 +230,11 @@ class CacheNode(object):
|
||||
|
||||
|
||||
class Gateway(object):
|
||||
def __init__(self, base_url):
|
||||
self.base_url = base_url
|
||||
def __init__(self, ar):
|
||||
self.base_url = ar.base_url
|
||||
self.password = ar.a
|
||||
|
||||
ui = urllib.parse.urlparse(base_url)
|
||||
ui = urllib.parse.urlparse(self.base_url)
|
||||
self.web_root = ui.path.strip("/")
|
||||
try:
|
||||
self.web_host, self.web_port = ui.netloc.split(":")
|
||||
@@ -208,15 +244,25 @@ class Gateway(object):
|
||||
if ui.scheme == "http":
|
||||
self.web_port = 80
|
||||
elif ui.scheme == "https":
|
||||
raise Exception("todo")
|
||||
self.web_port = 443
|
||||
else:
|
||||
raise Exception("bad url?")
|
||||
|
||||
self.ssl_context = None
|
||||
self.use_tls = ui.scheme.lower() == "https"
|
||||
if self.use_tls:
|
||||
import ssl
|
||||
|
||||
if ar.td:
|
||||
self.ssl_context = ssl._create_unverified_context()
|
||||
elif ar.te:
|
||||
self.ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS)
|
||||
self.ssl_context.load_verify_locations(ar.te)
|
||||
|
||||
self.conns = {}
|
||||
|
||||
def quotep(self, path):
|
||||
# TODO: mojibake support
|
||||
path = path.encode("utf-8", "ignore")
|
||||
path = path.encode("wtf-8")
|
||||
return quote(path, safe="/")
|
||||
|
||||
def getconn(self, tid=None):
|
||||
@@ -226,7 +272,15 @@ class Gateway(object):
|
||||
except:
|
||||
info("new conn [{}] [{}]".format(self.web_host, self.web_port))
|
||||
|
||||
conn = http.client.HTTPConnection(self.web_host, self.web_port, timeout=260)
|
||||
args = {}
|
||||
if not self.use_tls:
|
||||
C = http.client.HTTPConnection
|
||||
else:
|
||||
C = http.client.HTTPSConnection
|
||||
if self.ssl_context:
|
||||
args = {"context": self.ssl_context}
|
||||
|
||||
conn = C(self.web_host, self.web_port, timeout=260, **args)
|
||||
|
||||
self.conns[tid] = conn
|
||||
return conn
|
||||
@@ -239,41 +293,72 @@ class Gateway(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
def sendreq(self, *args, **kwargs):
|
||||
def sendreq(self, *args, headers={}, **kwargs):
|
||||
tid = get_tid()
|
||||
if self.password:
|
||||
headers["Cookie"] = "=".join(["cppwd", self.password])
|
||||
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
c.request(*list(args), headers=headers, **kwargs)
|
||||
return c.getresponse()
|
||||
except:
|
||||
self.closeconn(tid)
|
||||
dbg("bad conn")
|
||||
|
||||
self.closeconn(tid)
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **kwargs)
|
||||
c.request(*list(args), headers=headers, **kwargs)
|
||||
return c.getresponse()
|
||||
except:
|
||||
info("http connection failed:\n" + traceback.format_exc())
|
||||
if self.use_tls and not self.ssl_context:
|
||||
import ssl
|
||||
|
||||
cert = ssl.get_server_certificate((self.web_host, self.web_port))
|
||||
info("server certificate probably not trusted:\n" + cert)
|
||||
|
||||
raise
|
||||
|
||||
def listdir(self, path):
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls"
|
||||
r = self.sendreq("GET", web_path)
|
||||
if r.status != 200:
|
||||
self.closeconn()
|
||||
raise Exception(
|
||||
log(
|
||||
"http error {} reading dir {} in {}".format(
|
||||
r.status, web_path, rice_tid()
|
||||
)
|
||||
)
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
ctype = r.getheader("Content-Type", "")
|
||||
if ctype == "application/json":
|
||||
parser = self.parse_jls
|
||||
elif ctype.startswith("text/html"):
|
||||
parser = self.parse_html
|
||||
else:
|
||||
log("listdir on file: {}".format(path))
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
try:
|
||||
return self.parse_html(r)
|
||||
return parser(r)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
info(repr(path) + "\n" + traceback.format_exc())
|
||||
raise
|
||||
|
||||
def download_file_range(self, path, ofs1, ofs2):
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
|
||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
|
||||
info(
|
||||
"DL {:4.0f}K\033[36m{:>9}-{:<9}\033[0m{}".format(
|
||||
(ofs2 - ofs1) / 1024.0, ofs1, ofs2 - 1, path
|
||||
(ofs2 - ofs1) / 1024.0, ofs1, ofs2 - 1, hexler(path)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -288,11 +373,34 @@ class Gateway(object):
|
||||
|
||||
return r.read()
|
||||
|
||||
def parse_jls(self, datasrc):
|
||||
rsp = b""
|
||||
while True:
|
||||
buf = datasrc.read(1024 * 32)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
rsp += buf
|
||||
|
||||
rsp = json.loads(rsp.decode("utf-8"))
|
||||
ret = []
|
||||
for is_dir, nodes in [[True, rsp["dirs"]], [False, rsp["files"]]]:
|
||||
for n in nodes:
|
||||
fname = unquote(n["href"]).rstrip(b"/")
|
||||
fname = fname.decode("wtf-8")
|
||||
if bad_good:
|
||||
fname = enwin(fname)
|
||||
|
||||
fun = self.stat_dir if is_dir else self.stat_file
|
||||
ret.append([fname, fun(n["ts"], n["sz"]), 0])
|
||||
|
||||
return ret
|
||||
|
||||
def parse_html(self, datasrc):
|
||||
ret = []
|
||||
remainder = b""
|
||||
ptn = re.compile(
|
||||
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
|
||||
r'^<tr><td>(-|DIR|<a [^<]+</a>)</td><td><a[^>]* href="([^"]+)"[^>]*>([^<]+)</a></td><td>([^<]+)</td><td>[^<]+</td><td>([^<]+)</td></tr>$'
|
||||
)
|
||||
|
||||
while True:
|
||||
@@ -314,8 +422,13 @@ class Gateway(object):
|
||||
# print(line)
|
||||
continue
|
||||
|
||||
ftype, fname, fsize, fdate = m.groups()
|
||||
fname = html_dec(fname)
|
||||
ftype, furl, fname, fsize, fdate = m.groups()
|
||||
fname = furl.rstrip("/").split("/")[-1]
|
||||
fname = unquote(fname)
|
||||
fname = fname.decode("wtf-8")
|
||||
if bad_good:
|
||||
fname = enwin(fname)
|
||||
|
||||
sz = 1
|
||||
ts = 60 * 60 * 24 * 2
|
||||
try:
|
||||
@@ -325,7 +438,7 @@ class Gateway(object):
|
||||
info("bad HTML or OS [{}] [{}]".format(fdate, fsize))
|
||||
# python cannot strptime(1959-01-01) on windows
|
||||
|
||||
if ftype == "-":
|
||||
if ftype != "DIR":
|
||||
ret.append([fname, self.stat_file(ts, sz), 0])
|
||||
else:
|
||||
ret.append([fname, self.stat_dir(ts, sz), 0])
|
||||
@@ -358,11 +471,11 @@ class Gateway(object):
|
||||
|
||||
|
||||
class CPPF(Operations):
|
||||
def __init__(self, base_url, dircache, filecache):
|
||||
self.gw = Gateway(base_url)
|
||||
def __init__(self, ar):
|
||||
self.gw = Gateway(ar)
|
||||
self.junk_fh_ctr = 3
|
||||
self.n_dircache = dircache
|
||||
self.n_filecache = filecache
|
||||
self.n_dircache = ar.cd
|
||||
self.n_filecache = ar.cf
|
||||
|
||||
self.dircache = []
|
||||
self.dircache_mtx = threading.Lock()
|
||||
@@ -379,7 +492,11 @@ class CPPF(Operations):
|
||||
cache_path, cache1 = cn.tag
|
||||
cache2 = cache1 + len(cn.data)
|
||||
msg += "\n{:<2} {:>7} {:>10}:{:<9} {}".format(
|
||||
n, len(cn.data), cache1, cache2, cache_path
|
||||
n,
|
||||
len(cn.data),
|
||||
cache1,
|
||||
cache2,
|
||||
cache_path.replace("\r", "\\r").replace("\n", "\\n"),
|
||||
)
|
||||
return msg
|
||||
|
||||
@@ -574,8 +691,18 @@ class CPPF(Operations):
|
||||
|
||||
else:
|
||||
if get2 - get1 <= 1024 * 1024:
|
||||
h_ofs = get1 - 256 * 1024
|
||||
h_end = get2 + 1024 * 1024
|
||||
# unless the request is for the last n bytes of the file,
|
||||
# grow the start to cache some stuff around the range
|
||||
if get2 < file_sz - 1:
|
||||
h_ofs = get1 - 1024 * 256
|
||||
else:
|
||||
h_ofs = get1 - 1024 * 32
|
||||
|
||||
# likewise grow the end unless start is 0
|
||||
if get1 > 0:
|
||||
h_end = get2 + 1024 * 1024
|
||||
else:
|
||||
h_end = get2 + 1024 * 64
|
||||
else:
|
||||
# big enough, doesn't need pads
|
||||
h_ofs = get1
|
||||
@@ -610,7 +737,7 @@ class CPPF(Operations):
|
||||
|
||||
def _readdir(self, path, fh=None):
|
||||
path = path.strip("/")
|
||||
log("readdir [{}] [{}]".format(path, fh))
|
||||
log("readdir [{}] [{}]".format(hexler(path), fh))
|
||||
|
||||
ret = self.gw.listdir(path)
|
||||
if not self.n_dircache:
|
||||
@@ -621,6 +748,7 @@ class CPPF(Operations):
|
||||
self.dircache.append(cn)
|
||||
self.clean_dircache()
|
||||
|
||||
# import pprint; pprint.pprint(ret)
|
||||
return ret
|
||||
|
||||
def readdir(self, path, fh=None):
|
||||
@@ -637,7 +765,11 @@ class CPPF(Operations):
|
||||
path = path.strip("/")
|
||||
ofs2 = offset + length
|
||||
file_sz = self.getattr(path)["st_size"]
|
||||
log("read {} |{}| {}:{} max {}".format(path, length, offset, ofs2, file_sz))
|
||||
log(
|
||||
"read {} |{}| {}:{} max {}".format(
|
||||
hexler(path), length, offset, ofs2, file_sz
|
||||
)
|
||||
)
|
||||
if ofs2 > file_sz:
|
||||
ofs2 = file_sz
|
||||
log("truncate to |{}| :{}".format(ofs2 - offset, ofs2))
|
||||
@@ -676,7 +808,9 @@ class CPPF(Operations):
|
||||
return ret
|
||||
|
||||
def getattr(self, path, fh=None):
|
||||
log("getattr [{}]".format(path))
|
||||
log("getattr [{}]".format(hexler(path)))
|
||||
if WINDOWS:
|
||||
path = enwin(path) # windows occasionally decodes f0xx to xx
|
||||
|
||||
path = path.strip("/")
|
||||
try:
|
||||
@@ -699,11 +833,24 @@ class CPPF(Operations):
|
||||
dents = self._readdir(dirpath)
|
||||
|
||||
for cache_name, cache_stat, _ in dents:
|
||||
# if "qw" in cache_name and "qw" in fname:
|
||||
# info(
|
||||
# "cmp\n [{}]\n [{}]\n\n{}\n".format(
|
||||
# hexler(cache_name),
|
||||
# hexler(fname),
|
||||
# "\n".join(traceback.format_stack()[:-1]),
|
||||
# )
|
||||
# )
|
||||
|
||||
if cache_name == fname:
|
||||
# dbg("=" + repr(cache_stat))
|
||||
return cache_stat
|
||||
|
||||
info("=ENOENT ({})".format(path))
|
||||
fun = info
|
||||
if MACOS and path.split("/")[-1].startswith("._"):
|
||||
fun = dbg
|
||||
|
||||
fun("=ENOENT ({})".format(hexler(path)))
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
access = None
|
||||
@@ -773,24 +920,24 @@ class CPPF(Operations):
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
def open(self, path, flags):
|
||||
dbg("open [{}] [{}]".format(path, flags))
|
||||
dbg("open [{}] [{}]".format(hexler(path), flags))
|
||||
return self._open(path)
|
||||
|
||||
def opendir(self, path):
|
||||
dbg("opendir [{}]".format(path))
|
||||
dbg("opendir [{}]".format(hexler(path)))
|
||||
return self._open(path)
|
||||
|
||||
def flush(self, path, fh):
|
||||
dbg("flush [{}] [{}]".format(path, fh))
|
||||
dbg("flush [{}] [{}]".format(hexler(path), fh))
|
||||
|
||||
def release(self, ino, fi):
|
||||
dbg("release [{}] [{}]".format(ino, fi))
|
||||
dbg("release [{}] [{}]".format(hexler(ino), fi))
|
||||
|
||||
def releasedir(self, ino, fi):
|
||||
dbg("releasedir [{}] [{}]".format(ino, fi))
|
||||
dbg("releasedir [{}] [{}]".format(hexler(ino), fi))
|
||||
|
||||
def access(self, path, mode):
|
||||
dbg("access [{}] [{}]".format(path, mode))
|
||||
dbg("access [{}] [{}]".format(hexler(path), mode))
|
||||
try:
|
||||
x = self.getattr(path)
|
||||
if x["st_mode"] <= 0:
|
||||
@@ -799,41 +946,90 @@ class CPPF(Operations):
|
||||
raise FuseOSError(errno.ENOENT)
|
||||
|
||||
|
||||
class TheArgparseFormatter(
|
||||
argparse.RawTextHelpFormatter, argparse.ArgumentDefaultsHelpFormatter
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
global info, log, dbg
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
|
||||
# filecache helps for reads that are ~64k or smaller;
|
||||
# linux generally does 128k so the cache is a slowdown,
|
||||
# windows likes to use 4k and 64k so cache is required,
|
||||
# value is numChunks (1~3M each) to keep in the cache
|
||||
nf = 24 if WINDOWS or MACOS else 0
|
||||
nf = 24
|
||||
|
||||
# dircache is always a boost,
|
||||
# only want to disable it for tests etc,
|
||||
# value is numSec until an entry goes stale
|
||||
nd = 1
|
||||
|
||||
try:
|
||||
local, remote = sys.argv[1:3]
|
||||
filecache = nf if len(sys.argv) <= 3 else int(sys.argv[3])
|
||||
dircache = nd if len(sys.argv) <= 4 else float(sys.argv[4])
|
||||
except:
|
||||
where = "local directory"
|
||||
if WINDOWS:
|
||||
where += " or DRIVE:"
|
||||
where = "local directory"
|
||||
if WINDOWS:
|
||||
where += " or DRIVE:"
|
||||
|
||||
print("need arg 1: " + where)
|
||||
print("need arg 2: root url")
|
||||
print("optional 3: num files in filecache ({})".format(nf))
|
||||
print("optional 4: num seconds / dircache ({})".format(nd))
|
||||
print()
|
||||
print("example:")
|
||||
print(" copyparty-fuse.py ./music http://192.168.1.69:3923/music/")
|
||||
if WINDOWS:
|
||||
print(" copyparty-fuse.py M: http://192.168.1.69:3923/music/")
|
||||
ex_pre = "\n " + os.path.basename(__file__) + " "
|
||||
examples = ["http://192.168.1.69:3923/music/ ./music"]
|
||||
if WINDOWS:
|
||||
examples.append("http://192.168.1.69:3923/music/ M:")
|
||||
|
||||
return
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=TheArgparseFormatter,
|
||||
epilog="example:" + ex_pre + ex_pre.join(examples),
|
||||
)
|
||||
ap.add_argument(
|
||||
"-cd", metavar="NUM_SECONDS", type=float, default=nd, help="directory cache"
|
||||
)
|
||||
ap.add_argument(
|
||||
"-cf", metavar="NUM_BLOCKS", type=int, default=nf, help="file cache"
|
||||
)
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||
ap.add_argument("-d", action="store_true", help="enable debug")
|
||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||
ap.add_argument("base_url", type=str, help="remote copyparty URL to mount")
|
||||
ap.add_argument("local_path", type=str, help=where + " to mount it on")
|
||||
ar = ap.parse_args()
|
||||
|
||||
if ar.d:
|
||||
# windows terminals are slow (cmd.exe, mintty)
|
||||
# otoh fancy_log beats RecentLog on linux
|
||||
logger = RecentLog().put if WINDOWS else fancy_log
|
||||
|
||||
info = logger
|
||||
log = logger
|
||||
dbg = logger
|
||||
else:
|
||||
# debug=off, speed is dontcare
|
||||
info = fancy_log
|
||||
log = null_log
|
||||
dbg = null_log
|
||||
|
||||
if ar.a and ar.a.startswith("$"):
|
||||
fn = ar.a[1:]
|
||||
log("reading password from file [{}]".format(fn))
|
||||
with open(fn, "rb") as f:
|
||||
ar.a = f.read().decode("utf-8").strip()
|
||||
|
||||
if WINDOWS:
|
||||
os.system("")
|
||||
os.system("rem")
|
||||
|
||||
for ch in '<>:"\\|?*':
|
||||
# microsoft maps illegal characters to f0xx
|
||||
# (e000 to f8ff is basic-plane private-use)
|
||||
bad_good[ch] = chr(ord(ch) + 0xF000)
|
||||
|
||||
for n in range(0, 0x100):
|
||||
# map surrogateescape to another private-use area
|
||||
bad_good[chr(n + 0xDC00)] = chr(n + 0xF100)
|
||||
|
||||
for k, v in bad_good.items():
|
||||
good_bad[v] = k
|
||||
|
||||
register_wtf8()
|
||||
|
||||
try:
|
||||
with open("/etc/fuse.conf", "rb") as f:
|
||||
@@ -845,7 +1041,7 @@ def main():
|
||||
if not MACOS:
|
||||
args["nonempty"] = True
|
||||
|
||||
FUSE(CPPF(remote, dircache, filecache), local, **args)
|
||||
FUSE(CPPF(ar), ar.local_path, encoding="wtf-8", **args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@@ -567,6 +567,8 @@ class CPPF(Fuse):
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
|
||||
server = CPPF()
|
||||
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
|
||||
server.parse(values=server, errex=1)
|
||||
|
34
bin/mtag/README.md
Normal file
34
bin/mtag/README.md
Normal file
@@ -0,0 +1,34 @@
|
||||
standalone programs which take an audio file as argument
|
||||
|
||||
some of these rely on libraries which are not MIT-compatible
|
||||
|
||||
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
|
||||
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
run [`install-deps.sh`](install-deps.sh) to build/install most dependencies required by these programs (supports windows/linux/macos)
|
||||
|
||||
*alternatively* (or preferably) use packages from your distro instead, then you'll need at least these:
|
||||
|
||||
* from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg`
|
||||
* from pypy: `keyfinder vamp`
|
||||
|
||||
|
||||
# usage from copyparty
|
||||
|
||||
`copyparty -e2dsa -e2ts -mtp key=f,audio-key.py -mtp .bpm=f,audio-bpm.py`
|
||||
|
||||
* `f,` makes the detected value replace any existing values
|
||||
* the `.` in `.bpm` indicates numeric value
|
||||
* assumes the python files are in the folder you're launching copyparty from, replace the filename with a relative/absolute path if that's not the case
|
||||
* `mtp` modules will not run if a file has existing tags in the db, so clear out the tags with `-e2tsr` the first time you launch with new `mtp` options
|
||||
|
||||
|
||||
## usage with volume-flags
|
||||
|
||||
instead of affecting all volumes, you can set the options for just one volume like so:
|
||||
```
|
||||
copyparty -v /mnt/nas/music:/music:r:cmtp=key=f,audio-key.py:cmtp=.bpm=f,audio-bpm.py:ce2dsa:ce2ts
|
||||
```
|
69
bin/mtag/audio-bpm.py
Executable file
69
bin/mtag/audio-bpm.py
Executable file
@@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
import vamp
|
||||
import tempfile
|
||||
import numpy as np
|
||||
import subprocess as sp
|
||||
|
||||
from copyparty.util import fsenc
|
||||
|
||||
"""
|
||||
dep: vamp
|
||||
dep: beatroot-vamp
|
||||
dep: ffmpeg
|
||||
"""
|
||||
|
||||
|
||||
def det(tf):
|
||||
# fmt: off
|
||||
sp.check_call([
|
||||
"ffmpeg",
|
||||
"-nostdin",
|
||||
"-hide_banner",
|
||||
"-v", "fatal",
|
||||
"-ss", "13",
|
||||
"-y", "-i", fsenc(sys.argv[1]),
|
||||
"-ac", "1",
|
||||
"-ar", "22050",
|
||||
"-t", "300",
|
||||
"-f", "f32le",
|
||||
tf
|
||||
])
|
||||
# fmt: on
|
||||
|
||||
with open(tf, "rb") as f:
|
||||
d = np.fromfile(f, dtype=np.float32)
|
||||
try:
|
||||
# 98% accuracy on jcore
|
||||
c = vamp.collect(d, 22050, "beatroot-vamp:beatroot")
|
||||
cl = c["list"]
|
||||
except:
|
||||
# fallback; 73% accuracy
|
||||
plug = "vamp-example-plugins:fixedtempo"
|
||||
c = vamp.collect(d, 22050, plug, parameters={"maxdflen": 40})
|
||||
print(c["list"][0]["label"].split(" ")[0])
|
||||
return
|
||||
|
||||
# throws if detection failed:
|
||||
bpm = float(cl[-1]["timestamp"] - cl[1]["timestamp"])
|
||||
bpm = round(60 * ((len(cl) - 1) / bpm), 2)
|
||||
print(f"{bpm:.2f}")
|
||||
|
||||
|
||||
def main():
|
||||
with tempfile.NamedTemporaryFile(suffix=".pcm", delete=False) as f:
|
||||
f.write(b"h")
|
||||
tf = f.name
|
||||
|
||||
try:
|
||||
det(tf)
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
os.unlink(tf)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
18
bin/mtag/audio-key.py
Executable file
18
bin/mtag/audio-key.py
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
import keyfinder
|
||||
|
||||
"""
|
||||
dep: github/mixxxdj/libkeyfinder
|
||||
dep: pypi/keyfinder
|
||||
dep: ffmpeg
|
||||
|
||||
note: cannot fsenc
|
||||
"""
|
||||
|
||||
|
||||
try:
|
||||
print(keyfinder.key(sys.argv[1]).camelot())
|
||||
except:
|
||||
pass
|
265
bin/mtag/install-deps.sh
Executable file
265
bin/mtag/install-deps.sh
Executable file
@@ -0,0 +1,265 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
|
||||
# install dependencies for audio-*.py
|
||||
#
|
||||
# linux: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf
|
||||
# win64: requires msys2-mingw64 environment
|
||||
# macos: requires macports
|
||||
#
|
||||
# has the following manual dependencies, especially on mac:
|
||||
# https://www.vamp-plugins.org/pack.html
|
||||
#
|
||||
# installs stuff to the following locations:
|
||||
# ~/pe/
|
||||
# whatever your python uses for --user packages
|
||||
#
|
||||
# does the following terrible things:
|
||||
# modifies the keyfinder python lib to load the .so in ~/pe
|
||||
|
||||
|
||||
linux=1
|
||||
|
||||
win=
|
||||
[ ! -z "$MSYSTEM" ] || [ -e /msys2.exe ] && {
|
||||
[ "$MSYSTEM" = MINGW64 ] || {
|
||||
echo windows detected, msys2-mingw64 required
|
||||
exit 1
|
||||
}
|
||||
pacman -S --needed mingw-w64-x86_64-{ffmpeg,python,python-pip,vamp-plugin-sdk}
|
||||
win=1
|
||||
linux=
|
||||
}
|
||||
|
||||
mac=
|
||||
[ $(uname -s) = Darwin ] && {
|
||||
#pybin="$(printf '%s\n' /opt/local/bin/python* | (sed -E 's/(.*\/[^/0-9]+)([0-9]?[^/]*)$/\2 \1/' || cat) | (sort -nr || cat) | (sed -E 's/([^ ]*) (.*)/\2\1/' || cat) | grep -E '/(python|pypy)[0-9\.-]*$' | head -n 1)"
|
||||
pybin=/opt/local/bin/python3.9
|
||||
[ -e "$pybin" ] || {
|
||||
echo mac detected, python3 from macports required
|
||||
exit 1
|
||||
}
|
||||
pkgs='ffmpeg python39 py39-wheel'
|
||||
ninst=$(port installed | awk '/^ /{print$1}' | sort | uniq | grep -E '^('"$(echo "$pkgs" | tr ' ' '|')"')$' | wc -l)
|
||||
[ $ninst -eq 3 ] || {
|
||||
sudo port install $pkgs
|
||||
}
|
||||
mac=1
|
||||
linux=
|
||||
}
|
||||
|
||||
hash -r
|
||||
|
||||
[ $mac ] || {
|
||||
command -v python3 && pybin=python3 || pybin=python
|
||||
}
|
||||
|
||||
$pybin -m pip install --user numpy
|
||||
|
||||
|
||||
command -v gnutar && tar() { gnutar "$@"; }
|
||||
command -v gtar && tar() { gtar "$@"; }
|
||||
command -v gsed && sed() { gsed "$@"; }
|
||||
|
||||
|
||||
need() {
|
||||
command -v $1 >/dev/null || {
|
||||
echo need $1
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
need cmake
|
||||
need ffmpeg
|
||||
need $pybin
|
||||
#need patchelf
|
||||
|
||||
|
||||
td="$(mktemp -d)"
|
||||
cln() {
|
||||
rm -rf "$td"
|
||||
}
|
||||
trap cln EXIT
|
||||
cd "$td"
|
||||
pwd
|
||||
|
||||
|
||||
dl_text() {
|
||||
command -v curl >/dev/null && exec curl "$@"
|
||||
exec wget -O- "$@"
|
||||
}
|
||||
dl_files() {
|
||||
local yolo= ex=
|
||||
[ $1 = "yolo" ] && yolo=1 && ex=k && shift
|
||||
command -v curl >/dev/null && exec curl -${ex}JOL "$@"
|
||||
|
||||
[ $yolo ] && ex=--no-check-certificate
|
||||
exec wget --trust-server-names $ex "$@"
|
||||
}
|
||||
export -f dl_files
|
||||
|
||||
|
||||
github_tarball() {
|
||||
dl_text "$1" |
|
||||
tee json |
|
||||
(
|
||||
# prefer jq if available
|
||||
jq -r '.tarball_url' ||
|
||||
|
||||
# fallback to awk (sorry)
|
||||
awk -F\" '/"tarball_url": "/ {print$4}'
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
}
|
||||
|
||||
|
||||
gitlab_tarball() {
|
||||
dl_text "$1" |
|
||||
tee json |
|
||||
(
|
||||
# prefer jq if available
|
||||
jq -r '.[0].assets.sources[]|select(.format|test("tar.gz")).url' ||
|
||||
|
||||
# fallback to abomination
|
||||
tr \" '\n' | grep -E '\.tar\.gz$' | head -n 1
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
tee links |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
}
|
||||
|
||||
|
||||
install_keyfinder() {
|
||||
# windows support:
|
||||
# use msys2 in mingw-w64 mode
|
||||
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python}
|
||||
|
||||
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
|
||||
|
||||
tar -xf mixxxdj-libkeyfinder-*
|
||||
rm -- *.tar.gz
|
||||
cd mixxxdj-libkeyfinder*
|
||||
|
||||
h="$HOME"
|
||||
so="lib/libkeyfinder.so"
|
||||
memes=()
|
||||
|
||||
[ $win ] &&
|
||||
so="bin/libkeyfinder.dll" &&
|
||||
h="$(printf '%s\n' "$USERPROFILE" | tr '\\' '/')" &&
|
||||
memes+=(-G "MinGW Makefiles" -DBUILD_TESTING=OFF)
|
||||
|
||||
[ $mac ] &&
|
||||
so="lib/libkeyfinder.dylib"
|
||||
|
||||
cmake -DCMAKE_INSTALL_PREFIX="$h/pe/keyfinder" "${memes[@]}" -S . -B build
|
||||
cmake --build build --parallel $(nproc || echo 4)
|
||||
cmake --install build
|
||||
|
||||
libpath="$h/pe/keyfinder/$so"
|
||||
[ $linux ] && [ ! -e "$libpath" ] &&
|
||||
so=lib64/libkeyfinder.so
|
||||
|
||||
libpath="$h/pe/keyfinder/$so"
|
||||
[ -e "$libpath" ] || {
|
||||
echo "so not found at $sop"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# rm -rf /Users/ed/Library/Python/3.9/lib/python/site-packages/*keyfinder*
|
||||
CFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include" \
|
||||
LDFLAGS="-L$h/pe/keyfinder/lib -L$h/pe/keyfinder/lib64 -L/opt/local/lib" \
|
||||
PKG_CONFIG_PATH=/c/msys64/mingw64/lib/pkgconfig \
|
||||
$pybin -m pip install --user keyfinder
|
||||
|
||||
pypath="$($pybin -c 'import keyfinder; print(keyfinder.__file__)')"
|
||||
for pyso in "${pypath%/*}"/*.so; do
|
||||
[ -e "$pyso" ] || break
|
||||
patchelf --set-rpath "${libpath%/*}" "$pyso" ||
|
||||
echo "WARNING: patchelf failed (only fatal on musl-based distros)"
|
||||
done
|
||||
|
||||
mv "$pypath"{,.bak}
|
||||
(
|
||||
printf 'import ctypes\nctypes.cdll.LoadLibrary("%s")\n' "$libpath"
|
||||
cat "$pypath.bak"
|
||||
) >"$pypath"
|
||||
|
||||
echo
|
||||
echo libkeyfinder successfully installed to the following locations:
|
||||
echo " $libpath"
|
||||
echo " $pypath"
|
||||
}
|
||||
|
||||
|
||||
have_beatroot() {
|
||||
$pybin -c 'import vampyhost, sys; plugs = vampyhost.list_plugins(); sys.exit(0 if "beatroot-vamp:beatroot" in plugs else 1)'
|
||||
}
|
||||
|
||||
|
||||
install_vamp() {
|
||||
# windows support:
|
||||
# use msys2 in mingw-w64 mode
|
||||
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python,python-pip,vamp-plugin-sdk}
|
||||
|
||||
$pybin -m pip install --user vamp
|
||||
|
||||
have_beatroot || {
|
||||
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz)
|
||||
sha512sum -c <(
|
||||
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
|
||||
) <beatroot-vamp-v1.0.tar.gz
|
||||
tar -xf beatroot-vamp-v1.0.tar.gz
|
||||
cd beatroot-vamp-v1.0
|
||||
make -f Makefile.linux -j4
|
||||
# /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp
|
||||
mkdir ~/vamp
|
||||
cp -pv beatroot-vamp.* ~/vamp/
|
||||
}
|
||||
|
||||
have_beatroot &&
|
||||
printf '\033[32mfound the vamp beatroot plugin, nice\033[0m\n' ||
|
||||
printf '\033[31mWARNING: could not find the vamp beatroot plugin, please install it for optimal results\033[0m\n'
|
||||
}
|
||||
|
||||
|
||||
# not in use because it kinda segfaults, also no windows support
|
||||
install_soundtouch() {
|
||||
gitlab_tarball https://gitlab.com/api/v4/projects/soundtouch%2Fsoundtouch/releases
|
||||
|
||||
tar -xvf soundtouch-*
|
||||
rm -- *.tar.gz
|
||||
cd soundtouch-*
|
||||
|
||||
# https://github.com/jrising/pysoundtouch
|
||||
./bootstrap
|
||||
./configure --enable-integer-samples CXXFLAGS="-fPIC" --prefix="$HOME/pe/soundtouch"
|
||||
make -j$(nproc || echo 4)
|
||||
make install
|
||||
|
||||
CFLAGS=-I$HOME/pe/soundtouch/include/ \
|
||||
LDFLAGS=-L$HOME/pe/soundtouch/lib \
|
||||
$pybin -m pip install --user git+https://github.com/snowxmas/pysoundtouch.git
|
||||
|
||||
pypath="$($pybin -c 'import importlib; print(importlib.util.find_spec("soundtouch").origin)')"
|
||||
libpath="$(echo "$HOME/pe/soundtouch/lib/")"
|
||||
patchelf --set-rpath "$libpath" "$pypath"
|
||||
|
||||
echo
|
||||
echo soundtouch successfully installed to the following locations:
|
||||
echo " $libpath"
|
||||
echo " $pypath"
|
||||
}
|
||||
|
||||
|
||||
[ "$1" = keyfinder ] && { install_keyfinder; exit $?; }
|
||||
[ "$1" = soundtouch ] && { install_soundtouch; exit $?; }
|
||||
[ "$1" = vamp ] && { install_vamp; exit $?; }
|
||||
|
||||
echo no args provided, installing keyfinder and vamp
|
||||
install_keyfinder
|
||||
install_vamp
|
8
bin/mtag/sleep.py
Normal file
8
bin/mtag/sleep.py
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import time
|
||||
import random
|
||||
|
||||
v = random.random() * 6
|
||||
time.sleep(v)
|
||||
print(f"{v:.2f}")
|
27
contrib/README.md
Normal file
27
contrib/README.md
Normal file
@@ -0,0 +1,27 @@
|
||||
### [`copyparty.bat`](copyparty.bat)
|
||||
* launches copyparty with no arguments (anon read+write within same folder)
|
||||
* intended for windows machines with no python.exe in PATH
|
||||
* works on windows, linux and macos
|
||||
* assumes `copyparty-sfx.py` was renamed to `copyparty.py` in the same folder as `copyparty.bat`
|
||||
|
||||
### [`index.html`](index.html)
|
||||
* drop-in redirect from an httpd to copyparty
|
||||
* assumes the webserver and copyparty is running on the same server/IP
|
||||
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
|
||||
|
||||
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
|
||||
* disables thumbnails and folder-type detection in windows explorer
|
||||
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
|
||||
|
||||
### [`cfssl.sh`](cfssl.sh)
|
||||
* creates CA and server certificates using cfssl
|
||||
* give a 3rd argument to install it to your copyparty config
|
||||
|
||||
# OS integration
|
||||
init-scripts to start copyparty as a service
|
||||
* [`systemd/copyparty.service`](systemd/copyparty.service)
|
||||
* [`openrc/copyparty`](openrc/copyparty)
|
||||
|
||||
# Reverse-proxy
|
||||
copyparty has basic support for running behind another webserver
|
||||
* [`nginx/copyparty.conf`](nginx/copyparty.conf)
|
72
contrib/cfssl.sh
Executable file
72
contrib/cfssl.sh
Executable file
@@ -0,0 +1,72 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# ca-name and server-name
|
||||
ca_name="$1"
|
||||
srv_name="$2"
|
||||
|
||||
[ -z "$srv_name" ] && {
|
||||
echo "need arg 1: ca name"
|
||||
echo "need arg 2: server name"
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
gen_ca() {
|
||||
(tee /dev/stderr <<EOF
|
||||
{"CN": "$ca_name ca",
|
||||
"CA": {"expiry":"87600h", "pathlen":0},
|
||||
"key": {"algo":"rsa", "size":4096},
|
||||
"names": [{"O":"$ca_name ca"}]}
|
||||
EOF
|
||||
)|
|
||||
cfssl gencert -initca - |
|
||||
cfssljson -bare ca
|
||||
|
||||
mv ca-key.pem ca.key
|
||||
rm ca.csr
|
||||
}
|
||||
|
||||
|
||||
gen_srv() {
|
||||
(tee /dev/stderr <<EOF
|
||||
{"key": {"algo":"rsa", "size":4096},
|
||||
"names": [{"O":"$ca_name - $srv_name"}]}
|
||||
EOF
|
||||
)|
|
||||
cfssl gencert -ca ca.pem -ca-key ca.key \
|
||||
-profile=www -hostname="$srv_name.$ca_name" - |
|
||||
cfssljson -bare "$srv_name"
|
||||
|
||||
mv "$srv_name-key.pem" "$srv_name.key"
|
||||
rm "$srv_name.csr"
|
||||
}
|
||||
|
||||
|
||||
# create ca if not exist
|
||||
[ -e ca.key ] ||
|
||||
gen_ca
|
||||
|
||||
# always create server cert
|
||||
gen_srv
|
||||
|
||||
|
||||
# dump cert info
|
||||
show() {
|
||||
openssl x509 -text -noout -in $1 |
|
||||
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
|
||||
}
|
||||
show ca.pem
|
||||
show "$srv_name.pem"
|
||||
|
||||
|
||||
# write cert into copyparty config
|
||||
[ -z "$3" ] || {
|
||||
mkdir -p ~/.config/copyparty
|
||||
cat "$srv_name".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||
}
|
||||
|
||||
|
||||
# rm *.key *.pem
|
||||
# cfssl print-defaults config
|
||||
# cfssl print-defaults csr
|
33
contrib/copyparty.bat
Normal file
33
contrib/copyparty.bat
Normal file
@@ -0,0 +1,33 @@
|
||||
exec python "$(dirname "$0")"/copyparty.py
|
||||
|
||||
@rem on linux, the above will execute and the script will terminate
|
||||
@rem on windows, the rest of this script will run
|
||||
|
||||
@echo off
|
||||
cls
|
||||
|
||||
set py=
|
||||
for /f %%i in ('where python 2^>nul') do (
|
||||
set "py=%%i"
|
||||
goto c1
|
||||
)
|
||||
:c1
|
||||
|
||||
if [%py%] == [] (
|
||||
for /f %%i in ('where /r "%localappdata%\programs\python" python 2^>nul') do (
|
||||
set "py=%%i"
|
||||
goto c2
|
||||
)
|
||||
)
|
||||
:c2
|
||||
|
||||
if [%py%] == [] set "py=c:\python27\python.exe"
|
||||
|
||||
if not exist "%py%" (
|
||||
echo could not find python
|
||||
echo(
|
||||
pause
|
||||
exit /b
|
||||
)
|
||||
|
||||
start cmd /c %py% "%~dp0\copyparty.py"
|
31
contrib/explorer-nothumbs-nofoldertypes.reg
Normal file
31
contrib/explorer-nothumbs-nofoldertypes.reg
Normal file
@@ -0,0 +1,31 @@
|
||||
Windows Registry Editor Version 5.00
|
||||
|
||||
; this will do 3 things, all optional:
|
||||
; 1) disable thumbnails
|
||||
; 2) delete all existing folder type settings/detections
|
||||
; 3) disable folder type detection (force default columns)
|
||||
;
|
||||
; this makes the file explorer way faster,
|
||||
; especially on slow/networked locations
|
||||
|
||||
|
||||
; =====================================================================
|
||||
; 1) disable thumbnails
|
||||
|
||||
[HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Explorer\Advanced]
|
||||
"IconsOnly"=dword:00000001
|
||||
|
||||
|
||||
; =====================================================================
|
||||
; 2) delete all existing folder type settings/detections
|
||||
|
||||
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags]
|
||||
|
||||
[-HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\BagMRU]
|
||||
|
||||
|
||||
; =====================================================================
|
||||
; 3) disable folder type detection
|
||||
|
||||
[HKEY_CURRENT_USER\Software\Classes\Local Settings\Software\Microsoft\Windows\Shell\Bags\AllFolders\Shell]
|
||||
"FolderType"="NotSpecified"
|
43
contrib/index.html
Normal file
43
contrib/index.html
Normal file
@@ -0,0 +1,43 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>⇆🎉 redirect</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<style>
|
||||
|
||||
html, body {
|
||||
font-family: sans-serif;
|
||||
}
|
||||
body {
|
||||
padding: 1em 2em;
|
||||
font-size: 1.5em;
|
||||
}
|
||||
a {
|
||||
font-size: 1.2em;
|
||||
padding: .1em;
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<span id="desc">you probably want</span> <a id="redir" href="//10.13.1.1:3923/">copyparty</a>
|
||||
<script>
|
||||
|
||||
var a = document.getElementById('redir'),
|
||||
proto = window.location.protocol.indexOf('https') === 0 ? 'https' : 'http',
|
||||
loc = window.location.hostname || '127.0.0.1',
|
||||
port = a.getAttribute('href').split(':').pop().split('/')[0],
|
||||
url = proto + '://' + loc + ':' + port + '/';
|
||||
|
||||
a.setAttribute('href', url);
|
||||
document.getElementById('desc').innerHTML = 'redirecting to';
|
||||
|
||||
setTimeout(function() {
|
||||
window.location.href = url;
|
||||
}, 500);
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
26
contrib/nginx/copyparty.conf
Normal file
26
contrib/nginx/copyparty.conf
Normal file
@@ -0,0 +1,26 @@
|
||||
upstream cpp {
|
||||
server 127.0.0.1:3923;
|
||||
keepalive 120;
|
||||
}
|
||||
server {
|
||||
listen 443 ssl;
|
||||
listen [::]:443 ssl;
|
||||
|
||||
server_name fs.example.com;
|
||||
|
||||
location / {
|
||||
proxy_pass http://cpp;
|
||||
proxy_redirect off;
|
||||
# disable buffering (next 4 lines)
|
||||
proxy_http_version 1.1;
|
||||
client_max_body_size 0;
|
||||
proxy_buffering off;
|
||||
proxy_request_buffering off;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Connection "Keep-Alive";
|
||||
}
|
||||
}
|
18
contrib/openrc/copyparty
Normal file
18
contrib/openrc/copyparty
Normal file
@@ -0,0 +1,18 @@
|
||||
#!/sbin/openrc-run
|
||||
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty /etc/init.d && rc-update add copyparty
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
|
||||
name="$SVCNAME"
|
||||
command_background=true
|
||||
pidfile="/var/run/$SVCNAME.pid"
|
||||
|
||||
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
|
||||
command_args="-q -v /mnt::a"
|
19
contrib/systemd/copyparty.service
Normal file
19
contrib/systemd/copyparty.service
Normal file
@@ -0,0 +1,19 @@
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
|
||||
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
@@ -8,17 +8,29 @@ __copyright__ = 2019
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import signal
|
||||
import shutil
|
||||
import filecmp
|
||||
import locale
|
||||
import argparse
|
||||
import threading
|
||||
import traceback
|
||||
from textwrap import dedent
|
||||
|
||||
from .__init__ import E, WINDOWS, VT100
|
||||
from .__init__ import E, WINDOWS, VT100, PY2
|
||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||
from .svchub import SvcHub
|
||||
from .util import py_desc
|
||||
from .util import py_desc, align_tab, IMPLICATIONS
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
|
||||
class RiceFormatter(argparse.HelpFormatter):
|
||||
@@ -44,6 +56,10 @@ class RiceFormatter(argparse.HelpFormatter):
|
||||
return "".join(indent + line + "\n" for line in text.splitlines())
|
||||
|
||||
|
||||
def warn(msg):
|
||||
print("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
|
||||
|
||||
|
||||
def ensure_locale():
|
||||
for x in [
|
||||
"en_US.UTF-8",
|
||||
@@ -84,9 +100,87 @@ def ensure_cert():
|
||||
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
|
||||
|
||||
|
||||
def configure_ssl_ver(al):
|
||||
def terse_sslver(txt):
|
||||
txt = txt.lower()
|
||||
for c in ["_", "v", "."]:
|
||||
txt = txt.replace(c, "")
|
||||
|
||||
return txt.replace("tls10", "tls1")
|
||||
|
||||
# oh man i love openssl
|
||||
# check this out
|
||||
# hold my beer
|
||||
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
|
||||
sslver = terse_sslver(al.ssl_ver).split(",")
|
||||
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
||||
# SSLv2 SSLv3 TLSv1 TLSv1_1 TLSv1_2 TLSv1_3
|
||||
if "help" in sslver:
|
||||
avail = [terse_sslver(x[6:]) for x in flags]
|
||||
avail = " ".join(sorted(avail) + ["all"])
|
||||
print("\navailable ssl/tls versions:\n " + avail)
|
||||
sys.exit(0)
|
||||
|
||||
al.ssl_flags_en = 0
|
||||
al.ssl_flags_de = 0
|
||||
for flag in sorted(flags):
|
||||
ver = terse_sslver(flag[6:])
|
||||
num = getattr(ssl, flag)
|
||||
if ver in sslver:
|
||||
al.ssl_flags_en |= num
|
||||
else:
|
||||
al.ssl_flags_de |= num
|
||||
|
||||
if sslver == ["all"]:
|
||||
x = al.ssl_flags_en
|
||||
al.ssl_flags_en = al.ssl_flags_de
|
||||
al.ssl_flags_de = x
|
||||
|
||||
for k in ["ssl_flags_en", "ssl_flags_de"]:
|
||||
num = getattr(al, k)
|
||||
print("{}: {:8x} ({})".format(k, num, num))
|
||||
|
||||
# think i need that beer now
|
||||
|
||||
|
||||
def configure_ssl_ciphers(al):
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
if al.ssl_ver:
|
||||
ctx.options &= ~al.ssl_flags_en
|
||||
ctx.options |= al.ssl_flags_de
|
||||
|
||||
is_help = al.ciphers == "help"
|
||||
|
||||
if al.ciphers and not is_help:
|
||||
try:
|
||||
ctx.set_ciphers(al.ciphers)
|
||||
except:
|
||||
print("\n\033[1;31mfailed to set ciphers\033[0m\n")
|
||||
|
||||
if not hasattr(ctx, "get_ciphers"):
|
||||
print("cannot read cipher list: openssl or python too old")
|
||||
else:
|
||||
ciphers = [x["description"] for x in ctx.get_ciphers()]
|
||||
print("\n ".join(["\nenabled ciphers:"] + align_tab(ciphers) + [""]))
|
||||
|
||||
if is_help:
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def sighandler(signal=None, frame=None):
|
||||
msg = [""] * 5
|
||||
for th in threading.enumerate():
|
||||
msg.append(str(th))
|
||||
msg.extend(traceback.format_stack(sys._current_frames()[th.ident]))
|
||||
|
||||
msg.append("\n")
|
||||
print("\n".join(msg))
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if WINDOWS:
|
||||
os.system("") # enables colors
|
||||
os.system("rem") # enables colors
|
||||
|
||||
desc = py_desc().replace("[", "\033[1;30m[")
|
||||
|
||||
@@ -94,7 +188,20 @@ def main():
|
||||
print(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
|
||||
|
||||
ensure_locale()
|
||||
ensure_cert()
|
||||
if HAVE_SSL:
|
||||
ensure_cert()
|
||||
|
||||
deprecated = [["-e2s", "-e2ds"]]
|
||||
for dk, nk in deprecated:
|
||||
try:
|
||||
idx = sys.argv.index(dk)
|
||||
except:
|
||||
continue
|
||||
|
||||
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
|
||||
print(msg.format(dk, nk))
|
||||
sys.argv[idx] = nk
|
||||
time.sleep(2)
|
||||
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=RiceFormatter,
|
||||
@@ -103,43 +210,119 @@ def main():
|
||||
epilog=dedent(
|
||||
"""
|
||||
-a takes username:password,
|
||||
-v takes src:dst:permset:permset:... where "permset" is
|
||||
accesslevel followed by username (no separator)
|
||||
-v takes src:dst:permset:permset:cflag:cflag:...
|
||||
where "permset" is accesslevel followed by username (no separator)
|
||||
and "cflag" is config flags to set on this volume
|
||||
|
||||
list of cflags:
|
||||
"cnodupe" rejects existing files (instead of symlinking them)
|
||||
|
||||
example:\033[35m
|
||||
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed \033[36m
|
||||
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
|
||||
mount current directory at "/" with
|
||||
* r (read-only) for everyone
|
||||
* a (read+write) for ed
|
||||
mount ../inc at "/dump" with
|
||||
* w (write-only) for everyone
|
||||
* a (read+write) for ed \033[0m
|
||||
* a (read+write) for ed
|
||||
* reject duplicate files \033[0m
|
||||
|
||||
if no accounts or volumes are configured,
|
||||
current folder will be read/write for everyone
|
||||
|
||||
consider the config file for more flexible account/volume management,
|
||||
including dynamic reload at runtime (and being more readable w)
|
||||
|
||||
values for --urlform:
|
||||
"stash" dumps the data to file and returns length + checksum
|
||||
"save,get" dumps to file and returns the page like a GET
|
||||
"print,get" prints the data in the log and returns GET
|
||||
(leave out the ",get" to return an error instead)
|
||||
|
||||
--ciphers help = available ssl/tls ciphers,
|
||||
--ssl-ver help = available ssl/tls versions,
|
||||
default is what python considers safe, usually >= TLS1
|
||||
"""
|
||||
),
|
||||
)
|
||||
ap.add_argument(
|
||||
"-c", metavar="PATH", type=str, action="append", help="add config file"
|
||||
)
|
||||
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind")
|
||||
ap.add_argument("-p", metavar="PORT", type=int, default=3923, help="port to bind")
|
||||
ap.add_argument("-nc", metavar="NUM", type=int, default=16, help="max num clients")
|
||||
ap.add_argument(
|
||||
"-j", metavar="CORES", type=int, default=1, help="max num cpu cores"
|
||||
)
|
||||
# fmt: off
|
||||
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
|
||||
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||
ap.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
|
||||
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
||||
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
||||
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
|
||||
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
|
||||
ap.add_argument("-q", action="store_true", help="quiet")
|
||||
ap.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
||||
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
||||
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)")
|
||||
ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)")
|
||||
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms")
|
||||
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
|
||||
|
||||
ap2 = ap.add_argument_group('database options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
|
||||
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
||||
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
||||
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
|
||||
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||
|
||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
||||
ap2.add_argument("--ssl-ver", metavar="LIST", type=str, help="ssl/tls versions to allow")
|
||||
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
||||
|
||||
al = ap.parse_args()
|
||||
# fmt: on
|
||||
|
||||
# propagate implications
|
||||
for k1, k2 in IMPLICATIONS:
|
||||
if getattr(al, k1):
|
||||
setattr(al, k2, True)
|
||||
|
||||
al.i = al.i.split(",")
|
||||
try:
|
||||
if "-" in al.p:
|
||||
lo, hi = [int(x) for x in al.p.split("-")]
|
||||
al.p = list(range(lo, hi + 1))
|
||||
else:
|
||||
al.p = [int(x) for x in al.p.split(",")]
|
||||
except:
|
||||
raise Exception("invalid value for -p")
|
||||
|
||||
if HAVE_SSL:
|
||||
if al.ssl_ver:
|
||||
configure_ssl_ver(al)
|
||||
|
||||
if al.ciphers:
|
||||
configure_ssl_ciphers(al)
|
||||
else:
|
||||
warn("ssl module does not exist; cannot enable https")
|
||||
|
||||
if PY2 and WINDOWS and al.e2d:
|
||||
warn(
|
||||
"windows py2 cannot do unicode filenames with -e2d\n"
|
||||
+ " (if you crash with codec errors then that is why)"
|
||||
)
|
||||
|
||||
# signal.signal(signal.SIGINT, sighandler)
|
||||
|
||||
SvcHub(al).run()
|
||||
|
||||
|
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 5, 1)
|
||||
CODENAME = "fuse jelly"
|
||||
BUILD_DT = (2020, 8, 17)
|
||||
VERSION = (0, 9, 13)
|
||||
CODENAME = "the strongest music server"
|
||||
BUILD_DT = (2021, 3, 23)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
@@ -1,23 +1,38 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
from .util import undot, Pebkac, fsdec, fsenc
|
||||
from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint
|
||||
|
||||
|
||||
class VFS(object):
|
||||
"""single level in the virtual fs"""
|
||||
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[]):
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[], flags={}):
|
||||
self.realpath = realpath # absolute path on host filesystem
|
||||
self.vpath = vpath # absolute path in the virtual filesystem
|
||||
self.uread = uread # users who can read this
|
||||
self.uwrite = uwrite # users who can write this
|
||||
self.flags = flags # config switches
|
||||
self.nodes = {} # child nodes
|
||||
self.all_vols = {vpath: self} # flattened recursive
|
||||
|
||||
def __repr__(self):
|
||||
return "VFS({})".format(
|
||||
", ".join(
|
||||
"{}={!r}".format(k, self.__dict__[k])
|
||||
for k in "realpath vpath uread uwrite flags".split()
|
||||
)
|
||||
)
|
||||
|
||||
def _trk(self, vol):
|
||||
self.all_vols[vol.vpath] = vol
|
||||
return vol
|
||||
|
||||
def add(self, src, dst):
|
||||
"""get existing, or add new path to the vfs"""
|
||||
@@ -29,16 +44,17 @@ class VFS(object):
|
||||
name, dst = dst.split("/", 1)
|
||||
if name in self.nodes:
|
||||
# exists; do not manipulate permissions
|
||||
return self.nodes[name].add(src, dst)
|
||||
return self._trk(self.nodes[name].add(src, dst))
|
||||
|
||||
vn = VFS(
|
||||
"{}/{}".format(self.realpath, name),
|
||||
"{}/{}".format(self.vpath, name).lstrip("/"),
|
||||
self.uread,
|
||||
self.uwrite,
|
||||
self.flags,
|
||||
)
|
||||
self.nodes[name] = vn
|
||||
return vn.add(src, dst)
|
||||
return self._trk(vn.add(src, dst))
|
||||
|
||||
if dst in self.nodes:
|
||||
# leaf exists; return as-is
|
||||
@@ -48,7 +64,7 @@ class VFS(object):
|
||||
vp = "{}/{}".format(self.vpath, dst).lstrip("/")
|
||||
vn = VFS(src, vp)
|
||||
self.nodes[dst] = vn
|
||||
return vn
|
||||
return self._trk(vn)
|
||||
|
||||
def _find(self, vpath):
|
||||
"""return [vfs,remainder]"""
|
||||
@@ -95,20 +111,19 @@ class VFS(object):
|
||||
|
||||
return fsdec(os.path.realpath(fsenc(rp)))
|
||||
|
||||
def ls(self, rem, uname):
|
||||
def ls(self, rem, uname, scandir, lstat=False):
|
||||
"""return user-readable [fsdir,real,virt] items at vpath"""
|
||||
virt_vis = {} # nodes readable by user
|
||||
abspath = self.canonical(rem)
|
||||
items = os.listdir(fsenc(abspath))
|
||||
real = [fsdec(x) for x in items]
|
||||
real = list(statdir(nuprint, scandir, lstat, abspath))
|
||||
real.sort()
|
||||
if not rem:
|
||||
for name, vn2 in sorted(self.nodes.items()):
|
||||
if uname in vn2.uread:
|
||||
if uname in vn2.uread or "*" in vn2.uread:
|
||||
virt_vis[name] = vn2
|
||||
|
||||
# no vfs nodes in the list of real inodes
|
||||
real = [x for x in real if x not in self.nodes]
|
||||
real = [x for x in real if x[0] not in self.nodes]
|
||||
|
||||
return [abspath, real, virt_vis]
|
||||
|
||||
@@ -128,11 +143,10 @@ class VFS(object):
|
||||
class AuthSrv(object):
|
||||
"""verifies users against given paths"""
|
||||
|
||||
def __init__(self, args, log_func):
|
||||
self.log_func = log_func
|
||||
def __init__(self, args, log_func, warn_anonwrite=True):
|
||||
self.args = args
|
||||
|
||||
self.warn_anonwrite = True
|
||||
self.log_func = log_func
|
||||
self.warn_anonwrite = warn_anonwrite
|
||||
|
||||
if WINDOWS:
|
||||
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
|
||||
@@ -142,8 +156,8 @@ class AuthSrv(object):
|
||||
self.mutex = threading.Lock()
|
||||
self.reload()
|
||||
|
||||
def log(self, msg):
|
||||
self.log_func("auth", msg)
|
||||
def log(self, msg, c=0):
|
||||
self.log_func("auth", msg, c)
|
||||
|
||||
def invert(self, orig):
|
||||
if PY2:
|
||||
@@ -161,7 +175,7 @@ class AuthSrv(object):
|
||||
|
||||
yield prev, True
|
||||
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, mount):
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, mflags, mount):
|
||||
vol_src = None
|
||||
vol_dst = None
|
||||
for ln in [x.decode("utf-8").strip() for x in fd]:
|
||||
@@ -191,13 +205,43 @@ class AuthSrv(object):
|
||||
mount[vol_dst] = vol_src
|
||||
mread[vol_dst] = []
|
||||
mwrite[vol_dst] = []
|
||||
mflags[vol_dst] = {}
|
||||
continue
|
||||
|
||||
lvl, uname = ln.split(" ")
|
||||
if lvl in "ra":
|
||||
mread[vol_dst].append(uname)
|
||||
if lvl in "wa":
|
||||
mwrite[vol_dst].append(uname)
|
||||
self._read_vol_str(
|
||||
lvl, uname, mread[vol_dst], mwrite[vol_dst], mflags[vol_dst]
|
||||
)
|
||||
|
||||
def _read_vol_str(self, lvl, uname, mr, mw, mf):
|
||||
if lvl == "c":
|
||||
cval = True
|
||||
if "=" in uname:
|
||||
uname, cval = uname.split("=", 1)
|
||||
|
||||
self._read_volflag(mf, uname, cval, False)
|
||||
return
|
||||
|
||||
if uname == "":
|
||||
uname = "*"
|
||||
|
||||
if lvl in "ra":
|
||||
mr.append(uname)
|
||||
|
||||
if lvl in "wa":
|
||||
mw.append(uname)
|
||||
|
||||
def _read_volflag(self, flags, name, value, is_list):
|
||||
if name not in ["mtp"]:
|
||||
flags[name] = value
|
||||
return
|
||||
|
||||
if not is_list:
|
||||
value = [value]
|
||||
elif not value:
|
||||
return
|
||||
|
||||
flags[name] = flags.get(name, []) + value
|
||||
|
||||
def reload(self):
|
||||
"""
|
||||
@@ -210,6 +254,7 @@ class AuthSrv(object):
|
||||
user = {} # username:password
|
||||
mread = {} # mountpoint:[username]
|
||||
mwrite = {} # mountpoint:[username]
|
||||
mflags = {} # mountpoint:[flag]
|
||||
mount = {} # dst:src (mountpoint:realpath)
|
||||
|
||||
if self.args.a:
|
||||
@@ -219,7 +264,7 @@ class AuthSrv(object):
|
||||
|
||||
if self.args.v:
|
||||
# list of src:dst:permset:permset:...
|
||||
# permset is [rwa]username
|
||||
# permset is [rwa]username or [c]flag
|
||||
for v_str in self.args.v:
|
||||
m = self.re_vol.match(v_str)
|
||||
if not m:
|
||||
@@ -232,27 +277,24 @@ class AuthSrv(object):
|
||||
mount[dst] = src
|
||||
mread[dst] = []
|
||||
mwrite[dst] = []
|
||||
mflags[dst] = {}
|
||||
|
||||
perms = perms.split(":")
|
||||
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
|
||||
if uname == "":
|
||||
uname = "*"
|
||||
if lvl in "ra":
|
||||
mread[dst].append(uname)
|
||||
if lvl in "wa":
|
||||
mwrite[dst].append(uname)
|
||||
self._read_vol_str(lvl, uname, mread[dst], mwrite[dst], mflags[dst])
|
||||
|
||||
if self.args.c:
|
||||
for cfg_fn in self.args.c:
|
||||
with open(cfg_fn, "rb") as f:
|
||||
self._parse_config_file(f, user, mread, mwrite, mount)
|
||||
self._parse_config_file(f, user, mread, mwrite, mflags, mount)
|
||||
|
||||
if not mount:
|
||||
# -h says our defaults are CWD at root and read/write for everyone
|
||||
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
|
||||
elif "" not in mount:
|
||||
# there's volumes but no root; make root inaccessible
|
||||
vfs = VFS(os.path.abspath("."), "", [], [])
|
||||
vfs = VFS(os.path.abspath("."), "")
|
||||
vfs.flags["d2d"] = True
|
||||
|
||||
maxdepth = 0
|
||||
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
|
||||
@@ -262,12 +304,13 @@ class AuthSrv(object):
|
||||
|
||||
if dst == "":
|
||||
# rootfs was mapped; fully replaces the default CWD vfs
|
||||
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst])
|
||||
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst], mflags[dst])
|
||||
continue
|
||||
|
||||
v = vfs.add(mount[dst], dst)
|
||||
v.uread = mread[dst]
|
||||
v.uwrite = mwrite[dst]
|
||||
v.flags = mflags[dst]
|
||||
|
||||
missing_users = {}
|
||||
for d in [mread, mwrite]:
|
||||
@@ -278,21 +321,100 @@ class AuthSrv(object):
|
||||
|
||||
if missing_users:
|
||||
self.log(
|
||||
"\033[31myou must -a the following users: "
|
||||
+ ", ".join(k for k in sorted(missing_users))
|
||||
+ "\033[0m"
|
||||
"you must -a the following users: "
|
||||
+ ", ".join(k for k in sorted(missing_users)),
|
||||
c=1,
|
||||
)
|
||||
raise Exception("invalid config")
|
||||
|
||||
all_mte = {}
|
||||
errors = False
|
||||
for vol in vfs.all_vols.values():
|
||||
if (self.args.e2ds and vol.uwrite) or self.args.e2dsa:
|
||||
vol.flags["e2ds"] = True
|
||||
|
||||
if self.args.e2d or "e2ds" in vol.flags:
|
||||
vol.flags["e2d"] = True
|
||||
|
||||
for k in ["e2t", "e2ts", "e2tsr"]:
|
||||
if getattr(self.args, k):
|
||||
vol.flags[k] = True
|
||||
|
||||
for k1, k2 in IMPLICATIONS:
|
||||
if k1 in vol.flags:
|
||||
vol.flags[k2] = True
|
||||
|
||||
# default tag-list if unset
|
||||
if "mte" not in vol.flags:
|
||||
vol.flags["mte"] = self.args.mte
|
||||
|
||||
# append parsers from argv to volume-flags
|
||||
self._read_volflag(vol.flags, "mtp", self.args.mtp, True)
|
||||
|
||||
# d2d drops all database features for a volume
|
||||
for grp, rm in [["d2d", "e2d"], ["d2t", "e2t"]]:
|
||||
if not vol.flags.get(grp, False):
|
||||
continue
|
||||
|
||||
vol.flags["d2t"] = True
|
||||
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
|
||||
|
||||
# mt* needs e2t so drop those too
|
||||
for grp, rm in [["e2t", "mt"]]:
|
||||
if vol.flags.get(grp, False):
|
||||
continue
|
||||
|
||||
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
|
||||
|
||||
# verify tags mentioned by -mt[mp] are used by -mte
|
||||
local_mtp = {}
|
||||
local_only_mtp = {}
|
||||
for a in vol.flags.get("mtp", []) + vol.flags.get("mtm", []):
|
||||
a = a.split("=")[0]
|
||||
local_mtp[a] = True
|
||||
local = True
|
||||
for b in self.args.mtp or []:
|
||||
b = b.split("=")[0]
|
||||
if a == b:
|
||||
local = False
|
||||
|
||||
if local:
|
||||
local_only_mtp[a] = True
|
||||
|
||||
local_mte = {}
|
||||
for a in vol.flags.get("mte", "").split(","):
|
||||
local = True
|
||||
all_mte[a] = True
|
||||
local_mte[a] = True
|
||||
for b in self.args.mte.split(","):
|
||||
if not a or not b:
|
||||
continue
|
||||
|
||||
if a == b:
|
||||
local = False
|
||||
|
||||
for mtp in local_only_mtp.keys():
|
||||
if mtp not in local_mte:
|
||||
m = 'volume "/{}" defines metadata tag "{}", but doesnt use it in "-mte" (or with "cmte" in its volume-flags)'
|
||||
self.log(m.format(vol.vpath, mtp), 1)
|
||||
errors = True
|
||||
|
||||
for mtp in self.args.mtp or []:
|
||||
mtp = mtp.split("=")[0]
|
||||
if mtp not in all_mte:
|
||||
m = 'metadata tag "{}" is defined by "-mtm" or "-mtp", but is not used by "-mte" (or by any "cmte" volume-flag)'
|
||||
self.log(m.format(mtp), 1)
|
||||
errors = True
|
||||
|
||||
if errors:
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
v, _ = vfs.get("/", "*", False, True)
|
||||
if self.warn_anonwrite and os.getcwd() == v.realpath:
|
||||
self.warn_anonwrite = False
|
||||
self.log(
|
||||
"\033[31manyone can read/write the current directory: {}\033[0m".format(
|
||||
v.realpath
|
||||
)
|
||||
)
|
||||
msg = "anyone can read/write the current directory: {}"
|
||||
self.log(msg.format(v.realpath), c=1)
|
||||
except Pebkac:
|
||||
self.warn_anonwrite = True
|
||||
|
||||
|
@@ -49,11 +49,11 @@ class MpWorker(object):
|
||||
# print('k')
|
||||
pass
|
||||
|
||||
def log(self, src, msg):
|
||||
self.q_yield.put([0, "log", [src, msg]])
|
||||
def log(self, src, msg, c=0):
|
||||
self.q_yield.put([0, "log", [src, msg, c]])
|
||||
|
||||
def logw(self, msg):
|
||||
self.log("mp{}".format(self.n), msg)
|
||||
def logw(self, msg, c=0):
|
||||
self.log("mp{}".format(self.n), msg, c)
|
||||
|
||||
def httpdrop(self, addr):
|
||||
self.q_yield.put([0, "httpdrop", [addr]])
|
||||
@@ -73,7 +73,9 @@ class MpWorker(object):
|
||||
if PY2:
|
||||
sck = pickle.loads(sck) # nosec
|
||||
|
||||
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
|
||||
if self.args.log_conn:
|
||||
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
||||
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
with self.mutex:
|
||||
|
@@ -28,7 +28,9 @@ class BrokerThr(object):
|
||||
def put(self, want_retval, dest, *args):
|
||||
if dest == "httpconn":
|
||||
sck, addr = args
|
||||
self.log("%s %s" % addr, "-" * 4 + "C-qpop")
|
||||
if self.args.log_conn:
|
||||
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
||||
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
else:
|
||||
|
@@ -5,6 +5,7 @@ import os
|
||||
import stat
|
||||
import gzip
|
||||
import time
|
||||
import copy
|
||||
import json
|
||||
import socket
|
||||
import ctypes
|
||||
@@ -16,9 +17,6 @@ from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||
|
||||
if not PY2:
|
||||
unicode = str
|
||||
from html import escape as html_escape
|
||||
else:
|
||||
from cgi import escape as html_escape # pylint: disable=no-name-in-module
|
||||
|
||||
|
||||
class HttpCli(object):
|
||||
@@ -27,24 +25,27 @@ class HttpCli(object):
|
||||
"""
|
||||
|
||||
def __init__(self, conn):
|
||||
self.t0 = time.time()
|
||||
self.conn = conn
|
||||
self.s = conn.s
|
||||
self.sr = conn.sr
|
||||
self.ip = conn.addr[0]
|
||||
self.addr = conn.addr
|
||||
self.args = conn.args
|
||||
self.auth = conn.auth
|
||||
self.log_func = conn.log_func
|
||||
self.log_src = conn.log_src
|
||||
self.tls = hasattr(self.s, "cipher")
|
||||
|
||||
self.bufsz = 1024 * 32
|
||||
self.absolute_urls = False
|
||||
self.out_headers = {"Access-Control-Allow-Origin": "*"}
|
||||
|
||||
def log(self, msg):
|
||||
self.log_func(self.log_src, msg)
|
||||
def log(self, msg, c=0):
|
||||
self.log_func(self.log_src, msg, c)
|
||||
|
||||
def _check_nonfatal(self, ex):
|
||||
return ex.code in [404]
|
||||
return ex.code < 400 or ex.code in [404, 429]
|
||||
|
||||
def _assert_safe_rem(self, rem):
|
||||
# sanity check to prevent any disasters
|
||||
@@ -62,7 +63,7 @@ class HttpCli(object):
|
||||
|
||||
if not headerlines[0]:
|
||||
# seen after login with IE6.0.2900.5512.xpsp.080413-2111 (xp-sp3)
|
||||
self.log("\033[1;31mBUG: trailing newline from previous request\033[0m")
|
||||
self.log("BUG: trailing newline from previous request", c="1;31")
|
||||
headerlines.pop(0)
|
||||
|
||||
try:
|
||||
@@ -73,9 +74,11 @@ class HttpCli(object):
|
||||
except Pebkac as ex:
|
||||
# self.log("pebkac at httpcli.run #1: " + repr(ex))
|
||||
self.keepalive = self._check_nonfatal(ex)
|
||||
self.loud_reply(str(ex), status=ex.code)
|
||||
self.loud_reply(unicode(ex), status=ex.code)
|
||||
return self.keepalive
|
||||
|
||||
# time.sleep(0.4)
|
||||
|
||||
# normalize incoming headers to lowercase;
|
||||
# outgoing headers however are Correct-Case
|
||||
for header_line in headerlines[1:]:
|
||||
@@ -85,11 +88,16 @@ class HttpCli(object):
|
||||
v = self.headers.get("connection", "").lower()
|
||||
self.keepalive = not v.startswith("close")
|
||||
|
||||
v = self.headers.get("x-forwarded-for", None)
|
||||
if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]:
|
||||
self.ip = v.split(",")[0]
|
||||
self.log_src = self.conn.set_rproxy(self.ip)
|
||||
|
||||
self.uname = "*"
|
||||
if "cookie" in self.headers:
|
||||
cookies = self.headers["cookie"].split(";")
|
||||
for k, v in [x.split("=", 1) for x in cookies]:
|
||||
if k != "cppwd":
|
||||
if k.strip() != "cppwd":
|
||||
continue
|
||||
|
||||
v = unescape_cookie(v)
|
||||
@@ -120,11 +128,16 @@ class HttpCli(object):
|
||||
k, v = k.split("=", 1)
|
||||
uparam[k.lower()] = v.strip()
|
||||
else:
|
||||
uparam[k.lower()] = True
|
||||
uparam[k.lower()] = False
|
||||
|
||||
self.uparam = uparam
|
||||
self.vpath = unquotep(vpath)
|
||||
|
||||
ua = self.headers.get("user-agent", "")
|
||||
if ua.startswith("rclone/"):
|
||||
uparam["raw"] = False
|
||||
uparam["dots"] = False
|
||||
|
||||
try:
|
||||
if self.mode in ["GET", "HEAD"]:
|
||||
return self.handle_get() and self.keepalive
|
||||
@@ -141,7 +154,7 @@ class HttpCli(object):
|
||||
try:
|
||||
# self.log("pebkac at httpcli.run #2: " + repr(ex))
|
||||
self.keepalive = self._check_nonfatal(ex)
|
||||
self.loud_reply(str(ex), status=ex.code)
|
||||
self.loud_reply("{}: {}".format(str(ex), self.vpath), status=ex.code)
|
||||
return self.keepalive
|
||||
except Pebkac:
|
||||
return False
|
||||
@@ -150,7 +163,7 @@ class HttpCli(object):
|
||||
response = ["HTTP/1.1 {} {}".format(status, HTTPCODE[status])]
|
||||
|
||||
if length is not None:
|
||||
response.append("Content-Length: " + str(length))
|
||||
response.append("Content-Length: " + unicode(length))
|
||||
|
||||
# close if unknown length, otherwise take client's preference
|
||||
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
|
||||
@@ -180,7 +193,8 @@ class HttpCli(object):
|
||||
self.send_headers(len(body), status, mime, headers)
|
||||
|
||||
try:
|
||||
self.s.sendall(body)
|
||||
if self.mode != "HEAD":
|
||||
self.s.sendall(body)
|
||||
except:
|
||||
raise Pebkac(400, "client d/c while replying body")
|
||||
|
||||
@@ -188,7 +202,7 @@ class HttpCli(object):
|
||||
|
||||
def loud_reply(self, body, *args, **kwargs):
|
||||
self.log(body.rstrip())
|
||||
self.reply(b"<pre>" + body.encode("utf-8"), *list(args), **kwargs)
|
||||
self.reply(b"<pre>" + body.encode("utf-8") + b"\r\n", *list(args), **kwargs)
|
||||
|
||||
def handle_get(self):
|
||||
logmsg = "{:4} {}".format(self.mode, self.req)
|
||||
@@ -208,6 +222,9 @@ class HttpCli(object):
|
||||
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
|
||||
return self.tx_file(static_path)
|
||||
|
||||
if "tree" in self.uparam:
|
||||
return self.tx_tree()
|
||||
|
||||
# conditional redirect to single volumes
|
||||
if self.vpath == "" and not self.uparam:
|
||||
nread = len(self.rvol)
|
||||
@@ -226,7 +243,7 @@ class HttpCli(object):
|
||||
)
|
||||
if not self.readable and not self.writable:
|
||||
self.log("inaccessible: [{}]".format(self.vpath))
|
||||
self.uparam = {"h": True}
|
||||
self.uparam = {"h": False}
|
||||
|
||||
if "h" in self.uparam:
|
||||
self.vpath = None
|
||||
@@ -284,30 +301,71 @@ class HttpCli(object):
|
||||
if "application/octet-stream" in ctype:
|
||||
return self.handle_post_binary()
|
||||
|
||||
raise Pebkac(405, "don't know how to handle {} POST".format(ctype))
|
||||
if "application/x-www-form-urlencoded" in ctype:
|
||||
opt = self.args.urlform
|
||||
if "stash" in opt:
|
||||
return self.handle_stash()
|
||||
|
||||
def handle_stash(self):
|
||||
remains = int(self.headers.get("content-length", None))
|
||||
if remains is None:
|
||||
reader = read_socket_unbounded(self.sr)
|
||||
if "save" in opt:
|
||||
post_sz, _, _, path = self.dump_to_file()
|
||||
self.log("urlform: {} bytes, {}".format(post_sz, path))
|
||||
elif "print" in opt:
|
||||
reader, _ = self.get_body_reader()
|
||||
for buf in reader:
|
||||
buf = buf.decode("utf-8", "replace")
|
||||
self.log("urlform @ {}\n {}\n".format(self.vpath, buf))
|
||||
|
||||
if "get" in opt:
|
||||
return self.handle_get()
|
||||
|
||||
raise Pebkac(405, "POST({}) is disabled".format(ctype))
|
||||
|
||||
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
|
||||
|
||||
def get_body_reader(self):
|
||||
chunked = "chunked" in self.headers.get("transfer-encoding", "").lower()
|
||||
remains = int(self.headers.get("content-length", -1))
|
||||
if chunked:
|
||||
return read_socket_chunked(self.sr), remains
|
||||
elif remains == -1:
|
||||
self.keepalive = False
|
||||
return read_socket_unbounded(self.sr), remains
|
||||
else:
|
||||
reader = read_socket(self.sr, remains)
|
||||
return read_socket(self.sr, remains), remains
|
||||
|
||||
def dump_to_file(self):
|
||||
reader, remains = self.get_body_reader()
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
|
||||
addr = self.conn.addr[0].replace(":", ".")
|
||||
addr = self.ip.replace(":", ".")
|
||||
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
||||
path = os.path.join(fdir, fn)
|
||||
|
||||
with open(path, "wb", 512 * 1024) as f:
|
||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||
|
||||
self.log("wrote {}/{} bytes to {}".format(post_sz, remains, path))
|
||||
self.conn.hsrv.broker.put(
|
||||
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fn
|
||||
)
|
||||
|
||||
return post_sz, sha_b64, remains, path
|
||||
|
||||
def handle_stash(self):
|
||||
post_sz, sha_b64, remains, path = self.dump_to_file()
|
||||
spd = self._spd(post_sz)
|
||||
self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path))
|
||||
self.reply("{}\n{}\n".format(post_sz, sha_b64).encode("utf-8"))
|
||||
return True
|
||||
|
||||
def _spd(self, nbytes, add=True):
|
||||
if add:
|
||||
self.conn.nbyte += nbytes
|
||||
|
||||
spd1 = get_spd(nbytes, self.t0)
|
||||
spd2 = get_spd(self.conn.nbyte, self.conn.t0)
|
||||
return spd1 + " " + spd2
|
||||
|
||||
def handle_post_multipart(self):
|
||||
self.parser = MultipartParser(self.log, self.sr, self.headers)
|
||||
self.parser.parse()
|
||||
@@ -356,6 +414,9 @@ class HttpCli(object):
|
||||
except:
|
||||
raise Pebkac(422, "you POSTed invalid json")
|
||||
|
||||
if "srch" in self.uparam or "srch" in body:
|
||||
return self.handle_search(body)
|
||||
|
||||
# prefer this over undot; no reason to allow traversion
|
||||
if "/" in body["name"]:
|
||||
raise Pebkac(400, "folders verboten")
|
||||
@@ -367,9 +428,11 @@ class HttpCli(object):
|
||||
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
|
||||
body["vdir"] = self.vpath
|
||||
body["rdir"] = os.path.join(vfs.realpath, rem)
|
||||
body["addr"] = self.addr[0]
|
||||
body["vtop"] = vfs.vpath
|
||||
body["ptop"] = vfs.realpath
|
||||
body["prel"] = rem
|
||||
body["addr"] = self.ip
|
||||
body["vcfg"] = vfs.flags
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||
response = x.get()
|
||||
@@ -379,6 +442,52 @@ class HttpCli(object):
|
||||
self.reply(response.encode("utf-8"), mime="application/json")
|
||||
return True
|
||||
|
||||
def handle_search(self, body):
|
||||
vols = []
|
||||
for vtop in self.rvol:
|
||||
vfs, _ = self.conn.auth.vfs.get(vtop, self.uname, True, False)
|
||||
vols.append([vfs.vpath, vfs.realpath, vfs.flags])
|
||||
|
||||
idx = self.conn.get_u2idx()
|
||||
t0 = time.time()
|
||||
if idx.p_end:
|
||||
penalty = 0.7
|
||||
t_idle = t0 - idx.p_end
|
||||
if idx.p_dur > 0.7 and t_idle < penalty:
|
||||
m = "rate-limit ({:.1f} sec), cost {:.2f}, idle {:.2f}"
|
||||
raise Pebkac(429, m.format(penalty, idx.p_dur, t_idle))
|
||||
|
||||
if "srch" in body:
|
||||
# search by up2k hashlist
|
||||
vbody = copy.deepcopy(body)
|
||||
vbody["hash"] = len(vbody["hash"])
|
||||
self.log("qj: " + repr(vbody))
|
||||
hits = idx.fsearch(vols, body)
|
||||
msg = repr(hits)
|
||||
taglist = []
|
||||
else:
|
||||
# search by query params
|
||||
self.log("qj: " + repr(body))
|
||||
hits, taglist = idx.search(vols, body)
|
||||
msg = len(hits)
|
||||
|
||||
idx.p_end = time.time()
|
||||
idx.p_dur = idx.p_end - t0
|
||||
self.log("q#: {} ({:.2f}s)".format(msg, idx.p_dur))
|
||||
|
||||
order = []
|
||||
cfg = self.args.mte.split(",")
|
||||
for t in cfg:
|
||||
if t in taglist:
|
||||
order.append(t)
|
||||
for t in taglist:
|
||||
if t not in order:
|
||||
order.append(t)
|
||||
|
||||
r = json.dumps({"hits": hits, "tag_order": order}).encode("utf-8")
|
||||
self.reply(r, mime="application/json")
|
||||
return True
|
||||
|
||||
def handle_post_binary(self):
|
||||
try:
|
||||
remains = int(self.headers["content-length"])
|
||||
@@ -391,7 +500,10 @@ class HttpCli(object):
|
||||
except KeyError:
|
||||
raise Pebkac(400, "need hash and wark headers for binary POST")
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", wark, chash)
|
||||
vfs, _ = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
ptop = vfs.realpath
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash)
|
||||
response = x.get()
|
||||
chunksize, cstart, path, lastmod = response
|
||||
|
||||
@@ -420,7 +532,7 @@ class HttpCli(object):
|
||||
if len(cstart) > 1 and path != os.devnull:
|
||||
self.log(
|
||||
"clone {} to {}".format(
|
||||
cstart[0], " & ".join(str(x) for x in cstart[1:])
|
||||
cstart[0], " & ".join(unicode(x) for x in cstart[1:])
|
||||
)
|
||||
)
|
||||
ofs = 0
|
||||
@@ -436,8 +548,13 @@ class HttpCli(object):
|
||||
|
||||
self.log("clone {} done".format(cstart[0]))
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", wark, chash)
|
||||
num_left = x.get()
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
|
||||
x = x.get()
|
||||
try:
|
||||
num_left, path = x
|
||||
except:
|
||||
self.loud_reply(x, status=500)
|
||||
return False
|
||||
|
||||
if not WINDOWS and num_left == 0:
|
||||
times = (int(time.time()), int(lastmod))
|
||||
@@ -447,7 +564,9 @@ class HttpCli(object):
|
||||
except:
|
||||
self.log("failed to utime ({}, {})".format(path, times))
|
||||
|
||||
self.loud_reply("thank")
|
||||
spd = self._spd(post_sz)
|
||||
self.log("{} thank".format(spd))
|
||||
self.reply(b"thank")
|
||||
return True
|
||||
|
||||
def handle_login(self):
|
||||
@@ -460,7 +579,7 @@ class HttpCli(object):
|
||||
msg = "naw dude"
|
||||
pwd = "x" # nosec
|
||||
|
||||
h = {"Set-Cookie": "cppwd={}; Path=/".format(pwd)}
|
||||
h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
|
||||
html = self.conn.tpl_msg.render(h1=msg, h2='<a href="/">ack</a>', redir="/")
|
||||
self.reply(html.encode("utf-8"), headers=h)
|
||||
return True
|
||||
@@ -491,10 +610,9 @@ class HttpCli(object):
|
||||
raise Pebkac(500, "mkdir failed, check the logs")
|
||||
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
esc_paths = [quotep(vpath), html_escape(vpath)]
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}">go to /{}</a>'.format(
|
||||
quotep(vpath), html_escape(vpath, quote=False)
|
||||
),
|
||||
h2='<a href="/{}">go to /{}</a>'.format(*esc_paths),
|
||||
pre="aight",
|
||||
click=True,
|
||||
)
|
||||
@@ -527,7 +645,7 @@ class HttpCli(object):
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
|
||||
quotep(vpath), html_escape(vpath, quote=False)
|
||||
quotep(vpath), html_escape(vpath)
|
||||
),
|
||||
pre="aight",
|
||||
click=True,
|
||||
@@ -549,38 +667,48 @@ class HttpCli(object):
|
||||
self.log("discarding incoming file without filename")
|
||||
# fallthrough
|
||||
|
||||
fn = os.devnull
|
||||
if p_file and not nullwrite:
|
||||
fdir = os.path.join(vfs.realpath, rem)
|
||||
fn = os.path.join(fdir, sanitize_fn(p_file))
|
||||
fname = sanitize_fn(p_file)
|
||||
|
||||
if not os.path.isdir(fsenc(fdir)):
|
||||
raise Pebkac(404, "that folder does not exist")
|
||||
|
||||
# TODO broker which avoid this race and
|
||||
# provides a new filename if taken (same as up2k)
|
||||
if os.path.exists(fsenc(fn)):
|
||||
fn += ".{:.6f}-{}".format(time.time(), self.addr[0])
|
||||
# using current-time instead of t0 cause clients
|
||||
# may reuse a name for multiple files in one post
|
||||
suffix = ".{:.6f}-{}".format(time.time(), self.ip)
|
||||
open_args = {"fdir": fdir, "suffix": suffix}
|
||||
else:
|
||||
open_args = {}
|
||||
fname = os.devnull
|
||||
fdir = ""
|
||||
|
||||
try:
|
||||
with open(fsenc(fn), "wb") as f:
|
||||
self.log("writing to {0}".format(fn))
|
||||
with ren_open(fname, "wb", 512 * 1024, **open_args) as f:
|
||||
f, fname = f["orz"]
|
||||
self.log("writing to {}/{}".format(fdir, fname))
|
||||
sz, sha512_hex, _ = hashcopy(self.conn, p_data, f)
|
||||
if sz == 0:
|
||||
raise Pebkac(400, "empty files in post")
|
||||
|
||||
files.append([sz, sha512_hex])
|
||||
self.conn.hsrv.broker.put(
|
||||
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname
|
||||
)
|
||||
self.conn.nbyte += sz
|
||||
|
||||
except Pebkac:
|
||||
if fn != os.devnull:
|
||||
os.rename(fsenc(fn), fsenc(fn + ".PARTIAL"))
|
||||
if fname != os.devnull:
|
||||
fp = os.path.join(fdir, fname)
|
||||
suffix = ".PARTIAL"
|
||||
try:
|
||||
os.rename(fsenc(fp), fsenc(fp + suffix))
|
||||
except:
|
||||
fp = fp[: -len(suffix)]
|
||||
os.rename(fsenc(fp), fsenc(fp + suffix))
|
||||
|
||||
raise
|
||||
|
||||
except Pebkac as ex:
|
||||
errmsg = str(ex)
|
||||
errmsg = unicode(ex)
|
||||
|
||||
td = max(0.1, time.time() - t0)
|
||||
sz_total = sum(x[0] for x in files)
|
||||
@@ -599,7 +727,9 @@ class HttpCli(object):
|
||||
# truncated SHA-512 prevents length extension attacks;
|
||||
# using SHA-512/224, optionally SHA-512/256 = :64
|
||||
|
||||
self.log(msg)
|
||||
vspd = self._spd(sz_total, False)
|
||||
self.log("{} {}".format(vspd, msg))
|
||||
|
||||
if not nullwrite:
|
||||
# TODO this is bad
|
||||
log_fn = "up.{:.6f}.txt".format(t0)
|
||||
@@ -609,7 +739,7 @@ class HttpCli(object):
|
||||
"\n".join(
|
||||
unicode(x)
|
||||
for x in [
|
||||
":".join(unicode(x) for x in self.addr),
|
||||
":".join(unicode(x) for x in [self.ip, self.addr[1]]),
|
||||
msg.rstrip(),
|
||||
]
|
||||
)
|
||||
@@ -621,7 +751,7 @@ class HttpCli(object):
|
||||
|
||||
html = self.conn.tpl_msg.render(
|
||||
h2='<a href="/{}">return to /{}</a>'.format(
|
||||
quotep(self.vpath), html_escape(self.vpath, quote=False)
|
||||
quotep(self.vpath), html_escape(self.vpath)
|
||||
),
|
||||
pre=msg,
|
||||
)
|
||||
@@ -658,7 +788,7 @@ class HttpCli(object):
|
||||
return True
|
||||
|
||||
fp = os.path.join(vfs.realpath, rem)
|
||||
srv_lastmod = -1
|
||||
srv_lastmod = srv_lastmod3 = -1
|
||||
try:
|
||||
st = os.stat(fsenc(fp))
|
||||
srv_lastmod = st.st_mtime
|
||||
@@ -709,7 +839,7 @@ class HttpCli(object):
|
||||
if p_field != "body":
|
||||
raise Pebkac(400, "expected body, got {}".format(p_field))
|
||||
|
||||
with open(fp, "wb") as f:
|
||||
with open(fp, "wb", 512 * 1024) as f:
|
||||
sz, sha512, _ = hashcopy(self.conn, p_data, f)
|
||||
|
||||
new_lastmod = os.stat(fsenc(fp)).st_mtime
|
||||
@@ -734,9 +864,12 @@ class HttpCli(object):
|
||||
cli_dt = time.strptime(cli_lastmod, "%a, %d %b %Y %H:%M:%S GMT")
|
||||
cli_ts = calendar.timegm(cli_dt)
|
||||
return file_lastmod, int(file_ts) > int(cli_ts)
|
||||
except:
|
||||
self.log("bad lastmod format: {}".format(cli_lastmod))
|
||||
self.log(" expected format: {}".format(file_lastmod))
|
||||
except Exception as ex:
|
||||
self.log(
|
||||
"lastmod {}\nremote: [{}]\n local: [{}]".format(
|
||||
repr(ex), cli_lastmod, file_lastmod
|
||||
)
|
||||
)
|
||||
return file_lastmod, file_lastmod != cli_lastmod
|
||||
|
||||
return file_lastmod, True
|
||||
@@ -759,6 +892,8 @@ class HttpCli(object):
|
||||
editions[ext or "plain"] = [fs_path, st.st_size]
|
||||
except:
|
||||
pass
|
||||
if not self.vpath.startswith(".cpr/"):
|
||||
break
|
||||
|
||||
if not editions:
|
||||
raise Pebkac(404)
|
||||
@@ -853,6 +988,7 @@ class HttpCli(object):
|
||||
|
||||
logtail += " [\033[36m{}-{}\033[0m]".format(lower, upper)
|
||||
|
||||
use_sendfile = False
|
||||
if decompress:
|
||||
open_func = gzip.open
|
||||
open_args = [fsenc(fs_path), "rb"]
|
||||
@@ -862,6 +998,11 @@ class HttpCli(object):
|
||||
open_func = open
|
||||
# 512 kB is optimal for huge files, use 64k
|
||||
open_args = [fsenc(fs_path), "rb", 64 * 1024]
|
||||
use_sendfile = (
|
||||
not self.tls #
|
||||
and not self.args.no_sendfile
|
||||
and hasattr(os, "sendfile")
|
||||
)
|
||||
|
||||
#
|
||||
# send reply
|
||||
@@ -876,35 +1017,25 @@ class HttpCli(object):
|
||||
mime=guess_mime(req_path)[0] or "application/octet-stream",
|
||||
)
|
||||
|
||||
logmsg += str(status) + logtail
|
||||
logmsg += unicode(status) + logtail
|
||||
|
||||
if self.mode == "HEAD" or not do_send:
|
||||
self.log(logmsg)
|
||||
return True
|
||||
|
||||
ret = True
|
||||
with open_func(*open_args) as f:
|
||||
remains = upper - lower
|
||||
f.seek(lower)
|
||||
while remains > 0:
|
||||
# time.sleep(0.01)
|
||||
buf = f.read(4096)
|
||||
if not buf:
|
||||
break
|
||||
if use_sendfile:
|
||||
remains = sendfile_kern(lower, upper, f, self.s)
|
||||
else:
|
||||
remains = sendfile_py(lower, upper, f, self.s)
|
||||
|
||||
if remains < len(buf):
|
||||
buf = buf[:remains]
|
||||
if remains > 0:
|
||||
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
||||
|
||||
remains -= len(buf)
|
||||
|
||||
try:
|
||||
self.s.sendall(buf)
|
||||
except:
|
||||
logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
|
||||
self.log(logmsg)
|
||||
return False
|
||||
|
||||
self.log(logmsg)
|
||||
return True
|
||||
spd = self._spd((upper - lower) - remains)
|
||||
self.log("{}, {}".format(logmsg, spd))
|
||||
return ret
|
||||
|
||||
def tx_md(self, fs_path):
|
||||
logmsg = "{:4} {} ".format("", self.req)
|
||||
@@ -938,14 +1069,16 @@ class HttpCli(object):
|
||||
|
||||
targs = {
|
||||
"edit": "edit" in self.uparam,
|
||||
"title": html_escape(self.vpath, quote=False),
|
||||
"title": html_escape(self.vpath),
|
||||
"lastmod": int(ts_md * 1000),
|
||||
"md_plug": "true" if self.args.emp else "false",
|
||||
"md_chk_rate": self.args.mcr,
|
||||
"md": "",
|
||||
}
|
||||
sz_html = len(template.render(**targs).encode("utf-8"))
|
||||
self.send_headers(sz_html + sz_md, status)
|
||||
|
||||
logmsg += str(status)
|
||||
logmsg += unicode(status)
|
||||
if self.mode == "HEAD" or not do_send:
|
||||
self.log(logmsg)
|
||||
return True
|
||||
@@ -959,7 +1092,7 @@ class HttpCli(object):
|
||||
self.log(logmsg + " \033[31md/c\033[0m")
|
||||
return False
|
||||
|
||||
self.log(logmsg + " " + str(len(html)))
|
||||
self.log(logmsg + " " + unicode(len(html)))
|
||||
return True
|
||||
|
||||
def tx_mounts(self):
|
||||
@@ -969,6 +1102,61 @@ class HttpCli(object):
|
||||
self.reply(html.encode("utf-8"))
|
||||
return True
|
||||
|
||||
def tx_tree(self):
|
||||
top = self.uparam["tree"] or ""
|
||||
dst = self.vpath
|
||||
if top in [".", ".."]:
|
||||
top = undot(self.vpath + "/" + top)
|
||||
|
||||
if top == dst:
|
||||
dst = ""
|
||||
elif top:
|
||||
if not dst.startswith(top + "/"):
|
||||
raise Pebkac(400, "arg funk")
|
||||
|
||||
dst = dst[len(top) + 1 :]
|
||||
|
||||
ret = self.gen_tree(top, dst)
|
||||
ret = json.dumps(ret)
|
||||
self.reply(ret.encode("utf-8"), mime="application/json")
|
||||
return True
|
||||
|
||||
def gen_tree(self, top, target):
|
||||
ret = {}
|
||||
excl = None
|
||||
if target:
|
||||
excl, target = (target.split("/", 1) + [""])[:2]
|
||||
sub = self.gen_tree("/".join([top, excl]).strip("/"), target)
|
||||
ret["k" + quotep(excl)] = sub
|
||||
|
||||
try:
|
||||
vn, rem = self.auth.vfs.get(top, self.uname, True, False)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
|
||||
except:
|
||||
vfs_ls = []
|
||||
vfs_virt = {}
|
||||
for v in self.rvol:
|
||||
d1, d2 = v.rsplit("/", 1) if "/" in v else ["", v]
|
||||
if d1 == top:
|
||||
vfs_virt[d2] = 0
|
||||
|
||||
dirs = []
|
||||
|
||||
vfs_ls = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||
|
||||
if not self.args.ed or "dots" not in self.uparam:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
for fn in [x for x in vfs_ls if x != excl]:
|
||||
dirs.append(quotep(fn))
|
||||
|
||||
for x in vfs_virt.keys():
|
||||
if x != excl:
|
||||
dirs.append(x)
|
||||
|
||||
ret["a"] = dirs
|
||||
return ret
|
||||
|
||||
def tx_browser(self):
|
||||
vpath = ""
|
||||
vpnodes = [["", "/"]]
|
||||
@@ -979,7 +1167,7 @@ class HttpCli(object):
|
||||
else:
|
||||
vpath += "/" + node
|
||||
|
||||
vpnodes.append([quotep(vpath) + "/", html_escape(node, quote=False)])
|
||||
vpnodes.append([quotep(vpath) + "/", html_escape(node)])
|
||||
|
||||
vn, rem = self.auth.vfs.get(
|
||||
self.vpath, self.uname, self.readable, self.writable
|
||||
@@ -994,9 +1182,14 @@ class HttpCli(object):
|
||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||
return self.tx_md(abspath)
|
||||
|
||||
if rem.startswith(".hist/up2k."):
|
||||
raise Pebkac(403)
|
||||
|
||||
return self.tx_file(abspath)
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
|
||||
stats = {k: v for k, v in vfs_ls}
|
||||
vfs_ls = [x[0] for x in vfs_ls]
|
||||
vfs_ls.extend(vfs_virt.keys())
|
||||
|
||||
# check for old versions of files,
|
||||
@@ -1019,22 +1212,35 @@ class HttpCli(object):
|
||||
if not self.args.ed or "dots" not in self.uparam:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
hidden = []
|
||||
if rem == ".hist":
|
||||
hidden = ["up2k."]
|
||||
|
||||
is_ls = "ls" in self.uparam
|
||||
|
||||
icur = None
|
||||
if "e2t" in vn.flags:
|
||||
idx = self.conn.get_u2idx()
|
||||
icur = idx.get_cur(vn.realpath)
|
||||
|
||||
dirs = []
|
||||
files = []
|
||||
for fn in vfs_ls:
|
||||
base = ""
|
||||
href = fn
|
||||
if self.absolute_urls and vpath:
|
||||
if not is_ls and self.absolute_urls and vpath:
|
||||
base = "/" + vpath + "/"
|
||||
href = base + fn
|
||||
|
||||
if fn in vfs_virt:
|
||||
fspath = vfs_virt[fn].realpath
|
||||
elif hidden and any(fn.startswith(x) for x in hidden):
|
||||
continue
|
||||
else:
|
||||
fspath = fsroot + "/" + fn
|
||||
|
||||
try:
|
||||
inf = os.stat(fsenc(fspath))
|
||||
inf = stats.get(fn) or os.stat(fsenc(fspath))
|
||||
except:
|
||||
self.log("broken symlink: {}".format(repr(fspath)))
|
||||
continue
|
||||
@@ -1054,35 +1260,61 @@ class HttpCli(object):
|
||||
dt = datetime.utcfromtimestamp(inf.st_mtime)
|
||||
dt = dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
item = [margin, quotep(href), html_escape(fn, quote=False), sz, dt]
|
||||
try:
|
||||
ext = "---" if is_dir else fn.rsplit(".", 1)[1]
|
||||
except:
|
||||
ext = "%"
|
||||
|
||||
item = {
|
||||
"lead": margin,
|
||||
"href": quotep(href),
|
||||
"name": fn,
|
||||
"sz": sz,
|
||||
"ext": ext,
|
||||
"dt": dt,
|
||||
"ts": int(inf.st_mtime),
|
||||
}
|
||||
if is_dir:
|
||||
dirs.append(item)
|
||||
else:
|
||||
files.append(item)
|
||||
item["rd"] = rem
|
||||
|
||||
logues = [None, None]
|
||||
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
|
||||
fn = os.path.join(abspath, fn)
|
||||
if os.path.exists(fsenc(fn)):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
logues[n] = f.read().decode("utf-8")
|
||||
taglist = {}
|
||||
for f in files:
|
||||
fn = f["name"]
|
||||
rd = f["rd"]
|
||||
del f["rd"]
|
||||
if icur:
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
try:
|
||||
r = icur.execute(q, (rd, fn)).fetchone()
|
||||
except:
|
||||
args = s3enc(idx.mem_cur, rd, fn)
|
||||
r = icur.execute(q, args).fetchone()
|
||||
|
||||
if False:
|
||||
# this is a mistake
|
||||
md = None
|
||||
for fn in [x[2] for x in files]:
|
||||
if fn.lower() == "readme.md":
|
||||
fn = os.path.join(abspath, fn)
|
||||
with open(fn, "rb") as f:
|
||||
md = f.read().decode("utf-8")
|
||||
tags = {}
|
||||
f["tags"] = tags
|
||||
|
||||
break
|
||||
if not r:
|
||||
continue
|
||||
|
||||
w = r[0][:16]
|
||||
q = "select k, v from mt where w = ? and k != 'x'"
|
||||
for k, v in icur.execute(q, (w,)):
|
||||
taglist[k] = True
|
||||
tags[k] = v
|
||||
|
||||
if icur:
|
||||
taglist = [k for k in vn.flags.get("mte", "").split(",") if k in taglist]
|
||||
for f in dirs:
|
||||
f["tags"] = {}
|
||||
|
||||
srv_info = []
|
||||
|
||||
try:
|
||||
if not self.args.nih:
|
||||
srv_info.append(str(socket.gethostname()).split(".")[0])
|
||||
srv_info.append(unicode(socket.gethostname()).split(".")[0])
|
||||
except:
|
||||
self.log("#wow #whoa")
|
||||
pass
|
||||
@@ -1106,21 +1338,55 @@ class HttpCli(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
srv_info = "</span> /// <span>".join(srv_info)
|
||||
|
||||
perms = []
|
||||
if self.readable:
|
||||
perms.append("read")
|
||||
if self.writable:
|
||||
perms.append("write")
|
||||
|
||||
logues = ["", ""]
|
||||
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
|
||||
fn = os.path.join(abspath, fn)
|
||||
if os.path.exists(fsenc(fn)):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
logues[n] = f.read().decode("utf-8")
|
||||
|
||||
if is_ls:
|
||||
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
|
||||
ret = {
|
||||
"dirs": dirs,
|
||||
"files": files,
|
||||
"srvinf": srv_info,
|
||||
"perms": perms,
|
||||
"logues": logues,
|
||||
"taglist": taglist,
|
||||
}
|
||||
ret = json.dumps(ret)
|
||||
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
|
||||
return True
|
||||
|
||||
ts = ""
|
||||
# ts = "?{}".format(time.time())
|
||||
|
||||
dirs.extend(files)
|
||||
|
||||
html = self.conn.tpl_browser.render(
|
||||
vdir=quotep(self.vpath),
|
||||
vpnodes=vpnodes,
|
||||
files=dirs,
|
||||
can_upload=self.writable,
|
||||
can_read=self.readable,
|
||||
ts=ts,
|
||||
prologue=logues[0],
|
||||
epilogue=logues[1],
|
||||
title=html_escape(self.vpath, quote=False),
|
||||
srv_info="</span> /// <span>".join(srv_info),
|
||||
perms=json.dumps(perms),
|
||||
taglist=taglist,
|
||||
tag_order=json.dumps(
|
||||
vn.flags["mte"].split(",") if "mte" in vn.flags else []
|
||||
),
|
||||
have_up2k_idx=("e2d" in vn.flags),
|
||||
have_tags_idx=("e2t" in vn.flags),
|
||||
logues=logues,
|
||||
title=html_escape(self.vpath),
|
||||
srv_info=srv_info,
|
||||
)
|
||||
self.reply(html.encode("utf-8", "replace"))
|
||||
return True
|
||||
|
@@ -3,9 +3,15 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import ssl
|
||||
import time
|
||||
import socket
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
try:
|
||||
import jinja2
|
||||
except ImportError:
|
||||
@@ -14,16 +20,19 @@ except ImportError:
|
||||
you do not have jinja2 installed,\033[33m
|
||||
choose one of these:\033[0m
|
||||
* apt install python-jinja2
|
||||
* python3 -m pip install --user jinja2
|
||||
* {} -m pip install --user jinja2
|
||||
* (try another python version, if you have one)
|
||||
* (try copyparty.sfx instead)
|
||||
"""
|
||||
""".format(
|
||||
os.path.basename(sys.executable)
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
from .__init__ import E
|
||||
from .util import Unrecv
|
||||
from .httpcli import HttpCli
|
||||
from .u2idx import U2idx
|
||||
|
||||
|
||||
class HttpConn(object):
|
||||
@@ -41,9 +50,12 @@ class HttpConn(object):
|
||||
self.auth = hsrv.auth
|
||||
self.cert_path = hsrv.cert_path
|
||||
|
||||
self.t0 = time.time()
|
||||
self.nbyte = 0
|
||||
self.workload = 0
|
||||
self.u2idx = None
|
||||
self.log_func = hsrv.log
|
||||
self.log_src = "{} \033[36m{}".format(addr[0], addr[1]).ljust(26)
|
||||
self.set_rproxy()
|
||||
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
||||
@@ -53,15 +65,33 @@ class HttpConn(object):
|
||||
self.tpl_md = env.get_template("md.html")
|
||||
self.tpl_mde = env.get_template("mde.html")
|
||||
|
||||
def set_rproxy(self, ip=None):
|
||||
if ip is None:
|
||||
color = 36
|
||||
ip = self.addr[0]
|
||||
self.rproxy = None
|
||||
else:
|
||||
color = 34
|
||||
self.rproxy = ip
|
||||
|
||||
self.ip = ip
|
||||
self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26)
|
||||
return self.log_src
|
||||
|
||||
def respath(self, res_name):
|
||||
return os.path.join(E.mod, "web", res_name)
|
||||
|
||||
def log(self, msg):
|
||||
self.log_func(self.log_src, msg)
|
||||
def log(self, msg, c=0):
|
||||
self.log_func(self.log_src, msg, c)
|
||||
|
||||
def run(self):
|
||||
def get_u2idx(self):
|
||||
if not self.u2idx:
|
||||
self.u2idx = U2idx(self.args, self.log_func)
|
||||
|
||||
return self.u2idx
|
||||
|
||||
def _detect_https(self):
|
||||
method = None
|
||||
self.sr = None
|
||||
if self.cert_path:
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
@@ -86,16 +116,58 @@ class HttpConn(object):
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return
|
||||
|
||||
if method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]:
|
||||
return method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"]
|
||||
|
||||
def run(self):
|
||||
self.sr = None
|
||||
if self.args.https_only:
|
||||
is_https = True
|
||||
elif self.args.http_only or not HAVE_SSL:
|
||||
is_https = False
|
||||
else:
|
||||
is_https = self._detect_https()
|
||||
|
||||
if is_https:
|
||||
if self.sr:
|
||||
self.log("\033[1;31mTODO: cannot do https in jython\033[0m")
|
||||
self.log("TODO: cannot do https in jython", c="1;31")
|
||||
return
|
||||
|
||||
self.log_src = self.log_src.replace("[36m", "[35m")
|
||||
try:
|
||||
self.s = ssl.wrap_socket(
|
||||
self.s, server_side=True, certfile=self.cert_path
|
||||
)
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
ctx.load_cert_chain(self.cert_path)
|
||||
if self.args.ssl_ver:
|
||||
ctx.options &= ~self.args.ssl_flags_en
|
||||
ctx.options |= self.args.ssl_flags_de
|
||||
# print(repr(ctx.options))
|
||||
|
||||
if self.args.ssl_log:
|
||||
try:
|
||||
ctx.keylog_filename = self.args.ssl_log
|
||||
except:
|
||||
self.log("keylog failed; openssl or python too old")
|
||||
|
||||
if self.args.ciphers:
|
||||
ctx.set_ciphers(self.args.ciphers)
|
||||
|
||||
self.s = ctx.wrap_socket(self.s, server_side=True)
|
||||
msg = [
|
||||
"\033[1;3{:d}m{}".format(c, s)
|
||||
for c, s in zip([0, 5, 0], self.s.cipher())
|
||||
]
|
||||
self.log(" ".join(msg) + "\033[0m")
|
||||
|
||||
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
|
||||
overlap = [y[::-1] for y in self.s.shared_ciphers()]
|
||||
lines = [str(x) for x in (["TLS cipher overlap:"] + overlap)]
|
||||
self.log("\n".join(lines))
|
||||
for k, v in [
|
||||
["compression", self.s.compression()],
|
||||
["ALPN proto", self.s.selected_alpn_protocol()],
|
||||
["NPN proto", self.s.selected_npn_protocol()],
|
||||
]:
|
||||
self.log("TLS {}: {}".format(k, v or "nah"))
|
||||
|
||||
except Exception as ex:
|
||||
em = str(ex)
|
||||
|
||||
@@ -108,7 +180,7 @@ class HttpConn(object):
|
||||
pass
|
||||
|
||||
else:
|
||||
self.log("\033[35mhandshake\033[0m " + em)
|
||||
self.log("handshake\033[0m " + em, c=5)
|
||||
|
||||
return
|
||||
|
||||
|
@@ -38,7 +38,9 @@ class HttpSrv(object):
|
||||
|
||||
def accept(self, sck, addr):
|
||||
"""takes an incoming tcp connection and creates a thread to handle it"""
|
||||
self.log("%s %s" % addr, "-" * 5 + "C-cthr")
|
||||
if self.args.log_conn:
|
||||
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
|
||||
|
||||
thr = threading.Thread(target=self.thr_client, args=(sck, addr))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
@@ -66,11 +68,15 @@ class HttpSrv(object):
|
||||
thr.start()
|
||||
|
||||
try:
|
||||
self.log("%s %s" % addr, "-" * 6 + "C-crun")
|
||||
if self.args.log_conn:
|
||||
self.log("%s %s" % addr, "|%sC-crun" % ("-" * 6,), c="1;30")
|
||||
|
||||
cli.run()
|
||||
|
||||
finally:
|
||||
self.log("%s %s" % addr, "-" * 7 + "C-done")
|
||||
if self.args.log_conn:
|
||||
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")
|
||||
|
||||
try:
|
||||
sck.shutdown(socket.SHUT_RDWR)
|
||||
sck.close()
|
||||
@@ -78,7 +84,8 @@ class HttpSrv(object):
|
||||
if not MACOS:
|
||||
self.log(
|
||||
"%s %s" % addr,
|
||||
"shut_rdwr err:\n {}\n {}".format(repr(sck), ex),
|
||||
"shut({}): {}".format(sck.fileno(), ex),
|
||||
c="1;30",
|
||||
)
|
||||
if ex.errno not in [10038, 10054, 107, 57, 9]:
|
||||
# 10038 No longer considered a socket
|
||||
|
347
copyparty/mtag.py
Normal file
347
copyparty/mtag.py
Normal file
@@ -0,0 +1,347 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
from .util import fsenc, fsdec, REKOBO_LKEY
|
||||
|
||||
if not PY2:
|
||||
unicode = str
|
||||
|
||||
|
||||
class MTag(object):
|
||||
def __init__(self, log_func, args):
|
||||
self.log_func = log_func
|
||||
self.usable = True
|
||||
self.prefer_mt = False
|
||||
mappings = args.mtm
|
||||
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
|
||||
or_ffprobe = " or ffprobe"
|
||||
|
||||
if self.backend == "mutagen":
|
||||
self.get = self.get_mutagen
|
||||
try:
|
||||
import mutagen
|
||||
except:
|
||||
self.log("could not load mutagen, trying ffprobe instead", c=3)
|
||||
self.backend = "ffprobe"
|
||||
|
||||
if self.backend == "ffprobe":
|
||||
self.get = self.get_ffprobe
|
||||
self.prefer_mt = True
|
||||
# about 20x slower
|
||||
if PY2:
|
||||
cmd = [b"ffprobe", b"-version"]
|
||||
try:
|
||||
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
except:
|
||||
self.usable = False
|
||||
else:
|
||||
if not shutil.which("ffprobe"):
|
||||
self.usable = False
|
||||
|
||||
if self.usable and WINDOWS and sys.version_info < (3, 8):
|
||||
self.usable = False
|
||||
or_ffprobe = " or python >= 3.8"
|
||||
msg = "found ffprobe but your python is too old; need 3.8 or newer"
|
||||
self.log(msg, c=1)
|
||||
|
||||
if not self.usable:
|
||||
msg = "need mutagen{} to read media tags so please run this:\n {} -m pip install --user mutagen"
|
||||
self.log(msg.format(or_ffprobe, os.path.basename(sys.executable)), c=1)
|
||||
return
|
||||
|
||||
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
tagmap = {
|
||||
"album": ["album", "talb", "\u00a9alb", "original-album", "toal"],
|
||||
"artist": [
|
||||
"artist",
|
||||
"tpe1",
|
||||
"\u00a9art",
|
||||
"composer",
|
||||
"performer",
|
||||
"arranger",
|
||||
"\u00a9wrt",
|
||||
"tcom",
|
||||
"tpe3",
|
||||
"original-artist",
|
||||
"tope",
|
||||
],
|
||||
"title": ["title", "tit2", "\u00a9nam"],
|
||||
"circle": [
|
||||
"album-artist",
|
||||
"tpe2",
|
||||
"aart",
|
||||
"conductor",
|
||||
"organization",
|
||||
"band",
|
||||
],
|
||||
".tn": ["tracknumber", "trck", "trkn", "track"],
|
||||
"genre": ["genre", "tcon", "\u00a9gen"],
|
||||
"date": [
|
||||
"original-release-date",
|
||||
"release-date",
|
||||
"date",
|
||||
"tdrc",
|
||||
"\u00a9day",
|
||||
"original-date",
|
||||
"original-year",
|
||||
"tyer",
|
||||
"tdor",
|
||||
"tory",
|
||||
"year",
|
||||
"creation-time",
|
||||
],
|
||||
".bpm": ["bpm", "tbpm", "tmpo", "tbp"],
|
||||
"key": ["initial-key", "tkey", "key"],
|
||||
"comment": ["comment", "comm", "\u00a9cmt", "comments", "description"],
|
||||
}
|
||||
|
||||
if mappings:
|
||||
for k, v in [x.split("=") for x in mappings]:
|
||||
tagmap[k] = v.split(",")
|
||||
|
||||
self.tagmap = {}
|
||||
for k, vs in tagmap.items():
|
||||
vs2 = []
|
||||
for v in vs:
|
||||
if "-" not in v:
|
||||
vs2.append(v)
|
||||
continue
|
||||
|
||||
vs2.append(v.replace("-", " "))
|
||||
vs2.append(v.replace("-", "_"))
|
||||
vs2.append(v.replace("-", ""))
|
||||
|
||||
self.tagmap[k] = vs2
|
||||
|
||||
self.rmap = {
|
||||
v: [n, k] for k, vs in self.tagmap.items() for n, v in enumerate(vs)
|
||||
}
|
||||
# self.get = self.compare
|
||||
|
||||
def log(self, msg, c=0):
|
||||
self.log_func("mtag", msg, c)
|
||||
|
||||
def normalize_tags(self, ret, md):
|
||||
for k, v in dict(md).items():
|
||||
if not v:
|
||||
continue
|
||||
|
||||
k = k.lower().split("::")[0].strip()
|
||||
mk = self.rmap.get(k)
|
||||
if not mk:
|
||||
continue
|
||||
|
||||
pref, mk = mk
|
||||
if mk not in ret or ret[mk][0] > pref:
|
||||
ret[mk] = [pref, v[0]]
|
||||
|
||||
# take first value
|
||||
ret = {k: unicode(v[1]).strip() for k, v in ret.items()}
|
||||
|
||||
# track 3/7 => track 3
|
||||
for k, v in ret.items():
|
||||
if k[0] == ".":
|
||||
v = v.split("/")[0].strip().lstrip("0")
|
||||
ret[k] = v or 0
|
||||
|
||||
# normalize key notation to rkeobo
|
||||
okey = ret.get("key")
|
||||
if okey:
|
||||
key = okey.replace(" ", "").replace("maj", "").replace("min", "m")
|
||||
ret["key"] = REKOBO_LKEY.get(key.lower(), okey)
|
||||
|
||||
return ret
|
||||
|
||||
def compare(self, abspath):
|
||||
if abspath.endswith(".au"):
|
||||
return {}
|
||||
|
||||
print("\n" + abspath)
|
||||
r1 = self.get_mutagen(abspath)
|
||||
r2 = self.get_ffprobe(abspath)
|
||||
|
||||
keys = {}
|
||||
for d in [r1, r2]:
|
||||
for k in d.keys():
|
||||
keys[k] = True
|
||||
|
||||
diffs = []
|
||||
l1 = []
|
||||
l2 = []
|
||||
for k in sorted(keys.keys()):
|
||||
if k in [".q", ".dur"]:
|
||||
continue # lenient
|
||||
|
||||
v1 = r1.get(k)
|
||||
v2 = r2.get(k)
|
||||
if v1 == v2:
|
||||
print(" ", k, v1)
|
||||
elif v1 != "0000": # ffprobe date=0
|
||||
diffs.append(k)
|
||||
print(" 1", k, v1)
|
||||
print(" 2", k, v2)
|
||||
if v1:
|
||||
l1.append(k)
|
||||
if v2:
|
||||
l2.append(k)
|
||||
|
||||
if diffs:
|
||||
raise Exception()
|
||||
|
||||
return r1
|
||||
|
||||
def get_mutagen(self, abspath):
|
||||
import mutagen
|
||||
|
||||
try:
|
||||
md = mutagen.File(abspath, easy=True)
|
||||
x = md.info.length
|
||||
except Exception as ex:
|
||||
return {}
|
||||
|
||||
ret = {}
|
||||
try:
|
||||
dur = int(md.info.length)
|
||||
try:
|
||||
q = int(md.info.bitrate / 1024)
|
||||
except:
|
||||
q = int((os.path.getsize(abspath) / dur) / 128)
|
||||
|
||||
ret[".dur"] = [0, dur]
|
||||
ret[".q"] = [0, q]
|
||||
except:
|
||||
pass
|
||||
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_ffprobe(self, abspath):
|
||||
cmd = [b"ffprobe", b"-hide_banner", b"--", fsenc(abspath)]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
r = p.communicate()
|
||||
txt = r[1].decode("utf-8", "replace")
|
||||
txt = [x.rstrip("\r") for x in txt.split("\n")]
|
||||
|
||||
"""
|
||||
note:
|
||||
tags which contain newline will be truncated on first \n,
|
||||
ffprobe emits \n and spacepads the : to align visually
|
||||
note:
|
||||
the Stream ln always mentions Audio: if audio
|
||||
the Stream ln usually has kb/s, is more accurate
|
||||
the Duration ln always has kb/s
|
||||
the Metadata: after Chapter may contain BPM info,
|
||||
title : Tempo: 126.0
|
||||
|
||||
Input #0, wav,
|
||||
Metadata:
|
||||
date : <OK>
|
||||
Duration:
|
||||
Chapter #
|
||||
Metadata:
|
||||
title : <NG>
|
||||
|
||||
Input #0, mp3,
|
||||
Metadata:
|
||||
album : <OK>
|
||||
Duration:
|
||||
Stream #0:0: Audio:
|
||||
Stream #0:1: Video:
|
||||
Metadata:
|
||||
comment : <NG>
|
||||
"""
|
||||
|
||||
ptn_md_beg = re.compile("^( +)Metadata:$")
|
||||
ptn_md_kv = re.compile("^( +)([^:]+) *: (.*)")
|
||||
ptn_dur = re.compile("^ *Duration: ([^ ]+)(, |$)")
|
||||
ptn_br1 = re.compile("^ *Duration: .*, bitrate: ([0-9]+) kb/s(, |$)")
|
||||
ptn_br2 = re.compile("^ *Stream.*: Audio:.* ([0-9]+) kb/s(, |$)")
|
||||
ptn_audio = re.compile("^ *Stream .*: Audio: ")
|
||||
ptn_au_parent = re.compile("^ *(Input #|Stream .*: Audio: )")
|
||||
|
||||
ret = {}
|
||||
md = {}
|
||||
in_md = False
|
||||
is_audio = False
|
||||
au_parent = False
|
||||
for ln in txt:
|
||||
m = ptn_md_kv.match(ln)
|
||||
if m and in_md and len(m.group(1)) == in_md:
|
||||
_, k, v = [x.strip() for x in m.groups()]
|
||||
if k != "" and v != "":
|
||||
md[k] = [v]
|
||||
continue
|
||||
else:
|
||||
in_md = False
|
||||
|
||||
m = ptn_md_beg.match(ln)
|
||||
if m and au_parent:
|
||||
in_md = len(m.group(1)) + 2
|
||||
continue
|
||||
|
||||
au_parent = bool(ptn_au_parent.search(ln))
|
||||
|
||||
if ptn_audio.search(ln):
|
||||
is_audio = True
|
||||
|
||||
m = ptn_dur.search(ln)
|
||||
if m:
|
||||
sec = 0
|
||||
tstr = m.group(1)
|
||||
if tstr.lower() != "n/a":
|
||||
try:
|
||||
tf = tstr.split(",")[0].split(".")[0].split(":")
|
||||
for f in tf:
|
||||
sec *= 60
|
||||
sec += int(f)
|
||||
except:
|
||||
self.log("invalid timestr from ffprobe: [{}]".format(tstr), c=3)
|
||||
|
||||
ret[".dur"] = sec
|
||||
m = ptn_br1.search(ln)
|
||||
if m:
|
||||
ret[".q"] = m.group(1)
|
||||
|
||||
m = ptn_br2.search(ln)
|
||||
if m:
|
||||
ret[".q"] = m.group(1)
|
||||
|
||||
if not is_audio:
|
||||
return {}
|
||||
|
||||
ret = {k: [0, v] for k, v in ret.items()}
|
||||
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_bin(self, parsers, abspath):
|
||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
pypath = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||
pypath = str(os.pathsep.join(pypath))
|
||||
env = os.environ.copy()
|
||||
env["PYTHONPATH"] = pypath
|
||||
|
||||
ret = {}
|
||||
for tagname, (binpath, timeout) in parsers.items():
|
||||
try:
|
||||
cmd = [sys.executable, binpath, abspath]
|
||||
args = {"env": env, "timeout": timeout}
|
||||
|
||||
if WINDOWS:
|
||||
args["creationflags"] = 0x4000
|
||||
else:
|
||||
cmd = ["nice"] + cmd
|
||||
|
||||
cmd = [fsenc(x) for x in cmd]
|
||||
v = sp.check_output(cmd, **args).strip()
|
||||
if v:
|
||||
ret[tagname] = v.decode("utf-8")
|
||||
except:
|
||||
pass
|
||||
|
||||
return ret
|
@@ -65,16 +65,16 @@ class SvcHub(object):
|
||||
self.broker.shutdown()
|
||||
print("nailed it")
|
||||
|
||||
def _log_disabled(self, src, msg):
|
||||
def _log_disabled(self, src, msg, c=0):
|
||||
pass
|
||||
|
||||
def _log_enabled(self, src, msg):
|
||||
def _log_enabled(self, src, msg, c=0):
|
||||
"""handles logging from all components"""
|
||||
with self.log_mutex:
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
dt = datetime.utcfromtimestamp(now)
|
||||
print("\033[36m{}\033[0m".format(dt.strftime("%Y-%m-%d")))
|
||||
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
|
||||
|
||||
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
||||
day_now = dt.day
|
||||
@@ -84,23 +84,30 @@ class SvcHub(object):
|
||||
dt = dt.replace(hour=0, minute=0, second=0)
|
||||
self.next_day = calendar.timegm(dt.utctimetuple())
|
||||
|
||||
fmt = "\033[36m{} \033[33m{:21} \033[0m{}"
|
||||
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
|
||||
if not VT100:
|
||||
fmt = "{} {:21} {}"
|
||||
fmt = "{} {:21} {}\n"
|
||||
if "\033" in msg:
|
||||
msg = self.ansi_re.sub("", msg)
|
||||
if "\033" in src:
|
||||
src = self.ansi_re.sub("", src)
|
||||
elif c:
|
||||
if isinstance(c, int):
|
||||
msg = "\033[3{}m{}".format(c, msg)
|
||||
elif "\033" not in c:
|
||||
msg = "\033[{}m{}\033[0m".format(c, msg)
|
||||
else:
|
||||
msg = "{}{}\033[0m".format(c, msg)
|
||||
|
||||
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
|
||||
msg = fmt.format(ts, src, msg)
|
||||
try:
|
||||
print(msg)
|
||||
print(msg, end="")
|
||||
except UnicodeEncodeError:
|
||||
try:
|
||||
print(msg.encode("utf-8", "replace").decode())
|
||||
print(msg.encode("utf-8", "replace").decode(), end="")
|
||||
except:
|
||||
print(msg.encode("ascii", "replace").decode())
|
||||
print(msg.encode("ascii", "replace").decode(), end="")
|
||||
|
||||
def check_mp_support(self):
|
||||
vmin = sys.version_info[1]
|
||||
|
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import re
|
||||
import time
|
||||
import socket
|
||||
import select
|
||||
|
||||
from .util import chkcmd, Counter
|
||||
|
||||
@@ -23,55 +24,80 @@ class TcpSrv(object):
|
||||
|
||||
ip = "127.0.0.1"
|
||||
eps = {ip: "local only"}
|
||||
if self.args.i != ip:
|
||||
eps = self.detect_interfaces(self.args.i) or {self.args.i: "external"}
|
||||
nonlocals = [x for x in self.args.i if x != ip]
|
||||
if nonlocals:
|
||||
eps = self.detect_interfaces(self.args.i)
|
||||
if not eps:
|
||||
for x in nonlocals:
|
||||
eps[x] = "external"
|
||||
|
||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||
self.log(
|
||||
"tcpsrv",
|
||||
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
|
||||
ip, self.args.p, desc
|
||||
),
|
||||
)
|
||||
for port in sorted(self.args.p):
|
||||
self.log(
|
||||
"tcpsrv",
|
||||
"available @ http://{}:{}/ (\033[33m{}\033[0m)".format(
|
||||
ip, port, desc
|
||||
),
|
||||
)
|
||||
|
||||
self.srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
self.srv = []
|
||||
for ip in self.args.i:
|
||||
for port in self.args.p:
|
||||
self.srv.append(self._listen(ip, port))
|
||||
|
||||
def _listen(self, ip, port):
|
||||
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
try:
|
||||
self.srv.bind((self.args.i, self.args.p))
|
||||
srv.bind((ip, port))
|
||||
return srv
|
||||
except (OSError, socket.error) as ex:
|
||||
if ex.errno == 98:
|
||||
raise Exception(
|
||||
"\033[1;31mport {} is busy on interface {}\033[0m".format(
|
||||
self.args.p, self.args.i
|
||||
)
|
||||
)
|
||||
|
||||
if ex.errno == 99:
|
||||
raise Exception(
|
||||
"\033[1;31minterface {} does not exist\033[0m".format(self.args.i)
|
||||
)
|
||||
if ex.errno in [98, 48]:
|
||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
elif ex.errno in [99, 49]:
|
||||
e = "\033[1;31minterface {} does not exist\033[0m".format(ip)
|
||||
else:
|
||||
raise
|
||||
raise Exception(e)
|
||||
|
||||
def run(self):
|
||||
self.srv.listen(self.args.nc)
|
||||
|
||||
self.log("tcpsrv", "listening @ {0}:{1}".format(self.args.i, self.args.p))
|
||||
for srv in self.srv:
|
||||
srv.listen(self.args.nc)
|
||||
ip, port = srv.getsockname()
|
||||
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
|
||||
|
||||
while True:
|
||||
self.log("tcpsrv", "-" * 1 + "C-ncli")
|
||||
if self.args.log_conn:
|
||||
self.log("tcpsrv", "|%sC-ncli" % ("-" * 1,), c="1;30")
|
||||
|
||||
if self.num_clients.v >= self.args.nc:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
self.log("tcpsrv", "-" * 2 + "C-acc1")
|
||||
sck, addr = self.srv.accept()
|
||||
self.log("%s %s" % addr, "-" * 3 + "C-acc2")
|
||||
self.num_clients.add()
|
||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||
if self.args.log_conn:
|
||||
self.log("tcpsrv", "|%sC-acc1" % ("-" * 2,), c="1;30")
|
||||
|
||||
ready, _, _ = select.select(self.srv, [], [])
|
||||
for srv in ready:
|
||||
sck, addr = srv.accept()
|
||||
sip, sport = srv.getsockname()
|
||||
if self.args.log_conn:
|
||||
self.log(
|
||||
"%s %s" % addr,
|
||||
"|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||
"-" * 3, sip, sport % 8, sport
|
||||
),
|
||||
c="1;30",
|
||||
)
|
||||
|
||||
self.num_clients.add()
|
||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||
|
||||
def shutdown(self):
|
||||
self.log("tcpsrv", "ok bye")
|
||||
|
||||
def detect_interfaces(self, listen_ip):
|
||||
def detect_interfaces(self, listen_ips):
|
||||
eps = {}
|
||||
|
||||
# get all ips and their interfaces
|
||||
@@ -85,8 +111,9 @@ class TcpSrv(object):
|
||||
for ln in ip_addr.split("\n"):
|
||||
try:
|
||||
ip, dev = r.match(ln.rstrip()).groups()
|
||||
if listen_ip in ["0.0.0.0", ip]:
|
||||
eps[ip] = dev
|
||||
for lip in listen_ips:
|
||||
if lip in ["0.0.0.0", ip]:
|
||||
eps[ip] = dev
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -113,11 +140,12 @@ class TcpSrv(object):
|
||||
|
||||
s.close()
|
||||
|
||||
if default_route and listen_ip in ["0.0.0.0", default_route]:
|
||||
desc = "\033[32mexternal"
|
||||
try:
|
||||
eps[default_route] += ", " + desc
|
||||
except:
|
||||
eps[default_route] = desc
|
||||
for lip in listen_ips:
|
||||
if default_route and lip in ["0.0.0.0", default_route]:
|
||||
desc = "\033[32mexternal"
|
||||
try:
|
||||
eps[default_route] += ", " + desc
|
||||
except:
|
||||
eps[default_route] = desc
|
||||
|
||||
return eps
|
||||
|
281
copyparty/u2idx.py
Normal file
281
copyparty/u2idx.py
Normal file
@@ -0,0 +1,281 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import time
|
||||
import threading
|
||||
from datetime import datetime
|
||||
|
||||
from .util import u8safe, s3dec, html_escape, Pebkac
|
||||
from .up2k import up2k_wark_from_hashlist
|
||||
|
||||
|
||||
try:
|
||||
HAVE_SQLITE3 = True
|
||||
import sqlite3
|
||||
except:
|
||||
HAVE_SQLITE3 = False
|
||||
|
||||
|
||||
class U2idx(object):
|
||||
def __init__(self, args, log_func):
|
||||
self.args = args
|
||||
self.log_func = log_func
|
||||
self.timeout = args.srch_time
|
||||
|
||||
if not HAVE_SQLITE3:
|
||||
self.log("could not load sqlite3; searchign wqill be disabled")
|
||||
return
|
||||
|
||||
self.cur = {}
|
||||
self.mem_cur = sqlite3.connect(":memory:")
|
||||
self.mem_cur.execute(r"create table a (b text)")
|
||||
|
||||
self.p_end = None
|
||||
self.p_dur = 0
|
||||
|
||||
def log(self, msg, c=0):
|
||||
self.log_func("u2idx", msg, c)
|
||||
|
||||
def fsearch(self, vols, body):
|
||||
"""search by up2k hashlist"""
|
||||
if not HAVE_SQLITE3:
|
||||
return []
|
||||
|
||||
fsize = body["size"]
|
||||
fhash = body["hash"]
|
||||
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
|
||||
|
||||
uq = "substr(w,1,16) = ? and w = ?"
|
||||
uv = [wark[:16], wark]
|
||||
|
||||
try:
|
||||
return self.run_query(vols, uq, uv, {})[0]
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
def get_cur(self, ptop):
|
||||
cur = self.cur.get(ptop)
|
||||
if cur:
|
||||
return cur
|
||||
|
||||
cur = _open(ptop)
|
||||
if not cur:
|
||||
return None
|
||||
|
||||
self.cur[ptop] = cur
|
||||
return cur
|
||||
|
||||
def search(self, vols, body):
|
||||
"""search by query params"""
|
||||
if not HAVE_SQLITE3:
|
||||
return []
|
||||
|
||||
qobj = {}
|
||||
_conv_sz(qobj, body, "sz_min", "up.sz >= ?")
|
||||
_conv_sz(qobj, body, "sz_max", "up.sz <= ?")
|
||||
_conv_dt(qobj, body, "dt_min", "up.mt >= ?")
|
||||
_conv_dt(qobj, body, "dt_max", "up.mt <= ?")
|
||||
for seg, dk in [["path", "up.rd"], ["name", "up.fn"]]:
|
||||
if seg in body:
|
||||
_conv_txt(qobj, body, seg, dk)
|
||||
|
||||
uq, uv = _sqlize(qobj)
|
||||
|
||||
qobj = {}
|
||||
if "tags" in body:
|
||||
_conv_txt(qobj, body, "tags", "mt.v")
|
||||
|
||||
if "adv" in body:
|
||||
_conv_adv(qobj, body, "adv")
|
||||
|
||||
try:
|
||||
return self.run_query(vols, uq, uv, qobj)
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
def run_query(self, vols, uq, uv, targs):
|
||||
self.log("qs: {} {} , {}".format(uq, repr(uv), repr(targs)))
|
||||
|
||||
done_flag = []
|
||||
self.active_id = "{:.6f}_{}".format(
|
||||
time.time(), threading.current_thread().ident
|
||||
)
|
||||
thr = threading.Thread(
|
||||
target=self.terminator,
|
||||
args=(
|
||||
self.active_id,
|
||||
done_flag,
|
||||
),
|
||||
)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if not targs:
|
||||
if not uq:
|
||||
q = "select * from up"
|
||||
v = ()
|
||||
else:
|
||||
q = "select * from up where " + uq
|
||||
v = tuple(uv)
|
||||
else:
|
||||
q = "select up.* from up"
|
||||
keycmp = "substr(up.w,1,16)"
|
||||
where = []
|
||||
v = []
|
||||
ctr = 0
|
||||
for tq, tv in sorted(targs.items()):
|
||||
ctr += 1
|
||||
tq = tq.split("\n")[0]
|
||||
keycmp2 = "mt{}.w".format(ctr)
|
||||
q += " inner join mt mt{} on {} = {}".format(ctr, keycmp, keycmp2)
|
||||
keycmp = keycmp2
|
||||
where.append(tq.replace("mt.", keycmp[:-1]))
|
||||
v.append(tv)
|
||||
|
||||
if uq:
|
||||
where.append(uq)
|
||||
v.extend(uv)
|
||||
|
||||
q += " where " + (" and ".join(where))
|
||||
|
||||
# self.log("q2: {} {}".format(q, repr(v)))
|
||||
|
||||
ret = []
|
||||
lim = 1000
|
||||
taglist = {}
|
||||
for (vtop, ptop, flags) in vols:
|
||||
cur = self.get_cur(ptop)
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
self.active_cur = cur
|
||||
|
||||
sret = []
|
||||
c = cur.execute(q, v)
|
||||
for hit in c:
|
||||
w, ts, sz, rd, fn = hit
|
||||
lim -= 1
|
||||
if lim <= 0:
|
||||
break
|
||||
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
rp = os.path.join(vtop, rd, fn).replace("\\", "/")
|
||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||
|
||||
for hit in sret:
|
||||
w = hit["w"]
|
||||
del hit["w"]
|
||||
tags = {}
|
||||
q2 = "select k, v from mt where w = ? and k != 'x'"
|
||||
for k, v2 in cur.execute(q2, (w,)):
|
||||
taglist[k] = True
|
||||
tags[k] = v2
|
||||
|
||||
hit["tags"] = tags
|
||||
|
||||
ret.extend(sret)
|
||||
|
||||
done_flag.append(True)
|
||||
self.active_id = None
|
||||
|
||||
# undupe hits from multiple metadata keys
|
||||
if len(ret) > 1:
|
||||
ret = [ret[0]] + [
|
||||
y for x, y in zip(ret[:-1], ret[1:]) if x["rp"] != y["rp"]
|
||||
]
|
||||
|
||||
return ret, list(taglist.keys())
|
||||
|
||||
def terminator(self, identifier, done_flag):
|
||||
for _ in range(self.timeout):
|
||||
time.sleep(1)
|
||||
if done_flag:
|
||||
return
|
||||
|
||||
if identifier == self.active_id:
|
||||
self.active_cur.connection.interrupt()
|
||||
|
||||
|
||||
def _open(ptop):
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
if os.path.exists(db_path):
|
||||
return sqlite3.connect(db_path).cursor()
|
||||
|
||||
|
||||
def _conv_sz(q, body, k, sql):
|
||||
if k in body:
|
||||
q[sql] = int(float(body[k]) * 1024 * 1024)
|
||||
|
||||
|
||||
def _conv_dt(q, body, k, sql):
|
||||
if k not in body:
|
||||
return
|
||||
|
||||
v = body[k].upper().rstrip("Z").replace(",", " ").replace("T", " ")
|
||||
while " " in v:
|
||||
v = v.replace(" ", " ")
|
||||
|
||||
for fmt in ["%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M", "%Y-%m-%d %H", "%Y-%m-%d"]:
|
||||
try:
|
||||
ts = datetime.strptime(v, fmt).timestamp()
|
||||
break
|
||||
except:
|
||||
ts = None
|
||||
|
||||
if ts:
|
||||
q[sql] = ts
|
||||
|
||||
|
||||
def _conv_txt(q, body, k, sql):
|
||||
for v in body[k].split(" "):
|
||||
inv = ""
|
||||
if v.startswith("-"):
|
||||
inv = "not"
|
||||
v = v[1:]
|
||||
|
||||
if not v:
|
||||
continue
|
||||
|
||||
head = "'%'||"
|
||||
if v.startswith("^"):
|
||||
head = ""
|
||||
v = v[1:]
|
||||
|
||||
tail = "||'%'"
|
||||
if v.endswith("$"):
|
||||
tail = ""
|
||||
v = v[:-1]
|
||||
|
||||
qk = "{} {} like {}?{}".format(sql, inv, head, tail)
|
||||
q[qk + "\n" + v] = u8safe(v)
|
||||
|
||||
|
||||
def _conv_adv(q, body, k):
|
||||
ptn = re.compile(r"^(\.?[a-z]+) *(==?|!=|<=?|>=?) *(.*)$")
|
||||
|
||||
parts = body[k].split(" ")
|
||||
parts = [x.strip() for x in parts if x.strip()]
|
||||
|
||||
for part in parts:
|
||||
m = ptn.match(part)
|
||||
if not m:
|
||||
p = html_escape(part)
|
||||
raise Pebkac(400, "invalid argument [" + p + "]")
|
||||
|
||||
k, op, v = m.groups()
|
||||
qk = "mt.k = '{}' and mt.v {} ?".format(k, op)
|
||||
q[qk + "\n" + v] = u8safe(v)
|
||||
|
||||
|
||||
def _sqlize(qobj):
|
||||
keys = []
|
||||
values = []
|
||||
for k, v in sorted(qobj.items()):
|
||||
keys.append(k.split("\n")[0])
|
||||
values.append(v)
|
||||
|
||||
return " and ".join(keys), values
|
1287
copyparty/up2k.py
1287
copyparty/up2k.py
File diff suppressed because it is too large
Load Diff
@@ -2,13 +2,18 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import base64
|
||||
import select
|
||||
import struct
|
||||
import hashlib
|
||||
import platform
|
||||
import traceback
|
||||
import threading
|
||||
import mimetypes
|
||||
import contextlib
|
||||
import subprocess as sp # nosec
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
@@ -52,11 +57,58 @@ HTTPCODE = {
|
||||
413: "Payload Too Large",
|
||||
416: "Requested Range Not Satisfiable",
|
||||
422: "Unprocessable Entity",
|
||||
429: "Too Many Requests",
|
||||
500: "Internal Server Error",
|
||||
501: "Not Implemented",
|
||||
}
|
||||
|
||||
|
||||
IMPLICATIONS = [
|
||||
["e2dsa", "e2ds"],
|
||||
["e2ds", "e2d"],
|
||||
["e2tsr", "e2ts"],
|
||||
["e2ts", "e2t"],
|
||||
["e2t", "e2d"],
|
||||
]
|
||||
|
||||
|
||||
REKOBO_KEY = {
|
||||
v: ln.split(" ", 1)[0]
|
||||
for ln in """
|
||||
1B 6d B
|
||||
2B 7d Gb F#
|
||||
3B 8d Db C#
|
||||
4B 9d Ab G#
|
||||
5B 10d Eb D#
|
||||
6B 11d Bb A#
|
||||
7B 12d F
|
||||
8B 1d C
|
||||
9B 2d G
|
||||
10B 3d D
|
||||
11B 4d A
|
||||
12B 5d E
|
||||
1A 6m Abm G#m
|
||||
2A 7m Ebm D#m
|
||||
3A 8m Bbm A#m
|
||||
4A 9m Fm
|
||||
5A 10m Cm
|
||||
6A 11m Gm
|
||||
7A 12m Dm
|
||||
8A 1m Am
|
||||
9A 2m Em
|
||||
10A 3m Bm
|
||||
11A 4m Gbm F#m
|
||||
12A 5m Dbm C#m
|
||||
""".strip().split(
|
||||
"\n"
|
||||
)
|
||||
for v in ln.strip().split(" ")[1:]
|
||||
if v
|
||||
}
|
||||
|
||||
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
|
||||
|
||||
|
||||
class Counter(object):
|
||||
def __init__(self, v=0):
|
||||
self.v = v
|
||||
@@ -95,6 +147,145 @@ class Unrecv(object):
|
||||
self.buf = buf + self.buf
|
||||
|
||||
|
||||
class ProgressPrinter(threading.Thread):
|
||||
"""
|
||||
periodically print progress info without linefeeds
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
threading.Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self.msg = None
|
||||
self.end = False
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
msg = None
|
||||
while not self.end:
|
||||
time.sleep(0.1)
|
||||
if msg == self.msg or self.end:
|
||||
continue
|
||||
|
||||
msg = self.msg
|
||||
uprint(" {}\033[K\r".format(msg))
|
||||
|
||||
print("\033[K", end="")
|
||||
sys.stdout.flush() # necessary on win10 even w/ stderr btw
|
||||
|
||||
|
||||
def uprint(msg):
|
||||
try:
|
||||
print(msg, end="")
|
||||
except UnicodeEncodeError:
|
||||
try:
|
||||
print(msg.encode("utf-8", "replace").decode(), end="")
|
||||
except:
|
||||
print(msg.encode("ascii", "replace").decode(), end="")
|
||||
|
||||
|
||||
def nuprint(msg):
|
||||
uprint("{}\n".format(msg))
|
||||
|
||||
|
||||
def rice_tid():
|
||||
tid = threading.current_thread().ident
|
||||
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
|
||||
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
|
||||
|
||||
|
||||
def trace(*args, **kwargs):
|
||||
t = time.time()
|
||||
stack = "".join(
|
||||
"\033[36m{}\033[33m{}".format(x[0].split(os.sep)[-1][:-3], x[1])
|
||||
for x in traceback.extract_stack()[3:-1]
|
||||
)
|
||||
parts = ["{:.6f}".format(t), rice_tid(), stack]
|
||||
|
||||
if args:
|
||||
parts.append(repr(args))
|
||||
|
||||
if kwargs:
|
||||
parts.append(repr(kwargs))
|
||||
|
||||
msg = "\033[0m ".join(parts)
|
||||
# _tracebuf.append(msg)
|
||||
nuprint(msg)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ren_open(fname, *args, **kwargs):
|
||||
fdir = kwargs.pop("fdir", None)
|
||||
suffix = kwargs.pop("suffix", None)
|
||||
|
||||
if fname == os.devnull:
|
||||
with open(fname, *args, **kwargs) as f:
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
orig_name = fname
|
||||
bname = fname
|
||||
ext = ""
|
||||
while True:
|
||||
ofs = bname.rfind(".")
|
||||
if ofs < 0 or ofs < len(bname) - 7:
|
||||
# doesn't look like an extension anymore
|
||||
break
|
||||
|
||||
ext = bname[ofs:] + ext
|
||||
bname = bname[:ofs]
|
||||
|
||||
b64 = ""
|
||||
while True:
|
||||
try:
|
||||
if fdir:
|
||||
fpath = os.path.join(fdir, fname)
|
||||
else:
|
||||
fpath = fname
|
||||
|
||||
if suffix and os.path.exists(fpath):
|
||||
fpath += suffix
|
||||
fname += suffix
|
||||
ext += suffix
|
||||
|
||||
with open(fsenc(fpath), *args, **kwargs) as f:
|
||||
if b64:
|
||||
fp2 = "fn-trunc.{}.txt".format(b64)
|
||||
fp2 = os.path.join(fdir, fp2)
|
||||
with open(fsenc(fp2), "wb") as f2:
|
||||
f2.write(orig_name.encode("utf-8"))
|
||||
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
except OSError as ex_:
|
||||
ex = ex_
|
||||
if ex.errno not in [36, 63] and (not WINDOWS or ex.errno != 22):
|
||||
raise
|
||||
|
||||
if not b64:
|
||||
b64 = (bname + ext).encode("utf-8", "replace")
|
||||
b64 = hashlib.sha512(b64).digest()[:12]
|
||||
b64 = base64.urlsafe_b64encode(b64).decode("utf-8").rstrip("=")
|
||||
|
||||
badlen = len(fname)
|
||||
while len(fname) >= badlen:
|
||||
if len(bname) < 8:
|
||||
raise ex
|
||||
|
||||
if len(bname) > len(ext):
|
||||
# drop the last letter of the filename
|
||||
bname = bname[:-1]
|
||||
else:
|
||||
try:
|
||||
# drop the leftmost sub-extension
|
||||
_, ext = ext.split(".", 1)
|
||||
except:
|
||||
# okay do the first letter then
|
||||
ext = "." + ext[2:]
|
||||
|
||||
fname = "{}~{}{}".format(bname, b64, ext)
|
||||
|
||||
|
||||
class MultipartParser(object):
|
||||
def __init__(self, log_func, sr, http_headers):
|
||||
self.sr = sr
|
||||
@@ -335,18 +526,38 @@ def read_header(sr):
|
||||
|
||||
|
||||
def humansize(sz, terse=False):
|
||||
for unit in ['B', 'KiB', 'MiB', 'GiB', 'TiB']:
|
||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||
if sz < 1024:
|
||||
break
|
||||
|
||||
sz /= 1024
|
||||
|
||||
ret = ' '.join([str(sz)[:4].rstrip('.'), unit])
|
||||
|
||||
|
||||
sz /= 1024.0
|
||||
|
||||
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||
|
||||
if not terse:
|
||||
return ret
|
||||
|
||||
return ret.replace('iB', '').replace(' ', '')
|
||||
|
||||
return ret.replace("iB", "").replace(" ", "")
|
||||
|
||||
|
||||
def get_spd(nbyte, t0, t=None):
|
||||
if t is None:
|
||||
t = time.time()
|
||||
|
||||
bps = nbyte / ((t - t0) + 0.001)
|
||||
s1 = humansize(nbyte).replace(" ", "\033[33m").replace("iB", "")
|
||||
s2 = humansize(bps).replace(" ", "\033[35m").replace("iB", "")
|
||||
return "{} \033[0m{}/s\033[0m".format(s1, s2)
|
||||
|
||||
|
||||
def s2hms(s, optional_h=False):
|
||||
s = int(s)
|
||||
h, s = divmod(s, 3600)
|
||||
m, s = divmod(s, 60)
|
||||
if not h and optional_h:
|
||||
return "{}:{:02}".format(m, s)
|
||||
|
||||
return "{}:{:02}:{:02}".format(h, m, s)
|
||||
|
||||
|
||||
def undot(path):
|
||||
@@ -392,10 +603,30 @@ def sanitize_fn(fn):
|
||||
return fn.strip()
|
||||
|
||||
|
||||
def u8safe(txt):
|
||||
try:
|
||||
return txt.encode("utf-8", "xmlcharrefreplace").decode("utf-8", "replace")
|
||||
except:
|
||||
return txt.encode("utf-8", "replace").decode("utf-8", "replace")
|
||||
|
||||
|
||||
def exclude_dotfiles(filepaths):
|
||||
for fpath in filepaths:
|
||||
if not fpath.split("/")[-1].startswith("."):
|
||||
yield fpath
|
||||
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
||||
|
||||
|
||||
def html_escape(s, quote=False):
|
||||
"""html.escape but also newlines"""
|
||||
s = (
|
||||
s.replace("&", "&")
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace("\r", " ")
|
||||
.replace("\n", " ")
|
||||
)
|
||||
if quote:
|
||||
s = s.replace('"', """).replace("'", "'")
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def quotep(txt):
|
||||
@@ -412,8 +643,8 @@ def quotep(txt):
|
||||
def unquotep(txt):
|
||||
"""url unquoter which deals with bytes correctly"""
|
||||
btxt = w8enc(txt)
|
||||
unq1 = btxt.replace(b"+", b" ")
|
||||
unq2 = unquote(unq1)
|
||||
# btxt = btxt.replace(b"+", b" ")
|
||||
unq2 = unquote(btxt)
|
||||
return w8dec(unq2)
|
||||
|
||||
|
||||
@@ -433,6 +664,16 @@ def w8enc(txt):
|
||||
return txt.encode(FS_ENCODING, "surrogateescape")
|
||||
|
||||
|
||||
def w8b64dec(txt):
|
||||
"""decodes base64(filesystem-bytes) to wtf8"""
|
||||
return w8dec(base64.urlsafe_b64decode(txt.encode("ascii")))
|
||||
|
||||
|
||||
def w8b64enc(txt):
|
||||
"""encodes wtf8 to base64(filesystem-bytes)"""
|
||||
return base64.urlsafe_b64encode(w8enc(txt)).decode("ascii")
|
||||
|
||||
|
||||
if PY2 and WINDOWS:
|
||||
# moonrunes become \x3f with bytestrings,
|
||||
# losing mojibake support is worth
|
||||
@@ -446,6 +687,41 @@ else:
|
||||
fsdec = w8dec
|
||||
|
||||
|
||||
def s3enc(mem_cur, rd, fn):
|
||||
ret = []
|
||||
for v in [rd, fn]:
|
||||
try:
|
||||
mem_cur.execute("select * from a where b = ?", (v,))
|
||||
ret.append(v)
|
||||
except:
|
||||
ret.append("//" + w8b64enc(v))
|
||||
# self.log("mojien/{} [{}] {}".format(k, v, ret[-1][2:]))
|
||||
|
||||
return tuple(ret)
|
||||
|
||||
|
||||
def s3dec(rd, fn):
|
||||
ret = []
|
||||
for k, v in [["d", rd], ["f", fn]]:
|
||||
if v.startswith("//"):
|
||||
ret.append(w8b64dec(v[2:]))
|
||||
# self.log("mojide/{} [{}] {}".format(k, ret[-1], v[2:]))
|
||||
else:
|
||||
ret.append(v)
|
||||
|
||||
return tuple(ret)
|
||||
|
||||
|
||||
def atomic_move(src, dst):
|
||||
if not PY2:
|
||||
os.replace(src, dst)
|
||||
else:
|
||||
if os.path.exists(dst):
|
||||
os.unlink(dst)
|
||||
|
||||
os.rename(src, dst)
|
||||
|
||||
|
||||
def read_socket(sr, total_size):
|
||||
remains = total_size
|
||||
while remains > 0:
|
||||
@@ -470,6 +746,40 @@ def read_socket_unbounded(sr):
|
||||
yield buf
|
||||
|
||||
|
||||
def read_socket_chunked(sr, log=None):
|
||||
err = "expected chunk length, got [{}] |{}| instead"
|
||||
while True:
|
||||
buf = b""
|
||||
while b"\r" not in buf:
|
||||
rbuf = sr.recv(2)
|
||||
if not rbuf or len(buf) > 16:
|
||||
err = err.format(buf.decode("utf-8", "replace"), len(buf))
|
||||
raise Pebkac(400, err)
|
||||
|
||||
buf += rbuf
|
||||
|
||||
if not buf.endswith(b"\n"):
|
||||
sr.recv(1)
|
||||
|
||||
try:
|
||||
chunklen = int(buf.rstrip(b"\r\n"), 16)
|
||||
except:
|
||||
err = err.format(buf.decode("utf-8", "replace"), len(buf))
|
||||
raise Pebkac(400, err)
|
||||
|
||||
if chunklen == 0:
|
||||
sr.recv(2) # \r\n after final chunk
|
||||
return
|
||||
|
||||
if log:
|
||||
log("receiving {} byte chunk".format(chunklen))
|
||||
|
||||
for chunk in read_socket(sr, chunklen):
|
||||
yield chunk
|
||||
|
||||
sr.recv(2) # \r\n after each chunk too
|
||||
|
||||
|
||||
def hashcopy(actor, fin, fout):
|
||||
u32_lim = int((2 ** 31) * 0.9)
|
||||
hashobj = hashlib.sha512()
|
||||
@@ -489,6 +799,73 @@ def hashcopy(actor, fin, fout):
|
||||
return tlen, hashobj.hexdigest(), digest_b64
|
||||
|
||||
|
||||
def sendfile_py(lower, upper, f, s):
|
||||
remains = upper - lower
|
||||
f.seek(lower)
|
||||
while remains > 0:
|
||||
# time.sleep(0.01)
|
||||
buf = f.read(min(4096, remains))
|
||||
if not buf:
|
||||
return remains
|
||||
|
||||
try:
|
||||
s.sendall(buf)
|
||||
remains -= len(buf)
|
||||
except:
|
||||
return remains
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def sendfile_kern(lower, upper, f, s):
|
||||
out_fd = s.fileno()
|
||||
in_fd = f.fileno()
|
||||
ofs = lower
|
||||
while ofs < upper:
|
||||
try:
|
||||
req = min(2 ** 30, upper - ofs)
|
||||
select.select([], [out_fd], [], 10)
|
||||
n = os.sendfile(out_fd, in_fd, ofs, req)
|
||||
except Exception as ex:
|
||||
# print("sendfile: " + repr(ex))
|
||||
n = 0
|
||||
|
||||
if n <= 0:
|
||||
return upper - ofs
|
||||
|
||||
ofs += n
|
||||
# print("sendfile: ok, sent {} now, {} total, {} remains".format(n, ofs - lower, upper - ofs))
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def statdir(logger, scandir, lstat, top):
|
||||
try:
|
||||
btop = fsenc(top)
|
||||
if scandir and hasattr(os, "scandir"):
|
||||
src = "scandir"
|
||||
with os.scandir(btop) as dh:
|
||||
for fh in dh:
|
||||
try:
|
||||
yield [fsdec(fh.name), fh.stat(follow_symlinks=not lstat)]
|
||||
except Exception as ex:
|
||||
msg = "scan-stat: \033[36m{} @ {}"
|
||||
logger(msg.format(repr(ex), fsdec(fh.path)))
|
||||
else:
|
||||
src = "listdir"
|
||||
fun = os.lstat if lstat else os.stat
|
||||
for name in os.listdir(btop):
|
||||
abspath = os.path.join(btop, name)
|
||||
try:
|
||||
yield [fsdec(name), fun(abspath)]
|
||||
except Exception as ex:
|
||||
msg = "list-stat: \033[36m{} @ {}"
|
||||
logger(msg.format(repr(ex), fsdec(abspath)))
|
||||
|
||||
except Exception as ex:
|
||||
logger("{}: \033[31m{} @ {}".format(src, repr(ex), top))
|
||||
|
||||
|
||||
def unescape_cookie(orig):
|
||||
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
|
||||
ret = ""
|
||||
@@ -543,7 +920,11 @@ def chkcmd(*argv):
|
||||
def gzip_orig_sz(fn):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
f.seek(-4, 2)
|
||||
return struct.unpack(b"I", f.read(4))[0]
|
||||
rv = f.read(4)
|
||||
try:
|
||||
return struct.unpack(b"I", rv)[0]
|
||||
except:
|
||||
return struct.unpack("I", rv)[0]
|
||||
|
||||
|
||||
def py_desc():
|
||||
@@ -553,7 +934,11 @@ def py_desc():
|
||||
if ofs > 0:
|
||||
py_ver = py_ver[:ofs]
|
||||
|
||||
bitness = struct.calcsize(b"P") * 8
|
||||
try:
|
||||
bitness = struct.calcsize(b"P") * 8
|
||||
except:
|
||||
bitness = struct.calcsize("P") * 8
|
||||
|
||||
host_os = platform.system()
|
||||
compiler = platform.python_compiler()
|
||||
|
||||
@@ -565,7 +950,26 @@ def py_desc():
|
||||
)
|
||||
|
||||
|
||||
def align_tab(lines):
|
||||
rows = []
|
||||
ncols = 0
|
||||
for ln in lines:
|
||||
row = [x for x in ln.split(" ") if x]
|
||||
ncols = max(ncols, len(row))
|
||||
rows.append(row)
|
||||
|
||||
lens = [0] * ncols
|
||||
for row in rows:
|
||||
for n, col in enumerate(row):
|
||||
lens[n] = max(lens[n], len(col))
|
||||
|
||||
return ["".join(x.ljust(y + 2) for x, y in zip(row, lens)) for row in rows]
|
||||
|
||||
|
||||
class Pebkac(Exception):
|
||||
def __init__(self, code, msg=None):
|
||||
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
||||
self.code = code
|
||||
|
||||
def __repr__(self):
|
||||
return "Pebkac({}, {})".format(self.code, repr(self.args))
|
||||
|
12
copyparty/web/Makefile
Normal file
12
copyparty/web/Makefile
Normal file
@@ -0,0 +1,12 @@
|
||||
# run me to zopfli all the static files
|
||||
# which should help on really slow connections
|
||||
# but then why are you using copyparty in the first place
|
||||
|
||||
pk: $(addsuffix .gz, $(wildcard *.js *.css))
|
||||
un: $(addsuffix .un, $(wildcard *.gz))
|
||||
|
||||
%.gz: %
|
||||
pigz -11 -J 34 -I 5730 $<
|
||||
|
||||
%.un: %
|
||||
pigz -d $<
|
@@ -39,15 +39,22 @@ body {
|
||||
margin: 1.3em 0 0 0;
|
||||
font-size: 1.4em;
|
||||
}
|
||||
#path #entree {
|
||||
margin-left: -.7em;
|
||||
}
|
||||
#files {
|
||||
border-collapse: collapse;
|
||||
margin-top: 2em;
|
||||
border-spacing: 0;
|
||||
z-index: 1;
|
||||
position: relative;
|
||||
}
|
||||
#files tbody a {
|
||||
display: block;
|
||||
padding: .3em 0;
|
||||
}
|
||||
a {
|
||||
#files tbody div a {
|
||||
color: #f5a;
|
||||
}
|
||||
a, #files tbody div a:last-child {
|
||||
color: #fc5;
|
||||
padding: .2em;
|
||||
text-decoration: none;
|
||||
@@ -55,16 +62,18 @@ a {
|
||||
#files a:hover {
|
||||
color: #fff;
|
||||
background: #161616;
|
||||
text-decoration: underline;
|
||||
}
|
||||
#files thead a {
|
||||
color: #999;
|
||||
font-weight: normal;
|
||||
}
|
||||
#files tr:hover {
|
||||
#files tr+tr:hover {
|
||||
background: #1c1c1c;
|
||||
}
|
||||
#files thead th {
|
||||
padding: .5em 1.3em .3em 1.3em;
|
||||
cursor: pointer;
|
||||
}
|
||||
#files thead th:last-child {
|
||||
background: #444;
|
||||
@@ -82,6 +91,16 @@ a {
|
||||
margin: 0;
|
||||
padding: 0 .5em;
|
||||
}
|
||||
#files td {
|
||||
border-bottom: 1px solid #111;
|
||||
}
|
||||
#files td+td+td {
|
||||
max-width: 30em;
|
||||
overflow: hidden;
|
||||
}
|
||||
#files tr+tr td {
|
||||
border-top: 1px solid #383838;
|
||||
}
|
||||
#files tbody td:nth-child(3) {
|
||||
font-family: monospace;
|
||||
font-size: 1.3em;
|
||||
@@ -100,6 +119,9 @@ a {
|
||||
padding-bottom: 1.3em;
|
||||
border-bottom: .5em solid #444;
|
||||
}
|
||||
#files tbody tr td:last-child {
|
||||
white-space: nowrap;
|
||||
}
|
||||
#files thead th[style] {
|
||||
width: auto !important;
|
||||
}
|
||||
@@ -131,6 +153,15 @@ a {
|
||||
.logue {
|
||||
padding: .2em 1.5em;
|
||||
}
|
||||
.logue:empty {
|
||||
display: none;
|
||||
}
|
||||
#pro.logue {
|
||||
margin-bottom: .8em;
|
||||
}
|
||||
#epi.logue {
|
||||
margin: .8em 0;
|
||||
}
|
||||
#srv_info {
|
||||
opacity: .5;
|
||||
font-size: .8em;
|
||||
@@ -142,11 +173,14 @@ a {
|
||||
#srv_info span {
|
||||
color: #fff;
|
||||
}
|
||||
a.play {
|
||||
#files tbody a.play {
|
||||
color: #e70;
|
||||
padding: .2em;
|
||||
margin: -.2em;
|
||||
}
|
||||
a.play.act {
|
||||
color: #af0;
|
||||
#files tbody a.play.act {
|
||||
color: #840;
|
||||
text-shadow: 0 0 .3em #b80;
|
||||
}
|
||||
#blocked {
|
||||
position: fixed;
|
||||
@@ -156,7 +190,7 @@ a.play.act {
|
||||
height: 100%;
|
||||
background: #333;
|
||||
font-size: 2.5em;
|
||||
z-index:99;
|
||||
z-index: 99;
|
||||
}
|
||||
#blk_play,
|
||||
#blk_abrt {
|
||||
@@ -190,6 +224,7 @@ a.play.act {
|
||||
bottom: -6em;
|
||||
height: 6em;
|
||||
width: 100%;
|
||||
z-index: 3;
|
||||
transition: bottom 0.15s;
|
||||
}
|
||||
#widget.open {
|
||||
@@ -214,6 +249,9 @@ a.play.act {
|
||||
75% {cursor: url(/.cpr/dd/5.png), pointer}
|
||||
85% {cursor: url(/.cpr/dd/1.png), pointer}
|
||||
}
|
||||
@keyframes spin {
|
||||
100% {transform: rotate(360deg)}
|
||||
}
|
||||
#wtoggle {
|
||||
position: absolute;
|
||||
top: -1.2em;
|
||||
@@ -273,3 +311,344 @@ a.play.act {
|
||||
width: calc(100% - 10.5em);
|
||||
background: rgba(0,0,0,0.2);
|
||||
}
|
||||
@media (min-width: 90em) {
|
||||
#barpos,
|
||||
#barbuf {
|
||||
width: calc(100% - 24em);
|
||||
left: 9.8em;
|
||||
top: .7em;
|
||||
height: 1.6em;
|
||||
bottom: auto;
|
||||
}
|
||||
#widget {
|
||||
bottom: -3.2em;
|
||||
height: 3.2em;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
.opview {
|
||||
display: none;
|
||||
}
|
||||
.opview.act {
|
||||
display: block;
|
||||
}
|
||||
#ops a {
|
||||
color: #fc5;
|
||||
font-size: 1.5em;
|
||||
padding: .25em .3em;
|
||||
margin: 0;
|
||||
outline: none;
|
||||
}
|
||||
#ops a.act {
|
||||
background: #281838;
|
||||
border-radius: 0 0 .2em .2em;
|
||||
border-bottom: .3em solid #d90;
|
||||
box-shadow: 0 -.15em .2em #000 inset;
|
||||
padding-bottom: .3em;
|
||||
}
|
||||
#ops i {
|
||||
font-size: 1.5em;
|
||||
}
|
||||
#ops i:before {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #01a7e1;
|
||||
position: relative;
|
||||
}
|
||||
#ops i:after {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #ff3f1a;
|
||||
margin-left: -.35em;
|
||||
font-size: 1.05em;
|
||||
}
|
||||
#ops,
|
||||
.opbox {
|
||||
border: 1px solid #3a3a3a;
|
||||
box-shadow: 0 0 1em #222 inset;
|
||||
}
|
||||
#ops {
|
||||
background: #333;
|
||||
margin: 1.7em 1.5em 0 1.5em;
|
||||
padding: .3em .6em;
|
||||
border-radius: .3em;
|
||||
border-width: .15em 0;
|
||||
}
|
||||
.opbox {
|
||||
background: #2d2d2d;
|
||||
margin: 1.5em 0 0 0;
|
||||
padding: .5em;
|
||||
border-radius: 0 1em 1em 0;
|
||||
border-width: .15em .3em .3em 0;
|
||||
max-width: 40em;
|
||||
}
|
||||
.opbox input {
|
||||
margin: .5em;
|
||||
}
|
||||
.opview input[type=text] {
|
||||
color: #fff;
|
||||
background: #383838;
|
||||
border: none;
|
||||
box-shadow: 0 0 .3em #222;
|
||||
border-bottom: 1px solid #fc5;
|
||||
border-radius: .2em;
|
||||
padding: .2em .3em;
|
||||
}
|
||||
input[type="checkbox"]+label {
|
||||
color: #f5a;
|
||||
}
|
||||
input[type="checkbox"]:checked+label {
|
||||
color: #fc5;
|
||||
}
|
||||
|
||||
|
||||
|
||||
#srch_form {
|
||||
border: 1px solid #3a3a3a;
|
||||
box-shadow: 0 0 1em #222 inset;
|
||||
background: #2d2d2d;
|
||||
border-radius: .4em;
|
||||
margin: 1.4em;
|
||||
margin-bottom: 0;
|
||||
padding: 0 .5em .5em 0;
|
||||
}
|
||||
#srch_form table {
|
||||
display: inline-block;
|
||||
}
|
||||
#srch_form td {
|
||||
padding: .6em .6em;
|
||||
}
|
||||
#srch_form td:first-child {
|
||||
width: 3em;
|
||||
padding-right: .2em;
|
||||
text-align: right;
|
||||
}
|
||||
#op_search input {
|
||||
margin: 0;
|
||||
}
|
||||
#srch_q {
|
||||
white-space: pre;
|
||||
color: #f80;
|
||||
height: 1em;
|
||||
margin: .2em 0 -1em 1.6em;
|
||||
}
|
||||
#files td div span {
|
||||
color: #fff;
|
||||
padding: 0 .4em;
|
||||
font-weight: bold;
|
||||
font-style: italic;
|
||||
}
|
||||
#files td div a:hover {
|
||||
background: #444;
|
||||
color: #fff;
|
||||
}
|
||||
#files td div a {
|
||||
display: inline-block;
|
||||
white-space: nowrap;
|
||||
}
|
||||
#files td div a:last-child {
|
||||
width: 100%;
|
||||
}
|
||||
#files td div {
|
||||
border-collapse: collapse;
|
||||
width: 100%;
|
||||
}
|
||||
#files td div a:last-child {
|
||||
width: 100%;
|
||||
}
|
||||
#wrap {
|
||||
margin-top: 2em;
|
||||
}
|
||||
#tree {
|
||||
display: none;
|
||||
position: fixed;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
top: 7em;
|
||||
padding-top: .2em;
|
||||
overflow-y: auto;
|
||||
-ms-scroll-chaining: none;
|
||||
overscroll-behavior-y: none;
|
||||
scrollbar-color: #eb0 #333;
|
||||
}
|
||||
#thx_ff {
|
||||
padding: 5em 0;
|
||||
}
|
||||
#tree::-webkit-scrollbar-track {
|
||||
background: #333;
|
||||
}
|
||||
#tree::-webkit-scrollbar {
|
||||
background: #333;
|
||||
}
|
||||
#tree::-webkit-scrollbar-thumb {
|
||||
background: #eb0;
|
||||
}
|
||||
#tree:hover {
|
||||
z-index: 2;
|
||||
}
|
||||
#treeul {
|
||||
position: relative;
|
||||
left: -1.7em;
|
||||
width: calc(100% + 1.3em);
|
||||
}
|
||||
.tglbtn,
|
||||
#tree>a+a {
|
||||
padding: .2em .4em;
|
||||
font-size: 1.2em;
|
||||
background: #2a2a2a;
|
||||
box-shadow: 0 .1em .2em #222 inset;
|
||||
border-radius: .3em;
|
||||
margin: .2em;
|
||||
position: relative;
|
||||
top: -.2em;
|
||||
}
|
||||
.tglbtn:hover,
|
||||
#tree>a+a:hover {
|
||||
background: #805;
|
||||
}
|
||||
.tglbtn.on,
|
||||
#tree>a+a.on {
|
||||
background: #fc4;
|
||||
color: #400;
|
||||
text-shadow: none;
|
||||
}
|
||||
#detree {
|
||||
padding: .3em .5em;
|
||||
font-size: 1.5em;
|
||||
}
|
||||
#tree ul,
|
||||
#tree li {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
#tree ul {
|
||||
border-left: .2em solid #555;
|
||||
}
|
||||
#tree li {
|
||||
margin-left: 1em;
|
||||
list-style: none;
|
||||
border-top: 1px solid #4c4c4c;
|
||||
border-bottom: 1px solid #222;
|
||||
}
|
||||
#tree li:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
#treeul a.hl {
|
||||
color: #400;
|
||||
background: #fc4;
|
||||
border-radius: .3em;
|
||||
text-shadow: none;
|
||||
}
|
||||
#treeul a {
|
||||
display: inline-block;
|
||||
}
|
||||
#treeul a+a {
|
||||
width: calc(100% - 2em);
|
||||
background: #333;
|
||||
line-height: 1em;
|
||||
}
|
||||
#treeul a+a:hover {
|
||||
background: #222;
|
||||
color: #fff;
|
||||
}
|
||||
#treeul a:first-child {
|
||||
font-family: monospace, monospace;
|
||||
}
|
||||
.dumb_loader_thing {
|
||||
display: inline-block;
|
||||
margin: 1em .3em 1em 1em;
|
||||
padding: 0 1.2em 0 0;
|
||||
font-size: 4em;
|
||||
animation: spin 1s linear infinite;
|
||||
position: absolute;
|
||||
z-index: 9;
|
||||
}
|
||||
#files .cfg {
|
||||
display: none;
|
||||
font-size: 2em;
|
||||
white-space: nowrap;
|
||||
}
|
||||
#files th:hover .cfg,
|
||||
#files th.min .cfg {
|
||||
display: block;
|
||||
width: 1em;
|
||||
border-radius: .2em;
|
||||
margin: -1.3em auto 0 auto;
|
||||
background: #444;
|
||||
}
|
||||
#files th.min .cfg {
|
||||
margin: -.6em;
|
||||
}
|
||||
#files>thead>tr>th.min span {
|
||||
position: absolute;
|
||||
transform: rotate(270deg);
|
||||
background: linear-gradient(90deg, rgba(68,68,68,0), rgba(68,68,68,0.5) 70%, #444);
|
||||
margin-left: -4.6em;
|
||||
padding: .4em;
|
||||
top: 5.4em;
|
||||
width: 8em;
|
||||
text-align: right;
|
||||
letter-spacing: .04em;
|
||||
}
|
||||
#files td:nth-child(2n) {
|
||||
color: #f5a;
|
||||
}
|
||||
#files td.min a {
|
||||
display: none;
|
||||
}
|
||||
#files tr.play td {
|
||||
background: #fc4;
|
||||
border-color: transparent;
|
||||
color: #400;
|
||||
text-shadow: none;
|
||||
}
|
||||
#files tr.play a {
|
||||
color: inherit;
|
||||
}
|
||||
#files tr.play a:hover {
|
||||
color: #300;
|
||||
background: #fea;
|
||||
}
|
||||
#op_cfg {
|
||||
max-width: none;
|
||||
margin-right: 1.5em;
|
||||
}
|
||||
#op_cfg>div>a {
|
||||
line-height: 2em;
|
||||
}
|
||||
#op_cfg>div>span {
|
||||
display: inline-block;
|
||||
padding: .2em .4em;
|
||||
}
|
||||
#op_cfg h3 {
|
||||
margin: .8em 0 0 .6em;
|
||||
padding: 0;
|
||||
border-bottom: 1px solid #555;
|
||||
}
|
||||
#opdesc {
|
||||
display: none;
|
||||
}
|
||||
#ops:hover #opdesc {
|
||||
display: block;
|
||||
background: linear-gradient(0deg,#555, #4c4c4c 80%, #444);
|
||||
box-shadow: 0 .3em 1em #222;
|
||||
padding: 1em;
|
||||
border-radius: .3em;
|
||||
position: absolute;
|
||||
z-index: 3;
|
||||
top: 6em;
|
||||
right: 1.5em;
|
||||
}
|
||||
#ops:hover #opdesc.off {
|
||||
display: none;
|
||||
}
|
||||
#opdesc code {
|
||||
background: #3c3c3c;
|
||||
padding: .2em .3em;
|
||||
border-top: 1px solid #777;
|
||||
border-radius: .3em;
|
||||
font-family: monospace, monospace;
|
||||
line-height: 2em;
|
||||
}
|
||||
|
@@ -7,52 +7,104 @@
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css{{ ts }}">
|
||||
{%- if can_upload %}
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css{{ ts }}">
|
||||
{%- endif %}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
{%- if can_upload %}
|
||||
<div id="ops">
|
||||
<a href="#" data-dest="" data-desc="close submenu">---</a>
|
||||
<a href="#" data-perm="read" data-dest="search" data-desc="search for files by attributes, path/name, music tags, or any combination of those.<br /><br /><code>foo bar</code> = must contain both foo and bar,<br /><code>foo -bar</code> = must contain foo but not bar,<br /><code>^yana .opus$</code> = must start with yana and have the opus extension">🔎</a>
|
||||
{%- if have_up2k_idx %}
|
||||
<a href="#" data-dest="up2k" data-desc="up2k: upload files (if you have write-access) or toggle into the search-mode and drag files onto the search button to see if they exist somewhere on the server">🚀</a>
|
||||
{%- else %}
|
||||
<a href="#" data-perm="write" data-dest="up2k" data-desc="up2k: upload files with resume support (close your browser and drop the same files in later)">🚀</a>
|
||||
{%- endif %}
|
||||
<a href="#" data-perm="write" data-dest="bup" data-desc="bup: basic uploader, even supports netscape 4.0">🎈</a>
|
||||
<a href="#" data-perm="write" data-dest="mkdir" data-desc="mkdir: create a new directory">📂</a>
|
||||
<a href="#" data-perm="write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
|
||||
<a href="#" data-perm="write" data-dest="msg" data-desc="msg: send a message to the server log">📟</a>
|
||||
<a href="#" data-dest="cfg" data-desc="configuration options">⚙️</a>
|
||||
<div id="opdesc"></div>
|
||||
</div>
|
||||
|
||||
<div id="op_search" class="opview">
|
||||
{%- if have_tags_idx %}
|
||||
<div id="srch_form" class="tags"></div>
|
||||
{%- else %}
|
||||
<div id="srch_form"></div>
|
||||
{%- endif %}
|
||||
<div id="srch_q"></div>
|
||||
</div>
|
||||
|
||||
{%- include 'upload.html' %}
|
||||
{%- endif %}
|
||||
|
||||
<div id="op_cfg" class="opview opbox">
|
||||
<h3>key notation</h3>
|
||||
<div id="key_notation"></div>
|
||||
<h3>tooltips</h3>
|
||||
<div>
|
||||
<a id="tooltips" class="tglbtn" href="#">enable</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h1 id="path">
|
||||
<a href="#" id="entree">🌲</a>
|
||||
{%- for n in vpnodes %}
|
||||
<a href="/{{ n[0] }}">{{ n[1] }}</a>
|
||||
{%- endfor %}
|
||||
</h1>
|
||||
|
||||
{%- if can_read %}
|
||||
{%- if prologue %}
|
||||
<div id="pro" class="logue">{{ prologue }}</div>
|
||||
{%- endif %}
|
||||
<div id="tree">
|
||||
<a href="#" id="detree">🍞...</a>
|
||||
<a href="#" step="2" id="twobytwo">+</a>
|
||||
<a href="#" step="-2" id="twig">–</a>
|
||||
<a href="#" class="tglbtn" id="dyntree">a</a>
|
||||
<ul id="treeul"></ul>
|
||||
<div id="thx_ff"> </div>
|
||||
</div>
|
||||
|
||||
<div id="wrap">
|
||||
|
||||
<div id="pro" class="logue">{{ logues[0] }}</div>
|
||||
|
||||
<table id="files">
|
||||
<thead>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>File Name</th>
|
||||
<th sort="int">File Size</th>
|
||||
<th>Date</th>
|
||||
<th name="href"><span>File Name</span></th>
|
||||
<th name="sz" sort="int"><span>Size</span></th>
|
||||
{%- for k in taglist %}
|
||||
{%- if k.startswith('.') %}
|
||||
<th name="tags/{{ k }}" sort="int"><span>{{ k[1:] }}</span></th>
|
||||
{%- else %}
|
||||
<th name="tags/{{ k }}"><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
<th name="ext"><span>T</span></th>
|
||||
<th name="ts"><span>Date</span></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
||||
{%- for f in files %}
|
||||
<tr><td>{{ f[0] }}</td><td><a href="{{ f[1] }}">{{ f[2] }}</a></td><td>{{ f[3] }}</td><td>{{ f[4] }}</td></tr>
|
||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
|
||||
{%- if f.tags is defined %}
|
||||
{%- for k in taglist %}
|
||||
<td>{{ f.tags[k] }}</td>
|
||||
{%- endfor %}
|
||||
{%- endif %}
|
||||
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
||||
{%- endfor %}
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
{%- if epilogue %}
|
||||
<div id="epi" class="logue">{{ epilogue }}</div>
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
<div id="epi" class="logue">{{ logues[1] }}</div>
|
||||
|
||||
<h2><a href="?h">control-panel</a></h2>
|
||||
|
||||
</div>
|
||||
|
||||
{%- if srv_info %}
|
||||
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
||||
{%- endif %}
|
||||
@@ -66,14 +118,16 @@
|
||||
<canvas id="barbuf"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{%- if can_read %}
|
||||
|
||||
<script>
|
||||
var tag_order_cfg = {{ tag_order }};
|
||||
</script>
|
||||
<script src="/.cpr/util.js{{ ts }}"></script>
|
||||
<script src="/.cpr/browser.js{{ ts }}"></script>
|
||||
{%- endif %}
|
||||
|
||||
{%- if can_upload %}
|
||||
<script src="/.cpr/up2k.js{{ ts }}"></script>
|
||||
{%- endif %}
|
||||
<script>
|
||||
apply_perms({{ perms }});
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -109,8 +109,12 @@ h2 a, h4 a, h6 a {
|
||||
#mp ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em {
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
@@ -289,6 +293,32 @@ blink {
|
||||
text-decoration: underline;
|
||||
border: none;
|
||||
}
|
||||
#mh a:hover {
|
||||
color: #000;
|
||||
background: #ddd;
|
||||
}
|
||||
#toolsbox {
|
||||
overflow: hidden;
|
||||
display: inline-block;
|
||||
background: #eee;
|
||||
height: 1.5em;
|
||||
padding: 0 .2em;
|
||||
margin: 0 .2em;
|
||||
position: absolute;
|
||||
}
|
||||
#toolsbox.open {
|
||||
height: auto;
|
||||
overflow: visible;
|
||||
background: #eee;
|
||||
box-shadow: 0 .2em .2em #ccc;
|
||||
padding-bottom: .2em;
|
||||
}
|
||||
#toolsbox a {
|
||||
display: block;
|
||||
}
|
||||
#toolsbox a+a {
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -332,8 +362,12 @@ blink {
|
||||
html.dark #m>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em {
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
@@ -371,6 +405,17 @@ blink {
|
||||
color: #ccc;
|
||||
background: none;
|
||||
}
|
||||
html.dark #mh a:hover {
|
||||
background: #333;
|
||||
color: #fff;
|
||||
}
|
||||
html.dark #toolsbox {
|
||||
background: #222;
|
||||
}
|
||||
html.dark #toolsbox.open {
|
||||
box-shadow: 0 .2em .2em #069;
|
||||
border-radius: 0 0 .4em .4em;
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (min-width: 66em) {
|
||||
@@ -541,7 +586,8 @@ blink {
|
||||
color: #240;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em {
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #940;
|
||||
}
|
||||
}
|
||||
|
@@ -17,7 +17,14 @@
|
||||
<a id="save" href="?edit">save</a>
|
||||
<a id="sbs" href="#">sbs</a>
|
||||
<a id="nsbs" href="#">editor</a>
|
||||
<a id="help" href="#">help</a>
|
||||
<div id="toolsbox">
|
||||
<a id="tools" href="#">tools</a>
|
||||
<a id="fmt_table" href="#">prettify table (ctrl-k)</a>
|
||||
<a id="iter_uni" href="#">non-ascii: iterate (ctrl-u)</a>
|
||||
<a id="mark_uni" href="#">non-ascii: markup</a>
|
||||
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
|
||||
<a id="help" href="#">help</a>
|
||||
</div>
|
||||
{%- else %}
|
||||
<a href="?edit">edit (basic)</a>
|
||||
<a href="?edit2">edit (fancy)</a>
|
||||
@@ -46,6 +53,9 @@ write markdown (most html is 🙆 too)
|
||||
|
||||
## hotkey list
|
||||
* `Ctrl-S` to save
|
||||
* `Ctrl-E` to toggle mode
|
||||
* `Ctrl-K` to prettyprint a table
|
||||
* `Ctrl-U` to iterate non-ascii chars
|
||||
* `Ctrl-H` / `Ctrl-Shift-H` to create a header
|
||||
* `TAB` / `Shift-TAB` to indent/dedent a selection
|
||||
|
||||
@@ -113,8 +123,12 @@ write markdown (most html is 🙆 too)
|
||||
|
||||
<script>
|
||||
|
||||
var link_md_as_html = false; // TODO (does nothing)
|
||||
var last_modified = {{ lastmod }};
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
(function () {
|
||||
var btn = document.getElementById("lightswitch");
|
||||
@@ -131,17 +145,11 @@ var last_modified = {{ lastmod }};
|
||||
toggle();
|
||||
})();
|
||||
|
||||
if (!String.startsWith) {
|
||||
String.prototype.startsWith = function(s, i) {
|
||||
i = i>0 ? i|0 : 0;
|
||||
return this.substring(i, i + s.length) === s;
|
||||
};
|
||||
}
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js"></script>
|
||||
<script src="/.cpr/deps/marked.full.js"></script>
|
||||
<script src="/.cpr/md.js"></script>
|
||||
{%- if edit %}
|
||||
<script src="/.cpr/md2.js"></script>
|
||||
<script src="/.cpr/md2.js"></script>
|
||||
{%- endif %}
|
||||
</body></html>
|
||||
|
@@ -1,10 +1,12 @@
|
||||
var dom_toc = document.getElementById('toc');
|
||||
var dom_wrap = document.getElementById('mw');
|
||||
var dom_hbar = document.getElementById('mh');
|
||||
var dom_nav = document.getElementById('mn');
|
||||
var dom_pre = document.getElementById('mp');
|
||||
var dom_src = document.getElementById('mt');
|
||||
var dom_navtgl = document.getElementById('navtoggle');
|
||||
"use strict";
|
||||
|
||||
var dom_toc = ebi('toc');
|
||||
var dom_wrap = ebi('mw');
|
||||
var dom_hbar = ebi('mh');
|
||||
var dom_nav = ebi('mn');
|
||||
var dom_pre = ebi('mp');
|
||||
var dom_src = ebi('mt');
|
||||
var dom_navtgl = ebi('navtoggle');
|
||||
|
||||
|
||||
// chrome 49 needs this
|
||||
@@ -18,6 +20,10 @@ var dbg = function () { };
|
||||
// dbg = console.log
|
||||
|
||||
|
||||
// plugins
|
||||
var md_plug = {};
|
||||
|
||||
|
||||
function hesc(txt) {
|
||||
return txt.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
}
|
||||
@@ -30,7 +36,7 @@ function cls(dom, name, add) {
|
||||
}
|
||||
|
||||
|
||||
function static(obj) {
|
||||
function statify(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
@@ -59,7 +65,7 @@ function static(obj) {
|
||||
if (a > 0)
|
||||
loc.push(n[a]);
|
||||
|
||||
var dec = hesc(decodeURIComponent(n[a]));
|
||||
var dec = hesc(uricom_dec(n[a])[0]);
|
||||
|
||||
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
||||
}
|
||||
@@ -154,13 +160,110 @@ function copydom(src, dst, lv) {
|
||||
}
|
||||
|
||||
|
||||
function md_plug_err(ex, js) {
|
||||
var errbox = ebi('md_errbox');
|
||||
if (errbox)
|
||||
errbox.parentNode.removeChild(errbox);
|
||||
|
||||
if (!ex)
|
||||
return;
|
||||
|
||||
var msg = (ex + '').split('\n')[0];
|
||||
var ln = ex.lineNumber;
|
||||
var o = null;
|
||||
if (ln) {
|
||||
msg = "Line " + ln + ", " + msg;
|
||||
var lns = js.split('\n');
|
||||
if (ln < lns.length) {
|
||||
o = document.createElement('span');
|
||||
o.style.cssText = 'color:#ac2;font-size:.9em;font-family:scp;display:block';
|
||||
o.textContent = lns[ln - 1];
|
||||
}
|
||||
}
|
||||
errbox = document.createElement('div');
|
||||
errbox.setAttribute('id', 'md_errbox');
|
||||
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
|
||||
errbox.textContent = msg;
|
||||
errbox.onclick = function () {
|
||||
alert('' + ex.stack);
|
||||
};
|
||||
if (o) {
|
||||
errbox.appendChild(o);
|
||||
errbox.style.padding = '.25em .5em';
|
||||
}
|
||||
dom_nav.appendChild(errbox);
|
||||
|
||||
try {
|
||||
console.trace();
|
||||
}
|
||||
catch (ex2) { }
|
||||
}
|
||||
|
||||
|
||||
function load_plug(md_text, plug_type) {
|
||||
if (!md_opt.allow_plugins)
|
||||
return md_text;
|
||||
|
||||
var find = '\n```copyparty_' + plug_type + '\n';
|
||||
var ofs = md_text.indexOf(find);
|
||||
if (ofs === -1)
|
||||
return md_text;
|
||||
|
||||
var ofs2 = md_text.indexOf('\n```', ofs + 1);
|
||||
if (ofs2 == -1)
|
||||
return md_text;
|
||||
|
||||
var js = md_text.slice(ofs + find.length, ofs2 + 1);
|
||||
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
|
||||
|
||||
var old_plug = md_plug[plug_type];
|
||||
if (!old_plug || old_plug[1] != js) {
|
||||
js = 'const x = { ' + js + ' }; x;';
|
||||
try {
|
||||
var x = eval(js);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug[plug_type] = null;
|
||||
md_plug_err(ex, js);
|
||||
return md;
|
||||
}
|
||||
if (x['ctor']) {
|
||||
x['ctor']();
|
||||
delete x['ctor'];
|
||||
}
|
||||
md_plug[plug_type] = [x, js];
|
||||
}
|
||||
|
||||
return md;
|
||||
}
|
||||
|
||||
|
||||
function convert_markdown(md_text, dest_dom) {
|
||||
marked.setOptions({
|
||||
md_text = md_text.replace(/\r/g, '');
|
||||
|
||||
md_plug_err(null);
|
||||
md_text = load_plug(md_text, 'pre');
|
||||
md_text = load_plug(md_text, 'post');
|
||||
|
||||
var marked_opts = {
|
||||
//headerPrefix: 'h-',
|
||||
breaks: true,
|
||||
gfm: true
|
||||
});
|
||||
var md_html = marked(md_text);
|
||||
};
|
||||
|
||||
var ext = md_plug['pre'];
|
||||
if (ext)
|
||||
Object.assign(marked_opts, ext[0]);
|
||||
|
||||
try {
|
||||
var md_html = marked(md_text, marked_opts);
|
||||
}
|
||||
catch (ex) {
|
||||
if (ext)
|
||||
md_plug_err(ex, ext[1]);
|
||||
|
||||
throw ex;
|
||||
}
|
||||
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
||||
|
||||
var nodes = md_dom.getElementsByTagName('a');
|
||||
@@ -196,7 +299,7 @@ function convert_markdown(md_text, dest_dom) {
|
||||
}
|
||||
|
||||
// separate <code> for each line in <pre>
|
||||
var nodes = md_dom.getElementsByTagName('pre');
|
||||
nodes = md_dom.getElementsByTagName('pre');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
var el = nodes[a];
|
||||
|
||||
@@ -209,7 +312,7 @@ function convert_markdown(md_text, dest_dom) {
|
||||
continue;
|
||||
|
||||
var nline = parseInt(el.getAttribute('data-ln')) + 1;
|
||||
var lines = el.innerHTML.replace(/\r?\n<\/code>$/i, '</code>').split(/\r?\n/g);
|
||||
var lines = el.innerHTML.replace(/\n<\/code>$/i, '</code>').split(/\n/g);
|
||||
for (var b = 0; b < lines.length - 1; b++)
|
||||
lines[b] += '</code>\n<code data-ln="' + (nline + b) + '">';
|
||||
|
||||
@@ -242,12 +345,29 @@ function convert_markdown(md_text, dest_dom) {
|
||||
el.innerHTML = '<a href="#' + id + '">' + el.innerHTML + '</a>';
|
||||
}
|
||||
|
||||
ext = md_plug['post'];
|
||||
if (ext && ext[0].render)
|
||||
try {
|
||||
ext[0].render(md_dom);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug_err(ex, ext[1]);
|
||||
}
|
||||
|
||||
copydom(md_dom, dest_dom, 0);
|
||||
|
||||
if (ext && ext[0].render2)
|
||||
try {
|
||||
ext[0].render2(dest_dom);
|
||||
}
|
||||
catch (ex) {
|
||||
md_plug_err(ex, ext[1]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function init_toc() {
|
||||
var loader = document.getElementById('ml');
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
|
||||
var anchors = []; // list of toc entries, complex objects
|
||||
@@ -281,7 +401,12 @@ function init_toc() {
|
||||
|
||||
elm.childNodes[0].setAttribute('ctr', ctr.slice(0, lv).join('.'));
|
||||
|
||||
html.push('<li>' + elm.innerHTML + '</li>');
|
||||
var elm2 = elm.cloneNode(true);
|
||||
elm2.childNodes[0].textContent = elm.textContent;
|
||||
while (elm2.childNodes.length > 1)
|
||||
elm2.removeChild(elm2.childNodes[1]);
|
||||
|
||||
html.push('<li>' + elm2.innerHTML + '</li>');
|
||||
|
||||
if (anchor != null)
|
||||
anchors.push(anchor);
|
||||
@@ -399,11 +524,9 @@ dom_navtgl.onclick = function () {
|
||||
dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav';
|
||||
dom_nav.style.display = hidden ? 'none' : 'block';
|
||||
|
||||
if (window.localStorage)
|
||||
localStorage.setItem('hidenav', hidden ? 1 : 0);
|
||||
|
||||
swrite('hidenav', hidden ? 1 : 0);
|
||||
redraw();
|
||||
};
|
||||
|
||||
if (window.localStorage && localStorage.getItem('hidenav') == 1)
|
||||
if (sread('hidenav') == 1)
|
||||
dom_navtgl.onclick();
|
||||
|
@@ -77,32 +77,50 @@ html.dark #mt {
|
||||
background: #f97;
|
||||
border-radius: .15em;
|
||||
}
|
||||
html.dark #save.force-save {
|
||||
color: #fca;
|
||||
background: #720;
|
||||
}
|
||||
#save.disabled {
|
||||
opacity: .4;
|
||||
}
|
||||
#helpbox,
|
||||
#toast {
|
||||
background: #f7f7f7;
|
||||
border-radius: .4em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#helpbox {
|
||||
display: none;
|
||||
position: fixed;
|
||||
background: #f7f7f7;
|
||||
box-shadow: 0 .5em 2em #777;
|
||||
border-radius: .4em;
|
||||
padding: 2em;
|
||||
top: 4em;
|
||||
overflow-y: auto;
|
||||
box-shadow: 0 .5em 2em #777;
|
||||
height: calc(100% - 12em);
|
||||
left: calc(50% - 15em);
|
||||
right: 0;
|
||||
width: 30em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#helpclose {
|
||||
display: block;
|
||||
}
|
||||
html.dark #helpbox {
|
||||
background: #222;
|
||||
box-shadow: 0 .5em 2em #444;
|
||||
}
|
||||
html.dark #helpbox,
|
||||
html.dark #toast {
|
||||
background: #222;
|
||||
border: 1px solid #079;
|
||||
border-width: 1px 0;
|
||||
}
|
||||
|
||||
# mt {opacity: .5;top:1px}
|
||||
#toast {
|
||||
font-weight: bold;
|
||||
text-align: center;
|
||||
padding: .6em 0;
|
||||
position: fixed;
|
||||
z-index: 9001;
|
||||
top: 30%;
|
||||
transition: opacity 0.2s ease-in-out;
|
||||
opacity: 1;
|
||||
}
|
||||
|
@@ -1,16 +1,25 @@
|
||||
"use strict";
|
||||
|
||||
|
||||
// server state
|
||||
var server_md = dom_src.value;
|
||||
|
||||
|
||||
// the non-ascii whitelist
|
||||
var esc_uni_whitelist = '\\n\\t\\x20-\\x7eÆØÅæøå';
|
||||
var js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
|
||||
|
||||
|
||||
// dom nodes
|
||||
var dom_swrap = document.getElementById('mtw');
|
||||
var dom_sbs = document.getElementById('sbs');
|
||||
var dom_nsbs = document.getElementById('nsbs');
|
||||
var dom_swrap = ebi('mtw');
|
||||
var dom_sbs = ebi('sbs');
|
||||
var dom_nsbs = ebi('nsbs');
|
||||
var dom_tbox = ebi('toolsbox');
|
||||
var dom_ref = (function () {
|
||||
var d = document.createElement('div');
|
||||
d.setAttribute('id', 'mtr');
|
||||
dom_swrap.appendChild(d);
|
||||
d = document.getElementById('mtr');
|
||||
d = ebi('mtr');
|
||||
// hide behind the textarea (offsetTop is not computed if display:none)
|
||||
dom_src.style.zIndex = '4';
|
||||
d.style.zIndex = '3';
|
||||
@@ -99,7 +108,7 @@ var draw_md = (function () {
|
||||
map_src = genmap(dom_ref, map_src);
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
|
||||
cls(document.getElementById('save'), 'disabled', src == server_md);
|
||||
cls(ebi('save'), 'disabled', src == server_md);
|
||||
|
||||
var t1 = new Date().getTime();
|
||||
delay = t1 - t0 > 100 ? 25 : 1;
|
||||
@@ -135,7 +144,7 @@ redraw = (function () {
|
||||
onresize();
|
||||
}
|
||||
function modetoggle() {
|
||||
mode = dom_nsbs.innerHTML;
|
||||
var mode = dom_nsbs.innerHTML;
|
||||
dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor';
|
||||
mode += ' single';
|
||||
dom_wrap.setAttribute('class', mode);
|
||||
@@ -164,14 +173,14 @@ redraw = (function () {
|
||||
dst.scrollTop = 0;
|
||||
return;
|
||||
}
|
||||
if (y + 8 + src.clientHeight > src.scrollHeight) {
|
||||
if (y + 48 + src.clientHeight > src.scrollHeight) {
|
||||
dst.scrollTop = dst.scrollHeight - dst.clientHeight;
|
||||
return;
|
||||
}
|
||||
y += src.clientHeight / 2;
|
||||
var sy1 = -1, sy2 = -1, dy1 = -1, dy2 = -1;
|
||||
for (var a = 1; a < nlines + 1; a++) {
|
||||
if (srcmap[a] === null || dstmap[a] === null)
|
||||
if (srcmap[a] == null || dstmap[a] == null)
|
||||
continue;
|
||||
|
||||
if (srcmap[a] > y) {
|
||||
@@ -214,14 +223,108 @@ redraw = (function () {
|
||||
})();
|
||||
|
||||
|
||||
// modification checker
|
||||
function Modpoll() {
|
||||
this.skip_one = true;
|
||||
this.disabled = false;
|
||||
|
||||
this.periodic = function () {
|
||||
var that = this;
|
||||
setTimeout(function () {
|
||||
that.periodic();
|
||||
}, 1000 * md_opt.modpoll_freq);
|
||||
|
||||
var skip = null;
|
||||
|
||||
if (ebi('toast'))
|
||||
skip = 'toast';
|
||||
|
||||
else if (this.skip_one)
|
||||
skip = 'saved';
|
||||
|
||||
else if (this.disabled)
|
||||
skip = 'disabled';
|
||||
|
||||
if (skip) {
|
||||
console.log('modpoll skip, ' + skip);
|
||||
this.skip_one = false;
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('modpoll...');
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + new Date().getTime();
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.modpoll = this;
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = this.cb;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
this.cb = function () {
|
||||
if (this.modpoll.disabled || this.modpoll.skip_one) {
|
||||
console.log('modpoll abort');
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
console.log('modpoll err ' + this.status + ": " + this.responseText);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.responseText)
|
||||
return;
|
||||
|
||||
var server_ref = server_md.replace(/\r/g, '');
|
||||
var server_now = this.responseText.replace(/\r/g, '');
|
||||
|
||||
if (server_ref != server_now) {
|
||||
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
|
||||
this.modpoll.disabled = true;
|
||||
var msg = [
|
||||
"The document has changed on the server.<br />" +
|
||||
"The changes will NOT be loaded into your editor automatically.",
|
||||
|
||||
"Press F5 or CTRL-R to refresh the page,<br />" +
|
||||
"replacing your document with the server copy.",
|
||||
|
||||
"You can click this message to ignore and contnue."
|
||||
];
|
||||
return toast(false, "box-shadow:0 1em 2em rgba(64,64,64,0.8);font-weight:normal",
|
||||
36, "<p>" + msg.join('</p>\n<p>') + '</p>');
|
||||
}
|
||||
|
||||
console.log('modpoll eq');
|
||||
}
|
||||
|
||||
if (md_opt.modpoll_freq > 0)
|
||||
this.periodic();
|
||||
|
||||
return this;
|
||||
}
|
||||
var modpoll = new Modpoll();
|
||||
|
||||
|
||||
window.onbeforeunload = function (e) {
|
||||
if ((ebi("save").getAttribute('class') + '').indexOf('disabled') >= 0)
|
||||
return; //nice (todo)
|
||||
|
||||
e.preventDefault(); //ff
|
||||
e.returnValue = ''; //chrome
|
||||
};
|
||||
|
||||
|
||||
// save handler
|
||||
function save(e) {
|
||||
if (e) e.preventDefault();
|
||||
var save_btn = document.getElementById("save"),
|
||||
var save_btn = ebi("save"),
|
||||
save_cls = save_btn.getAttribute('class') + '';
|
||||
|
||||
if (save_cls.indexOf('disabled') >= 0) {
|
||||
toast('font-size:2em;color:#fc6;width:9em;', 'no changes');
|
||||
toast(true, ";font-size:2em;color:#c90", 9, "no changes");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -245,6 +348,8 @@ function save(e) {
|
||||
xhr.onreadystatechange = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.txt = txt;
|
||||
|
||||
modpoll.skip_one = true; // skip one iteration while we save
|
||||
xhr.send(fd);
|
||||
}
|
||||
|
||||
@@ -338,23 +443,44 @@ function savechk_cb() {
|
||||
last_modified = this.lastmod;
|
||||
server_md = this.txt;
|
||||
draw_md();
|
||||
toast('font-size:6em;font-family:serif;color:#cf6;width:4em;',
|
||||
'OK✔️<span style="font-size:.2em;color:#999">' + this.ntry + '</span>');
|
||||
toast(true, ";font-size:6em;font-family:serif;color:#9b4", 4,
|
||||
'OK✔️<span style="font-size:.2em;color:#999;position:absolute">' + this.ntry + '</span>');
|
||||
|
||||
modpoll.disabled = false;
|
||||
}
|
||||
|
||||
function toast(style, msg) {
|
||||
var ok = document.createElement('div');
|
||||
style += 'font-weight:bold;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1';
|
||||
function toast(autoclose, style, width, msg) {
|
||||
var ok = ebi("toast");
|
||||
if (ok)
|
||||
ok.parentNode.removeChild(ok);
|
||||
|
||||
style = "width:" + width + "em;left:calc(50% - " + (width / 2) + "em);" + style;
|
||||
ok = document.createElement('div');
|
||||
ok.setAttribute('id', 'toast');
|
||||
ok.setAttribute('style', style);
|
||||
ok.innerHTML = msg;
|
||||
var parent = document.getElementById('m');
|
||||
var parent = ebi('m');
|
||||
document.documentElement.appendChild(ok);
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
ok.parentNode.removeChild(ok);
|
||||
}, 750);
|
||||
|
||||
var hide = function (delay) {
|
||||
delay = delay || 0;
|
||||
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
}, delay);
|
||||
|
||||
setTimeout(function () {
|
||||
if (ok.parentNode)
|
||||
ok.parentNode.removeChild(ok);
|
||||
}, delay + 250);
|
||||
}
|
||||
|
||||
ok.onclick = function () {
|
||||
hide(0);
|
||||
};
|
||||
|
||||
if (autoclose)
|
||||
hide(500);
|
||||
}
|
||||
|
||||
|
||||
@@ -427,6 +553,9 @@ function setsel(s) {
|
||||
dom_src.value = [s.pre, s.sel, s.post].join('');
|
||||
dom_src.setSelectionRange(s.car, s.cdr, dom_src.selectionDirection);
|
||||
dom_src.oninput();
|
||||
// support chrome:
|
||||
dom_src.blur();
|
||||
dom_src.focus();
|
||||
}
|
||||
|
||||
|
||||
@@ -500,7 +629,8 @@ function md_newline() {
|
||||
var s = linebounds(true),
|
||||
ln = s.md.substring(s.n1, s.n2),
|
||||
m1 = /^( *)([0-9]+)(\. +)/.exec(ln),
|
||||
m2 = /^[ \t>+-]*(\* )?/.exec(ln);
|
||||
m2 = /^[ \t>+-]*(\* )?/.exec(ln),
|
||||
drop = dom_src.selectionEnd - dom_src.selectionStart;
|
||||
|
||||
var pre = m2[0];
|
||||
if (m1 !== null)
|
||||
@@ -512,7 +642,7 @@ function md_newline() {
|
||||
|
||||
s.pre = s.md.substring(0, s.car) + '\n' + pre;
|
||||
s.sel = '';
|
||||
s.post = s.md.substring(s.car);
|
||||
s.post = s.md.substring(s.car + drop);
|
||||
s.car = s.cdr = s.pre.length;
|
||||
setsel(s);
|
||||
return false;
|
||||
@@ -522,11 +652,21 @@ function md_newline() {
|
||||
// backspace
|
||||
function md_backspace() {
|
||||
var s = linebounds(true),
|
||||
ln = s.md.substring(s.n1, s.n2),
|
||||
m = /^[ \t>+-]*(\* )?([0-9]+\. +)?/.exec(ln);
|
||||
o0 = dom_src.selectionStart,
|
||||
left = s.md.slice(s.n1, o0),
|
||||
m = /^[ \t>+-]*(\* )?([0-9]+\. +)?/.exec(left);
|
||||
|
||||
// if car is in whitespace area, do nothing
|
||||
if (/^\s*$/.test(left))
|
||||
return true;
|
||||
|
||||
// same if selection
|
||||
if (o0 != dom_src.selectionEnd)
|
||||
return true;
|
||||
|
||||
// same if line is all-whitespace or non-markup
|
||||
var v = m[0].replace(/[^ ]/g, " ");
|
||||
if (v === m[0] || v.length !== ln.length)
|
||||
if (v === m[0] || v.length !== left.length)
|
||||
return true;
|
||||
|
||||
s.pre = s.md.substring(0, s.n1) + v;
|
||||
@@ -540,8 +680,8 @@ function md_backspace() {
|
||||
|
||||
// paragraph jump
|
||||
function md_p_jump(down) {
|
||||
var ofs = dom_src.selectionStart;
|
||||
var txt = dom_src.value;
|
||||
var txt = dom_src.value,
|
||||
ofs = dom_src.selectionStart;
|
||||
|
||||
if (down) {
|
||||
while (txt[ofs] == '\n' && --ofs > 0);
|
||||
@@ -562,6 +702,224 @@ function md_p_jump(down) {
|
||||
}
|
||||
|
||||
|
||||
function reLastIndexOf(txt, ptn, end) {
|
||||
var ofs = (typeof end !== 'undefined') ? end : txt.length;
|
||||
end = ofs;
|
||||
while (ofs >= 0) {
|
||||
var sub = txt.slice(ofs, end);
|
||||
if (ptn.test(sub))
|
||||
return ofs;
|
||||
|
||||
ofs--;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
// table formatter
|
||||
function fmt_table(e) {
|
||||
if (e) e.preventDefault();
|
||||
//dom_tbox.setAttribute('class', '');
|
||||
|
||||
var txt = dom_src.value,
|
||||
ofs = dom_src.selectionStart,
|
||||
//o0 = txt.lastIndexOf('\n\n', ofs),
|
||||
//o1 = txt.indexOf('\n\n', ofs);
|
||||
o0 = reLastIndexOf(txt, /\n\s*\n/m, ofs),
|
||||
o1 = txt.slice(ofs).search(/\n\s*\n|\n\s*$/m);
|
||||
// note \s contains \n but its fine
|
||||
|
||||
if (o0 < 0)
|
||||
o0 = 0;
|
||||
else {
|
||||
// seek past the hit
|
||||
var m = /\n\s*\n/m.exec(txt.slice(o0));
|
||||
o0 += m[0].length;
|
||||
}
|
||||
|
||||
o1 = o1 < 0 ? txt.length : o1 + ofs;
|
||||
|
||||
var err = 'cannot format table due to ',
|
||||
tab = txt.slice(o0, o1).split(/\s*\n/),
|
||||
re_ind = /^\s*/,
|
||||
ind = tab[1].match(re_ind)[0],
|
||||
r0_ind = tab[0].slice(0, ind.length),
|
||||
lpipe = tab[1].indexOf('|') < tab[1].indexOf('-'),
|
||||
rpipe = tab[1].lastIndexOf('|') > tab[1].lastIndexOf('-'),
|
||||
re_lpipe = lpipe ? /^\s*\|\s*/ : /^\s*/,
|
||||
re_rpipe = rpipe ? /\s*\|\s*$/ : /\s*$/,
|
||||
ncols;
|
||||
|
||||
// the second row defines the table,
|
||||
// need to process that first
|
||||
var tmp = tab[0];
|
||||
tab[0] = tab[1];
|
||||
tab[1] = tmp;
|
||||
|
||||
for (var a = 0; a < tab.length; a++) {
|
||||
var row_name = (a == 1) ? 'header' : 'row#' + (a + 1);
|
||||
|
||||
var ind2 = tab[a].match(re_ind)[0];
|
||||
if (ind != ind2 && a != 1) // the table can be a list entry or something, ignore [0]
|
||||
return alert(err + 'indentation mismatch on row#2 and ' + row_name + ',\n' + tab[a]);
|
||||
|
||||
var t = tab[a].slice(ind.length);
|
||||
t = t.replace(re_lpipe, "");
|
||||
t = t.replace(re_rpipe, "");
|
||||
tab[a] = t.split(/\s*\|\s*/g);
|
||||
|
||||
if (a == 0)
|
||||
ncols = tab[a].length;
|
||||
else if (ncols < tab[a].length)
|
||||
return alert(err + 'num.columns(' + row_name + ') exceeding row#2; ' + ncols + ' < ' + tab[a].length);
|
||||
|
||||
// if row has less columns than row2, fill them in
|
||||
while (tab[a].length < ncols)
|
||||
tab[a].push('');
|
||||
}
|
||||
|
||||
// aight now swap em back
|
||||
tmp = tab[0];
|
||||
tab[0] = tab[1];
|
||||
tab[1] = tmp;
|
||||
|
||||
var re_align = /^ *(:?)-+(:?) *$/;
|
||||
var align = [];
|
||||
for (var col = 0; col < tab[1].length; col++) {
|
||||
var m = tab[1][col].match(re_align);
|
||||
if (!m)
|
||||
return alert(err + 'invalid column specification, row#2, col ' + (col + 1) + ', [' + tab[1][col] + ']');
|
||||
|
||||
if (m[2]) {
|
||||
if (m[1])
|
||||
align.push('c');
|
||||
else
|
||||
align.push('r');
|
||||
}
|
||||
else
|
||||
align.push('l');
|
||||
}
|
||||
|
||||
var pad = [];
|
||||
var tmax = 0;
|
||||
for (var col = 0; col < ncols; col++) {
|
||||
var max = 0;
|
||||
for (var row = 0; row < tab.length; row++)
|
||||
if (row != 1)
|
||||
max = Math.max(max, tab[row][col].length);
|
||||
|
||||
var s = '';
|
||||
for (var n = 0; n < max; n++)
|
||||
s += ' ';
|
||||
|
||||
pad.push(s);
|
||||
tmax = Math.max(max, tmax);
|
||||
}
|
||||
|
||||
var dashes = '';
|
||||
for (var a = 0; a < tmax; a++)
|
||||
dashes += '-';
|
||||
|
||||
var ret = [];
|
||||
for (var row = 0; row < tab.length; row++) {
|
||||
var ln = [];
|
||||
for (var col = 0; col < tab[row].length; col++) {
|
||||
var p = pad[col];
|
||||
var s = tab[row][col];
|
||||
|
||||
if (align[col] == 'l') {
|
||||
s = (s + p).slice(0, p.length);
|
||||
}
|
||||
else if (align[col] == 'r') {
|
||||
s = (p + s).slice(-p.length);
|
||||
}
|
||||
else {
|
||||
var pt = p.length - s.length;
|
||||
var pl = p.slice(0, Math.floor(pt / 2));
|
||||
var pr = p.slice(0, pt - pl.length);
|
||||
s = pl + s + pr;
|
||||
}
|
||||
|
||||
if (row == 1) {
|
||||
if (align[col] == 'l')
|
||||
s = dashes.slice(0, p.length);
|
||||
else if (align[col] == 'r')
|
||||
s = dashes.slice(0, p.length - 1) + ':';
|
||||
else
|
||||
s = ':' + dashes.slice(0, p.length - 2) + ':';
|
||||
}
|
||||
ln.push(s);
|
||||
}
|
||||
ret.push(ind + '| ' + ln.join(' | ') + ' |');
|
||||
}
|
||||
|
||||
// restore any markup in the row0 gutter
|
||||
ret[0] = r0_ind + ret[0].slice(ind.length);
|
||||
|
||||
ret = {
|
||||
"pre": txt.slice(0, o0),
|
||||
"sel": ret.join('\n'),
|
||||
"post": txt.slice(o1),
|
||||
"car": o0,
|
||||
"cdr": o0
|
||||
};
|
||||
setsel(ret);
|
||||
}
|
||||
|
||||
|
||||
// show unicode
|
||||
function mark_uni(e) {
|
||||
if (e) e.preventDefault();
|
||||
dom_tbox.setAttribute('class', '');
|
||||
|
||||
var txt = dom_src.value,
|
||||
ptn = new RegExp('([^' + js_uni_whitelist + ']+)', 'g'),
|
||||
mod = txt.replace(/\r/g, "").replace(ptn, "\u2588\u2770$1\u2771");
|
||||
|
||||
if (txt == mod) {
|
||||
alert('no results; no modifications were made');
|
||||
return;
|
||||
}
|
||||
dom_src.value = mod;
|
||||
}
|
||||
|
||||
|
||||
// iterate unicode
|
||||
function iter_uni(e) {
|
||||
if (e) e.preventDefault();
|
||||
|
||||
var txt = dom_src.value,
|
||||
ofs = dom_src.selectionDirection == "forward" ? dom_src.selectionEnd : dom_src.selectionStart,
|
||||
re = new RegExp('([^' + js_uni_whitelist + ']+)'),
|
||||
m = re.exec(txt.slice(ofs));
|
||||
|
||||
if (!m) {
|
||||
alert('no more hits from cursor onwards');
|
||||
return;
|
||||
}
|
||||
ofs += m.index;
|
||||
|
||||
dom_src.setSelectionRange(ofs, ofs + m[0].length, "forward");
|
||||
dom_src.oninput();
|
||||
// support chrome:
|
||||
dom_src.blur();
|
||||
dom_src.focus();
|
||||
}
|
||||
|
||||
|
||||
// configure whitelist
|
||||
function cfg_uni(e) {
|
||||
if (e) e.preventDefault();
|
||||
|
||||
var reply = prompt("unicode whitelist", esc_uni_whitelist);
|
||||
if (reply === null)
|
||||
return;
|
||||
|
||||
esc_uni_whitelist = reply;
|
||||
js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
|
||||
}
|
||||
|
||||
|
||||
// hotkeys / toolbar
|
||||
(function () {
|
||||
function keydown(ev) {
|
||||
@@ -574,7 +932,7 @@ function md_p_jump(down) {
|
||||
return false;
|
||||
}
|
||||
if (ev.code == "Escape" || kc == 27) {
|
||||
var d = document.getElementById('helpclose');
|
||||
var d = ebi('helpclose');
|
||||
if (d)
|
||||
d.click();
|
||||
}
|
||||
@@ -609,6 +967,19 @@ function md_p_jump(down) {
|
||||
if (!ctrl && !ev.shiftKey && kc == 8) {
|
||||
return md_backspace();
|
||||
}
|
||||
if (ctrl && (ev.code == "KeyK")) {
|
||||
fmt_table();
|
||||
return false;
|
||||
}
|
||||
if (ctrl && (ev.code == "KeyU")) {
|
||||
iter_uni();
|
||||
return false;
|
||||
}
|
||||
if (ctrl && (ev.code == "KeyE")) {
|
||||
dom_nsbs.click();
|
||||
//fmt_table();
|
||||
return false;
|
||||
}
|
||||
var up = ev.code == "ArrowUp" || kc == 38;
|
||||
var dn = ev.code == "ArrowDown" || kc == 40;
|
||||
if (ctrl && (up || dn)) {
|
||||
@@ -618,13 +989,22 @@ function md_p_jump(down) {
|
||||
}
|
||||
}
|
||||
document.onkeydown = keydown;
|
||||
document.getElementById('save').onclick = save;
|
||||
ebi('save').onclick = save;
|
||||
})();
|
||||
|
||||
|
||||
document.getElementById('help').onclick = function (e) {
|
||||
ebi('tools').onclick = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
var dom = document.getElementById('helpbox');
|
||||
var is_open = dom_tbox.getAttribute('class') != 'open';
|
||||
dom_tbox.setAttribute('class', is_open ? 'open' : '');
|
||||
};
|
||||
|
||||
|
||||
ebi('help').onclick = function (e) {
|
||||
if (e) e.preventDefault();
|
||||
dom_tbox.setAttribute('class', '');
|
||||
|
||||
var dom = ebi('helpbox');
|
||||
var dtxt = dom.getElementsByTagName('textarea');
|
||||
if (dtxt.length > 0) {
|
||||
convert_markdown(dtxt[0].value, dom);
|
||||
@@ -632,12 +1012,18 @@ document.getElementById('help').onclick = function (e) {
|
||||
}
|
||||
|
||||
dom.style.display = 'block';
|
||||
document.getElementById('helpclose').onclick = function () {
|
||||
ebi('helpclose').onclick = function () {
|
||||
dom.style.display = 'none';
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
ebi('fmt_table').onclick = fmt_table;
|
||||
ebi('mark_uni').onclick = mark_uni;
|
||||
ebi('iter_uni').onclick = iter_uni;
|
||||
ebi('cfg_uni').onclick = cfg_uni;
|
||||
|
||||
|
||||
// blame steen
|
||||
action_stack = (function () {
|
||||
var hist = {
|
||||
@@ -743,13 +1129,12 @@ action_stack = (function () {
|
||||
ref = newtxt;
|
||||
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
|
||||
if (hist.un.length > 0)
|
||||
dbg(static(hist.un.slice(-1)[0]));
|
||||
dbg(statify(hist.un.slice(-1)[0]));
|
||||
if (hist.re.length > 0)
|
||||
dbg(static(hist.re.slice(-1)[0]));
|
||||
dbg(statify(hist.re.slice(-1)[0]));
|
||||
}
|
||||
|
||||
return {
|
||||
push: push,
|
||||
undo: undo,
|
||||
redo: redo,
|
||||
push: schedule_push,
|
||||
@@ -759,7 +1144,7 @@ action_stack = (function () {
|
||||
})();
|
||||
|
||||
/*
|
||||
document.getElementById('help').onclick = function () {
|
||||
ebi('help').onclick = function () {
|
||||
var c1 = getComputedStyle(dom_src).cssText.split(';');
|
||||
var c2 = getComputedStyle(dom_ref).cssText.split(';');
|
||||
var max = Math.min(c1.length, c2.length);
|
||||
|
@@ -160,8 +160,12 @@ h2 {
|
||||
.mdo ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em {
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
@@ -253,8 +257,12 @@ html.dark .mdo>ul,
|
||||
html.dark .mdo>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em {
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
|
@@ -22,8 +22,12 @@
|
||||
</div>
|
||||
<script>
|
||||
|
||||
var link_md_as_html = false; // TODO (does nothing)
|
||||
var last_modified = {{ lastmod }};
|
||||
var md_opt = {
|
||||
link_md_as_html: false,
|
||||
allow_plugins: {{ md_plug }},
|
||||
modpoll_freq: {{ md_chk_rate }}
|
||||
};
|
||||
|
||||
var lightswitch = (function () {
|
||||
var fun = function () {
|
||||
@@ -39,6 +43,7 @@ var lightswitch = (function () {
|
||||
})();
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js"></script>
|
||||
<script src="/.cpr/deps/easymde.js"></script>
|
||||
<script src="/.cpr/mde.js"></script>
|
||||
</body></html>
|
||||
|
@@ -1,7 +1,9 @@
|
||||
var dom_wrap = document.getElementById('mw');
|
||||
var dom_nav = document.getElementById('mn');
|
||||
var dom_doc = document.getElementById('m');
|
||||
var dom_md = document.getElementById('mt');
|
||||
"use strict";
|
||||
|
||||
var dom_wrap = ebi('mw');
|
||||
var dom_nav = ebi('mn');
|
||||
var dom_doc = ebi('m');
|
||||
var dom_md = ebi('mt');
|
||||
|
||||
(function () {
|
||||
var n = document.location + '';
|
||||
@@ -13,7 +15,7 @@ var dom_md = document.getElementById('mt');
|
||||
if (a > 0)
|
||||
loc.push(n[a]);
|
||||
|
||||
var dec = decodeURIComponent(n[a]).replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
var dec = uricom_dec(n[a])[0].replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
|
||||
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
||||
}
|
||||
@@ -63,7 +65,7 @@ var mde = (function () {
|
||||
mde.codemirror.on("change", function () {
|
||||
md_changed(mde);
|
||||
});
|
||||
var loader = document.getElementById('ml');
|
||||
var loader = ebi('ml');
|
||||
loader.parentNode.removeChild(loader);
|
||||
return mde;
|
||||
})();
|
||||
@@ -121,7 +123,7 @@ function save(mde) {
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
|
||||
var url = (document.location + '').split('?')[0] + '?raw';
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
@@ -213,7 +215,7 @@ function save_chk() {
|
||||
var ok = document.createElement('div');
|
||||
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
|
||||
ok.innerHTML = 'OK✔️';
|
||||
var parent = document.getElementById('m');
|
||||
var parent = ebi('m');
|
||||
document.documentElement.appendChild(ok);
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,92 +1,4 @@
|
||||
.opview {
|
||||
display: none;
|
||||
}
|
||||
.opview.act {
|
||||
display: block;
|
||||
}
|
||||
#ops a {
|
||||
color: #fc5;
|
||||
font-size: 1.5em;
|
||||
padding: 0 .3em;
|
||||
margin: 0;
|
||||
outline: none;
|
||||
}
|
||||
#ops a.act {
|
||||
text-decoration: underline;
|
||||
}
|
||||
/*
|
||||
#ops a+a:after,
|
||||
#ops a:first-child:after {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #01a7e1;
|
||||
margin-left: .3em;
|
||||
position: relative;
|
||||
}
|
||||
#ops a+a:before {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #ff3f1a;
|
||||
margin-right: .3em;
|
||||
margin-left: -.3em;
|
||||
}
|
||||
#ops a:last-child:after {
|
||||
content: '';
|
||||
}
|
||||
#ops a.act:before,
|
||||
#ops a.act:after {
|
||||
text-decoration: none !important;
|
||||
}
|
||||
*/
|
||||
#ops i {
|
||||
font-size: 1.5em;
|
||||
}
|
||||
#ops i:before {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #01a7e1;
|
||||
position: relative;
|
||||
}
|
||||
#ops i:after {
|
||||
content: 'x';
|
||||
color: #282828;
|
||||
text-shadow: 0 0 .08em #ff3f1a;
|
||||
margin-left: -.35em;
|
||||
font-size: 1.05em;
|
||||
}
|
||||
#ops,
|
||||
.opbox {
|
||||
border: 1px solid #3a3a3a;
|
||||
box-shadow: 0 0 1em #222 inset;
|
||||
}
|
||||
#ops {
|
||||
display: none;
|
||||
background: #333;
|
||||
margin: 1.7em 1.5em 0 1.5em;
|
||||
padding: .3em .6em;
|
||||
border-radius: .3em;
|
||||
border-width: .15em 0;
|
||||
}
|
||||
.opbox {
|
||||
background: #2d2d2d;
|
||||
margin: 1.5em 0 0 0;
|
||||
padding: .5em;
|
||||
border-radius: 0 1em 1em 0;
|
||||
border-width: .15em .3em .3em 0;
|
||||
max-width: 40em;
|
||||
}
|
||||
.opbox input {
|
||||
margin: .5em;
|
||||
}
|
||||
.opbox input[type=text] {
|
||||
color: #fff;
|
||||
background: #383838;
|
||||
border: none;
|
||||
box-shadow: 0 0 .3em #222;
|
||||
border-bottom: 1px solid #fc5;
|
||||
border-radius: .2em;
|
||||
padding: .2em .3em;
|
||||
}
|
||||
|
||||
#op_up2k {
|
||||
padding: 0 1em 1em 1em;
|
||||
}
|
||||
@@ -94,6 +6,9 @@
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 2px;
|
||||
height: 2px;
|
||||
overflow: hidden;
|
||||
}
|
||||
#u2form input {
|
||||
background: #444;
|
||||
@@ -104,11 +19,6 @@
|
||||
color: #f87;
|
||||
padding: .5em;
|
||||
}
|
||||
#u2form {
|
||||
width: 2px;
|
||||
height: 2px;
|
||||
overflow: hidden;
|
||||
}
|
||||
#u2btn {
|
||||
color: #eee;
|
||||
background: #555;
|
||||
@@ -117,17 +27,27 @@
|
||||
background: linear-gradient(to bottom, #367 0%, #489 50%, #38788a 51%, #367 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#489', endColorstr='#38788a', GradientType=0);
|
||||
text-decoration: none;
|
||||
line-height: 1.5em;
|
||||
line-height: 1.3em;
|
||||
border: 1px solid #222;
|
||||
border-radius: .4em;
|
||||
text-align: center;
|
||||
font-size: 2em;
|
||||
margin: 1em auto;
|
||||
padding: 1em 0;
|
||||
width: 12em;
|
||||
font-size: 1.5em;
|
||||
margin: .5em auto;
|
||||
padding: .8em 0;
|
||||
width: 16em;
|
||||
cursor: pointer;
|
||||
box-shadow: .4em .4em 0 #111;
|
||||
}
|
||||
#op_up2k.srch #u2btn {
|
||||
background: linear-gradient(to bottom, #ca3 0%, #fd8 50%, #fc6 51%, #b92 100%);
|
||||
text-shadow: 1px 1px 1px #fc6;
|
||||
color: #333;
|
||||
}
|
||||
#u2conf #u2btn {
|
||||
margin: -1.5em 0;
|
||||
padding: .8em 0;
|
||||
width: 100%;
|
||||
}
|
||||
#u2notbtn {
|
||||
display: none;
|
||||
text-align: center;
|
||||
@@ -142,6 +62,9 @@
|
||||
width: calc(100% - 2em);
|
||||
max-width: 100em;
|
||||
}
|
||||
#op_up2k.srch #u2tab {
|
||||
max-width: none;
|
||||
}
|
||||
#u2tab td {
|
||||
border: 1px solid #ccc;
|
||||
border-width: 0 0px 1px 0;
|
||||
@@ -153,12 +76,19 @@
|
||||
#u2tab td:nth-child(3) {
|
||||
width: 40%;
|
||||
}
|
||||
#op_up2k.srch #u2tab td:nth-child(3) {
|
||||
font-family: sans-serif;
|
||||
width: auto;
|
||||
}
|
||||
#u2tab tr+tr:hover td {
|
||||
background: #222;
|
||||
}
|
||||
#u2conf {
|
||||
margin: 1em auto;
|
||||
width: 26em;
|
||||
width: 30em;
|
||||
}
|
||||
#u2conf.has_btn {
|
||||
width: 46em;
|
||||
}
|
||||
#u2conf * {
|
||||
text-align: center;
|
||||
@@ -194,10 +124,72 @@
|
||||
#u2conf input+a {
|
||||
background: #d80;
|
||||
}
|
||||
#u2conf label {
|
||||
font-size: 1.6em;
|
||||
width: 2em;
|
||||
height: 1em;
|
||||
padding: .4em 0;
|
||||
display: block;
|
||||
user-select: none;
|
||||
border-radius: .25em;
|
||||
}
|
||||
#u2conf input[type="checkbox"] {
|
||||
position: relative;
|
||||
opacity: .02;
|
||||
top: 2em;
|
||||
}
|
||||
#u2conf input[type="checkbox"]+label {
|
||||
position: relative;
|
||||
background: #603;
|
||||
border-bottom: .2em solid #a16;
|
||||
box-shadow: 0 .1em .3em #a00 inset;
|
||||
}
|
||||
#u2conf input[type="checkbox"]:checked+label {
|
||||
background: #6a1;
|
||||
border-bottom: .2em solid #efa;
|
||||
box-shadow: 0 .1em .5em #0c0;
|
||||
}
|
||||
#u2conf input[type="checkbox"]+label:hover {
|
||||
box-shadow: 0 .1em .3em #fb0;
|
||||
border-color: #fb0;
|
||||
}
|
||||
#op_up2k.srch #u2conf td:nth-child(1)>*,
|
||||
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
||||
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
||||
background: #777;
|
||||
border-color: #ccc;
|
||||
box-shadow: none;
|
||||
opacity: .2;
|
||||
}
|
||||
#u2cdesc {
|
||||
position: absolute;
|
||||
width: 34em;
|
||||
left: calc(50% - 15em);
|
||||
background: #222;
|
||||
border: 0 solid #555;
|
||||
text-align: center;
|
||||
overflow: hidden;
|
||||
margin: 0 -2em;
|
||||
height: 0;
|
||||
padding: 0 1em;
|
||||
opacity: .1;
|
||||
transition: all 0.14s ease-in-out;
|
||||
border-radius: .4em;
|
||||
box-shadow: 0 .2em .5em #222;
|
||||
}
|
||||
#u2cdesc.show {
|
||||
padding: 1em;
|
||||
height: auto;
|
||||
border-width: .2em 0;
|
||||
opacity: 1;
|
||||
}
|
||||
#u2foot {
|
||||
color: #fff;
|
||||
font-style: italic;
|
||||
}
|
||||
#u2footfoot {
|
||||
margin-bottom: -1em;
|
||||
}
|
||||
.prog {
|
||||
font-family: monospace;
|
||||
}
|
||||
@@ -219,3 +211,13 @@
|
||||
bottom: 0;
|
||||
background: #0a0;
|
||||
}
|
||||
#u2tab a>span {
|
||||
font-weight: bold;
|
||||
font-style: italic;
|
||||
color: #fff;
|
||||
padding-left: .2em;
|
||||
}
|
||||
#u2cleanup {
|
||||
float: right;
|
||||
margin-bottom: -.3em;
|
||||
}
|
||||
|
@@ -1,13 +1,7 @@
|
||||
<div id="ops"><a
|
||||
href="#" data-dest="">---</a><i></i><a
|
||||
href="#" data-dest="up2k">up2k</a><i></i><a
|
||||
href="#" data-dest="bup">bup</a><i></i><a
|
||||
href="#" data-dest="mkdir">mkdir</a><i></i><a
|
||||
href="#" data-dest="new_md">new.md</a></div>
|
||||
|
||||
<div id="op_bup" class="opview opbox act">
|
||||
<div id="u2err"></div>
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8">
|
||||
<input type="hidden" name="act" value="bput" />
|
||||
<input type="file" name="f" multiple><br />
|
||||
<input type="submit" value="start upload">
|
||||
@@ -15,7 +9,7 @@
|
||||
</div>
|
||||
|
||||
<div id="op_mkdir" class="opview opbox act">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8">
|
||||
<input type="hidden" name="act" value="mkdir" />
|
||||
<input type="text" name="name" size="30">
|
||||
<input type="submit" value="mkdir">
|
||||
@@ -23,19 +17,45 @@
|
||||
</div>
|
||||
|
||||
<div id="op_new_md" class="opview opbox">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="/{{ vdir }}">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8">
|
||||
<input type="hidden" name="act" value="new_md" />
|
||||
<input type="text" name="name" size="30">
|
||||
<input type="submit" value="create doc">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_msg" class="opview opbox">
|
||||
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8">
|
||||
<input type="text" name="msg" size="30">
|
||||
<input type="submit" value="send msg">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_up2k" class="opview">
|
||||
<form id="u2form" method="post" enctype="multipart/form-data" onsubmit="return false;"></form>
|
||||
|
||||
<table id="u2conf">
|
||||
<tr>
|
||||
<td>parallel uploads</td>
|
||||
<td rowspan="2">
|
||||
<input type="checkbox" id="multitask" />
|
||||
<label for="multitask" alt="continue hashing other files while uploading">🏃</label>
|
||||
</td>
|
||||
<td rowspan="2">
|
||||
<input type="checkbox" id="ask_up" />
|
||||
<label for="ask_up" alt="ask for confirmation befofre upload starts">💭</label>
|
||||
</td>
|
||||
<td rowspan="2">
|
||||
<input type="checkbox" id="flag_en" />
|
||||
<label for="flag_en" alt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</label>
|
||||
</td>
|
||||
{%- if have_up2k_idx %}
|
||||
<td data-perm="read" rowspan="2">
|
||||
<input type="checkbox" id="fsearch" />
|
||||
<label for="fsearch" alt="don't actually upload, instead check if the files already $N exist on the server (will scan all folders you can read)">🔎</label>
|
||||
</td>
|
||||
{%- endif %}
|
||||
<td data-perm="read" rowspan="2" id="u2btn_cw"></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
@@ -43,28 +63,29 @@
|
||||
<input class="txtbox" id="nthread" value="2" />
|
||||
<a href="#" id="nthread_add">+</a>
|
||||
</td>
|
||||
<td rowspan="2">
|
||||
<input type="checkbox" id="multitask" />
|
||||
<label for="multitask">hash while<br />uploading</label>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<div id="u2cdesc"></div>
|
||||
|
||||
<div id="u2notbtn"></div>
|
||||
|
||||
<div id="u2btn">
|
||||
drop files here<br />
|
||||
(or click me)
|
||||
<div id="u2btn_ct">
|
||||
<div id="u2btn">
|
||||
<span id="u2bm"></span><br />
|
||||
drop files here<br />
|
||||
(or click me)
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<table id="u2tab">
|
||||
<tr>
|
||||
<td>filename</td>
|
||||
<td>status</td>
|
||||
<td>progress</td>
|
||||
<td>progress<a href="#" id="u2cleanup">cleanup</a></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<p id="u2foot"></p>
|
||||
<p>( if you don't need lastmod timestamps, resumable uploads or progress bars just use the <a href="#" id="u2nope">basic uploader</a>)</p>
|
||||
<p id="u2footfoot">( if you don't need lastmod timestamps, resumable uploads or progress bars just use the <a href="#" id="u2nope">basic uploader</a>)</p>
|
||||
</div>
|
||||
|
422
copyparty/web/util.js
Normal file
422
copyparty/web/util.js
Normal file
@@ -0,0 +1,422 @@
|
||||
"use strict";
|
||||
|
||||
// error handler for mobile devices
|
||||
function hcroak(msg) {
|
||||
document.body.innerHTML = msg;
|
||||
window.onerror = undefined;
|
||||
throw 'fatal_err';
|
||||
}
|
||||
function croak(msg) {
|
||||
document.body.textContent = msg;
|
||||
window.onerror = undefined;
|
||||
throw msg;
|
||||
}
|
||||
function esc(txt) {
|
||||
return txt.replace(/[&"<>]/g, function (c) {
|
||||
return {
|
||||
'&': '&',
|
||||
'"': '"',
|
||||
'<': '<',
|
||||
'>': '>'
|
||||
}[c];
|
||||
});
|
||||
}
|
||||
function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
window.onerror = undefined;
|
||||
window['vis_exh'] = null;
|
||||
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
||||
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
||||
|
||||
if (error) {
|
||||
var find = ['desc', 'stack', 'trace'];
|
||||
for (var a = 0; a < find.length; a++)
|
||||
if (String(error[find[a]]) !== 'undefined')
|
||||
html.push('<h2>' + find[a] + '</h2>' +
|
||||
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
|
||||
}
|
||||
document.body.style.fontSize = '0.8em';
|
||||
document.body.style.padding = '0 1em 1em 1em';
|
||||
hcroak(html.join('\n'));
|
||||
}
|
||||
|
||||
|
||||
function ebi(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
|
||||
function ev(e) {
|
||||
e = e || window.event;
|
||||
if (!e)
|
||||
return;
|
||||
|
||||
if (e.preventDefault)
|
||||
e.preventDefault()
|
||||
|
||||
if (e.stopPropagation)
|
||||
e.stopPropagation();
|
||||
|
||||
e.returnValue = false;
|
||||
return e;
|
||||
}
|
||||
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||
if (!String.prototype.endsWith) {
|
||||
String.prototype.endsWith = function (search, this_len) {
|
||||
if (this_len === undefined || this_len > this.length) {
|
||||
this_len = this.length;
|
||||
}
|
||||
return this.substring(this_len - search.length, this_len) === search;
|
||||
};
|
||||
}
|
||||
if (!String.startsWith) {
|
||||
String.prototype.startsWith = function (s, i) {
|
||||
i = i > 0 ? i | 0 : 0;
|
||||
return this.substring(i, i + s.length) === s;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = document.createElement('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
|
||||
script.onreadystatechange = cb;
|
||||
script.onload = cb;
|
||||
|
||||
head.appendChild(script);
|
||||
}
|
||||
|
||||
|
||||
var crctab = (function () {
|
||||
var c, tab = [];
|
||||
for (var n = 0; n < 256; n++) {
|
||||
c = n;
|
||||
for (var k = 0; k < 8; k++) {
|
||||
c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));
|
||||
}
|
||||
tab[n] = c;
|
||||
}
|
||||
return tab;
|
||||
})();
|
||||
|
||||
|
||||
function crc32(str) {
|
||||
var crc = 0 ^ (-1);
|
||||
for (var i = 0; i < str.length; i++) {
|
||||
crc = (crc >>> 8) ^ crctab[(crc ^ str.charCodeAt(i)) & 0xFF];
|
||||
}
|
||||
return ((crc ^ (-1)) >>> 0).toString(16);
|
||||
};
|
||||
|
||||
|
||||
function sortTable(table, col, cb) {
|
||||
var tb = table.tBodies[0],
|
||||
th = table.tHead.rows[0].cells,
|
||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||
i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1;
|
||||
for (var a = 0, thl = th.length; a < thl; a++)
|
||||
th[a].className = th[a].className.replace(/ *sort-?1 */, " ");
|
||||
th[col].className += ' sort' + reverse;
|
||||
var stype = th[col].getAttribute('sort');
|
||||
try {
|
||||
var nrules = [], rules = jread("fsort", []);
|
||||
rules.unshift([th[col].getAttribute('name'), reverse, stype || '']);
|
||||
for (var a = 0; a < rules.length; a++) {
|
||||
var add = true;
|
||||
for (var b = 0; b < a; b++)
|
||||
if (rules[a][0] == rules[b][0])
|
||||
add = false;
|
||||
|
||||
if (add)
|
||||
nrules.push(rules[a]);
|
||||
|
||||
if (nrules.length >= 10)
|
||||
break;
|
||||
}
|
||||
jwrite("fsort", nrules);
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("failed to persist sort rules, resetting: " + ex);
|
||||
jwrite("fsort", null);
|
||||
}
|
||||
var vl = [];
|
||||
for (var a = 0; a < tr.length; a++) {
|
||||
var cell = tr[a].cells[col];
|
||||
if (!cell) {
|
||||
vl.push([null, a]);
|
||||
continue;
|
||||
}
|
||||
var v = cell.getAttribute('sortv') || cell.textContent.trim();
|
||||
if (stype == 'int') {
|
||||
v = parseInt(v.replace(/[, ]/g, '')) || 0;
|
||||
}
|
||||
vl.push([v, a]);
|
||||
}
|
||||
vl.sort(function (a, b) {
|
||||
a = a[0];
|
||||
b = b[0];
|
||||
if (a === null)
|
||||
return -1;
|
||||
if (b === null)
|
||||
return 1;
|
||||
|
||||
if (stype == 'int') {
|
||||
return reverse * (a - b);
|
||||
}
|
||||
return reverse * (a.localeCompare(b));
|
||||
});
|
||||
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[vl[i][1]]);
|
||||
if (cb) cb();
|
||||
}
|
||||
function makeSortable(table, cb) {
|
||||
var th = table.tHead, i;
|
||||
th && (th = th.rows[0]) && (th = th.cells);
|
||||
if (th) i = th.length;
|
||||
else return; // if no `<thead>` then do nothing
|
||||
while (--i >= 0) (function (i) {
|
||||
th[i].onclick = function (e) {
|
||||
ev(e);
|
||||
sortTable(table, i, cb);
|
||||
};
|
||||
}(i));
|
||||
}
|
||||
|
||||
|
||||
|
||||
(function () {
|
||||
var ops = document.querySelectorAll('#ops>a');
|
||||
for (var a = 0; a < ops.length; a++) {
|
||||
ops[a].onclick = opclick;
|
||||
}
|
||||
})();
|
||||
|
||||
|
||||
function opclick(e) {
|
||||
ev(e);
|
||||
|
||||
var dest = this.getAttribute('data-dest');
|
||||
goto(dest);
|
||||
|
||||
swrite('opmode', dest || null);
|
||||
|
||||
var input = document.querySelector('.opview.act input:not([type="hidden"])')
|
||||
if (input)
|
||||
input.focus();
|
||||
}
|
||||
|
||||
|
||||
function goto(dest) {
|
||||
var obj = document.querySelectorAll('.opview.act');
|
||||
for (var a = obj.length - 1; a >= 0; a--)
|
||||
obj[a].classList.remove('act');
|
||||
|
||||
obj = document.querySelectorAll('#ops>a');
|
||||
for (var a = obj.length - 1; a >= 0; a--)
|
||||
obj[a].classList.remove('act');
|
||||
|
||||
if (dest) {
|
||||
var ui = ebi('op_' + dest);
|
||||
ui.classList.add('act');
|
||||
document.querySelector('#ops>a[data-dest=' + dest + ']').classList.add('act');
|
||||
|
||||
var fn = window['goto_' + dest];
|
||||
if (fn)
|
||||
fn();
|
||||
}
|
||||
|
||||
if (window['treectl'])
|
||||
treectl.onscroll();
|
||||
}
|
||||
|
||||
|
||||
(function () {
|
||||
goto();
|
||||
var op = sread('opmode');
|
||||
if (op !== null && op !== '.')
|
||||
goto(op);
|
||||
})();
|
||||
|
||||
|
||||
function linksplit(rp) {
|
||||
var ret = [];
|
||||
var apath = '/';
|
||||
if (rp && rp.charAt(0) == '/')
|
||||
rp = rp.slice(1);
|
||||
|
||||
while (rp) {
|
||||
var link = rp;
|
||||
var ofs = rp.indexOf('/');
|
||||
if (ofs === -1) {
|
||||
rp = null;
|
||||
}
|
||||
else {
|
||||
link = rp.slice(0, ofs + 1);
|
||||
rp = rp.slice(ofs + 1);
|
||||
}
|
||||
var vlink = link;
|
||||
if (link.indexOf('/') !== -1)
|
||||
vlink = link.slice(0, -1) + '<span>/</span>';
|
||||
|
||||
ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
|
||||
apath += link;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
function uricom_enc(txt, do_fb_enc) {
|
||||
try {
|
||||
return encodeURIComponent(txt);
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("uce-err [" + txt + "]");
|
||||
if (do_fb_enc)
|
||||
return esc(txt);
|
||||
|
||||
return txt;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function uricom_dec(txt) {
|
||||
try {
|
||||
return [decodeURIComponent(txt), true];
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("ucd-err [" + txt + "]");
|
||||
return [txt, false];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function get_evpath() {
|
||||
var ret = document.location.pathname;
|
||||
|
||||
if (ret.indexOf('/') !== 0)
|
||||
ret = '/' + ret;
|
||||
|
||||
if (ret.lastIndexOf('/') !== ret.length - 1)
|
||||
ret += '/';
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
function get_vpath() {
|
||||
return uricom_dec(get_evpath())[0];
|
||||
}
|
||||
|
||||
|
||||
function unix2iso(ts) {
|
||||
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
|
||||
}
|
||||
|
||||
|
||||
function s2ms(s) {
|
||||
s = Math.floor(s);
|
||||
var m = Math.floor(s / 60);
|
||||
return m + ":" + ("0" + (s - m * 60)).slice(-2);
|
||||
}
|
||||
|
||||
|
||||
function has(haystack, needle) {
|
||||
for (var a = 0; a < haystack.length; a++)
|
||||
if (haystack[a] == needle)
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
function sread(key) {
|
||||
if (window.localStorage)
|
||||
return localStorage.getItem(key);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function swrite(key, val) {
|
||||
if (window.localStorage) {
|
||||
if (val === undefined || val === null)
|
||||
localStorage.removeItem(key);
|
||||
else
|
||||
localStorage.setItem(key, val);
|
||||
}
|
||||
}
|
||||
|
||||
function jread(key, fb) {
|
||||
var str = sread(key);
|
||||
if (!str)
|
||||
return fb;
|
||||
|
||||
return JSON.parse(str);
|
||||
}
|
||||
|
||||
function jwrite(key, val) {
|
||||
if (!val)
|
||||
swrite(key);
|
||||
else
|
||||
swrite(key, JSON.stringify(val));
|
||||
}
|
||||
|
||||
function icfg_get(name, defval) {
|
||||
var o = ebi(name);
|
||||
|
||||
var val = parseInt(sread(name));
|
||||
if (isNaN(val))
|
||||
return parseInt(o ? o.value : defval);
|
||||
|
||||
if (o)
|
||||
o.value = val;
|
||||
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_get(name, defval) {
|
||||
var o = ebi(name);
|
||||
if (!o)
|
||||
return defval;
|
||||
|
||||
var val = sread(name);
|
||||
if (val === null)
|
||||
val = defval;
|
||||
else
|
||||
val = (val == '1');
|
||||
|
||||
bcfg_upd_ui(name, val);
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_set(name, val) {
|
||||
swrite(name, val ? '1' : '0');
|
||||
bcfg_upd_ui(name, val);
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_upd_ui(name, val) {
|
||||
var o = ebi(name);
|
||||
if (!o)
|
||||
return;
|
||||
|
||||
if (o.getAttribute('type') == 'checkbox')
|
||||
o.checked = val;
|
||||
else if (o) {
|
||||
var fun = val ? 'add' : 'remove';
|
||||
o.classList[fun]('on');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function hist_push(url) {
|
||||
console.log("h-push " + url);
|
||||
history.pushState(url, url, url);
|
||||
}
|
||||
|
||||
function hist_replace(url) {
|
||||
console.log("h-repl " + url);
|
||||
history.replaceState(url, url, url);
|
||||
}
|
242
docs/music-analysis.sh
Normal file
242
docs/music-analysis.sh
Normal file
@@ -0,0 +1,242 @@
|
||||
#!/bin/bash
|
||||
echo please dont actually run this as a scriopt
|
||||
exit 1
|
||||
|
||||
|
||||
# dependency-heavy, not particularly good fit
|
||||
pacman -S llvm10
|
||||
python3 -m pip install --user librosa
|
||||
git clone https://github.com/librosa/librosa.git
|
||||
|
||||
|
||||
# correct bpm for tracks with bad tags
|
||||
br='
|
||||
/Trip Trip Trip\(Hardcore Edit\).mp3/ {v=176}
|
||||
/World!!.BIG_SOS/ {v=175}
|
||||
/\/08\..*\(BIG_SOS Bootleg\)\.mp3/ {v=175}
|
||||
/もってけ!セーラ服.Asterisk DnB/ {v=175}
|
||||
/Rondo\(Asterisk DnB Re.mp3/ {v=175}
|
||||
/Ray Nautica 175 Edit/ {v=175;x="thunk"}
|
||||
/TOKIMEKI Language.Jauz/ {v=174}
|
||||
/YUPPUN Hardcore Remix\).mp3/ {v=174;x="keeps drifting"}
|
||||
/(èâAâï.î╧ûδ|バーチャリアル.狐耶)J-Core Remix\).mp3/ {v=172;x="hard"}
|
||||
/lucky train..Freezer/ {v=170}
|
||||
/Alf zero Bootleg ReMix/ {v=170}
|
||||
/Prisoner of Love.Kacky/ {v=170}
|
||||
/火炎 .Qota/ {v=170}
|
||||
/\(hu-zin Bootleg\)\.mp3/ {v=170}
|
||||
/15. STRAIGHT BET\(Milynn Bootleg\)\.mp3/ {v=170}
|
||||
/\/13.*\(Milynn Bootleg\)\.mp3/ {v=167;x="way hard"}
|
||||
/COLOR PLANET .10SAI . nijikon Remix\)\.mp3/ {v=165}
|
||||
/11\. (朝はご飯派|Æ⌐é═é▓ö╤öh)\.mp3/ {v=162}
|
||||
/09\. Where.s the core/ {v=160}
|
||||
/PLANET\(Koushif Jersey Club Bootleg\)remaster.mp3/ {v=160;x="starts ez turns bs"}
|
||||
/kened Soul - Madeon x Angel Beats!.mp3/ {v=160}
|
||||
/Dear Moments\(Mother Harlot Bootleg\)\.mp3/ {v=150}
|
||||
/POWER.Ringos UKG/ {v=140}
|
||||
/ブルー・フィールド\(Ringos UKG Remix\).mp3/ {v=135}
|
||||
/プラチナジェット.Ringo Remix..mp3/ {v=131.2}
|
||||
/Mirrorball Love \(TKM Bootleg Mix\).mp3/ {v=130}
|
||||
/Photon Melodies \(TKM Bootleg Mix\).mp3/ {v=128}
|
||||
/Trap of Love \(TKM Bootleg Mix\).mp3/ {v=128}
|
||||
/One Step \(TKM Bootleg Mix\)\.mp3/ {v=126}
|
||||
/04 (トリカムイ岩|âgâèâJâÇâCèΓ).mp3/ {v=125}
|
||||
/Get your Wish \(NAWN REMIX\)\.mp3/ {v=95}
|
||||
/Flicker .Nitro Fun/ {v=92}
|
||||
/\/14\..*suicat Remix/ {v=85.5;x="tricky"}
|
||||
/Yanagi Nagi - Harumodoki \(EO Remix\)\.mp3/ {v=150}
|
||||
/Azure - Nicology\.mp3/ {v=128;x="off by 5 how"}
|
||||
'
|
||||
|
||||
|
||||
# afun host, collects/grades the results
|
||||
runfun() { cores=8; touch run; rm -f /dev/shm/mres.*; t00=$(date +%s); tbc() { bc | sed -r 's/(\.[0-9]{2}).*/\1/'; }; for ((core=0; core<$cores; core++)); do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db 'select dur.w, dur.v, bpm.v from mt bpm join mt dur on bpm.w = dur.w where bpm.k = ".bpm" and dur.k = ".dur" order by dur.w' | uniq -w16 | while IFS=\| read w dur bpm; do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db "select rd, fn from up where substr(w,1,16) = '$w'" | sed -r "s/^/$bpm /"; done | grep mir/cr | tr \| / | awk '{v=$1;sub(/[^ ]+ /,"")} '"$br"' {printf "%s %s\n",v,$0}' | while read bpm fn; do [ -e run ] || break; n=$((n+1)); ncore=$((n%cores)); [ $ncore -eq $core ] || continue; t0=$(date +%s.%N); (afun || exit 1; t=$(date +%s.%N); td=$(echo "scale=3; $t - $t0" | tbc); bd=$(echo "scale=3; $bpm / $py" | tbc); printf '%4s sec, %4s orig, %6s py, %4s div, %s\n' $td $bpm $py $bd "$fn") | tee -a /dev/shm/mres.$ncore; rv=${PIPESTATUS[0]}; [ $rv -eq 0 ] || { echo "FAULT($rv): $fn"; }; done & done; wait 2>/dev/null; cat /dev/shm/mres.* | awk 'function prt(c) {printf "\033[3%sm%s\033[0m\n",c,$0} $8!="div,"{next} $5!~/^[0-9\.]+/{next} {meta=$3;det=$5;div=meta/det} div<0.7{det/=2} div>1.3{det*=2} {idet=sprintf("%.0f",det)} {idiff=idet-meta} meta>idet{idiff=meta-idet} idiff==0{n0++;prt(6);next} idiff==1{n1++;prt(3);next} idiff>10{nx++;prt(1);next} {n10++;prt(5)} END {printf "ok: %d 1off: %2s (%3s) 10off: %2s (%3s) fail: %2s\n",n0,n1,n0+n1,n10,n0+n1+n10,nx}'; te=$(date +%s); echo $((te-t00)) sec spent; }
|
||||
|
||||
|
||||
# ok: 8 1off: 62 ( 70) 10off: 86 (156) fail: 25 # 105 sec, librosa @ 8c archvm on 3700x w10
|
||||
# ok: 4 1off: 59 ( 63) 10off: 65 (128) fail: 53 # using original tags (bad)
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -t 60 /dev/shm/$core.wav || return 1; py="$(/home/ed/src/librosa/examples/beat_tracker.py /dev/shm/$core.wav x 2>&1 | awk 'BEGIN {v=1} /^Estimated tempo: /{v=$3} END {print v}')"; } runfun
|
||||
|
||||
|
||||
# ok: 119 1off: 5 (124) 10off: 8 (132) fail: 49 # 51 sec, vamp-example-fixedtempo
|
||||
# ok: 109 1off: 4 (113) 10off: 9 (122) fail: 59 # bad-tags
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 22050 -f f32le /dev/shm/$core.pcm || return 1; py="$(python3 -c 'import vamp; import numpy as np; f = open("/dev/shm/'$core'.pcm", "rb"); d = np.fromfile(f, dtype=np.float32); c = vamp.collect(d, 22050, "vamp-example-plugins:fixedtempo", parameters={"maxdflen":40}); print(c["list"][0]["label"].split(" ")[0])')"; }; runfun
|
||||
|
||||
|
||||
# ok: 102 1off: 61 (163) 10off: 12 (175) fail: 6 # 61 sec, vamp-qm-tempotracker
|
||||
# ok: 80 1off: 48 (128) 10off: 11 (139) fail: 42 # bad-tags
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 22050 -f f32le /dev/shm/$core.pcm || return 1; py="$(python3 -c 'import vamp; import numpy as np; f = open("/dev/shm/'$core'.pcm", "rb"); d = np.fromfile(f, dtype=np.float32); c = vamp.collect(d, 22050, "qm-vamp-plugins:qm-tempotracker", parameters={"inputtempo":150}); v = [float(x["label"].split(" ")[0]) for x in c["list"] if x["label"]]; v = list(sorted(v))[len(v)//4:-len(v)//4]; print(round(sum(v) / len(v), 1))')"; }; runfun
|
||||
|
||||
|
||||
# ok: 133 1off: 32 (165) 10off: 12 (177) fail: 3 # 51 sec, vamp-beatroot
|
||||
# ok: 101 1off: 22 (123) 10off: 16 (139) fail: 39 # bad-tags
|
||||
# note: some tracks fully fail to analyze (unlike the others which always provide a guess)
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 22050 -f f32le /dev/shm/$core.pcm || return 1; py="$(python3 -c 'import vamp; import numpy as np; f = open("/dev/shm/'$core'.pcm", "rb"); d = np.fromfile(f, dtype=np.float32); c = vamp.collect(d, 22050, "beatroot-vamp:beatroot"); cl=c["list"]; print(round(60*((len(cl)-1)/(float(cl[-1]["timestamp"]-cl[1]["timestamp"]))), 2))')"; }; runfun
|
||||
|
||||
|
||||
# ok: 124 1off: 9 (133) 10off: 40 (173) fail: 8 # 231 sec, essentia/full
|
||||
# ok: 109 1off: 8 (117) 10off: 22 (139) fail: 42 # bad-tags
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 /dev/shm/$core.wav || return 1; py="$(python3 -c 'import essentia; import essentia.standard as es; fe, fef = es.MusicExtractor(lowlevelStats=["mean", "stdev"], rhythmStats=["mean", "stdev"], tonalStats=["mean", "stdev"])("/dev/shm/'$core'.wav"); print("{:.2f}".format(fe["rhythm.bpm"]))')"; }; runfun
|
||||
|
||||
|
||||
# ok: 113 1off: 18 (131) 10off: 46 (177) fail: 4 # 134 sec, essentia/re2013
|
||||
# ok: 101 1off: 15 (116) 10off: 26 (142) fail: 39 # bad-tags
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 /dev/shm/$core.wav || return 1; py="$(python3 -c 'from essentia.standard import *; a=MonoLoader(filename="/dev/shm/'$core'.wav")(); bpm,beats,confidence,_,intervals=RhythmExtractor2013(method="multifeature")(a); print("{:.2f}".format(bpm))')"; }; runfun
|
||||
|
||||
|
||||
|
||||
########################################################################
|
||||
##
|
||||
## key detectyion
|
||||
##
|
||||
########################################################################
|
||||
|
||||
|
||||
|
||||
# console scriptlet reusing keytabs from browser.js
|
||||
var m=''; for (var a=0; a<24; a++) m += 's/\\|(' + maps["traktor_sharps"][a].trim() + "|" + maps["rekobo_classic"][a].trim() + "|" + maps["traktor_musical"][a].trim() + "|" + maps["traktor_open"][a].trim() + ')$/|' + maps["rekobo_alnum"][a].trim() + '/;'; console.log(m);
|
||||
|
||||
|
||||
# translate to camelot
|
||||
re='s/\|(B|B|B|6d)$/|1B/;s/\|(F#|F#|Gb|7d)$/|2B/;s/\|(C#|Db|Db|8d)$/|3B/;s/\|(G#|Ab|Ab|9d)$/|4B/;s/\|(D#|Eb|Eb|10d)$/|5B/;s/\|(A#|Bb|Bb|11d)$/|6B/;s/\|(F|F|F|12d)$/|7B/;s/\|(C|C|C|1d)$/|8B/;s/\|(G|G|G|2d)$/|9B/;s/\|(D|D|D|3d)$/|10B/;s/\|(A|A|A|4d)$/|11B/;s/\|(E|E|E|5d)$/|12B/;s/\|(G#m|Abm|Abm|6m)$/|1A/;s/\|(D#m|Ebm|Ebm|7m)$/|2A/;s/\|(A#m|Bbm|Bbm|8m)$/|3A/;s/\|(Fm|Fm|Fm|9m)$/|4A/;s/\|(Cm|Cm|Cm|10m)$/|5A/;s/\|(Gm|Gm|Gm|11m)$/|6A/;s/\|(Dm|Dm|Dm|12m)$/|7A/;s/\|(Am|Am|Am|1m)$/|8A/;s/\|(Em|Em|Em|2m)$/|9A/;s/\|(Bm|Bm|Bm|3m)$/|10A/;s/\|(F#m|F#m|Gbm|4m)$/|11A/;s/\|(C#m|Dbm|Dbm|5m)$/|12A/;'
|
||||
|
||||
|
||||
# runner/wrapper
|
||||
runfun() { cores=8; touch run; tbc() { bc | sed -r 's/(\.[0-9]{2}).*/\1/'; }; for ((core=0; core<$cores; core++)); do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db 'select dur.w, dur.v, key.v from mt key join mt dur on key.w = dur.w where key.k = "key" and dur.k = ".dur" order by dur.w' | uniq -w16 | grep -vE '(Off-Key|None)$' | sed -r "s/ //g;$re" | uniq -w16 | while IFS=\| read w dur bpm; do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db "select rd, fn from up where substr(w,1,16) = '$w'" | sed -r "s/^/$bpm /"; done| grep mir/cr | tr \| / | while read key fn; do [ -e run ] || break; n=$((n+1)); ncore=$((n%cores)); [ $ncore -eq $core ] || continue; t0=$(date +%s.%N); (afun || exit 1; t=$(date +%s.%N); td=$(echo "scale=3; $t - $t0" | tbc); [ "$key" = "$py" ] && c=2 || c=5; printf '%4s sec, %4s orig, \033[3%dm%4s py,\033[0m %s\n' $td "$key" $c "$py" "$fn") || break; done & done; time wait 2>/dev/null; }
|
||||
|
||||
|
||||
# ok: 26 1off: 10 2off: 1 fail: 3 # 15 sec, keyfinder
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 -t 60 /dev/shm/$core.wav || break; py="$(python3 -c 'import sys; import keyfinder; print(keyfinder.key(sys.argv[1]).camelot())' "/dev/shm/$core.wav")"; }; runfun
|
||||
|
||||
|
||||
# https://github.com/MTG/essentia/raw/master/src/examples/tutorial/example_key_by_steps_streaming.py
|
||||
# https://essentia.upf.edu/reference/std_Key.html # edma edmm braw bgate
|
||||
sed -ri 's/^(key = Key\().*/\1profileType="bgate")/' example_key_by_steps_streaming.py
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 -t 60 /dev/shm/$core.wav || break; py="$(python3 example_key_by_steps_streaming.py /dev/shm/$core.{wav,yml} 2>/dev/null | sed -r "s/ major//;s/ minor/m/;s/^/|/;$re;s/.//")"; }; runfun
|
||||
|
||||
|
||||
|
||||
########################################################################
|
||||
##
|
||||
## misc
|
||||
##
|
||||
########################################################################
|
||||
|
||||
|
||||
|
||||
python3 -m pip install --user vamp
|
||||
|
||||
import librosa
|
||||
d, r = librosa.load('/dev/shm/0.wav')
|
||||
d.dtype
|
||||
# dtype('float32')
|
||||
d.shape
|
||||
# (1323000,)
|
||||
d
|
||||
# array([-1.9614939e-08, 1.8037968e-08, -1.4106059e-08, ...,
|
||||
# 1.2024145e-01, 2.7462116e-01, 1.6202132e-01], dtype=float32)
|
||||
|
||||
|
||||
|
||||
import vamp
|
||||
c = vamp.collect(d, r, "vamp-example-plugins:fixedtempo")
|
||||
c
|
||||
# {'list': [{'timestamp': 0.005804988, 'duration': 9.999092971, 'label': '110.0 bpm', 'values': array([109.98116], dtype=float32)}]}
|
||||
|
||||
|
||||
|
||||
ffmpeg -ss 48 -i /mnt/Users/ed/Music/mir/cr-a/'I Beg You(ths Bootleg).wav' -ac 1 -ar 22050 -f f32le -t 60 /dev/shm/f32.pcm
|
||||
|
||||
import numpy as np
|
||||
f = open('/dev/shm/f32.pcm', 'rb')
|
||||
d = np.fromfile(f, dtype=np.float32)
|
||||
d
|
||||
array([-0.17803933, -0.27206388, -0.41586545, ..., -0.04940119,
|
||||
-0.0267825 , -0.03564296], dtype=float32)
|
||||
|
||||
d = np.reshape(d, [1, -1])
|
||||
d
|
||||
array([[-0.17803933, -0.27206388, -0.41586545, ..., -0.04940119,
|
||||
-0.0267825 , -0.03564296]], dtype=float32)
|
||||
|
||||
|
||||
|
||||
import vampyhost
|
||||
print("\n".join(vampyhost.list_plugins()))
|
||||
|
||||
mvamp:marsyas_bextract_centroid
|
||||
mvamp:marsyas_bextract_lpcc
|
||||
mvamp:marsyas_bextract_lsp
|
||||
mvamp:marsyas_bextract_mfcc
|
||||
mvamp:marsyas_bextract_rolloff
|
||||
mvamp:marsyas_bextract_scf
|
||||
mvamp:marsyas_bextract_sfm
|
||||
mvamp:marsyas_bextract_zero_crossings
|
||||
mvamp:marsyas_ibt
|
||||
mvamp:zerocrossing
|
||||
qm-vamp-plugins:qm-adaptivespectrogram
|
||||
qm-vamp-plugins:qm-barbeattracker
|
||||
qm-vamp-plugins:qm-chromagram
|
||||
qm-vamp-plugins:qm-constantq
|
||||
qm-vamp-plugins:qm-dwt
|
||||
qm-vamp-plugins:qm-keydetector
|
||||
qm-vamp-plugins:qm-mfcc
|
||||
qm-vamp-plugins:qm-onsetdetector
|
||||
qm-vamp-plugins:qm-segmenter
|
||||
qm-vamp-plugins:qm-similarity
|
||||
qm-vamp-plugins:qm-tempotracker
|
||||
qm-vamp-plugins:qm-tonalchange
|
||||
qm-vamp-plugins:qm-transcription
|
||||
vamp-aubio:aubiomelenergy
|
||||
vamp-aubio:aubiomfcc
|
||||
vamp-aubio:aubionotes
|
||||
vamp-aubio:aubioonset
|
||||
vamp-aubio:aubiopitch
|
||||
vamp-aubio:aubiosilence
|
||||
vamp-aubio:aubiospecdesc
|
||||
vamp-aubio:aubiotempo
|
||||
vamp-example-plugins:amplitudefollower
|
||||
vamp-example-plugins:fixedtempo
|
||||
vamp-example-plugins:percussiononsets
|
||||
vamp-example-plugins:powerspectrum
|
||||
vamp-example-plugins:spectralcentroid
|
||||
vamp-example-plugins:zerocrossing
|
||||
vamp-rubberband:rubberband
|
||||
|
||||
|
||||
|
||||
plug = vampyhost.load_plugin("vamp-example-plugins:fixedtempo", 22050, 0)
|
||||
plug.info
|
||||
{'apiVersion': 2, 'pluginVersion': 1, 'identifier': 'fixedtempo', 'name': 'Simple Fixed Tempo Estimator', 'description': 'Study a short section of audio and estimate its tempo, assuming the tempo is constant', 'maker': 'Vamp SDK Example Plugins', 'copyright': 'Code copyright 2008 Queen Mary, University of London. Freely redistributable (BSD license)'}
|
||||
plug = vampyhost.load_plugin("qm-vamp-plugins:qm-tempotracker", 22050, 0)
|
||||
from pprint import pprint; pprint(plug.parameters)
|
||||
|
||||
|
||||
|
||||
for c in plug.parameters: print("{} \033[36m{} [\033[33m{}\033[36m] = {}\033[0m".format(c["identifier"], c["name"], "\033[36m, \033[33m".join(c["valueNames"]), c["valueNames"][int(c["defaultValue"])])) if "valueNames" in c else print("{} \033[36m{} [\033[33m{}..{}\033[36m] = {}\033[0m".format(c["identifier"], c["name"], c["minValue"], c["maxValue"], c["defaultValue"]))
|
||||
|
||||
|
||||
|
||||
beatroot-vamp:beatroot
|
||||
cl=c["list"]; 60*((len(cl)-1)/(float(cl[-1]["timestamp"]-cl[1]["timestamp"])))
|
||||
|
||||
|
||||
|
||||
ffmpeg -ss 48 -i /mnt/Users/ed/Music/mir/cr-a/'I Beg You(ths Bootleg).wav' -ac 1 -ar 22050 -f f32le -t 60 /dev/shm/f32.pcm
|
||||
# 128 bpm, key 5A Cm
|
||||
|
||||
import vamp
|
||||
import numpy as np
|
||||
f = open('/dev/shm/f32.pcm', 'rb')
|
||||
d = np.fromfile(f, dtype=np.float32)
|
||||
c = vamp.collect(d, 22050, "vamp-example-plugins:fixedtempo", parameters={"maxdflen":40})
|
||||
c["list"][0]["label"]
|
||||
# 127.6 bpm
|
||||
|
||||
c = vamp.collect(d, 22050, "qm-vamp-plugins:qm-tempotracker", parameters={"inputtempo":150})
|
||||
print("\n".join([v["label"] for v in c["list"] if v["label"]]))
|
||||
v = [float(x["label"].split(' ')[0]) for x in c["list"] if x["label"]]
|
||||
v = list(sorted(v))[len(v)//4:-len(v)//4]
|
||||
v = sum(v) / len(v)
|
||||
# 128.1 bpm
|
||||
|
@@ -3,6 +3,21 @@ echo not a script
|
||||
exit 1
|
||||
|
||||
|
||||
##
|
||||
## delete all partial uploads
|
||||
## (supports linux/macos, probably windows+msys2)
|
||||
|
||||
gzip -d < .hist/up2k.snap | jq -r '.[].tnam' | while IFS= read -r f; do rm -f -- "$f"; done
|
||||
gzip -d < .hist/up2k.snap | jq -r '.[].name' | while IFS= read -r f; do wc -c -- "$f" | grep -qiE '^[^0-9a-z]*0' && rm -f -- "$f"; done
|
||||
|
||||
|
||||
##
|
||||
## detect partial uploads based on file contents
|
||||
## (in case of context loss or old copyparties)
|
||||
|
||||
echo; find -type f | while IFS= read -r x; do printf '\033[A\033[36m%s\033[K\033[0m\n' "$x"; tail -c$((1024*1024)) <"$x" | xxd -a | awk 'NR==1&&/^[0: ]+.{16}$/{next} NR==2&&/^\*$/{next} NR==3&&/^[0f]+: [0 ]+65 +.{16}$/{next} {e=1} END {exit e}' || continue; printf '\033[A\033[31msus:\033[33m %s \033[0m\n\n' "$x"; done
|
||||
|
||||
|
||||
##
|
||||
## create a test payload
|
||||
|
||||
@@ -52,6 +67,33 @@ wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:392
|
||||
shab64() { sp=$1; f="$2"; v=0; sz=$(stat -c%s "$f"); while true; do w=$((v+sp*1024*1024)); printf $(tail -c +$((v+1)) "$f" | head -c $((w-v)) | sha512sum | cut -c-64 | sed -r 's/ .*//;s/(..)/\\x\1/g') | base64 -w0 | cut -c-43 | tr '+/' '-_'; v=$w; [ $v -lt $sz ] || break; done; }
|
||||
|
||||
|
||||
##
|
||||
## poll url for performance issues
|
||||
|
||||
command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s ",k,p}}') 2>&1 | awk -v ts=$t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s \033[3%dm%s %s\033[0m\n",ts,c,t2,t1}'; sleep 0.1 || break; done
|
||||
|
||||
|
||||
##
|
||||
## sqlite3 stuff
|
||||
|
||||
# find dupe metadata keys
|
||||
sqlite3 up2k.db 'select mt1.w, mt1.k, mt1.v, mt2.v from mt mt1 inner join mt mt2 on mt1.w = mt2.w where mt1.k = mt2.k and mt1.rowid != mt2.rowid'
|
||||
|
||||
# partial reindex by deleting all tags for a list of files
|
||||
time sqlite3 up2k.db 'select mt1.w from mt mt1 inner join mt mt2 on mt1.w = mt2.w where mt1.k = +mt2.k and mt1.rowid != mt2.rowid' > warks
|
||||
cat warks | while IFS= read -r x; do sqlite3 up2k.db "delete from mt where w = '$x'"; done
|
||||
|
||||
|
||||
##
|
||||
## media
|
||||
|
||||
# split track into test files
|
||||
e=6; s=10; d=~/dev/copyparty/srv/aus; n=1; p=0; e=$((e*60)); rm -rf $d; mkdir $d; while true; do ffmpeg -hide_banner -ss $p -i 'nervous_testpilot - office.mp3' -c copy -t $s $d/$(printf %04d $n).mp3; n=$((n+1)); p=$((p+s)); [ $p -gt $e ] && break; done
|
||||
|
||||
-v srv/aus:aus:r:ce2dsa:ce2ts:cmtp=fgsfds=bin/mtag/sleep.py
|
||||
sqlite3 .hist/up2k.db 'select * from mt where k="fgsfds" or k="t:mtp"' | tee /dev/stderr | wc -l
|
||||
|
||||
|
||||
##
|
||||
## vscode
|
||||
|
||||
@@ -81,6 +123,9 @@ for d in /usr /var; do find $d -type f -size +30M 2>/dev/null; done | while IFS=
|
||||
brew install python@2
|
||||
pip install virtualenv
|
||||
|
||||
# readme toc
|
||||
cat README.md | awk '!/^#/{next} {lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab)} {printf "%" ((lv-1)*4+1) "s [%s](#%s)\n", "*",$0,bab}'
|
||||
|
||||
|
||||
##
|
||||
## http 206
|
||||
|
62
docs/rclone.md
Normal file
62
docs/rclone.md
Normal file
@@ -0,0 +1,62 @@
|
||||
# using rclone to mount a remote copyparty server as a local filesystem
|
||||
|
||||
speed estimates with server and client on the same win10 machine:
|
||||
* `1070 MiB/s` with rclone as both server and client
|
||||
* `570 MiB/s` with rclone-client and `copyparty -ed -j16` as server
|
||||
* `220 MiB/s` with rclone-client and `copyparty -ed` as server
|
||||
* `100 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
|
||||
|
||||
when server is on another machine (1gbit LAN),
|
||||
* `75 MiB/s` with [../bin/copyparty-fuse.py](../bin/copyparty-fuse.py) as client
|
||||
* `92 MiB/s` with rclone-client and `copyparty -ed` as server
|
||||
* `103 MiB/s` (connection max) with `copyparty -ed -j16` and all the others
|
||||
|
||||
|
||||
# creating the config file
|
||||
|
||||
if you want to use password auth, add `headers = Cookie,cppwd=fgsfds` below
|
||||
|
||||
|
||||
### on windows clients:
|
||||
```
|
||||
(
|
||||
echo [cpp]
|
||||
echo type = http
|
||||
echo url = http://127.0.0.1:3923/
|
||||
) > %userprofile%\.config\rclone\rclone.conf
|
||||
```
|
||||
|
||||
also install the windows dependencies: [winfsp](https://github.com/billziss-gh/winfsp/releases/latest)
|
||||
|
||||
|
||||
### on unix clients:
|
||||
```
|
||||
cat > ~/.config/rclone/rclone.conf <<'EOF'
|
||||
[cpp]
|
||||
type = http
|
||||
url = http://127.0.0.1:3923/
|
||||
EOF
|
||||
```
|
||||
|
||||
|
||||
# mounting the copyparty server locally
|
||||
```
|
||||
rclone.exe mount --vfs-cache-max-age 5s --attr-timeout 5s --dir-cache-time 5s cpp: Z:
|
||||
```
|
||||
|
||||
|
||||
# use rclone as server too, replacing copyparty
|
||||
|
||||
feels out of place but is too good not to mention
|
||||
|
||||
```
|
||||
rclone.exe serve http --read-only .
|
||||
```
|
||||
|
||||
* `webdav` gives write-access but `http` is twice as fast
|
||||
* `ftp` is buggy, avoid
|
||||
|
||||
|
||||
# bugs
|
||||
|
||||
* rclone-client throws an exception if you try to read an empty file (should return zero bytes)
|
130
scripts/copyparty-repack.sh
Executable file
130
scripts/copyparty-repack.sh
Executable file
@@ -0,0 +1,130 @@
|
||||
#!/bin/bash
|
||||
repacker=1
|
||||
set -e
|
||||
|
||||
# -- download latest copyparty (source.tgz and sfx),
|
||||
# -- build minimal sfx versions,
|
||||
# -- create a .tar.gz bundle
|
||||
#
|
||||
# convenient for deploying updates to inconvenient locations
|
||||
# (and those are usually linux so bash is good inaff)
|
||||
# (but that said this even has macos support)
|
||||
#
|
||||
# bundle will look like:
|
||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty
|
||||
# -rw-r--r-- 0 ed ed 491318 Nov 19 00:40 copyparty-extras/copyparty-0.5.4.tar.gz
|
||||
# -rwxr-xr-x 0 ed ed 30254 Nov 17 23:58 copyparty-extras/copyparty-fuse.py
|
||||
# -rwxr-xr-x 0 ed ed 481403 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.sh
|
||||
# -rwxr-xr-x 0 ed ed 506043 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.py
|
||||
# -rwxr-xr-x 0 ed ed 167699 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.sh
|
||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
|
||||
|
||||
|
||||
command -v gnutar && tar() { gnutar "$@"; }
|
||||
command -v gtar && tar() { gtar "$@"; }
|
||||
command -v gsed && sed() { gsed "$@"; }
|
||||
td="$(mktemp -d)"
|
||||
od="$(pwd)"
|
||||
cd "$td"
|
||||
pwd
|
||||
|
||||
|
||||
dl_text() {
|
||||
command -v curl >/dev/null && exec curl "$@"
|
||||
exec wget -O- "$@"
|
||||
}
|
||||
dl_files() {
|
||||
command -v curl >/dev/null && exec curl -L --remote-name-all "$@"
|
||||
exec wget "$@"
|
||||
}
|
||||
export -f dl_files
|
||||
|
||||
|
||||
# if cache exists, use that instead of bothering github
|
||||
cache="$od/.copyparty-repack.cache"
|
||||
[ -e "$cache" ] &&
|
||||
tar -xf "$cache" ||
|
||||
{
|
||||
# get download links from github
|
||||
dl_text https://api.github.com/repos/9001/copyparty/releases/latest |
|
||||
(
|
||||
# prefer jq if available
|
||||
jq -r '.assets[]|select(.name|test("-sfx|tar.gz")).browser_download_url' ||
|
||||
|
||||
# fallback to awk (sorry)
|
||||
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
|
||||
tar -czf "$cache" *
|
||||
}
|
||||
|
||||
|
||||
# move src into copyparty-extras/,
|
||||
# move sfx into copyparty-extras/sfx-full/
|
||||
mkdir -p copyparty-extras/sfx-{full,lite}
|
||||
mv copyparty-sfx.* copyparty-extras/sfx-full/
|
||||
mv copyparty-*.tar.gz copyparty-extras/
|
||||
|
||||
|
||||
# unpack the source code
|
||||
( cd copyparty-extras/
|
||||
tar -xf *.tar.gz
|
||||
)
|
||||
|
||||
|
||||
# use repacker from release if that is newer
|
||||
p_other=copyparty-extras/copyparty-*/scripts/copyparty-repack.sh
|
||||
other=$(awk -F= 'BEGIN{v=-1} NR<10&&/^repacker=/{v=$NF} END{print v}' <$p_other)
|
||||
[ $repacker -lt $other ] &&
|
||||
cat $p_other >"$od/$0" && cd "$od" && rm -rf "$td" && exec "$0" "$@"
|
||||
|
||||
|
||||
# now drop the cache
|
||||
rm -f "$cache"
|
||||
|
||||
|
||||
# fix permissions
|
||||
chmod 755 \
|
||||
copyparty-extras/sfx-full/* \
|
||||
copyparty-extras/copyparty-*/{scripts,bin}/*
|
||||
|
||||
|
||||
# extract and repack the sfx with less features enabled
|
||||
( cd copyparty-extras/sfx-full/
|
||||
./copyparty-sfx.py -h
|
||||
cd ../copyparty-*/
|
||||
./scripts/make-sfx.sh re no-ogv no-cm
|
||||
)
|
||||
|
||||
|
||||
# put new sfx into copyparty-extras/sfx-lite/,
|
||||
# fuse client into copyparty-extras/,
|
||||
# copy lite-sfx.py to ./copyparty,
|
||||
# delete extracted source code
|
||||
( cd copyparty-extras/
|
||||
mv copyparty-*/dist/* sfx-lite/
|
||||
mv copyparty-*/bin/copyparty-fuse.py .
|
||||
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
|
||||
rm -rf copyparty-{0..9}*.*.*{0..9}
|
||||
)
|
||||
|
||||
|
||||
# and include the repacker itself too
|
||||
cp -av "$od/$0" copyparty-extras/ ||
|
||||
cp -av "$0" copyparty-extras/ ||
|
||||
true
|
||||
|
||||
|
||||
# create the bundle
|
||||
fn=copyparty-$(date +%Y-%m%d-%H%M%S).tgz
|
||||
tar -czvf "$od/$fn" *
|
||||
cd "$od"
|
||||
rm -rf "$td"
|
||||
|
||||
|
||||
echo
|
||||
echo "done, here's your bundle:"
|
||||
ls -al "$fn"
|
@@ -1,12 +1,10 @@
|
||||
FROM alpine:3.11
|
||||
FROM alpine:3.13
|
||||
WORKDIR /z
|
||||
ENV ver_asmcrypto=2821dd1dedd1196c378f5854037dda5c869313f3 \
|
||||
ver_markdownit=10.0.0 \
|
||||
ver_showdown=1.9.1 \
|
||||
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
||||
ver_marked=1.1.0 \
|
||||
ver_ogvjs=1.6.1 \
|
||||
ver_mde=2.10.1 \
|
||||
ver_codemirror=5.53.2 \
|
||||
ver_ogvjs=1.8.0 \
|
||||
ver_mde=2.14.0 \
|
||||
ver_codemirror=5.59.3 \
|
||||
ver_fontawesome=5.13.0 \
|
||||
ver_zopfli=1.0.3
|
||||
|
||||
@@ -17,7 +15,7 @@ RUN mkdir -p /z/dist/no-pk \
|
||||
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
|
||||
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
|
||||
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \
|
||||
&& wget https://github.com/asmcrypto/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
|
||||
&& wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
|
||||
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
|
||||
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
|
||||
&& wget https://github.com/codemirror/CodeMirror/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
|
||||
@@ -52,6 +50,7 @@ RUN tar -xf zopfli.tgz \
|
||||
-S . \
|
||||
&& make -C build \
|
||||
&& make -C build install \
|
||||
&& python3 -m ensurepip \
|
||||
&& python3 -m pip install fonttools zopfli
|
||||
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
diff -NarU2 CodeMirror-orig/mode/gfm/gfm.js CodeMirror-edit/mode/gfm/gfm.js
|
||||
--- CodeMirror-orig/mode/gfm/gfm.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/mode/gfm/gfm.js 2020-05-02 02:13:32.142131800 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/mode/gfm/gfm.js codemirror-5.59.3/mode/gfm/gfm.js
|
||||
--- codemirror-5.59.3-orig/mode/gfm/gfm.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/mode/gfm/gfm.js 2021-02-21 20:42:02.166174775 +0000
|
||||
@@ -97,5 +97,5 @@
|
||||
}
|
||||
}
|
||||
@@ -15,9 +15,9 @@ diff -NarU2 CodeMirror-orig/mode/gfm/gfm.js CodeMirror-edit/mode/gfm/gfm.js
|
||||
+ }*/
|
||||
stream.next();
|
||||
return null;
|
||||
diff -NarU2 CodeMirror-orig/mode/meta.js CodeMirror-edit/mode/meta.js
|
||||
--- CodeMirror-orig/mode/meta.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/mode/meta.js 2020-05-02 03:56:58.852408400 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/mode/meta.js codemirror-5.59.3/mode/meta.js
|
||||
--- codemirror-5.59.3-orig/mode/meta.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/mode/meta.js 2021-02-21 20:42:54.798742821 +0000
|
||||
@@ -13,4 +13,5 @@
|
||||
|
||||
CodeMirror.modeInfo = [
|
||||
@@ -28,7 +28,7 @@ diff -NarU2 CodeMirror-orig/mode/meta.js CodeMirror-edit/mode/meta.js
|
||||
{name: "Gas", mime: "text/x-gas", mode: "gas", ext: ["s"]},
|
||||
{name: "Gherkin", mime: "text/x-feature", mode: "gherkin", ext: ["feature"]},
|
||||
+ */
|
||||
{name: "GitHub Flavored Markdown", mime: "text/x-gfm", mode: "gfm", file: /^(readme|contributing|history).md$/i},
|
||||
{name: "GitHub Flavored Markdown", mime: "text/x-gfm", mode: "gfm", file: /^(readme|contributing|history)\.md$/i},
|
||||
+ /*
|
||||
{name: "Go", mime: "text/x-go", mode: "go", ext: ["go"]},
|
||||
{name: "Groovy", mime: "text/x-groovy", mode: "groovy", ext: ["groovy", "gradle"], file: /^Jenkinsfile$/},
|
||||
@@ -56,16 +56,16 @@ diff -NarU2 CodeMirror-orig/mode/meta.js CodeMirror-edit/mode/meta.js
|
||||
+ /*
|
||||
{name: "XQuery", mime: "application/xquery", mode: "xquery", ext: ["xy", "xquery"]},
|
||||
{name: "Yacas", mime: "text/x-yacas", mode: "yacas", ext: ["ys"]},
|
||||
@@ -171,4 +180,5 @@
|
||||
{name: "xu", mime: "text/x-xu", mode: "mscgen", ext: ["xu"]},
|
||||
{name: "msgenny", mime: "text/x-msgenny", mode: "mscgen", ext: ["msgenny"]}
|
||||
@@ -172,4 +181,5 @@
|
||||
{name: "msgenny", mime: "text/x-msgenny", mode: "mscgen", ext: ["msgenny"]},
|
||||
{name: "WebAssembly", mime: "text/webassembly", mode: "wast", ext: ["wat", "wast"]},
|
||||
+ */
|
||||
];
|
||||
// Ensure all modes have a mime property for backwards compatibility
|
||||
diff -NarU2 CodeMirror-orig/src/display/selection.js CodeMirror-edit/src/display/selection.js
|
||||
--- CodeMirror-orig/src/display/selection.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/display/selection.js 2020-05-02 03:27:30.144662800 +0200
|
||||
@@ -83,29 +83,21 @@
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/display/selection.js codemirror-5.59.3/src/display/selection.js
|
||||
--- codemirror-5.59.3-orig/src/display/selection.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/display/selection.js 2021-02-21 20:44:14.860894328 +0000
|
||||
@@ -84,29 +84,21 @@
|
||||
let order = getOrder(lineObj, doc.direction)
|
||||
iterateBidiSections(order, fromArg || 0, toArg == null ? lineLen : toArg, (from, to, dir, i) => {
|
||||
- let ltr = dir == "ltr"
|
||||
@@ -105,24 +105,24 @@ diff -NarU2 CodeMirror-orig/src/display/selection.js CodeMirror-edit/src/display
|
||||
+ botRight = openEnd && last ? rightSide : toPos.right
|
||||
add(topLeft, fromPos.top, topRight - topLeft, fromPos.bottom)
|
||||
if (fromPos.bottom < toPos.top) add(leftSide, fromPos.bottom, null, toPos.top)
|
||||
diff -NarU2 CodeMirror-orig/src/input/ContentEditableInput.js CodeMirror-edit/src/input/ContentEditableInput.js
|
||||
--- CodeMirror-orig/src/input/ContentEditableInput.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/input/ContentEditableInput.js 2020-05-02 03:33:05.707995500 +0200
|
||||
@@ -391,4 +391,5 @@
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/input/ContentEditableInput.js codemirror-5.59.3/src/input/ContentEditableInput.js
|
||||
--- codemirror-5.59.3-orig/src/input/ContentEditableInput.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/input/ContentEditableInput.js 2021-02-21 20:44:33.273953867 +0000
|
||||
@@ -399,4 +399,5 @@
|
||||
let info = mapFromLineView(view, line, pos.line)
|
||||
|
||||
+ /*
|
||||
let order = getOrder(line, cm.doc.direction), side = "left"
|
||||
if (order) {
|
||||
@@ -396,4 +397,5 @@
|
||||
@@ -404,4 +405,5 @@
|
||||
side = partPos % 2 ? "right" : "left"
|
||||
}
|
||||
+ */
|
||||
let result = nodeAndOffsetInLineMap(info.map, pos.ch, side)
|
||||
result.offset = result.collapse == "right" ? result.end : result.start
|
||||
diff -NarU2 CodeMirror-orig/src/input/movement.js CodeMirror-edit/src/input/movement.js
|
||||
--- CodeMirror-orig/src/input/movement.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/input/movement.js 2020-05-02 03:31:19.710773500 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/input/movement.js codemirror-5.59.3/src/input/movement.js
|
||||
--- codemirror-5.59.3-orig/src/input/movement.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/input/movement.js 2021-02-21 20:45:12.763093671 +0000
|
||||
@@ -15,4 +15,5 @@
|
||||
|
||||
export function endOfLine(visually, cm, lineObj, lineNo, dir) {
|
||||
@@ -146,9 +146,9 @@ diff -NarU2 CodeMirror-orig/src/input/movement.js CodeMirror-edit/src/input/move
|
||||
return null
|
||||
+ */
|
||||
}
|
||||
diff -NarU2 CodeMirror-orig/src/line/line_data.js CodeMirror-edit/src/line/line_data.js
|
||||
--- CodeMirror-orig/src/line/line_data.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/line/line_data.js 2020-05-02 03:17:02.785065000 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/line/line_data.js codemirror-5.59.3/src/line/line_data.js
|
||||
--- codemirror-5.59.3-orig/src/line/line_data.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/line/line_data.js 2021-02-21 20:45:36.472549599 +0000
|
||||
@@ -79,6 +79,6 @@
|
||||
// Optionally wire in some hacks into the token-rendering
|
||||
// algorithm, to deal with browser quirks.
|
||||
@@ -158,9 +158,9 @@ diff -NarU2 CodeMirror-orig/src/line/line_data.js CodeMirror-edit/src/line/line_
|
||||
+ // builder.addToken = buildTokenBadBidi(builder.addToken, order)
|
||||
builder.map = []
|
||||
let allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line)
|
||||
diff -NarU2 CodeMirror-orig/src/measurement/position_measurement.js CodeMirror-edit/src/measurement/position_measurement.js
|
||||
--- CodeMirror-orig/src/measurement/position_measurement.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/measurement/position_measurement.js 2020-05-02 03:35:20.674159600 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/measurement/position_measurement.js codemirror-5.59.3/src/measurement/position_measurement.js
|
||||
--- codemirror-5.59.3-orig/src/measurement/position_measurement.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/measurement/position_measurement.js 2021-02-21 20:50:52.372945293 +0000
|
||||
@@ -380,5 +380,6 @@
|
||||
sticky = "after"
|
||||
}
|
||||
@@ -199,9 +199,9 @@ diff -NarU2 CodeMirror-orig/src/measurement/position_measurement.js CodeMirror-e
|
||||
+*/
|
||||
|
||||
let measureText
|
||||
diff -NarU2 CodeMirror-orig/src/util/bidi.js CodeMirror-edit/src/util/bidi.js
|
||||
--- CodeMirror-orig/src/util/bidi.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/util/bidi.js 2020-05-02 03:12:44.418649800 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/util/bidi.js codemirror-5.59.3/src/util/bidi.js
|
||||
--- codemirror-5.59.3-orig/src/util/bidi.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/util/bidi.js 2021-02-21 20:52:18.168092225 +0000
|
||||
@@ -4,5 +4,5 @@
|
||||
|
||||
export function iterateBidiSections(order, from, to, f) {
|
||||
@@ -239,20 +239,19 @@ diff -NarU2 CodeMirror-orig/src/util/bidi.js CodeMirror-edit/src/util/bidi.js
|
||||
+ var fun = function(str, direction) {
|
||||
let outerType = direction == "ltr" ? "L" : "R"
|
||||
|
||||
@@ -204,12 +210,16 @@
|
||||
@@ -204,5 +210,11 @@
|
||||
return direction == "rtl" ? order.reverse() : order
|
||||
}
|
||||
-})()
|
||||
|
||||
+ return function(str, direction) {
|
||||
+ var ret = fun(str, direction);
|
||||
+ console.log("bidiOrdering inner ([%s], %s) => [%s]", str, direction, ret);
|
||||
+ return ret;
|
||||
+ }
|
||||
+})()
|
||||
})()
|
||||
+*/
|
||||
|
||||
// Get the bidi ordering for the given line (and cache it). Returns
|
||||
// false for lines that are fully left-to-right, and an array of
|
||||
@@ -210,6 +222,4 @@
|
||||
// BidiSpan objects otherwise.
|
||||
export function getOrder(line, direction) {
|
||||
- let order = line.order
|
||||
@@ -260,9 +259,9 @@ diff -NarU2 CodeMirror-orig/src/util/bidi.js CodeMirror-edit/src/util/bidi.js
|
||||
- return order
|
||||
+ return false;
|
||||
}
|
||||
diff -NarU2 CodeMirror-orig/src/util/feature_detection.js CodeMirror-edit/src/util/feature_detection.js
|
||||
--- CodeMirror-orig/src/util/feature_detection.js 2020-04-21 12:47:20.000000000 +0200
|
||||
+++ CodeMirror-edit/src/util/feature_detection.js 2020-05-02 03:16:21.085621400 +0200
|
||||
diff -NarU2 codemirror-5.59.3-orig/src/util/feature_detection.js codemirror-5.59.3/src/util/feature_detection.js
|
||||
--- codemirror-5.59.3-orig/src/util/feature_detection.js 2021-02-20 21:24:57.000000000 +0000
|
||||
+++ codemirror-5.59.3/src/util/feature_detection.js 2021-02-21 20:49:22.191269270 +0000
|
||||
@@ -25,4 +25,5 @@
|
||||
}
|
||||
|
||||
|
@@ -1,33 +1,57 @@
|
||||
diff -NarU2 easymde-orig/gulpfile.js easymde-mod1/gulpfile.js
|
||||
--- easymde-orig/gulpfile.js 2020-04-06 14:09:36.000000000 +0200
|
||||
+++ easymde-mod1/gulpfile.js 2020-05-01 14:33:52.260175200 +0200
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/gulpfile.js easy-markdown-editor-2.14.0/gulpfile.js
|
||||
--- easy-markdown-editor-2.14.0-orig/gulpfile.js 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/gulpfile.js 2021-02-21 20:55:37.134701007 +0000
|
||||
@@ -25,5 +25,4 @@
|
||||
'./node_modules/codemirror/lib/codemirror.css',
|
||||
'./src/css/*.css',
|
||||
- './node_modules/codemirror-spell-checker/src/css/spell-checker.css',
|
||||
];
|
||||
|
||||
diff -NarU2 easymde-orig/package.json easymde-mod1/package.json
|
||||
--- easymde-orig/package.json 2020-04-06 14:09:36.000000000 +0200
|
||||
+++ easymde-mod1/package.json 2020-05-01 14:33:57.189975800 +0200
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/package.json easy-markdown-editor-2.14.0/package.json
|
||||
--- easy-markdown-editor-2.14.0-orig/package.json 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/package.json 2021-02-21 20:55:47.761190082 +0000
|
||||
@@ -21,5 +21,4 @@
|
||||
"dependencies": {
|
||||
"codemirror": "^5.52.2",
|
||||
"codemirror": "^5.59.2",
|
||||
- "codemirror-spell-checker": "1.1.2",
|
||||
"marked": "^0.8.2"
|
||||
"marked": "^2.0.0"
|
||||
},
|
||||
diff -NarU2 easymde-orig/src/js/easymde.js easymde-mod1/src/js/easymde.js
|
||||
--- easymde-orig/src/js/easymde.js 2020-04-06 14:09:36.000000000 +0200
|
||||
+++ easymde-mod1/src/js/easymde.js 2020-05-01 14:34:19.878774400 +0200
|
||||
@@ -11,5 +11,4 @@
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/src/js/easymde.js easy-markdown-editor-2.14.0/src/js/easymde.js
|
||||
--- easy-markdown-editor-2.14.0-orig/src/js/easymde.js 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/src/js/easymde.js 2021-02-21 20:57:09.143171536 +0000
|
||||
@@ -12,5 +12,4 @@
|
||||
require('codemirror/mode/gfm/gfm.js');
|
||||
require('codemirror/mode/xml/xml.js');
|
||||
-var CodeMirrorSpellChecker = require('codemirror-spell-checker');
|
||||
var marked = require('marked/lib/marked');
|
||||
|
||||
@@ -1889,18 +1888,7 @@
|
||||
@@ -1762,9 +1761,4 @@
|
||||
options.autosave.uniqueId = options.autosave.unique_id;
|
||||
|
||||
- // If overlay mode is specified and combine is not provided, default it to true
|
||||
- if (options.overlayMode && options.overlayMode.combine === undefined) {
|
||||
- options.overlayMode.combine = true;
|
||||
- }
|
||||
-
|
||||
// Update this options
|
||||
this.options = options;
|
||||
@@ -2003,28 +1997,7 @@
|
||||
var mode, backdrop;
|
||||
|
||||
- // CodeMirror overlay mode
|
||||
- if (options.overlayMode) {
|
||||
- CodeMirror.defineMode('overlay-mode', function(config) {
|
||||
- return CodeMirror.overlayMode(CodeMirror.getMode(config, options.spellChecker !== false ? 'spell-checker' : 'gfm'), options.overlayMode.mode, options.overlayMode.combine);
|
||||
- });
|
||||
-
|
||||
- mode = 'overlay-mode';
|
||||
- backdrop = options.parsingConfig;
|
||||
- backdrop.gitHubSpice = false;
|
||||
- } else {
|
||||
mode = options.parsingConfig;
|
||||
mode.name = 'gfm';
|
||||
mode.gitHubSpice = false;
|
||||
- }
|
||||
- if (options.spellChecker !== false) {
|
||||
- mode = 'spell-checker';
|
||||
- backdrop = options.parsingConfig;
|
||||
@@ -37,16 +61,28 @@ diff -NarU2 easymde-orig/src/js/easymde.js easymde-mod1/src/js/easymde.js
|
||||
- CodeMirrorSpellChecker({
|
||||
- codeMirrorInstance: CodeMirror,
|
||||
- });
|
||||
- } else {
|
||||
mode = options.parsingConfig;
|
||||
mode.name = 'gfm';
|
||||
mode.gitHubSpice = false;
|
||||
- }
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
@@ -1927,5 +1915,4 @@
|
||||
configureMouse: configureMouse,
|
||||
inputStyle: (options.inputStyle != undefined) ? options.inputStyle : isMobile() ? 'contenteditable' : 'textarea',
|
||||
- spellcheck: (options.nativeSpellcheck != undefined) ? options.nativeSpellcheck : true,
|
||||
});
|
||||
diff -NarU2 easy-markdown-editor-2.14.0-orig/types/easymde.d.ts easy-markdown-editor-2.14.0/types/easymde.d.ts
|
||||
--- easy-markdown-editor-2.14.0-orig/types/easymde.d.ts 2021-02-14 12:11:48.000000000 +0000
|
||||
+++ easy-markdown-editor-2.14.0/types/easymde.d.ts 2021-02-21 20:57:42.492620979 +0000
|
||||
@@ -160,9 +160,4 @@
|
||||
}
|
||||
|
||||
- interface OverlayModeOptions {
|
||||
- mode: CodeMirror.Mode<any>
|
||||
- combine?: boolean
|
||||
- }
|
||||
-
|
||||
interface Options {
|
||||
autoDownloadFontAwesome?: boolean;
|
||||
@@ -214,7 +209,5 @@
|
||||
|
||||
promptTexts?: PromptTexts;
|
||||
- syncSideBySidePreviewScroll?: boolean;
|
||||
-
|
||||
- overlayMode?: OverlayModeOptions
|
||||
+ syncSideBySidePreviewScroll?: boolean
|
||||
}
|
||||
}
|
||||
|
@@ -4,10 +4,10 @@ import os
|
||||
import time
|
||||
|
||||
"""
|
||||
mkdir -p /dev/shm/fusefuzz/{r,v}
|
||||
PYTHONPATH=.. python3 -m copyparty -v /dev/shm/fusefuzz/r::r -i 127.0.0.1
|
||||
../bin/copyparty-fuse.py /dev/shm/fusefuzz/v http://127.0.0.1:3923/ 2 0
|
||||
(d="$PWD"; cd /dev/shm/fusefuzz && "$d"/fusefuzz.py)
|
||||
td=/dev/shm/; [ -e $td ] || td=$HOME; mkdir -p $td/fusefuzz/{r,v}
|
||||
PYTHONPATH=.. python3 -m copyparty -v $td/fusefuzz/r::r -i 127.0.0.1
|
||||
../bin/copyparty-fuse.py http://127.0.0.1:3923/ $td/fusefuzz/v -cf 2 -cd 0.5
|
||||
(d="$PWD"; cd $td/fusefuzz && "$d"/fusefuzz.py)
|
||||
"""
|
||||
|
||||
|
||||
|
@@ -3,12 +3,15 @@ set -e
|
||||
echo
|
||||
|
||||
# osx support
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
which md5sum 2>/dev/null >/dev/null &&
|
||||
@@ -83,6 +86,8 @@ function have() {
|
||||
python -c "import $1; $1; $1.__version__"
|
||||
}
|
||||
|
||||
mv copyparty/web/deps/marked.full.js.gz srv/ || true
|
||||
|
||||
. buildenv/bin/activate
|
||||
have setuptools
|
||||
have wheel
|
||||
|
@@ -18,13 +18,23 @@ echo
|
||||
# (the fancy markdown editor)
|
||||
|
||||
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
unexpand() { gunexpand "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
|
||||
[ -e /opt/local/bin/bzip2 ] &&
|
||||
bzip2() { /opt/local/bin/bzip2 "$@"; }
|
||||
}
|
||||
pybin=$(command -v python3 || command -v python) || {
|
||||
echo need python
|
||||
exit 1
|
||||
}
|
||||
|
||||
[ -e copyparty/__main__.py ] || cd ..
|
||||
@@ -35,11 +45,15 @@ command -v gfind >/dev/null && {
|
||||
exit 1
|
||||
}
|
||||
|
||||
do_sh=1
|
||||
do_py=1
|
||||
while [ ! -z "$1" ]; do
|
||||
[ "$1" = clean ] && clean=1 && shift && continue
|
||||
[ "$1" = re ] && repack=1 && shift && continue
|
||||
[ "$1" = no-ogv ] && no_ogv=1 && shift && continue
|
||||
[ "$1" = no-cm ] && no_cm=1 && shift && continue
|
||||
[ "$1" = no-sh ] && do_sh= && shift && continue
|
||||
[ "$1" = no-py ] && do_py= && shift && continue
|
||||
break
|
||||
done
|
||||
|
||||
@@ -59,28 +73,32 @@ cd sfx
|
||||
)/pe-copyparty"
|
||||
|
||||
echo "repack of files in $old"
|
||||
cp -pR "$old/"*{jinja2,copyparty} .
|
||||
mv {x.,}jinja2 2>/dev/null || true
|
||||
cp -pR "$old/"*{dep-j2,copyparty} .
|
||||
}
|
||||
|
||||
[ $repack ] || {
|
||||
echo collecting jinja2
|
||||
f="../build/Jinja2-2.6.tar.gz"
|
||||
f="../build/Jinja2-2.11.3.tar.gz"
|
||||
[ -e "$f" ] ||
|
||||
(url=https://files.pythonhosted.org/packages/25/c8/212b1c2fd6df9eaf536384b6c6619c4e70a3afd2dffdd00e5296ffbae940/Jinja2-2.6.tar.gz;
|
||||
(url=https://files.pythonhosted.org/packages/4f/e7/65300e6b32e69768ded990494809106f87da1d436418d5f1367ed3966fd7/Jinja2-2.11.3.tar.gz;
|
||||
wget -O$f "$url" || curl -L "$url" >$f)
|
||||
|
||||
tar -zxf $f
|
||||
mv Jinja2-*/jinja2 .
|
||||
rm -rf Jinja2-* jinja2/testsuite jinja2/_markupsafe/tests.py jinja2/_stringdefs.py
|
||||
mv Jinja2-*/src/jinja2 .
|
||||
rm -rf Jinja2-*
|
||||
|
||||
f=jinja2/lexer.py
|
||||
sed -r '/.*föö.*/ raise SyntaxError/' <$f >t
|
||||
tmv $f
|
||||
|
||||
f=jinja2/_markupsafe/_constants.py
|
||||
awk '!/: [0-9]+,?$/ || /(amp|gt|lt|quot|apos|nbsp).:/' <$f >t
|
||||
tmv $f
|
||||
echo collecting markupsafe
|
||||
f="../build/MarkupSafe-1.1.1.tar.gz"
|
||||
[ -e "$f" ] ||
|
||||
(url=https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz;
|
||||
wget -O$f "$url" || curl -L "$url" >$f)
|
||||
|
||||
tar -zxf $f
|
||||
mv MarkupSafe-*/src/markupsafe .
|
||||
rm -rf MarkupSafe-* markupsafe/_speedups.c
|
||||
|
||||
mkdir dep-j2/
|
||||
mv {markupsafe,jinja2} dep-j2/
|
||||
|
||||
# msys2 tar is bad, make the best of it
|
||||
echo collecting source
|
||||
@@ -94,8 +112,39 @@ cd sfx
|
||||
rm -f ../tar
|
||||
}
|
||||
|
||||
ver="$(awk '/^VERSION *= \(/ {
|
||||
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < ../copyparty/__version__.py)"
|
||||
ver=
|
||||
git describe --tags >/dev/null 2>/dev/null && {
|
||||
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
|
||||
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//; s/-g?/./g')";
|
||||
t_ver=
|
||||
|
||||
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+$' && {
|
||||
# short format (exact version number)
|
||||
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g')";
|
||||
}
|
||||
|
||||
printf '%s\n' "$git_ver" | grep -qE '^v[0-9\.]+-[0-9]+-g[0-9a-f]+$' && {
|
||||
# long format (unreleased commit)
|
||||
t_ver="$(printf '%s\n' "$ver" | sed -r 's/\./, /g; s/(.*) (.*)/\1 "\2"/')"
|
||||
}
|
||||
|
||||
[ -z "$t_ver" ] && {
|
||||
printf 'unexpected git version format: [%s]\n' "$git_ver"
|
||||
exit 1
|
||||
}
|
||||
|
||||
dt="$(git log -1 --format=%cd --date=short | sed -E 's/-0?/, /g')"
|
||||
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
|
||||
sed -ri '
|
||||
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;
|
||||
s/^(S_VERSION =)(.*)/#\1\2\n\1 "'"$ver"'"/;
|
||||
s/^(BUILD_DT =)(.*)/#\1\2\n\1 ('"$dt"')/;
|
||||
' copyparty/__version__.py
|
||||
}
|
||||
|
||||
[ -z "$ver" ] &&
|
||||
ver="$(awk '/^VERSION *= \(/ {
|
||||
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
|
||||
|
||||
ts=$(date -u +%s)
|
||||
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
|
||||
@@ -131,9 +180,19 @@ done
|
||||
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
|
||||
}
|
||||
|
||||
[ $repack ] ||
|
||||
find | grep -E '\.py$' |
|
||||
grep -vE '__version__' |
|
||||
tr '\n' '\0' |
|
||||
xargs -0 $pybin ../scripts/uncomment.py
|
||||
|
||||
f=dep-j2/jinja2/constants.py
|
||||
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
|
||||
tmv "$f"
|
||||
|
||||
# up2k goes from 28k to 22k laff
|
||||
echo entabbening
|
||||
find | grep -E '\.(js|css|html|py)$' | while IFS= read -r f; do
|
||||
find | grep -E '\.(js|css|html)$' | while IFS= read -r f; do
|
||||
unexpand -t 4 --first-only <"$f" >t
|
||||
tmv "$f"
|
||||
done
|
||||
@@ -143,29 +202,40 @@ args=(--owner=1000 --group=1000)
|
||||
[ "$OSTYPE" = msys ] &&
|
||||
args=()
|
||||
|
||||
tar -cf tar "${args[@]}" --numeric-owner copyparty jinja2
|
||||
tar -cf tar "${args[@]}" --numeric-owner copyparty dep-j2
|
||||
|
||||
echo compressing tar
|
||||
# detect best level; bzip2 -7 is usually better than -9
|
||||
for n in {2..9}; do cp tar t.$n; bzip2 -$n t.$n & done; wait; mv -v $(ls -1S t.*.bz2 | tail -n 1) tar.bz2
|
||||
for n in {2..9}; do cp tar t.$n; xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz | tail -n 1) tar.xz
|
||||
rm t.*
|
||||
[ $do_py ] && { for n in {2..9}; do cp tar t.$n; bzip2 -$n t.$n & done; wait; mv -v $(ls -1S t.*.bz2 | tail -n 1) tar.bz2; }
|
||||
[ $do_sh ] && { for n in {2..9}; do cp tar t.$n; xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz | tail -n 1) tar.xz; }
|
||||
rm t.* || true
|
||||
exts=()
|
||||
|
||||
|
||||
[ $do_sh ] && {
|
||||
exts+=(sh)
|
||||
echo creating unix sfx
|
||||
(
|
||||
sed "s/PACK_TS/$ts/; s/PACK_HTS/$hts/; s/CPP_VER/$ver/" <../scripts/sfx.sh |
|
||||
grep -E '^sfx_eof$' -B 9001;
|
||||
cat tar.xz
|
||||
) >$sfx_out.sh
|
||||
}
|
||||
|
||||
|
||||
[ $do_py ] && {
|
||||
exts+=(py)
|
||||
echo creating generic sfx
|
||||
python ../scripts/sfx.py --sfx-make tar.bz2 $ver $ts
|
||||
$pybin ../scripts/sfx.py --sfx-make tar.bz2 $ver $ts
|
||||
mv sfx.out $sfx_out.py
|
||||
chmod 755 $sfx_out.*
|
||||
}
|
||||
|
||||
|
||||
printf "done:\n"
|
||||
printf " %s\n" "$(realpath $sfx_out)."{sh,py}
|
||||
# rm -rf *
|
||||
for ext in ${exts[@]}; do
|
||||
printf " %s\n" "$(realpath $sfx_out)."$ext
|
||||
done
|
||||
|
||||
# tar -tvf ../sfx/tar | sed -r 's/(.* ....-..-.. ..:.. )(.*)/\2 `` \1/' | sort | sed -r 's/(.*) `` (.*)/\2 \1/'| less
|
||||
# for n in {1..9}; do tar -tf tar | grep -vE '/$' | sed -r 's/(.*)\.(.*)/\2.\1/' | sort | sed -r 's/([^\.]+)\.(.*)/\2.\1/' | tar -cT- | bzip2 -c$n | wc -c; done
|
||||
# apk add bash python3 tar xz bzip2
|
||||
# while true; do ./make-sfx.sh; for f in ..//dist/copyparty-sfx.{sh,py}; do mv $f $f.$(wc -c <$f | awk '{print$1}'); done; done
|
||||
|
@@ -2,12 +2,16 @@
|
||||
set -e
|
||||
echo
|
||||
|
||||
command -v gtar >/dev/null &&
|
||||
command -v gfind >/dev/null && {
|
||||
tar() { gtar "$@"; }
|
||||
# osx support
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||
tar() { $gtar "$@"; }
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
which md5sum 2>/dev/null >/dev/null &&
|
||||
@@ -16,27 +20,29 @@ which md5sum 2>/dev/null >/dev/null &&
|
||||
|
||||
ver="$1"
|
||||
|
||||
[[ "x$ver" == x ]] &&
|
||||
[ "x$ver" = x ] &&
|
||||
{
|
||||
echo "need argument 1: version"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
[[ -e copyparty/__main__.py ]] || cd ..
|
||||
[[ -e copyparty/__main__.py ]] ||
|
||||
[ -e copyparty/__main__.py ] || cd ..
|
||||
[ -e copyparty/__main__.py ] ||
|
||||
{
|
||||
echo "run me from within the project root folder"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
mv copyparty/web/deps/marked.full.js.gz srv/ || true
|
||||
|
||||
mkdir -p dist
|
||||
zip_path="$(pwd)/dist/copyparty-$ver.zip"
|
||||
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"
|
||||
|
||||
[[ -e "$zip_path" ]] ||
|
||||
[[ -e "$tgz_path" ]] &&
|
||||
[ -e "$zip_path" ] ||
|
||||
[ -e "$tgz_path" ] &&
|
||||
{
|
||||
echo "found existing archives for this version"
|
||||
echo " $zip_path"
|
||||
|
205
scripts/sfx.py
205
scripts/sfx.py
@@ -1,9 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
# coding: latin-1
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re, os, sys, stat, time, shutil, tarfile, hashlib, platform, tempfile
|
||||
import subprocess as sp
|
||||
import os, sys, time, shutil, runpy, tarfile, hashlib, platform, tempfile, traceback
|
||||
|
||||
"""
|
||||
run me with any version of python, i will unpack and run copyparty
|
||||
@@ -29,6 +28,7 @@ STAMP = None
|
||||
PY2 = sys.version_info[0] == 2
|
||||
sys.dont_write_bytecode = True
|
||||
me = os.path.abspath(os.path.realpath(__file__))
|
||||
cpp = None
|
||||
|
||||
|
||||
def eprint(*args, **kwargs):
|
||||
@@ -191,91 +191,14 @@ def makesfx(tar_src, ver, ts):
|
||||
# skip 0
|
||||
|
||||
|
||||
def get_py_win(ret):
|
||||
tops = []
|
||||
p = str(os.getenv("LocalAppdata"))
|
||||
if p:
|
||||
tops.append(os.path.join(p, "Programs", "Python"))
|
||||
|
||||
progfiles = {}
|
||||
for p in ["ProgramFiles", "ProgramFiles(x86)"]:
|
||||
p = str(os.getenv(p))
|
||||
if p:
|
||||
progfiles[p] = 1
|
||||
# 32bit apps get x86 for both
|
||||
if p.endswith(" (x86)"):
|
||||
progfiles[p[:-6]] = 1
|
||||
|
||||
tops += list(progfiles.keys())
|
||||
|
||||
for sysroot in [me, sys.executable]:
|
||||
sysroot = sysroot[:3].upper()
|
||||
if sysroot[1] == ":" and sysroot not in tops:
|
||||
tops.append(sysroot)
|
||||
|
||||
# $WIRESHARK_SLOGAN
|
||||
for top in tops:
|
||||
try:
|
||||
for name1 in sorted(os.listdir(top), reverse=True):
|
||||
if name1.lower().startswith("python"):
|
||||
path1 = os.path.join(top, name1)
|
||||
try:
|
||||
for name2 in os.listdir(path1):
|
||||
if name2.lower() == "python.exe":
|
||||
path2 = os.path.join(path1, name2)
|
||||
ret[path2.lower()] = path2
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def get_py_nix(ret):
|
||||
ptn = re.compile(r"^(python|pypy)[0-9\.-]*$")
|
||||
for bindir in os.getenv("PATH").split(":"):
|
||||
if not bindir:
|
||||
next
|
||||
|
||||
try:
|
||||
for fn in os.listdir(bindir):
|
||||
if ptn.match(fn):
|
||||
fn = os.path.join(bindir, fn)
|
||||
ret[fn.lower()] = fn
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def read_py(binp):
|
||||
cmd = [
|
||||
binp,
|
||||
"-c",
|
||||
"import sys; sys.stdout.write(' '.join(str(x) for x in sys.version_info)); import jinja2",
|
||||
]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
ver, _ = p.communicate()
|
||||
ver = ver.decode("utf-8").split(" ")[:3]
|
||||
ver = [int(x) if x.isdigit() else 0 for x in ver]
|
||||
return ver, p.returncode == 0
|
||||
|
||||
|
||||
def get_pys():
|
||||
ver, chk = read_py(sys.executable)
|
||||
if chk or PY2:
|
||||
return [[chk, ver, sys.executable]]
|
||||
|
||||
hits = {sys.executable.lower(): sys.executable}
|
||||
if platform.system() == "Windows":
|
||||
get_py_win(hits)
|
||||
else:
|
||||
get_py_nix(hits)
|
||||
|
||||
ret = []
|
||||
for binp in hits.values():
|
||||
ver, chk = read_py(binp)
|
||||
ret.append([chk, ver, binp])
|
||||
msg("\t".join(str(x) for x in ret[-1]))
|
||||
|
||||
return ret
|
||||
def u8(gen):
|
||||
try:
|
||||
for s in gen:
|
||||
yield s.decode("utf-8", "ignore")
|
||||
except:
|
||||
yield s
|
||||
for s in gen:
|
||||
yield s
|
||||
|
||||
|
||||
def yieldfile(fn):
|
||||
@@ -295,17 +218,19 @@ def hashfile(fn):
|
||||
def unpack():
|
||||
"""unpacks the tar yielded by `data`"""
|
||||
name = "pe-copyparty"
|
||||
tag = "v" + str(STAMP)
|
||||
withpid = "{}.{}".format(name, os.getpid())
|
||||
top = tempfile.gettempdir()
|
||||
final = os.path.join(top, name)
|
||||
mine = os.path.join(top, withpid)
|
||||
tar = os.path.join(mine, "tar")
|
||||
tag_mine = os.path.join(mine, "v" + str(STAMP))
|
||||
tag_final = os.path.join(final, "v" + str(STAMP))
|
||||
|
||||
if os.path.exists(tag_final):
|
||||
msg("found early")
|
||||
return final
|
||||
try:
|
||||
if tag in os.listdir(final):
|
||||
msg("found early")
|
||||
return final
|
||||
except:
|
||||
pass
|
||||
|
||||
nwrite = 0
|
||||
os.mkdir(mine)
|
||||
@@ -328,12 +253,15 @@ def unpack():
|
||||
|
||||
os.remove(tar)
|
||||
|
||||
with open(tag_mine, "wb") as f:
|
||||
with open(os.path.join(mine, tag), "wb") as f:
|
||||
f.write(b"h\n")
|
||||
|
||||
if os.path.exists(tag_final):
|
||||
msg("found late")
|
||||
return final
|
||||
try:
|
||||
if tag in os.listdir(final):
|
||||
msg("found late")
|
||||
return final
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if os.path.islink(final):
|
||||
@@ -352,7 +280,7 @@ def unpack():
|
||||
msg("reloc fail,", mine)
|
||||
return mine
|
||||
|
||||
for fn in os.listdir(top):
|
||||
for fn in u8(os.listdir(top)):
|
||||
if fn.startswith(name) and fn not in [name, withpid]:
|
||||
try:
|
||||
old = os.path.join(top, fn)
|
||||
@@ -415,16 +343,24 @@ def get_payload():
|
||||
break
|
||||
|
||||
|
||||
def confirm():
|
||||
def confirm(rv):
|
||||
msg()
|
||||
msg(traceback.format_exc())
|
||||
msg("*** hit enter to exit ***")
|
||||
raw_input() if PY2 else input()
|
||||
try:
|
||||
raw_input() if PY2 else input()
|
||||
except:
|
||||
pass
|
||||
|
||||
sys.exit(rv)
|
||||
|
||||
|
||||
def run(tmp, py):
|
||||
msg("OK")
|
||||
msg("will use:", py)
|
||||
msg("bound to:", tmp)
|
||||
def run(tmp, j2ver):
|
||||
global cpp
|
||||
|
||||
msg("jinja2:", j2ver or "bundled")
|
||||
msg("sfxdir:", tmp)
|
||||
msg()
|
||||
|
||||
# "systemd-tmpfiles-clean.timer"?? HOW do you even come up with this shit
|
||||
try:
|
||||
@@ -436,36 +372,25 @@ def run(tmp, py):
|
||||
except:
|
||||
pass
|
||||
|
||||
fp_py = os.path.join(tmp, "py")
|
||||
with open(fp_py, "wb") as f:
|
||||
f.write(py.encode("utf-8") + b"\n")
|
||||
ld = [tmp, os.path.join(tmp, "dep-j2")]
|
||||
if j2ver:
|
||||
del ld[-1]
|
||||
|
||||
# avoid loading ./copyparty.py
|
||||
cmd = [
|
||||
py,
|
||||
"-c",
|
||||
'import sys, runpy; sys.path.insert(0, r"'
|
||||
+ tmp
|
||||
+ '"); runpy.run_module("copyparty", run_name="__main__")',
|
||||
] + list(sys.argv[1:])
|
||||
for x in ld:
|
||||
sys.path.insert(0, x)
|
||||
|
||||
msg("\n", cmd, "\n")
|
||||
p = sp.Popen(str(x) for x in cmd)
|
||||
try:
|
||||
p.wait()
|
||||
runpy.run_module(str("copyparty"), run_name=str("__main__"))
|
||||
except SystemExit as ex:
|
||||
if ex.code:
|
||||
confirm(ex.code)
|
||||
except:
|
||||
p.wait()
|
||||
|
||||
if p.returncode != 0:
|
||||
confirm()
|
||||
|
||||
sys.exit(p.returncode)
|
||||
confirm(1)
|
||||
|
||||
|
||||
def main():
|
||||
sysver = str(sys.version).replace("\n", "\n" + " " * 18)
|
||||
pktime = time.strftime("%Y-%m-%d, %H:%M:%S", time.gmtime(STAMP))
|
||||
os.system("")
|
||||
msg()
|
||||
msg(" this is: copyparty", VER)
|
||||
msg(" packed at:", pktime, "UTC,", STAMP)
|
||||
@@ -495,33 +420,13 @@ def main():
|
||||
# skip 0
|
||||
|
||||
tmp = unpack()
|
||||
fp_py = os.path.join(tmp, "py")
|
||||
if os.path.exists(fp_py):
|
||||
with open(fp_py, "rb") as f:
|
||||
py = f.read().decode("utf-8").rstrip()
|
||||
|
||||
return run(tmp, py)
|
||||
try:
|
||||
from jinja2 import __version__ as j2ver
|
||||
except:
|
||||
j2ver = None
|
||||
|
||||
pys = get_pys()
|
||||
pys.sort(reverse=True)
|
||||
j2, ver, py = pys[0]
|
||||
if j2:
|
||||
try:
|
||||
os.rename(os.path.join(tmp, "jinja2"), os.path.join(tmp, "x.jinja2"))
|
||||
except:
|
||||
pass
|
||||
|
||||
return run(tmp, py)
|
||||
|
||||
msg("\n could not find jinja2; will use py2 + the bundled version\n")
|
||||
for _, ver, py in pys:
|
||||
if ver > [2, 7] and ver < [3, 0]:
|
||||
return run(tmp, py)
|
||||
|
||||
m = "\033[1;31m\n\n\ncould not find a python with jinja2 installed; please do one of these:\n\n pip install --user jinja2\n\n install python2\n\n\033[0m"
|
||||
msg(m)
|
||||
confirm()
|
||||
sys.exit(1)
|
||||
run(tmp, j2ver)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@@ -32,8 +32,12 @@ dir="$(
|
||||
|
||||
# detect available pythons
|
||||
(IFS=:; for d in $PATH; do
|
||||
printf '%s\n' "$d"/python* "$d"/pypy* | tac;
|
||||
done) | grep -E '(python|pypy)[0-9\.-]*$' > $dir/pys || true
|
||||
printf '%s\n' "$d"/python* "$d"/pypy*;
|
||||
done) |
|
||||
(sed -E 's/(.*\/[^/0-9]+)([0-9]?[^/]*)$/\2 \1/' || cat) |
|
||||
(sort -nr || cat) |
|
||||
(sed -E 's/([^ ]*) (.*)/\2\1/' || cat) |
|
||||
grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
|
||||
|
||||
# see if we made a choice before
|
||||
[ -z "$pybin" ] && pybin="$(cat $dir/py 2>/dev/null || true)"
|
||||
|
164
scripts/speedtest-fs.py
Normal file
164
scripts/speedtest-fs.py
Normal file
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import time
|
||||
import signal
|
||||
import traceback
|
||||
import threading
|
||||
from queue import Queue
|
||||
|
||||
|
||||
"""speedtest-fs: filesystem performance estimate"""
|
||||
__author__ = "ed <copyparty@ocv.me>"
|
||||
__copyright__ = 2020
|
||||
__license__ = "MIT"
|
||||
__url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
|
||||
def get_spd(nbyte, nsec):
|
||||
if not nsec:
|
||||
return "0.000 MB 0.000 sec 0.000 MB/s"
|
||||
|
||||
mb = nbyte / (1024 * 1024.0)
|
||||
spd = mb / nsec
|
||||
|
||||
return f"{mb:.3f} MB {nsec:.3f} sec {spd:.3f} MB/s"
|
||||
|
||||
|
||||
class Inf(object):
|
||||
def __init__(self, t0):
|
||||
self.msgs = []
|
||||
self.errors = []
|
||||
self.reports = []
|
||||
self.mtx_msgs = threading.Lock()
|
||||
self.mtx_reports = threading.Lock()
|
||||
|
||||
self.n_byte = 0
|
||||
self.n_sec = 0
|
||||
self.n_done = 0
|
||||
self.t0 = t0
|
||||
|
||||
thr = threading.Thread(target=self.print_msgs)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def msg(self, fn, n_read):
|
||||
with self.mtx_msgs:
|
||||
self.msgs.append(f"{fn} {n_read}")
|
||||
|
||||
def err(self, fn):
|
||||
with self.mtx_reports:
|
||||
self.errors.append(f"{fn}\n{traceback.format_exc()}")
|
||||
|
||||
def print_msgs(self):
|
||||
while True:
|
||||
time.sleep(0.02)
|
||||
with self.mtx_msgs:
|
||||
msgs = self.msgs
|
||||
self.msgs = []
|
||||
|
||||
if not msgs:
|
||||
continue
|
||||
|
||||
msgs = msgs[-64:]
|
||||
msgs = [f"{get_spd(self.n_byte, self.n_sec)} {x}" for x in msgs]
|
||||
print("\n".join(msgs))
|
||||
|
||||
def report(self, fn, n_byte, n_sec):
|
||||
with self.mtx_reports:
|
||||
self.reports.append([n_byte, n_sec, fn])
|
||||
self.n_byte += n_byte
|
||||
self.n_sec += n_sec
|
||||
|
||||
def done(self):
|
||||
with self.mtx_reports:
|
||||
self.n_done += 1
|
||||
|
||||
|
||||
def get_files(dir_path):
|
||||
for fn in os.listdir(dir_path):
|
||||
fn = os.path.join(dir_path, fn)
|
||||
st = os.stat(fn).st_mode
|
||||
|
||||
if stat.S_ISDIR(st):
|
||||
yield from get_files(fn)
|
||||
|
||||
if stat.S_ISREG(st):
|
||||
yield fn
|
||||
|
||||
|
||||
def worker(q, inf, read_sz):
|
||||
while True:
|
||||
fn = q.get()
|
||||
if not fn:
|
||||
break
|
||||
|
||||
n_read = 0
|
||||
try:
|
||||
t0 = time.time()
|
||||
with open(fn, "rb") as f:
|
||||
while True:
|
||||
buf = f.read(read_sz)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
n_read += len(buf)
|
||||
inf.msg(fn, n_read)
|
||||
|
||||
inf.report(fn, n_read, time.time() - t0)
|
||||
except:
|
||||
inf.err(fn)
|
||||
|
||||
inf.done()
|
||||
|
||||
|
||||
def sighandler(signo, frame):
|
||||
os._exit(0)
|
||||
|
||||
|
||||
def main():
|
||||
signal.signal(signal.SIGINT, sighandler)
|
||||
|
||||
root = "."
|
||||
if len(sys.argv) > 1:
|
||||
root = sys.argv[1]
|
||||
|
||||
t0 = time.time()
|
||||
q = Queue(256)
|
||||
inf = Inf(t0)
|
||||
|
||||
num_threads = 8
|
||||
read_sz = 32 * 1024
|
||||
for _ in range(num_threads):
|
||||
thr = threading.Thread(target=worker, args=(q, inf, read_sz,))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
for fn in get_files(root):
|
||||
q.put(fn)
|
||||
|
||||
for _ in range(num_threads):
|
||||
q.put(None)
|
||||
|
||||
while inf.n_done < num_threads:
|
||||
time.sleep(0.1)
|
||||
|
||||
t2 = time.time()
|
||||
print("\n")
|
||||
|
||||
log = inf.reports
|
||||
log.sort()
|
||||
for nbyte, nsec, fn in log[-64:]:
|
||||
print(f"{get_spd(nbyte, nsec)} {fn}")
|
||||
|
||||
print()
|
||||
print("\n".join(inf.errors))
|
||||
|
||||
print(get_spd(inf.n_byte, t2 - t0))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
77
scripts/uncomment.py
Normal file
77
scripts/uncomment.py
Normal file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import io
|
||||
import sys
|
||||
import tokenize
|
||||
|
||||
|
||||
def uncomment(fpath):
|
||||
""" modified https://stackoverflow.com/a/62074206 """
|
||||
|
||||
with open(fpath, "rb") as f:
|
||||
orig = f.read().decode("utf-8")
|
||||
|
||||
out = ""
|
||||
for ln in orig.split("\n"):
|
||||
if not ln.startswith("#"):
|
||||
break
|
||||
|
||||
out += ln + "\n"
|
||||
|
||||
io_obj = io.StringIO(orig)
|
||||
prev_toktype = tokenize.INDENT
|
||||
last_lineno = -1
|
||||
last_col = 0
|
||||
for tok in tokenize.generate_tokens(io_obj.readline):
|
||||
# print(repr(tok))
|
||||
token_type = tok[0]
|
||||
token_string = tok[1]
|
||||
start_line, start_col = tok[2]
|
||||
end_line, end_col = tok[3]
|
||||
|
||||
if start_line > last_lineno:
|
||||
last_col = 0
|
||||
|
||||
if start_col > last_col:
|
||||
out += " " * (start_col - last_col)
|
||||
|
||||
is_legalese = (
|
||||
"copyright" in token_string.lower() or "license" in token_string.lower()
|
||||
)
|
||||
|
||||
if token_type == tokenize.STRING:
|
||||
if (
|
||||
prev_toktype != tokenize.INDENT
|
||||
and prev_toktype != tokenize.NEWLINE
|
||||
and start_col > 0
|
||||
or is_legalese
|
||||
):
|
||||
out += token_string
|
||||
else:
|
||||
out += '"a"'
|
||||
elif token_type != tokenize.COMMENT or is_legalese:
|
||||
out += token_string
|
||||
|
||||
prev_toktype = token_type
|
||||
last_lineno = end_line
|
||||
last_col = end_col
|
||||
|
||||
# out = "\n".join(x for x in out.splitlines() if x.strip())
|
||||
|
||||
with open(fpath, "wb") as f:
|
||||
f.write(out.encode("utf-8"))
|
||||
|
||||
|
||||
def main():
|
||||
print("uncommenting", end="")
|
||||
for f in sys.argv[1:]:
|
||||
print(".", end="")
|
||||
uncomment(f)
|
||||
|
||||
print("k")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
6
setup.py
6
setup.py
@@ -2,10 +2,8 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function
|
||||
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
from glob import glob
|
||||
from shutil import rmtree
|
||||
|
||||
setuptools_available = True
|
||||
@@ -49,7 +47,7 @@ with open(here + "/README.md", "rb") as f:
|
||||
about = {}
|
||||
if not VERSION:
|
||||
with open(os.path.join(here, NAME, "__version__.py"), "rb") as f:
|
||||
exec(f.read().decode("utf-8").split("\n\n", 1)[1], about)
|
||||
exec (f.read().decode("utf-8").split("\n\n", 1)[1], about)
|
||||
else:
|
||||
about["__version__"] = VERSION
|
||||
|
||||
@@ -110,13 +108,13 @@ args = {
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.2",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Environment :: Console",
|
||||
|
141
srv/extend.md
Normal file
141
srv/extend.md
Normal file
@@ -0,0 +1,141 @@
|
||||
# hi
|
||||
this showcases my worst idea yet; *extending markdown with inline javascript*
|
||||
|
||||
due to obvious reasons it's disabled by default, and can be enabled with `-emp`
|
||||
|
||||
the examples are by no means correct, they're as much of a joke as this feature itself
|
||||
|
||||
|
||||
### sub-header
|
||||
nothing special about this one
|
||||
|
||||
|
||||
## except/
|
||||
this one becomes a hyperlink to ./except/ thanks to
|
||||
* the `copyparty_pre` plugin at the end of this file
|
||||
* which is invoked as a markdown filter every time the document is modified
|
||||
* which looks for headers ending with a `/` and erwrites all headers below that
|
||||
|
||||
it is a passthrough to the markdown extension api, see https://marked.js.org/using_pro
|
||||
|
||||
in addition to the markdown extension functions, `ctor` will be called on document init
|
||||
|
||||
|
||||
### these/
|
||||
and this one becomes ./except/these/
|
||||
|
||||
|
||||
#### ones.md
|
||||
finally ./except/these/ones.md
|
||||
|
||||
|
||||
### also-this.md
|
||||
whic hshoud be ./except/also-this.md
|
||||
|
||||
|
||||
|
||||
|
||||
# ok
|
||||
now for another extension type, `copyparty_post` which is called to manipulate the generated dom instead
|
||||
|
||||
`copyparty_post` can have the following functions, all optional
|
||||
* `ctor` is called on document init
|
||||
* `render` is called when the dom is done but still in-memory
|
||||
* `render2` is called with the live browser dom as-displayed
|
||||
|
||||
## post example
|
||||
|
||||
the values in the `ex:` columns are linkified to `example.com/$value`
|
||||
|
||||
| ex:foo | bar | ex:baz |
|
||||
| ------------ | -------- | ------ |
|
||||
| asdf | nice | fgsfds |
|
||||
| more one row | hi hello | aaa |
|
||||
|
||||
and the table can be sorted by clicking the headers
|
||||
|
||||
the difference is that with `copyparty_pre` you'll probably break various copyparty features but if you use `copyparty_post` then future copyparty versions will probably break you
|
||||
|
||||
|
||||
|
||||
|
||||
# heres the plugins
|
||||
if there is anything below ths line in the preview then the plugin feature is disabled (good)
|
||||
|
||||
|
||||
|
||||
|
||||
```copyparty_pre
|
||||
ctor() {
|
||||
md_plug['h'] = {
|
||||
on: false,
|
||||
lv: -1,
|
||||
path: []
|
||||
}
|
||||
},
|
||||
walkTokens(token) {
|
||||
if (token.type == 'heading') {
|
||||
var h = md_plug['h'],
|
||||
is_dir = token.text.endsWith('/');
|
||||
|
||||
if (h.lv >= token.depth) {
|
||||
h.on = false;
|
||||
}
|
||||
if (!h.on && is_dir) {
|
||||
h.on = true;
|
||||
h.lv = token.depth;
|
||||
h.path = [token.text];
|
||||
}
|
||||
else if (h.on && h.lv < token.depth) {
|
||||
h.path = h.path.slice(0, token.depth - h.lv);
|
||||
h.path.push(token.text);
|
||||
}
|
||||
if (!h.on)
|
||||
return false;
|
||||
|
||||
var path = h.path.join('');
|
||||
var emoji = is_dir ? '📂' : '📜';
|
||||
token.tokens[0].text = '<a href="' + path + '">' + emoji + ' ' + path + '</a>';
|
||||
}
|
||||
if (token.type == 'paragraph') {
|
||||
//console.log(JSON.parse(JSON.stringify(token.tokens)));
|
||||
for (var a = 0; a < token.tokens.length; a++) {
|
||||
var t = token.tokens[a];
|
||||
if (t.type == 'text' || t.type == 'strong' || t.type == 'em') {
|
||||
var ret = '', text = t.text;
|
||||
for (var b = 0; b < text.length; b++)
|
||||
ret += (Math.random() > 0.5) ? text[b] : text[b].toUpperCase();
|
||||
|
||||
t.text = ret;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
```copyparty_post
|
||||
render(dom) {
|
||||
var ths = dom.querySelectorAll('th');
|
||||
for (var a = 0; a < ths.length; a++) {
|
||||
var th = ths[a];
|
||||
if (th.textContent.indexOf('ex:') === 0) {
|
||||
th.textContent = th.textContent.slice(3);
|
||||
var nrow = 0;
|
||||
while ((th = th.previousSibling) != null)
|
||||
nrow++;
|
||||
|
||||
var trs = ths[a].parentNode.parentNode.parentNode.querySelectorAll('tr');
|
||||
for (var b = 1; b < trs.length; b++) {
|
||||
var td = trs[b].childNodes[nrow];
|
||||
td.innerHTML = '<a href="//example.com/' + td.innerHTML + '">' + td.innerHTML + '</a>';
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
render2(dom) {
|
||||
window.makeSortable(dom.getElementsByTagName('table')[0]);
|
||||
}
|
||||
```
|
26
srv/test.md
26
srv/test.md
@@ -1,5 +1,16 @@
|
||||
### hello world
|
||||
|
||||
* qwe
|
||||
* asd
|
||||
* zxc
|
||||
* 573
|
||||
* one
|
||||
* two
|
||||
|
||||
* |||
|
||||
|--|--|
|
||||
|listed|table|
|
||||
|
||||
```
|
||||
[72....................................................................]
|
||||
[80............................................................................]
|
||||
@@ -21,6 +32,8 @@
|
||||
l[i]=1I;(){}o0O</> var foo = "$(`bar`)"; a's'd
|
||||
```
|
||||
|
||||
🔍🌽.📕.🍙🔎
|
||||
|
||||
[](#s1)
|
||||
[s1](#s1)
|
||||
[#s1](#s1)
|
||||
@@ -121,6 +134,15 @@ a newline toplevel
|
||||
| a table | on the right |
|
||||
| second row | foo bar |
|
||||
|
||||
||
|
||||
--|:-:|-:
|
||||
a table | big text in this | aaakbfddd
|
||||
second row | centred | bbb
|
||||
|
||||
||
|
||||
--|--|--
|
||||
foo
|
||||
|
||||
* list entry
|
||||
* [x] yes
|
||||
* [ ] no
|
||||
@@ -209,3 +231,7 @@ unrelated neat stuff:
|
||||
awk '/./ {printf "%s %d\n", $0, NR; next} 1' <test.md >ln.md
|
||||
gawk '{print gensub(/([a-zA-Z\.])/,NR" \\1","1")}' <test.md >ln.md
|
||||
```
|
||||
|
||||
a|b|c
|
||||
--|--|--
|
||||
foo
|
||||
|
@@ -3,8 +3,10 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
import subprocess as sp # nosec
|
||||
|
||||
@@ -14,6 +16,12 @@ from copyparty.authsrv import AuthSrv
|
||||
from copyparty import util
|
||||
|
||||
|
||||
class Cfg(Namespace):
|
||||
def __init__(self, a=[], v=[], c=None):
|
||||
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr mte".split()}
|
||||
super(Cfg, self).__init__(a=a, v=v, c=c, **ex)
|
||||
|
||||
|
||||
class TestVFS(unittest.TestCase):
|
||||
def dump(self, vfs):
|
||||
print(json.dumps(vfs, indent=4, sort_keys=True, default=lambda o: o.__dict__))
|
||||
@@ -30,13 +38,16 @@ class TestVFS(unittest.TestCase):
|
||||
response = self.unfoo(response)
|
||||
self.assertEqual(util.undot(query), response)
|
||||
|
||||
def absify(self, root, names):
|
||||
return ["{}/{}".format(root, x).replace("//", "/") for x in names]
|
||||
|
||||
def ls(self, vfs, vpath, uname):
|
||||
"""helper for resolving and listing a folder"""
|
||||
vn, rem = vfs.get(vpath, uname, True, False)
|
||||
return vn.ls(rem, uname)
|
||||
r1 = vn.ls(rem, uname, False)
|
||||
r2 = vn.ls(rem, uname, False)
|
||||
self.assertEqual(r1, r2)
|
||||
|
||||
fsdir, real, virt = r1
|
||||
real = [x[0] for x in real]
|
||||
return fsdir, real, virt
|
||||
|
||||
def runcmd(self, *argv):
|
||||
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
@@ -59,16 +70,31 @@ class TestVFS(unittest.TestCase):
|
||||
|
||||
if os.path.exists("/Volumes"):
|
||||
devname, _ = self.chkcmd("hdiutil", "attach", "-nomount", "ram://8192")
|
||||
_, _ = self.chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
|
||||
return "/Volumes/cptd"
|
||||
devname = devname.strip()
|
||||
print("devname: [{}]".format(devname))
|
||||
for _ in range(10):
|
||||
try:
|
||||
_, _ = self.chkcmd(
|
||||
"diskutil", "eraseVolume", "HFS+", "cptd", devname
|
||||
)
|
||||
return "/Volumes/cptd"
|
||||
except Exception as ex:
|
||||
print(repr(ex))
|
||||
time.sleep(0.25)
|
||||
|
||||
raise Exception("TODO support windows")
|
||||
raise Exception("ramdisk creation failed")
|
||||
|
||||
def log(self, src, msg):
|
||||
ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
|
||||
try:
|
||||
os.mkdir(ret)
|
||||
finally:
|
||||
return ret
|
||||
|
||||
def log(self, src, msg, c=0):
|
||||
pass
|
||||
|
||||
def test(self):
|
||||
td = self.get_ramdisk() + "/vfs"
|
||||
td = os.path.join(self.get_ramdisk(), "vfs")
|
||||
try:
|
||||
shutil.rmtree(td)
|
||||
except OSError:
|
||||
@@ -88,7 +114,7 @@ class TestVFS(unittest.TestCase):
|
||||
f.write(fn)
|
||||
|
||||
# defaults
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=[]), self.log).vfs
|
||||
vfs = AuthSrv(Cfg(), self.log).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td)
|
||||
@@ -96,26 +122,24 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(vfs.uwrite, ["*"])
|
||||
|
||||
# single read-only rootfs (relative path)
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs
|
||||
vfs = AuthSrv(Cfg(v=["a/ab/::r"]), self.log).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td + "/a/ab")
|
||||
self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
|
||||
self.assertEqual(vfs.uread, ["*"])
|
||||
self.assertEqual(vfs.uwrite, [])
|
||||
|
||||
# single read-only rootfs (absolute path)
|
||||
vfs = AuthSrv(
|
||||
Namespace(c=None, a=[], v=[td + "//a/ac/../aa//::r"]), self.log
|
||||
).vfs
|
||||
vfs = AuthSrv(Cfg(v=[td + "//a/ac/../aa//::r"]), self.log).vfs
|
||||
self.assertEqual(vfs.nodes, {})
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td + "/a/aa")
|
||||
self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
|
||||
self.assertEqual(vfs.uread, ["*"])
|
||||
self.assertEqual(vfs.uwrite, [])
|
||||
|
||||
# read-only rootfs with write-only subdirectory (read-write for k)
|
||||
vfs = AuthSrv(
|
||||
Namespace(c=None, a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]),
|
||||
Cfg(a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]),
|
||||
self.log,
|
||||
).vfs
|
||||
self.assertEqual(len(vfs.nodes), 1)
|
||||
@@ -138,47 +162,69 @@ class TestVFS(unittest.TestCase):
|
||||
n = n.nodes["acb"]
|
||||
self.assertEqual(n.nodes, {})
|
||||
self.assertEqual(n.vpath, "a/ac/acb")
|
||||
self.assertEqual(n.realpath, td + "/a/ac/acb")
|
||||
self.assertEqual(n.realpath, os.path.join(td, "a", "ac", "acb"))
|
||||
self.assertEqual(n.uread, ["k"])
|
||||
self.assertEqual(n.uwrite, ["*", "k"])
|
||||
|
||||
# something funky about the windows path normalization,
|
||||
# doesn't really matter but makes the test messy, TODO?
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "/", "*")
|
||||
self.assertEqual(fsdir, td)
|
||||
self.assertEqual(real, ["b", "c"])
|
||||
self.assertEqual(list(virt), ["a"])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a", "*")
|
||||
self.assertEqual(fsdir, td + "/a")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a"))
|
||||
self.assertEqual(real, ["aa", "ab"])
|
||||
self.assertEqual(list(virt), ["ac"])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ab", "*")
|
||||
self.assertEqual(fsdir, td + "/a/ab")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ab"))
|
||||
self.assertEqual(real, ["aba", "abb", "abc"])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac", "*")
|
||||
self.assertEqual(fsdir, td + "/a/ac")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
|
||||
self.assertEqual(real, ["aca", "acc"])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac", "k")
|
||||
self.assertEqual(fsdir, td + "/a/ac")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ac"))
|
||||
self.assertEqual(real, ["aca", "acc"])
|
||||
self.assertEqual(list(virt), ["acb"])
|
||||
|
||||
self.assertRaises(util.Pebkac, vfs.get, "a/ac/acb", "*", True, False)
|
||||
|
||||
fsdir, real, virt = self.ls(vfs, "a/ac/acb", "k")
|
||||
self.assertEqual(fsdir, td + "/a/ac/acb")
|
||||
self.assertEqual(fsdir, os.path.join(td, "a", "ac", "acb"))
|
||||
self.assertEqual(real, ["acba", "acbb", "acbc"])
|
||||
self.assertEqual(list(virt), [])
|
||||
|
||||
# admin-only rootfs with all-read-only subfolder
|
||||
vfs = AuthSrv(
|
||||
Cfg(a=["k:k"], v=[".::ak", "a:a:r"]),
|
||||
self.log,
|
||||
).vfs
|
||||
self.assertEqual(len(vfs.nodes), 1)
|
||||
self.assertEqual(vfs.vpath, "")
|
||||
self.assertEqual(vfs.realpath, td)
|
||||
self.assertEqual(vfs.uread, ["k"])
|
||||
self.assertEqual(vfs.uwrite, ["k"])
|
||||
n = vfs.nodes["a"]
|
||||
self.assertEqual(len(vfs.nodes), 1)
|
||||
self.assertEqual(n.vpath, "a")
|
||||
self.assertEqual(n.realpath, os.path.join(td, "a"))
|
||||
self.assertEqual(n.uread, ["*"])
|
||||
self.assertEqual(n.uwrite, [])
|
||||
self.assertEqual(vfs.can_access("/", "*"), [False, False])
|
||||
self.assertEqual(vfs.can_access("/", "k"), [True, True])
|
||||
self.assertEqual(vfs.can_access("/a", "*"), [True, False])
|
||||
self.assertEqual(vfs.can_access("/a", "k"), [True, False])
|
||||
|
||||
# breadth-first construction
|
||||
vfs = AuthSrv(
|
||||
Namespace(
|
||||
c=None,
|
||||
a=[],
|
||||
Cfg(
|
||||
v=[
|
||||
"a/ac/acb:a/ac/acb:w",
|
||||
"a:a:w",
|
||||
@@ -199,7 +245,7 @@ class TestVFS(unittest.TestCase):
|
||||
self.undot(vfs, "./.././foo/..", "")
|
||||
|
||||
# shadowing
|
||||
vfs = AuthSrv(Namespace(c=None, a=[], v=[".::r", "b:a/ac:r"]), self.log).vfs
|
||||
vfs = AuthSrv(Cfg(v=[".::r", "b:a/ac:r"]), self.log).vfs
|
||||
|
||||
fsp, r1, v1 = self.ls(vfs, "", "*")
|
||||
self.assertEqual(fsp, td)
|
||||
@@ -207,20 +253,20 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(list(v1), ["a"])
|
||||
|
||||
fsp, r1, v1 = self.ls(vfs, "a", "*")
|
||||
self.assertEqual(fsp, td + "/a")
|
||||
self.assertEqual(fsp, os.path.join(td, "a"))
|
||||
self.assertEqual(r1, ["aa", "ab"])
|
||||
self.assertEqual(list(v1), ["ac"])
|
||||
|
||||
fsp1, r1, v1 = self.ls(vfs, "a/ac", "*")
|
||||
fsp2, r2, v2 = self.ls(vfs, "b", "*")
|
||||
self.assertEqual(fsp1, td + "/b")
|
||||
self.assertEqual(fsp2, td + "/b")
|
||||
self.assertEqual(fsp1, os.path.join(td, "b"))
|
||||
self.assertEqual(fsp2, os.path.join(td, "b"))
|
||||
self.assertEqual(r1, ["ba", "bb", "bc"])
|
||||
self.assertEqual(r1, r2)
|
||||
self.assertEqual(list(v1), list(v2))
|
||||
|
||||
# config file parser
|
||||
cfg_path = self.get_ramdisk() + "/test.cfg"
|
||||
cfg_path = os.path.join(self.get_ramdisk(), "test.cfg")
|
||||
with open(cfg_path, "wb") as f:
|
||||
f.write(
|
||||
dedent(
|
||||
@@ -236,7 +282,7 @@ class TestVFS(unittest.TestCase):
|
||||
).encode("utf-8")
|
||||
)
|
||||
|
||||
au = AuthSrv(Namespace(c=[cfg_path], a=[], v=[]), self.log)
|
||||
au = AuthSrv(Cfg(c=[cfg_path]), self.log)
|
||||
self.assertEqual(au.user["a"], "123")
|
||||
self.assertEqual(au.user["asd"], "fgh:jkl")
|
||||
n = au.vfs
|
||||
@@ -248,10 +294,11 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(len(n.nodes), 1)
|
||||
n = n.nodes["dst"]
|
||||
self.assertEqual(n.vpath, "dst")
|
||||
self.assertEqual(n.realpath, td + "/src")
|
||||
self.assertEqual(n.realpath, os.path.join(td, "src"))
|
||||
self.assertEqual(n.uread, ["a", "asd"])
|
||||
self.assertEqual(n.uwrite, ["asd"])
|
||||
self.assertEqual(len(n.nodes), 0)
|
||||
|
||||
os.chdir(tempfile.gettempdir())
|
||||
shutil.rmtree(td)
|
||||
os.unlink(cfg_path)
|
||||
|
Reference in New Issue
Block a user