mirror of
https://github.com/9001/copyparty.git
synced 2025-10-29 02:53:47 +00:00
Compare commits
143 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
83fec3cca7 | ||
|
|
3cefc99b7d | ||
|
|
3a38dcbc05 | ||
|
|
7ff08bce57 | ||
|
|
fd490af434 | ||
|
|
1195b8f17e | ||
|
|
28dce13776 | ||
|
|
431f20177a | ||
|
|
87aff54d9d | ||
|
|
f50462de82 | ||
|
|
9bda8c7eb6 | ||
|
|
e83c63d239 | ||
|
|
b38533b0cc | ||
|
|
5ccca3fbd5 | ||
|
|
9e850fc3ab | ||
|
|
ffbfcd7e00 | ||
|
|
5ea7590748 | ||
|
|
290c3bc2bb | ||
|
|
b12131e91c | ||
|
|
3b354447b0 | ||
|
|
d09ec6feaa | ||
|
|
21405c3fda | ||
|
|
13e5c96cab | ||
|
|
426687b75e | ||
|
|
c8f59fb978 | ||
|
|
871dde79a9 | ||
|
|
e14d81bc6f | ||
|
|
514d046d1f | ||
|
|
4ed9528d36 | ||
|
|
625560e642 | ||
|
|
73ebd917d1 | ||
|
|
cd3e0afad2 | ||
|
|
d8d1f94a86 | ||
|
|
00dfd8cfd1 | ||
|
|
273de6db31 | ||
|
|
c6c0eeb0ff | ||
|
|
e70c74a3b5 | ||
|
|
f7d939eeab | ||
|
|
e815c091b9 | ||
|
|
963529b7cf | ||
|
|
638a52374d | ||
|
|
d9d42b7aa2 | ||
|
|
ec7e5f36a2 | ||
|
|
56110883ea | ||
|
|
7f8d7d6006 | ||
|
|
49e4fb7e12 | ||
|
|
8dbbea473f | ||
|
|
3d375d5114 | ||
|
|
f3eae67d97 | ||
|
|
40c1b19235 | ||
|
|
ccaf0ab159 | ||
|
|
d07f147423 | ||
|
|
f5cb9f92b9 | ||
|
|
f991f74983 | ||
|
|
6b3295059e | ||
|
|
b18a07ae6b | ||
|
|
8ab03dabda | ||
|
|
5e760e35dc | ||
|
|
afbfa04514 | ||
|
|
7aace470c5 | ||
|
|
b4acb24f6a | ||
|
|
bcee8a4934 | ||
|
|
36b0718542 | ||
|
|
9a92bca45d | ||
|
|
b07445a363 | ||
|
|
a62ec0c27e | ||
|
|
57e3a2d382 | ||
|
|
b61022b374 | ||
|
|
a3e2b2ec87 | ||
|
|
a83d3f8801 | ||
|
|
90c5f2b9d2 | ||
|
|
4885653c07 | ||
|
|
21e1cd87ca | ||
|
|
81f82e8e9f | ||
|
|
c0e31851da | ||
|
|
6599c3eced | ||
|
|
5d6c61a861 | ||
|
|
1a5c66edd3 | ||
|
|
deae9fe95a | ||
|
|
abd65c6334 | ||
|
|
8137a99904 | ||
|
|
6f6f9c1f74 | ||
|
|
7b575f716f | ||
|
|
6ba6ea3572 | ||
|
|
9a22ad5ea3 | ||
|
|
beaab9778e | ||
|
|
f327bdb6b4 | ||
|
|
ae180e0f5f | ||
|
|
e3f1d19756 | ||
|
|
93c2bd6ef6 | ||
|
|
4d0e5ff6db | ||
|
|
0893f06919 | ||
|
|
46b6abde3f | ||
|
|
0696610dee | ||
|
|
edf0d3684c | ||
|
|
7af159f5f6 | ||
|
|
7f2cb6764a | ||
|
|
96495a9bf1 | ||
|
|
b2fafec5fc | ||
|
|
0850b8ae2b | ||
|
|
8a68a96c57 | ||
|
|
d3aae8ed6a | ||
|
|
c62ebadda8 | ||
|
|
ffcee6d390 | ||
|
|
de32838346 | ||
|
|
b9a4e47ea2 | ||
|
|
57d994422d | ||
|
|
6ecd745323 | ||
|
|
bd769f5bdb | ||
|
|
2381692aba | ||
|
|
24fdada0a0 | ||
|
|
bb5169710a | ||
|
|
9cde2352f3 | ||
|
|
482dd7a938 | ||
|
|
bddcc69438 | ||
|
|
19d4540630 | ||
|
|
4f5f6c81f5 | ||
|
|
7e4c1238ba | ||
|
|
f7196ac773 | ||
|
|
7a7c832000 | ||
|
|
2b4ccdbebb | ||
|
|
0d16b49489 | ||
|
|
768405b691 | ||
|
|
da01413b7b | ||
|
|
914e22c53e | ||
|
|
43a23bf733 | ||
|
|
92bb00c6d2 | ||
|
|
b0b97a2648 | ||
|
|
2c452fe323 | ||
|
|
ad73d0c77d | ||
|
|
7f9bf1c78c | ||
|
|
61a6bc3a65 | ||
|
|
46e10b0e9f | ||
|
|
8441206e26 | ||
|
|
9fdc5ee748 | ||
|
|
00ff133387 | ||
|
|
96164cb934 | ||
|
|
82fb21ae69 | ||
|
|
89d4a2b4c4 | ||
|
|
fc0c7ff374 | ||
|
|
5148c4f2e9 | ||
|
|
c3b59f7bcf | ||
|
|
61e148202b |
5
.vscode/launch.json
vendored
5
.vscode/launch.json
vendored
@@ -13,10 +13,13 @@
|
|||||||
"-ed",
|
"-ed",
|
||||||
"-emp",
|
"-emp",
|
||||||
"-e2dsa",
|
"-e2dsa",
|
||||||
|
"-e2ts",
|
||||||
"-a",
|
"-a",
|
||||||
"ed:wark",
|
"ed:wark",
|
||||||
"-v",
|
"-v",
|
||||||
"srv::r:aed:cnodupe"
|
"srv::r:aed:cnodupe",
|
||||||
|
"-v",
|
||||||
|
"dist:dist:r"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
6
.vscode/tasks.json
vendored
6
.vscode/tasks.json
vendored
@@ -8,8 +8,10 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "no_dbg",
|
"label": "no_dbg",
|
||||||
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -a ed:wark -v srv::r:aed:cnodupe ;exit 1",
|
"type": "shell",
|
||||||
"type": "shell"
|
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -e2ts -a ed:wark -v srv::r:aed:cnodupe -v dist:dist:r ;exit 1"
|
||||||
|
// -v ~/Music/mt:mt:r:cmtp=.bpm=~/dev/copyparty/bin/mtag/audio-bpm.py:cmtp=key=~/dev/copyparty/bin/mtag/audio-key.py:ce2tsr
|
||||||
|
// -v ~/Music/mt:mt:r:cmtp=.bpm=~/dev/copyparty/bin/mtag/audio-bpm.py:ce2tsr
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
148
README.md
148
README.md
@@ -13,6 +13,31 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
* code standard: `black`
|
* code standard: `black`
|
||||||
|
|
||||||
|
|
||||||
|
## readme toc
|
||||||
|
|
||||||
|
* top
|
||||||
|
* [quickstart](#quickstart)
|
||||||
|
* [notes](#notes)
|
||||||
|
* [status](#status)
|
||||||
|
* [bugs](#bugs)
|
||||||
|
* [usage](#usage)
|
||||||
|
* [zip downloads](#zip-downloads)
|
||||||
|
* [searching](#searching)
|
||||||
|
* [search configuration](#search-configuration)
|
||||||
|
* [metadata from audio files](#metadata-from-audio-files)
|
||||||
|
* [file parser plugins](#file-parser-plugins)
|
||||||
|
* [complete examples](#complete-examples)
|
||||||
|
* [client examples](#client-examples)
|
||||||
|
* [dependencies](#dependencies)
|
||||||
|
* [optional gpl stuff](#optional-gpl-stuff)
|
||||||
|
* [sfx](#sfx)
|
||||||
|
* [sfx repack](#sfx-repack)
|
||||||
|
* [install on android](#install-on-android)
|
||||||
|
* [dev env setup](#dev-env-setup)
|
||||||
|
* [how to release](#how-to-release)
|
||||||
|
* [todo](#todo)
|
||||||
|
|
||||||
|
|
||||||
## quickstart
|
## quickstart
|
||||||
|
|
||||||
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
|
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
|
||||||
@@ -48,7 +73,7 @@ you may also want these, especially on servers:
|
|||||||
* ☑ symlink/discard existing files (content-matching)
|
* ☑ symlink/discard existing files (content-matching)
|
||||||
* download
|
* download
|
||||||
* ☑ single files in browser
|
* ☑ single files in browser
|
||||||
* ✖ folders as zip files
|
* ☑ folders as zip / tar files
|
||||||
* ☑ FUSE client (read-only)
|
* ☑ FUSE client (read-only)
|
||||||
* browser
|
* browser
|
||||||
* ☑ tree-view
|
* ☑ tree-view
|
||||||
@@ -59,7 +84,7 @@ you may also want these, especially on servers:
|
|||||||
* server indexing
|
* server indexing
|
||||||
* ☑ locate files by contents
|
* ☑ locate files by contents
|
||||||
* ☑ search by name/path/date/size
|
* ☑ search by name/path/date/size
|
||||||
* ✖ search by ID3-tags etc.
|
* ☑ search by ID3-tags etc.
|
||||||
* markdown
|
* markdown
|
||||||
* ☑ viewer
|
* ☑ viewer
|
||||||
* ☑ editor (sure why not)
|
* ☑ editor (sure why not)
|
||||||
@@ -69,7 +94,38 @@ summary: it works! you can use it! (but technically not even close to beta)
|
|||||||
|
|
||||||
# bugs
|
# bugs
|
||||||
|
|
||||||
* probably, pls let me know
|
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
|
||||||
|
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
||||||
|
* Windows: python 2.7 cannot handle filenames with mojibake
|
||||||
|
* hiding the contents at url `/d1/d2/d3` using `-v :d1/d2/d3:cd2d` has the side-effect of creating databases (for files/tags) inside folders d1 and d2, and those databases take precedence over the main db at the top of the vfs - this means all files in d2 and below will be reindexed unless you already had a vfs entry at or below d2
|
||||||
|
* probably more, pls let me know
|
||||||
|
|
||||||
|
|
||||||
|
# usage
|
||||||
|
|
||||||
|
the browser has the following hotkeys
|
||||||
|
* `0..9` jump to 10%..90%
|
||||||
|
* `U/O` skip 10sec back/forward
|
||||||
|
* `J/L` prev/next song
|
||||||
|
* `I/K` prev/next folder
|
||||||
|
* `P` parent folder
|
||||||
|
|
||||||
|
|
||||||
|
## zip downloads
|
||||||
|
|
||||||
|
the `zip` link next to folders can produce various types of zip/tar files using these alternatives in the browser settings tab:
|
||||||
|
|
||||||
|
| name | url-suffix | description |
|
||||||
|
|--|--|--|
|
||||||
|
| `tar` | `?tar` | plain gnutar, works great with `curl \| tar -xv` |
|
||||||
|
| `zip` | `?zip=utf8` | works everywhere, glitchy filenames on win7 and older |
|
||||||
|
| `zip_dos` | `?zip` | traditional cp437 (no unicode) to fix glitchy filenames |
|
||||||
|
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
|
||||||
|
|
||||||
|
* hidden files (dotfiles) are excluded unless `-ed`
|
||||||
|
* the up2k.db is always excluded
|
||||||
|
* `zip_crc` will take longer to download since the server has to read each file twice
|
||||||
|
* please let me know if you find a program old enough to actually need this
|
||||||
|
|
||||||
|
|
||||||
# searching
|
# searching
|
||||||
@@ -82,7 +138,62 @@ path/name queries are space-separated, AND'ed together, and words are negated wi
|
|||||||
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
|
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
|
||||||
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
|
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
|
||||||
|
|
||||||
other metadata (like song tags etc) are not yet indexed for searching
|
add `-e2ts` to also scan/index tags from music files:
|
||||||
|
|
||||||
|
|
||||||
|
## search configuration
|
||||||
|
|
||||||
|
searching relies on two databases, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`). Configuration can be done through arguments, volume flags, or a mix of both.
|
||||||
|
|
||||||
|
through arguments:
|
||||||
|
* `-e2d` enables file indexing on upload
|
||||||
|
* `-e2ds` scans writable folders on startup
|
||||||
|
* `-e2dsa` scans all mounted volumes (including readonly ones)
|
||||||
|
* `-e2t` enables metadata indexing on upload
|
||||||
|
* `-e2ts` scans for tags in all files that don't have tags yet
|
||||||
|
* `-e2tsr` deletes all existing tags, so a full reindex
|
||||||
|
|
||||||
|
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
|
||||||
|
* `-v ~/music::r:ce2dsa:ce2tsr` does a full reindex of everything on startup
|
||||||
|
* `-v ~/music::r:cd2d` disables **all** indexing, even if any `-e2*` are on
|
||||||
|
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
||||||
|
|
||||||
|
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
|
||||||
|
|
||||||
|
|
||||||
|
## metadata from audio files
|
||||||
|
|
||||||
|
`-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume:
|
||||||
|
* `-v ~/music::r:cmte=title,artist` indexes and displays *title* followed by *artist*
|
||||||
|
|
||||||
|
if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected
|
||||||
|
|
||||||
|
`-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux`
|
||||||
|
|
||||||
|
tags that start with a `.` such as `.bpm` and `.dur`(ation) indicate numeric value
|
||||||
|
|
||||||
|
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
|
||||||
|
|
||||||
|
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
|
||||||
|
* is about 20x slower than mutagen
|
||||||
|
* catches a few tags that mutagen doesn't
|
||||||
|
* avoids pulling any GPL code into copyparty
|
||||||
|
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
|
||||||
|
|
||||||
|
|
||||||
|
## file parser plugins
|
||||||
|
|
||||||
|
copyparty can invoke external programs to collect additional metadata for files using `mtp` (as argument or volume flag), there is a default timeout of 30sec
|
||||||
|
|
||||||
|
* `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata
|
||||||
|
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
|
||||||
|
* `-v ~/music::r:cmtp=.bpm=~/bin/audio-bpm.py:cmtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly
|
||||||
|
|
||||||
|
|
||||||
|
## complete examples
|
||||||
|
|
||||||
|
* read-only music server with bpm and key scanning
|
||||||
|
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts -mtp .bpm=f,audio-bpm.py -mtp key=f,audio-key.py`
|
||||||
|
|
||||||
|
|
||||||
# client examples
|
# client examples
|
||||||
@@ -91,19 +202,43 @@ other metadata (like song tags etc) are not yet indexed for searching
|
|||||||
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
|
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
|
||||||
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
|
||||||
|
|
||||||
|
* curl/wget: upload some files (post=file, chunk=stdin)
|
||||||
|
* `post(){ curl -b cppwd=wark http://127.0.0.1:3923/ -F act=bput -F f=@"$1";}`
|
||||||
|
`post movie.mkv`
|
||||||
|
* `post(){ wget --header='Cookie: cppwd=wark' http://127.0.0.1:3923/?raw --post-file="$1" -O-;}`
|
||||||
|
`post movie.mkv`
|
||||||
|
* `chunk(){ curl -b cppwd=wark http://127.0.0.1:3923/ -T-;}`
|
||||||
|
`chunk <movie.mkv`
|
||||||
|
|
||||||
* FUSE: mount a copyparty server as a local filesystem
|
* FUSE: mount a copyparty server as a local filesystem
|
||||||
* cross-platform python client available in [./bin/](bin/)
|
* cross-platform python client available in [./bin/](bin/)
|
||||||
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
||||||
|
|
||||||
|
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
||||||
|
|
||||||
|
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
|
||||||
|
b512 <movie.mkv
|
||||||
|
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
|
|
||||||
* `jinja2`
|
* `jinja2` (is built into the SFX)
|
||||||
|
|
||||||
optional, will eventually enable thumbnails:
|
**optional,** enables music tags:
|
||||||
|
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
|
||||||
|
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
|
||||||
|
|
||||||
|
**optional,** will eventually enable thumbnails:
|
||||||
* `Pillow` (requires py2.7 or py3.5+)
|
* `Pillow` (requires py2.7 or py3.5+)
|
||||||
|
|
||||||
|
|
||||||
|
## optional gpl stuff
|
||||||
|
|
||||||
|
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
|
||||||
|
|
||||||
|
these are standalone and will never be imported / evaluated by copyparty
|
||||||
|
|
||||||
|
|
||||||
# sfx
|
# sfx
|
||||||
|
|
||||||
currently there are two self-contained binaries:
|
currently there are two self-contained binaries:
|
||||||
@@ -158,6 +293,7 @@ pip install black bandit pylint flake8 # vscode tooling
|
|||||||
in the `scripts` folder:
|
in the `scripts` folder:
|
||||||
|
|
||||||
* run `make -C deps-docker` to build all dependencies
|
* run `make -C deps-docker` to build all dependencies
|
||||||
|
* `git tag v1.2.3 && git push origin --tags`
|
||||||
* create github release with `make-tgz-release.sh`
|
* create github release with `make-tgz-release.sh`
|
||||||
* upload to pypi with `make-pypi-release.(sh|bat)`
|
* upload to pypi with `make-pypi-release.(sh|bat)`
|
||||||
* create sfx with `make-sfx.sh`
|
* create sfx with `make-sfx.sh`
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# copyparty-fuse.py
|
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
||||||
* mount a copyparty server as a local filesystem (read-only)
|
* mount a copyparty server as a local filesystem (read-only)
|
||||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||||
* **supports Linux** -- expect `117 MiB/s` sequential read
|
* **supports Linux** -- expect `117 MiB/s` sequential read
|
||||||
@@ -29,7 +29,7 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
# copyparty-fuse🅱️.py
|
# [`copyparty-fuse🅱️.py`](copyparty-fuseb.py)
|
||||||
* mount a copyparty server as a local filesystem (read-only)
|
* mount a copyparty server as a local filesystem (read-only)
|
||||||
* does the same thing except more correct, `samba` approves
|
* does the same thing except more correct, `samba` approves
|
||||||
* **supports Linux** -- expect `18 MiB/s` (wait what)
|
* **supports Linux** -- expect `18 MiB/s` (wait what)
|
||||||
@@ -37,5 +37,11 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
# copyparty-fuse-streaming.py
|
# [`copyparty-fuse-streaming.py`](copyparty-fuse-streaming.py)
|
||||||
* pretend this doesn't exist
|
* pretend this doesn't exist
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# [`mtag/`](mtag/)
|
||||||
|
* standalone programs which perform misc. file analysis
|
||||||
|
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
||||||
|
|||||||
@@ -1008,6 +1008,12 @@ def main():
|
|||||||
log = null_log
|
log = null_log
|
||||||
dbg = null_log
|
dbg = null_log
|
||||||
|
|
||||||
|
if ar.a and ar.a.startswith("$"):
|
||||||
|
fn = ar.a[1:]
|
||||||
|
log("reading password from file [{}]".format(fn))
|
||||||
|
with open(fn, "rb") as f:
|
||||||
|
ar.a = f.read().decode("utf-8").strip()
|
||||||
|
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
os.system("rem")
|
os.system("rem")
|
||||||
|
|
||||||
|
|||||||
34
bin/mtag/README.md
Normal file
34
bin/mtag/README.md
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
standalone programs which take an audio file as argument
|
||||||
|
|
||||||
|
some of these rely on libraries which are not MIT-compatible
|
||||||
|
|
||||||
|
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
|
||||||
|
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
|
||||||
|
|
||||||
|
|
||||||
|
# dependencies
|
||||||
|
|
||||||
|
run [`install-deps.sh`](install-deps.sh) to build/install most dependencies required by these programs (supports windows/linux/macos)
|
||||||
|
|
||||||
|
*alternatively* (or preferably) use packages from your distro instead, then you'll need at least these:
|
||||||
|
|
||||||
|
* from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg`
|
||||||
|
* from pypy: `keyfinder vamp`
|
||||||
|
|
||||||
|
|
||||||
|
# usage from copyparty
|
||||||
|
|
||||||
|
`copyparty -e2dsa -e2ts -mtp key=f,audio-key.py -mtp .bpm=f,audio-bpm.py`
|
||||||
|
|
||||||
|
* `f,` makes the detected value replace any existing values
|
||||||
|
* the `.` in `.bpm` indicates numeric value
|
||||||
|
* assumes the python files are in the folder you're launching copyparty from, replace the filename with a relative/absolute path if that's not the case
|
||||||
|
* `mtp` modules will not run if a file has existing tags in the db, so clear out the tags with `-e2tsr` the first time you launch with new `mtp` options
|
||||||
|
|
||||||
|
|
||||||
|
## usage with volume-flags
|
||||||
|
|
||||||
|
instead of affecting all volumes, you can set the options for just one volume like so:
|
||||||
|
```
|
||||||
|
copyparty -v /mnt/nas/music:/music:r:cmtp=key=f,audio-key.py:cmtp=.bpm=f,audio-bpm.py:ce2dsa:ce2ts
|
||||||
|
```
|
||||||
69
bin/mtag/audio-bpm.py
Executable file
69
bin/mtag/audio-bpm.py
Executable file
@@ -0,0 +1,69 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import vamp
|
||||||
|
import tempfile
|
||||||
|
import numpy as np
|
||||||
|
import subprocess as sp
|
||||||
|
|
||||||
|
from copyparty.util import fsenc
|
||||||
|
|
||||||
|
"""
|
||||||
|
dep: vamp
|
||||||
|
dep: beatroot-vamp
|
||||||
|
dep: ffmpeg
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def det(tf):
|
||||||
|
# fmt: off
|
||||||
|
sp.check_call([
|
||||||
|
"ffmpeg",
|
||||||
|
"-nostdin",
|
||||||
|
"-hide_banner",
|
||||||
|
"-v", "fatal",
|
||||||
|
"-ss", "13",
|
||||||
|
"-y", "-i", fsenc(sys.argv[1]),
|
||||||
|
"-ac", "1",
|
||||||
|
"-ar", "22050",
|
||||||
|
"-t", "300",
|
||||||
|
"-f", "f32le",
|
||||||
|
tf
|
||||||
|
])
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
with open(tf, "rb") as f:
|
||||||
|
d = np.fromfile(f, dtype=np.float32)
|
||||||
|
try:
|
||||||
|
# 98% accuracy on jcore
|
||||||
|
c = vamp.collect(d, 22050, "beatroot-vamp:beatroot")
|
||||||
|
cl = c["list"]
|
||||||
|
except:
|
||||||
|
# fallback; 73% accuracy
|
||||||
|
plug = "vamp-example-plugins:fixedtempo"
|
||||||
|
c = vamp.collect(d, 22050, plug, parameters={"maxdflen": 40})
|
||||||
|
print(c["list"][0]["label"].split(" ")[0])
|
||||||
|
return
|
||||||
|
|
||||||
|
# throws if detection failed:
|
||||||
|
bpm = float(cl[-1]["timestamp"] - cl[1]["timestamp"])
|
||||||
|
bpm = round(60 * ((len(cl) - 1) / bpm), 2)
|
||||||
|
print(f"{bpm:.2f}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".pcm", delete=False) as f:
|
||||||
|
f.write(b"h")
|
||||||
|
tf = f.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
det(tf)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
os.unlink(tf)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
18
bin/mtag/audio-key.py
Executable file
18
bin/mtag/audio-key.py
Executable file
@@ -0,0 +1,18 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import keyfinder
|
||||||
|
|
||||||
|
"""
|
||||||
|
dep: github/mixxxdj/libkeyfinder
|
||||||
|
dep: pypi/keyfinder
|
||||||
|
dep: ffmpeg
|
||||||
|
|
||||||
|
note: cannot fsenc
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
print(keyfinder.key(sys.argv[1]).camelot())
|
||||||
|
except:
|
||||||
|
pass
|
||||||
265
bin/mtag/install-deps.sh
Executable file
265
bin/mtag/install-deps.sh
Executable file
@@ -0,0 +1,265 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
|
||||||
|
# install dependencies for audio-*.py
|
||||||
|
#
|
||||||
|
# linux: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf
|
||||||
|
# win64: requires msys2-mingw64 environment
|
||||||
|
# macos: requires macports
|
||||||
|
#
|
||||||
|
# has the following manual dependencies, especially on mac:
|
||||||
|
# https://www.vamp-plugins.org/pack.html
|
||||||
|
#
|
||||||
|
# installs stuff to the following locations:
|
||||||
|
# ~/pe/
|
||||||
|
# whatever your python uses for --user packages
|
||||||
|
#
|
||||||
|
# does the following terrible things:
|
||||||
|
# modifies the keyfinder python lib to load the .so in ~/pe
|
||||||
|
|
||||||
|
|
||||||
|
linux=1
|
||||||
|
|
||||||
|
win=
|
||||||
|
[ ! -z "$MSYSTEM" ] || [ -e /msys2.exe ] && {
|
||||||
|
[ "$MSYSTEM" = MINGW64 ] || {
|
||||||
|
echo windows detected, msys2-mingw64 required
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
pacman -S --needed mingw-w64-x86_64-{ffmpeg,python,python-pip,vamp-plugin-sdk}
|
||||||
|
win=1
|
||||||
|
linux=
|
||||||
|
}
|
||||||
|
|
||||||
|
mac=
|
||||||
|
[ $(uname -s) = Darwin ] && {
|
||||||
|
#pybin="$(printf '%s\n' /opt/local/bin/python* | (sed -E 's/(.*\/[^/0-9]+)([0-9]?[^/]*)$/\2 \1/' || cat) | (sort -nr || cat) | (sed -E 's/([^ ]*) (.*)/\2\1/' || cat) | grep -E '/(python|pypy)[0-9\.-]*$' | head -n 1)"
|
||||||
|
pybin=/opt/local/bin/python3.9
|
||||||
|
[ -e "$pybin" ] || {
|
||||||
|
echo mac detected, python3 from macports required
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
pkgs='ffmpeg python39 py39-wheel'
|
||||||
|
ninst=$(port installed | awk '/^ /{print$1}' | sort | uniq | grep -E '^('"$(echo "$pkgs" | tr ' ' '|')"')$' | wc -l)
|
||||||
|
[ $ninst -eq 3 ] || {
|
||||||
|
sudo port install $pkgs
|
||||||
|
}
|
||||||
|
mac=1
|
||||||
|
linux=
|
||||||
|
}
|
||||||
|
|
||||||
|
hash -r
|
||||||
|
|
||||||
|
[ $mac ] || {
|
||||||
|
command -v python3 && pybin=python3 || pybin=python
|
||||||
|
}
|
||||||
|
|
||||||
|
$pybin -m pip install --user numpy
|
||||||
|
|
||||||
|
|
||||||
|
command -v gnutar && tar() { gnutar "$@"; }
|
||||||
|
command -v gtar && tar() { gtar "$@"; }
|
||||||
|
command -v gsed && sed() { gsed "$@"; }
|
||||||
|
|
||||||
|
|
||||||
|
need() {
|
||||||
|
command -v $1 >/dev/null || {
|
||||||
|
echo need $1
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
need cmake
|
||||||
|
need ffmpeg
|
||||||
|
need $pybin
|
||||||
|
#need patchelf
|
||||||
|
|
||||||
|
|
||||||
|
td="$(mktemp -d)"
|
||||||
|
cln() {
|
||||||
|
rm -rf "$td"
|
||||||
|
}
|
||||||
|
trap cln EXIT
|
||||||
|
cd "$td"
|
||||||
|
pwd
|
||||||
|
|
||||||
|
|
||||||
|
dl_text() {
|
||||||
|
command -v curl >/dev/null && exec curl "$@"
|
||||||
|
exec wget -O- "$@"
|
||||||
|
}
|
||||||
|
dl_files() {
|
||||||
|
local yolo= ex=
|
||||||
|
[ $1 = "yolo" ] && yolo=1 && ex=k && shift
|
||||||
|
command -v curl >/dev/null && exec curl -${ex}JOL "$@"
|
||||||
|
|
||||||
|
[ $yolo ] && ex=--no-check-certificate
|
||||||
|
exec wget --trust-server-names $ex "$@"
|
||||||
|
}
|
||||||
|
export -f dl_files
|
||||||
|
|
||||||
|
|
||||||
|
github_tarball() {
|
||||||
|
dl_text "$1" |
|
||||||
|
tee json |
|
||||||
|
(
|
||||||
|
# prefer jq if available
|
||||||
|
jq -r '.tarball_url' ||
|
||||||
|
|
||||||
|
# fallback to awk (sorry)
|
||||||
|
awk -F\" '/"tarball_url": "/ {print$4}'
|
||||||
|
) |
|
||||||
|
tee /dev/stderr |
|
||||||
|
tr -d '\r' | tr '\n' '\0' |
|
||||||
|
xargs -0 bash -c 'dl_files "$@"' _
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
gitlab_tarball() {
|
||||||
|
dl_text "$1" |
|
||||||
|
tee json |
|
||||||
|
(
|
||||||
|
# prefer jq if available
|
||||||
|
jq -r '.[0].assets.sources[]|select(.format|test("tar.gz")).url' ||
|
||||||
|
|
||||||
|
# fallback to abomination
|
||||||
|
tr \" '\n' | grep -E '\.tar\.gz$' | head -n 1
|
||||||
|
) |
|
||||||
|
tee /dev/stderr |
|
||||||
|
tr -d '\r' | tr '\n' '\0' |
|
||||||
|
tee links |
|
||||||
|
xargs -0 bash -c 'dl_files "$@"' _
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
install_keyfinder() {
|
||||||
|
# windows support:
|
||||||
|
# use msys2 in mingw-w64 mode
|
||||||
|
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python}
|
||||||
|
|
||||||
|
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
|
||||||
|
|
||||||
|
tar -xf mixxxdj-libkeyfinder-*
|
||||||
|
rm -- *.tar.gz
|
||||||
|
cd mixxxdj-libkeyfinder*
|
||||||
|
|
||||||
|
h="$HOME"
|
||||||
|
so="lib/libkeyfinder.so"
|
||||||
|
memes=()
|
||||||
|
|
||||||
|
[ $win ] &&
|
||||||
|
so="bin/libkeyfinder.dll" &&
|
||||||
|
h="$(printf '%s\n' "$USERPROFILE" | tr '\\' '/')" &&
|
||||||
|
memes+=(-G "MinGW Makefiles" -DBUILD_TESTING=OFF)
|
||||||
|
|
||||||
|
[ $mac ] &&
|
||||||
|
so="lib/libkeyfinder.dylib"
|
||||||
|
|
||||||
|
cmake -DCMAKE_INSTALL_PREFIX="$h/pe/keyfinder" "${memes[@]}" -S . -B build
|
||||||
|
cmake --build build --parallel $(nproc || echo 4)
|
||||||
|
cmake --install build
|
||||||
|
|
||||||
|
libpath="$h/pe/keyfinder/$so"
|
||||||
|
[ $linux ] && [ ! -e "$libpath" ] &&
|
||||||
|
so=lib64/libkeyfinder.so
|
||||||
|
|
||||||
|
libpath="$h/pe/keyfinder/$so"
|
||||||
|
[ -e "$libpath" ] || {
|
||||||
|
echo "so not found at $sop"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# rm -rf /Users/ed/Library/Python/3.9/lib/python/site-packages/*keyfinder*
|
||||||
|
CFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include" \
|
||||||
|
LDFLAGS="-L$h/pe/keyfinder/lib -L$h/pe/keyfinder/lib64 -L/opt/local/lib" \
|
||||||
|
PKG_CONFIG_PATH=/c/msys64/mingw64/lib/pkgconfig \
|
||||||
|
$pybin -m pip install --user keyfinder
|
||||||
|
|
||||||
|
pypath="$($pybin -c 'import keyfinder; print(keyfinder.__file__)')"
|
||||||
|
for pyso in "${pypath%/*}"/*.so; do
|
||||||
|
[ -e "$pyso" ] || break
|
||||||
|
patchelf --set-rpath "${libpath%/*}" "$pyso" ||
|
||||||
|
echo "WARNING: patchelf failed (only fatal on musl-based distros)"
|
||||||
|
done
|
||||||
|
|
||||||
|
mv "$pypath"{,.bak}
|
||||||
|
(
|
||||||
|
printf 'import ctypes\nctypes.cdll.LoadLibrary("%s")\n' "$libpath"
|
||||||
|
cat "$pypath.bak"
|
||||||
|
) >"$pypath"
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo libkeyfinder successfully installed to the following locations:
|
||||||
|
echo " $libpath"
|
||||||
|
echo " $pypath"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
have_beatroot() {
|
||||||
|
$pybin -c 'import vampyhost, sys; plugs = vampyhost.list_plugins(); sys.exit(0 if "beatroot-vamp:beatroot" in plugs else 1)'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
install_vamp() {
|
||||||
|
# windows support:
|
||||||
|
# use msys2 in mingw-w64 mode
|
||||||
|
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python,python-pip,vamp-plugin-sdk}
|
||||||
|
|
||||||
|
$pybin -m pip install --user vamp
|
||||||
|
|
||||||
|
have_beatroot || {
|
||||||
|
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
|
||||||
|
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz)
|
||||||
|
sha512sum -c <(
|
||||||
|
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
|
||||||
|
) <beatroot-vamp-v1.0.tar.gz
|
||||||
|
tar -xf beatroot-vamp-v1.0.tar.gz
|
||||||
|
cd beatroot-vamp-v1.0
|
||||||
|
make -f Makefile.linux -j4
|
||||||
|
# /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp
|
||||||
|
mkdir ~/vamp
|
||||||
|
cp -pv beatroot-vamp.* ~/vamp/
|
||||||
|
}
|
||||||
|
|
||||||
|
have_beatroot &&
|
||||||
|
printf '\033[32mfound the vamp beatroot plugin, nice\033[0m\n' ||
|
||||||
|
printf '\033[31mWARNING: could not find the vamp beatroot plugin, please install it for optimal results\033[0m\n'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# not in use because it kinda segfaults, also no windows support
|
||||||
|
install_soundtouch() {
|
||||||
|
gitlab_tarball https://gitlab.com/api/v4/projects/soundtouch%2Fsoundtouch/releases
|
||||||
|
|
||||||
|
tar -xvf soundtouch-*
|
||||||
|
rm -- *.tar.gz
|
||||||
|
cd soundtouch-*
|
||||||
|
|
||||||
|
# https://github.com/jrising/pysoundtouch
|
||||||
|
./bootstrap
|
||||||
|
./configure --enable-integer-samples CXXFLAGS="-fPIC" --prefix="$HOME/pe/soundtouch"
|
||||||
|
make -j$(nproc || echo 4)
|
||||||
|
make install
|
||||||
|
|
||||||
|
CFLAGS=-I$HOME/pe/soundtouch/include/ \
|
||||||
|
LDFLAGS=-L$HOME/pe/soundtouch/lib \
|
||||||
|
$pybin -m pip install --user git+https://github.com/snowxmas/pysoundtouch.git
|
||||||
|
|
||||||
|
pypath="$($pybin -c 'import importlib; print(importlib.util.find_spec("soundtouch").origin)')"
|
||||||
|
libpath="$(echo "$HOME/pe/soundtouch/lib/")"
|
||||||
|
patchelf --set-rpath "$libpath" "$pypath"
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo soundtouch successfully installed to the following locations:
|
||||||
|
echo " $libpath"
|
||||||
|
echo " $pypath"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
[ "$1" = keyfinder ] && { install_keyfinder; exit $?; }
|
||||||
|
[ "$1" = soundtouch ] && { install_soundtouch; exit $?; }
|
||||||
|
[ "$1" = vamp ] && { install_vamp; exit $?; }
|
||||||
|
|
||||||
|
echo no args provided, installing keyfinder and vamp
|
||||||
|
install_keyfinder
|
||||||
|
install_vamp
|
||||||
8
bin/mtag/sleep.py
Normal file
8
bin/mtag/sleep.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import time
|
||||||
|
import random
|
||||||
|
|
||||||
|
v = random.random() * 6
|
||||||
|
time.sleep(v)
|
||||||
|
print(f"{v:.2f}")
|
||||||
@@ -12,7 +12,7 @@
|
|||||||
Description=copyparty file server
|
Description=copyparty file server
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
ExecStart=/usr/bin/python /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
|
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
|
||||||
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
|
|||||||
@@ -12,16 +12,19 @@ import re
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
import signal
|
||||||
import shutil
|
import shutil
|
||||||
import filecmp
|
import filecmp
|
||||||
import locale
|
import locale
|
||||||
import argparse
|
import argparse
|
||||||
|
import threading
|
||||||
|
import traceback
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
from .__init__ import E, WINDOWS, VT100
|
from .__init__ import E, WINDOWS, VT100, PY2
|
||||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
from .util import py_desc, align_tab
|
from .util import py_desc, align_tab, IMPLICATIONS
|
||||||
|
|
||||||
HAVE_SSL = True
|
HAVE_SSL = True
|
||||||
try:
|
try:
|
||||||
@@ -53,6 +56,10 @@ class RiceFormatter(argparse.HelpFormatter):
|
|||||||
return "".join(indent + line + "\n" for line in text.splitlines())
|
return "".join(indent + line + "\n" for line in text.splitlines())
|
||||||
|
|
||||||
|
|
||||||
|
def warn(msg):
|
||||||
|
print("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
|
||||||
|
|
||||||
|
|
||||||
def ensure_locale():
|
def ensure_locale():
|
||||||
for x in [
|
for x in [
|
||||||
"en_US.UTF-8",
|
"en_US.UTF-8",
|
||||||
@@ -160,11 +167,24 @@ def configure_ssl_ciphers(al):
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def sighandler(signal=None, frame=None):
|
||||||
|
msg = [""] * 5
|
||||||
|
for th in threading.enumerate():
|
||||||
|
msg.append(str(th))
|
||||||
|
msg.extend(traceback.format_stack(sys._current_frames()[th.ident]))
|
||||||
|
|
||||||
|
msg.append("\n")
|
||||||
|
print("\n".join(msg))
|
||||||
|
|
||||||
|
|
||||||
|
def main(argv=None):
|
||||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
os.system("rem") # enables colors
|
os.system("rem") # enables colors
|
||||||
|
|
||||||
|
if argv is None:
|
||||||
|
argv = sys.argv
|
||||||
|
|
||||||
desc = py_desc().replace("[", "\033[1;30m[")
|
desc = py_desc().replace("[", "\033[1;30m[")
|
||||||
|
|
||||||
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0m\n'
|
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0m\n'
|
||||||
@@ -177,13 +197,13 @@ def main():
|
|||||||
deprecated = [["-e2s", "-e2ds"]]
|
deprecated = [["-e2s", "-e2ds"]]
|
||||||
for dk, nk in deprecated:
|
for dk, nk in deprecated:
|
||||||
try:
|
try:
|
||||||
idx = sys.argv.index(dk)
|
idx = argv.index(dk)
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
|
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
|
||||||
print(msg.format(dk, nk))
|
print(msg.format(dk, nk))
|
||||||
sys.argv[idx] = nk
|
argv[idx] = nk
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
|
||||||
ap = argparse.ArgumentParser(
|
ap = argparse.ArgumentParser(
|
||||||
@@ -198,7 +218,7 @@ def main():
|
|||||||
and "cflag" is config flags to set on this volume
|
and "cflag" is config flags to set on this volume
|
||||||
|
|
||||||
list of cflags:
|
list of cflags:
|
||||||
cnodupe rejects existing files (instead of symlinking them)
|
"cnodupe" rejects existing files (instead of symlinking them)
|
||||||
|
|
||||||
example:\033[35m
|
example:\033[35m
|
||||||
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
|
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
|
||||||
@@ -237,34 +257,49 @@ def main():
|
|||||||
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
|
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
|
||||||
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
|
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
|
||||||
ap.add_argument("-q", action="store_true", help="quiet")
|
ap.add_argument("-q", action="store_true", help="quiet")
|
||||||
|
ap.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
||||||
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||||
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||||
ap.add_argument("-e2d", action="store_true", help="enable up2k database")
|
|
||||||
ap.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
|
||||||
ap.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
|
||||||
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||||
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||||
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
||||||
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||||
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||||
ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms")
|
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)")
|
||||||
|
ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)")
|
||||||
|
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms")
|
||||||
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
|
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
|
||||||
|
|
||||||
|
ap2 = ap.add_argument_group('database options')
|
||||||
|
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||||
|
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||||
|
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
||||||
|
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
|
||||||
|
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
||||||
|
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
||||||
|
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
|
||||||
|
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
||||||
|
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
|
||||||
|
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
|
||||||
|
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q")
|
||||||
|
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
|
||||||
|
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
||||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
||||||
ap2.add_argument("--ssl-ver", type=str, help="ssl/tls versions to allow")
|
ap2.add_argument("--ssl-ver", metavar="LIST", type=str, help="ssl/tls versions to allow")
|
||||||
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
|
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
|
||||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||||
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
||||||
al = ap.parse_args()
|
|
||||||
|
al = ap.parse_args(args=argv[1:])
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
if al.e2dsa:
|
# propagate implications
|
||||||
al.e2ds = True
|
for k1, k2 in IMPLICATIONS:
|
||||||
|
if getattr(al, k1):
|
||||||
if al.e2ds:
|
setattr(al, k2, True)
|
||||||
al.e2d = True
|
|
||||||
|
|
||||||
al.i = al.i.split(",")
|
al.i = al.i.split(",")
|
||||||
try:
|
try:
|
||||||
@@ -283,7 +318,15 @@ def main():
|
|||||||
if al.ciphers:
|
if al.ciphers:
|
||||||
configure_ssl_ciphers(al)
|
configure_ssl_ciphers(al)
|
||||||
else:
|
else:
|
||||||
print("\033[33m ssl module does not exist; cannot enable https\033[0m\n")
|
warn("ssl module does not exist; cannot enable https")
|
||||||
|
|
||||||
|
if PY2 and WINDOWS and al.e2d:
|
||||||
|
warn(
|
||||||
|
"windows py2 cannot do unicode filenames with -e2d\n"
|
||||||
|
+ " (if you crash with codec errors then that is why)"
|
||||||
|
)
|
||||||
|
|
||||||
|
# signal.signal(signal.SIGINT, sighandler)
|
||||||
|
|
||||||
SvcHub(al).run()
|
SvcHub(al).run()
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (0, 8, 1)
|
VERSION = (0, 10, 5)
|
||||||
CODENAME = "keeping track"
|
CODENAME = "zip it"
|
||||||
BUILD_DT = (2021, 2, 22)
|
BUILD_DT = (2021, 3, 31)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
@@ -1,12 +1,14 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import stat
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS
|
from .__init__ import PY2, WINDOWS
|
||||||
from .util import undot, Pebkac, fsdec, fsenc
|
from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint
|
||||||
|
|
||||||
|
|
||||||
class VFS(object):
|
class VFS(object):
|
||||||
@@ -21,6 +23,14 @@ class VFS(object):
|
|||||||
self.nodes = {} # child nodes
|
self.nodes = {} # child nodes
|
||||||
self.all_vols = {vpath: self} # flattened recursive
|
self.all_vols = {vpath: self} # flattened recursive
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "VFS({})".format(
|
||||||
|
", ".join(
|
||||||
|
"{}={!r}".format(k, self.__dict__[k])
|
||||||
|
for k in "realpath vpath uread uwrite flags".split()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def _trk(self, vol):
|
def _trk(self, vol):
|
||||||
self.all_vols[vol.vpath] = vol
|
self.all_vols[vol.vpath] = vol
|
||||||
return vol
|
return vol
|
||||||
@@ -44,6 +54,7 @@ class VFS(object):
|
|||||||
self.uwrite,
|
self.uwrite,
|
||||||
self.flags,
|
self.flags,
|
||||||
)
|
)
|
||||||
|
self._trk(vn)
|
||||||
self.nodes[name] = vn
|
self.nodes[name] = vn
|
||||||
return self._trk(vn.add(src, dst))
|
return self._trk(vn.add(src, dst))
|
||||||
|
|
||||||
@@ -102,12 +113,11 @@ class VFS(object):
|
|||||||
|
|
||||||
return fsdec(os.path.realpath(fsenc(rp)))
|
return fsdec(os.path.realpath(fsenc(rp)))
|
||||||
|
|
||||||
def ls(self, rem, uname):
|
def ls(self, rem, uname, scandir, lstat=False):
|
||||||
"""return user-readable [fsdir,real,virt] items at vpath"""
|
"""return user-readable [fsdir,real,virt] items at vpath"""
|
||||||
virt_vis = {} # nodes readable by user
|
virt_vis = {} # nodes readable by user
|
||||||
abspath = self.canonical(rem)
|
abspath = self.canonical(rem)
|
||||||
items = os.listdir(fsenc(abspath))
|
real = list(statdir(nuprint, scandir, lstat, abspath))
|
||||||
real = [fsdec(x) for x in items]
|
|
||||||
real.sort()
|
real.sort()
|
||||||
if not rem:
|
if not rem:
|
||||||
for name, vn2 in sorted(self.nodes.items()):
|
for name, vn2 in sorted(self.nodes.items()):
|
||||||
@@ -115,10 +125,77 @@ class VFS(object):
|
|||||||
virt_vis[name] = vn2
|
virt_vis[name] = vn2
|
||||||
|
|
||||||
# no vfs nodes in the list of real inodes
|
# no vfs nodes in the list of real inodes
|
||||||
real = [x for x in real if x not in self.nodes]
|
real = [x for x in real if x[0] not in self.nodes]
|
||||||
|
|
||||||
return [abspath, real, virt_vis]
|
return [abspath, real, virt_vis]
|
||||||
|
|
||||||
|
def walk(self, rel, rem, uname, dots, scandir, lstat=False):
|
||||||
|
"""
|
||||||
|
recursively yields from ./rem;
|
||||||
|
rel is a unix-style user-defined vpath (not vfs-related)
|
||||||
|
"""
|
||||||
|
|
||||||
|
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, lstat)
|
||||||
|
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
|
||||||
|
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||||
|
|
||||||
|
rfiles.sort()
|
||||||
|
rdirs.sort()
|
||||||
|
|
||||||
|
yield rel, fsroot, rfiles, rdirs, vfs_virt
|
||||||
|
|
||||||
|
for rdir, _ in rdirs:
|
||||||
|
if not dots and rdir.startswith("."):
|
||||||
|
continue
|
||||||
|
|
||||||
|
wrel = (rel + "/" + rdir).lstrip("/")
|
||||||
|
wrem = (rem + "/" + rdir).lstrip("/")
|
||||||
|
for x in self.walk(wrel, wrem, uname, scandir, lstat):
|
||||||
|
yield x
|
||||||
|
|
||||||
|
for n, vfs in sorted(vfs_virt.items()):
|
||||||
|
if not dots and n.startswith("."):
|
||||||
|
continue
|
||||||
|
|
||||||
|
wrel = (rel + "/" + n).lstrip("/")
|
||||||
|
for x in vfs.walk(wrel, "", uname, scandir, lstat):
|
||||||
|
yield x
|
||||||
|
|
||||||
|
def zipgen(self, vrem, flt, uname, dots, scandir):
|
||||||
|
if flt:
|
||||||
|
flt = {k: True for k in flt}
|
||||||
|
|
||||||
|
for vpath, apath, files, rd, vd in self.walk("", vrem, uname, dots, scandir):
|
||||||
|
if flt:
|
||||||
|
files = [x for x in files if x[0] in flt]
|
||||||
|
rd = [x for x in rd if x[0] in flt]
|
||||||
|
vd = {x: y for x, y in vd.items() if x in flt}
|
||||||
|
flt = None
|
||||||
|
|
||||||
|
# print(repr([vpath, apath, [x[0] for x in files]]))
|
||||||
|
fnames = [n[0] for n in files]
|
||||||
|
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
|
||||||
|
apaths = [os.path.join(apath, n) for n in fnames]
|
||||||
|
files = list(zip(vpaths, apaths, files))
|
||||||
|
|
||||||
|
if not dots:
|
||||||
|
# dotfile filtering based on vpath (intended visibility)
|
||||||
|
files = [x for x in files if "/." not in "/" + x[0]]
|
||||||
|
|
||||||
|
rm = [x for x in rd if x[0].startswith(".")]
|
||||||
|
for x in rm:
|
||||||
|
rd.remove(x)
|
||||||
|
|
||||||
|
rm = [k for k in vd.keys() if k.startswith(".")]
|
||||||
|
for x in rm:
|
||||||
|
del vd[x]
|
||||||
|
|
||||||
|
# up2k filetring based on actual abspath
|
||||||
|
files = [x for x in files if "{0}.hist{0}up2k.".format(os.sep) not in x[1]]
|
||||||
|
|
||||||
|
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
|
||||||
|
yield f
|
||||||
|
|
||||||
def user_tree(self, uname, readable=False, writable=False):
|
def user_tree(self, uname, readable=False, writable=False):
|
||||||
ret = []
|
ret = []
|
||||||
opt1 = readable and (uname in self.uread or "*" in self.uread)
|
opt1 = readable and (uname in self.uread or "*" in self.uread)
|
||||||
@@ -148,8 +225,8 @@ class AuthSrv(object):
|
|||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.reload()
|
self.reload()
|
||||||
|
|
||||||
def log(self, msg):
|
def log(self, msg, c=0):
|
||||||
self.log_func("auth", msg)
|
self.log_func("auth", msg, c)
|
||||||
|
|
||||||
def invert(self, orig):
|
def invert(self, orig):
|
||||||
if PY2:
|
if PY2:
|
||||||
@@ -201,13 +278,39 @@ class AuthSrv(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
lvl, uname = ln.split(" ")
|
lvl, uname = ln.split(" ")
|
||||||
if lvl in "ra":
|
self._read_vol_str(
|
||||||
mread[vol_dst].append(uname)
|
lvl, uname, mread[vol_dst], mwrite[vol_dst], mflags[vol_dst]
|
||||||
if lvl in "wa":
|
)
|
||||||
mwrite[vol_dst].append(uname)
|
|
||||||
if lvl == "c":
|
def _read_vol_str(self, lvl, uname, mr, mw, mf):
|
||||||
# config option, currently switches only
|
if lvl == "c":
|
||||||
mflags[vol_dst][uname] = True
|
cval = True
|
||||||
|
if "=" in uname:
|
||||||
|
uname, cval = uname.split("=", 1)
|
||||||
|
|
||||||
|
self._read_volflag(mf, uname, cval, False)
|
||||||
|
return
|
||||||
|
|
||||||
|
if uname == "":
|
||||||
|
uname = "*"
|
||||||
|
|
||||||
|
if lvl in "ra":
|
||||||
|
mr.append(uname)
|
||||||
|
|
||||||
|
if lvl in "wa":
|
||||||
|
mw.append(uname)
|
||||||
|
|
||||||
|
def _read_volflag(self, flags, name, value, is_list):
|
||||||
|
if name not in ["mtp"]:
|
||||||
|
flags[name] = value
|
||||||
|
return
|
||||||
|
|
||||||
|
if not is_list:
|
||||||
|
value = [value]
|
||||||
|
elif not value:
|
||||||
|
return
|
||||||
|
|
||||||
|
flags[name] = flags.get(name, []) + value
|
||||||
|
|
||||||
def reload(self):
|
def reload(self):
|
||||||
"""
|
"""
|
||||||
@@ -230,7 +333,7 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
if self.args.v:
|
if self.args.v:
|
||||||
# list of src:dst:permset:permset:...
|
# list of src:dst:permset:permset:...
|
||||||
# permset is [rwa]username
|
# permset is [rwa]username or [c]flag
|
||||||
for v_str in self.args.v:
|
for v_str in self.args.v:
|
||||||
m = self.re_vol.match(v_str)
|
m = self.re_vol.match(v_str)
|
||||||
if not m:
|
if not m:
|
||||||
@@ -247,15 +350,7 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
perms = perms.split(":")
|
perms = perms.split(":")
|
||||||
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
|
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
|
||||||
if lvl == "c":
|
self._read_vol_str(lvl, uname, mread[dst], mwrite[dst], mflags[dst])
|
||||||
# config option, currently switches only
|
|
||||||
mflags[dst][uname] = True
|
|
||||||
if uname == "":
|
|
||||||
uname = "*"
|
|
||||||
if lvl in "ra":
|
|
||||||
mread[dst].append(uname)
|
|
||||||
if lvl in "wa":
|
|
||||||
mwrite[dst].append(uname)
|
|
||||||
|
|
||||||
if self.args.c:
|
if self.args.c:
|
||||||
for cfg_fn in self.args.c:
|
for cfg_fn in self.args.c:
|
||||||
@@ -268,6 +363,7 @@ class AuthSrv(object):
|
|||||||
elif "" not in mount:
|
elif "" not in mount:
|
||||||
# there's volumes but no root; make root inaccessible
|
# there's volumes but no root; make root inaccessible
|
||||||
vfs = VFS(os.path.abspath("."), "")
|
vfs = VFS(os.path.abspath("."), "")
|
||||||
|
vfs.flags["d2d"] = True
|
||||||
|
|
||||||
maxdepth = 0
|
maxdepth = 0
|
||||||
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
|
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
|
||||||
@@ -294,21 +390,100 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
if missing_users:
|
if missing_users:
|
||||||
self.log(
|
self.log(
|
||||||
"\033[31myou must -a the following users: "
|
"you must -a the following users: "
|
||||||
+ ", ".join(k for k in sorted(missing_users))
|
+ ", ".join(k for k in sorted(missing_users)),
|
||||||
+ "\033[0m"
|
c=1,
|
||||||
)
|
)
|
||||||
raise Exception("invalid config")
|
raise Exception("invalid config")
|
||||||
|
|
||||||
|
all_mte = {}
|
||||||
|
errors = False
|
||||||
|
for vol in vfs.all_vols.values():
|
||||||
|
if (self.args.e2ds and vol.uwrite) or self.args.e2dsa:
|
||||||
|
vol.flags["e2ds"] = True
|
||||||
|
|
||||||
|
if self.args.e2d or "e2ds" in vol.flags:
|
||||||
|
vol.flags["e2d"] = True
|
||||||
|
|
||||||
|
for k in ["e2t", "e2ts", "e2tsr"]:
|
||||||
|
if getattr(self.args, k):
|
||||||
|
vol.flags[k] = True
|
||||||
|
|
||||||
|
for k1, k2 in IMPLICATIONS:
|
||||||
|
if k1 in vol.flags:
|
||||||
|
vol.flags[k2] = True
|
||||||
|
|
||||||
|
# default tag-list if unset
|
||||||
|
if "mte" not in vol.flags:
|
||||||
|
vol.flags["mte"] = self.args.mte
|
||||||
|
|
||||||
|
# append parsers from argv to volume-flags
|
||||||
|
self._read_volflag(vol.flags, "mtp", self.args.mtp, True)
|
||||||
|
|
||||||
|
# d2d drops all database features for a volume
|
||||||
|
for grp, rm in [["d2d", "e2d"], ["d2t", "e2t"]]:
|
||||||
|
if not vol.flags.get(grp, False):
|
||||||
|
continue
|
||||||
|
|
||||||
|
vol.flags["d2t"] = True
|
||||||
|
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
|
||||||
|
|
||||||
|
# mt* needs e2t so drop those too
|
||||||
|
for grp, rm in [["e2t", "mt"]]:
|
||||||
|
if vol.flags.get(grp, False):
|
||||||
|
continue
|
||||||
|
|
||||||
|
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
|
||||||
|
|
||||||
|
# verify tags mentioned by -mt[mp] are used by -mte
|
||||||
|
local_mtp = {}
|
||||||
|
local_only_mtp = {}
|
||||||
|
for a in vol.flags.get("mtp", []) + vol.flags.get("mtm", []):
|
||||||
|
a = a.split("=")[0]
|
||||||
|
local_mtp[a] = True
|
||||||
|
local = True
|
||||||
|
for b in self.args.mtp or []:
|
||||||
|
b = b.split("=")[0]
|
||||||
|
if a == b:
|
||||||
|
local = False
|
||||||
|
|
||||||
|
if local:
|
||||||
|
local_only_mtp[a] = True
|
||||||
|
|
||||||
|
local_mte = {}
|
||||||
|
for a in vol.flags.get("mte", "").split(","):
|
||||||
|
local = True
|
||||||
|
all_mte[a] = True
|
||||||
|
local_mte[a] = True
|
||||||
|
for b in self.args.mte.split(","):
|
||||||
|
if not a or not b:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if a == b:
|
||||||
|
local = False
|
||||||
|
|
||||||
|
for mtp in local_only_mtp.keys():
|
||||||
|
if mtp not in local_mte:
|
||||||
|
m = 'volume "/{}" defines metadata tag "{}", but doesnt use it in "-mte" (or with "cmte" in its volume-flags)'
|
||||||
|
self.log(m.format(vol.vpath, mtp), 1)
|
||||||
|
errors = True
|
||||||
|
|
||||||
|
for mtp in self.args.mtp or []:
|
||||||
|
mtp = mtp.split("=")[0]
|
||||||
|
if mtp not in all_mte:
|
||||||
|
m = 'metadata tag "{}" is defined by "-mtm" or "-mtp", but is not used by "-mte" (or by any "cmte" volume-flag)'
|
||||||
|
self.log(m.format(mtp), 1)
|
||||||
|
errors = True
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
v, _ = vfs.get("/", "*", False, True)
|
v, _ = vfs.get("/", "*", False, True)
|
||||||
if self.warn_anonwrite and os.getcwd() == v.realpath:
|
if self.warn_anonwrite and os.getcwd() == v.realpath:
|
||||||
self.warn_anonwrite = False
|
self.warn_anonwrite = False
|
||||||
self.log(
|
msg = "anyone can read/write the current directory: {}"
|
||||||
"\033[31manyone can read/write the current directory: {}\033[0m".format(
|
self.log(msg.format(v.realpath), c=1)
|
||||||
v.realpath
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except Pebkac:
|
except Pebkac:
|
||||||
self.warn_anonwrite = True
|
self.warn_anonwrite = True
|
||||||
|
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ class BrokerMp(object):
|
|||||||
self.procs.append(proc)
|
self.procs.append(proc)
|
||||||
proc.start()
|
proc.start()
|
||||||
|
|
||||||
if True:
|
if not self.args.q:
|
||||||
thr = threading.Thread(target=self.debug_load_balancer)
|
thr = threading.Thread(target=self.debug_load_balancer)
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|||||||
@@ -49,11 +49,11 @@ class MpWorker(object):
|
|||||||
# print('k')
|
# print('k')
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def log(self, src, msg):
|
def log(self, src, msg, c=0):
|
||||||
self.q_yield.put([0, "log", [src, msg]])
|
self.q_yield.put([0, "log", [src, msg, c]])
|
||||||
|
|
||||||
def logw(self, msg):
|
def logw(self, msg, c=0):
|
||||||
self.log("mp{}".format(self.n), msg)
|
self.log("mp{}".format(self.n), msg, c)
|
||||||
|
|
||||||
def httpdrop(self, addr):
|
def httpdrop(self, addr):
|
||||||
self.q_yield.put([0, "httpdrop", [addr]])
|
self.q_yield.put([0, "httpdrop", [addr]])
|
||||||
@@ -73,7 +73,9 @@ class MpWorker(object):
|
|||||||
if PY2:
|
if PY2:
|
||||||
sck = pickle.loads(sck) # nosec
|
sck = pickle.loads(sck) # nosec
|
||||||
|
|
||||||
self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,))
|
if self.args.log_conn:
|
||||||
|
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
||||||
|
|
||||||
self.httpsrv.accept(sck, addr)
|
self.httpsrv.accept(sck, addr)
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
|
|||||||
@@ -28,7 +28,9 @@ class BrokerThr(object):
|
|||||||
def put(self, want_retval, dest, *args):
|
def put(self, want_retval, dest, *args):
|
||||||
if dest == "httpconn":
|
if dest == "httpconn":
|
||||||
sck, addr = args
|
sck, addr = args
|
||||||
self.log("%s %s" % addr, "\033[1;30m|%sC-qpop\033[0m" % ("-" * 4,))
|
if self.args.log_conn:
|
||||||
|
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
||||||
|
|
||||||
self.httpsrv.accept(sck, addr)
|
self.httpsrv.accept(sck, addr)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import gzip
|
|||||||
import time
|
import time
|
||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
|
import string
|
||||||
import socket
|
import socket
|
||||||
import ctypes
|
import ctypes
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
@@ -14,6 +15,8 @@ import calendar
|
|||||||
|
|
||||||
from .__init__ import E, PY2, WINDOWS
|
from .__init__ import E, PY2, WINDOWS
|
||||||
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||||
|
from .szip import StreamZip
|
||||||
|
from .star import StreamTar
|
||||||
|
|
||||||
if not PY2:
|
if not PY2:
|
||||||
unicode = str
|
unicode = str
|
||||||
@@ -41,17 +44,21 @@ class HttpCli(object):
|
|||||||
self.absolute_urls = False
|
self.absolute_urls = False
|
||||||
self.out_headers = {"Access-Control-Allow-Origin": "*"}
|
self.out_headers = {"Access-Control-Allow-Origin": "*"}
|
||||||
|
|
||||||
def log(self, msg):
|
def log(self, msg, c=0):
|
||||||
self.log_func(self.log_src, msg)
|
self.log_func(self.log_src, msg, c)
|
||||||
|
|
||||||
def _check_nonfatal(self, ex):
|
def _check_nonfatal(self, ex):
|
||||||
return ex.code < 400 or ex.code == 404
|
return ex.code < 400 or ex.code in [404, 429]
|
||||||
|
|
||||||
def _assert_safe_rem(self, rem):
|
def _assert_safe_rem(self, rem):
|
||||||
# sanity check to prevent any disasters
|
# sanity check to prevent any disasters
|
||||||
if rem.startswith("/") or rem.startswith("../") or "/../" in rem:
|
if rem.startswith("/") or rem.startswith("../") or "/../" in rem:
|
||||||
raise Exception("that was close")
|
raise Exception("that was close")
|
||||||
|
|
||||||
|
def j2(self, name, **kwargs):
|
||||||
|
tpl = self.conn.hsrv.j2[name]
|
||||||
|
return tpl.render(**kwargs) if kwargs else tpl
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
"""returns true if connection can be reused"""
|
"""returns true if connection can be reused"""
|
||||||
self.keepalive = False
|
self.keepalive = False
|
||||||
@@ -63,7 +70,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
if not headerlines[0]:
|
if not headerlines[0]:
|
||||||
# seen after login with IE6.0.2900.5512.xpsp.080413-2111 (xp-sp3)
|
# seen after login with IE6.0.2900.5512.xpsp.080413-2111 (xp-sp3)
|
||||||
self.log("\033[1;31mBUG: trailing newline from previous request\033[0m")
|
self.log("BUG: trailing newline from previous request", c="1;31")
|
||||||
headerlines.pop(0)
|
headerlines.pop(0)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -74,7 +81,7 @@ class HttpCli(object):
|
|||||||
except Pebkac as ex:
|
except Pebkac as ex:
|
||||||
# self.log("pebkac at httpcli.run #1: " + repr(ex))
|
# self.log("pebkac at httpcli.run #1: " + repr(ex))
|
||||||
self.keepalive = self._check_nonfatal(ex)
|
self.keepalive = self._check_nonfatal(ex)
|
||||||
self.loud_reply(str(ex), status=ex.code)
|
self.loud_reply(unicode(ex), status=ex.code)
|
||||||
return self.keepalive
|
return self.keepalive
|
||||||
|
|
||||||
# time.sleep(0.4)
|
# time.sleep(0.4)
|
||||||
@@ -154,7 +161,9 @@ class HttpCli(object):
|
|||||||
try:
|
try:
|
||||||
# self.log("pebkac at httpcli.run #2: " + repr(ex))
|
# self.log("pebkac at httpcli.run #2: " + repr(ex))
|
||||||
self.keepalive = self._check_nonfatal(ex)
|
self.keepalive = self._check_nonfatal(ex)
|
||||||
self.loud_reply("{}: {}".format(str(ex), self.vpath), status=ex.code)
|
self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3)
|
||||||
|
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
|
||||||
|
self.reply(msg.encode("utf-8", "replace"), status=ex.code)
|
||||||
return self.keepalive
|
return self.keepalive
|
||||||
except Pebkac:
|
except Pebkac:
|
||||||
return False
|
return False
|
||||||
@@ -163,7 +172,7 @@ class HttpCli(object):
|
|||||||
response = ["HTTP/1.1 {} {}".format(status, HTTPCODE[status])]
|
response = ["HTTP/1.1 {} {}".format(status, HTTPCODE[status])]
|
||||||
|
|
||||||
if length is not None:
|
if length is not None:
|
||||||
response.append("Content-Length: " + str(length))
|
response.append("Content-Length: " + unicode(length))
|
||||||
|
|
||||||
# close if unknown length, otherwise take client's preference
|
# close if unknown length, otherwise take client's preference
|
||||||
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
|
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
|
||||||
@@ -222,6 +231,9 @@ class HttpCli(object):
|
|||||||
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
|
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
|
||||||
return self.tx_file(static_path)
|
return self.tx_file(static_path)
|
||||||
|
|
||||||
|
if "tree" in self.uparam:
|
||||||
|
return self.tx_tree()
|
||||||
|
|
||||||
# conditional redirect to single volumes
|
# conditional redirect to single volumes
|
||||||
if self.vpath == "" and not self.uparam:
|
if self.vpath == "" and not self.uparam:
|
||||||
nread = len(self.rvol)
|
nread = len(self.rvol)
|
||||||
@@ -246,9 +258,6 @@ class HttpCli(object):
|
|||||||
self.vpath = None
|
self.vpath = None
|
||||||
return self.tx_mounts()
|
return self.tx_mounts()
|
||||||
|
|
||||||
if "tree" in self.uparam:
|
|
||||||
return self.tx_tree()
|
|
||||||
|
|
||||||
return self.tx_browser()
|
return self.tx_browser()
|
||||||
|
|
||||||
def handle_options(self):
|
def handle_options(self):
|
||||||
@@ -323,8 +332,11 @@ class HttpCli(object):
|
|||||||
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
|
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
|
||||||
|
|
||||||
def get_body_reader(self):
|
def get_body_reader(self):
|
||||||
remains = int(self.headers.get("content-length", None))
|
chunked = "chunked" in self.headers.get("transfer-encoding", "").lower()
|
||||||
if remains is None:
|
remains = int(self.headers.get("content-length", -1))
|
||||||
|
if chunked:
|
||||||
|
return read_socket_chunked(self.sr), remains
|
||||||
|
elif remains == -1:
|
||||||
self.keepalive = False
|
self.keepalive = False
|
||||||
return read_socket_unbounded(self.sr), remains
|
return read_socket_unbounded(self.sr), remains
|
||||||
else:
|
else:
|
||||||
@@ -342,6 +354,10 @@ class HttpCli(object):
|
|||||||
with open(path, "wb", 512 * 1024) as f:
|
with open(path, "wb", 512 * 1024) as f:
|
||||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||||
|
|
||||||
|
self.conn.hsrv.broker.put(
|
||||||
|
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fn
|
||||||
|
)
|
||||||
|
|
||||||
return post_sz, sha_b64, remains, path
|
return post_sz, sha_b64, remains, path
|
||||||
|
|
||||||
def handle_stash(self):
|
def handle_stash(self):
|
||||||
@@ -381,8 +397,30 @@ class HttpCli(object):
|
|||||||
if act == "tput":
|
if act == "tput":
|
||||||
return self.handle_text_upload()
|
return self.handle_text_upload()
|
||||||
|
|
||||||
|
if act == "zip":
|
||||||
|
return self.handle_zip_post()
|
||||||
|
|
||||||
raise Pebkac(422, 'invalid action "{}"'.format(act))
|
raise Pebkac(422, 'invalid action "{}"'.format(act))
|
||||||
|
|
||||||
|
def handle_zip_post(self):
|
||||||
|
for k in ["zip", "tar"]:
|
||||||
|
v = self.uparam.get(k)
|
||||||
|
if v is not None:
|
||||||
|
break
|
||||||
|
|
||||||
|
if v is None:
|
||||||
|
raise Pebkac(422, "need zip or tar keyword")
|
||||||
|
|
||||||
|
vn, rem = self.auth.vfs.get(self.vpath, self.uname, True, False)
|
||||||
|
items = self.parser.require("files", 1024 * 1024)
|
||||||
|
if not items:
|
||||||
|
raise Pebkac(422, "need files list")
|
||||||
|
|
||||||
|
items = items.replace("\r", "").split("\n")
|
||||||
|
items = [unquotep(x) for x in items if items]
|
||||||
|
|
||||||
|
return self.tx_zip(k, v, vn, rem, items, self.args.ed)
|
||||||
|
|
||||||
def handle_post_json(self):
|
def handle_post_json(self):
|
||||||
try:
|
try:
|
||||||
remains = int(self.headers["content-length"])
|
remains = int(self.headers["content-length"])
|
||||||
@@ -410,29 +448,42 @@ class HttpCli(object):
|
|||||||
if "srch" in self.uparam or "srch" in body:
|
if "srch" in self.uparam or "srch" in body:
|
||||||
return self.handle_search(body)
|
return self.handle_search(body)
|
||||||
|
|
||||||
# prefer this over undot; no reason to allow traversion
|
|
||||||
if "/" in body["name"]:
|
|
||||||
raise Pebkac(400, "folders verboten")
|
|
||||||
|
|
||||||
# up2k-php compat
|
# up2k-php compat
|
||||||
for k in "chunkpit.php", "handshake.php":
|
for k in "chunkpit.php", "handshake.php":
|
||||||
if self.vpath.endswith(k):
|
if self.vpath.endswith(k):
|
||||||
self.vpath = self.vpath[: -len(k)]
|
self.vpath = self.vpath[: -len(k)]
|
||||||
|
|
||||||
|
sub = None
|
||||||
|
name = undot(body["name"])
|
||||||
|
if "/" in name:
|
||||||
|
sub, name = name.rsplit("/", 1)
|
||||||
|
self.vpath = "/".join([self.vpath, sub]).strip("/")
|
||||||
|
body["name"] = name
|
||||||
|
|
||||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||||
|
|
||||||
body["vtop"] = vfs.vpath
|
body["vtop"] = vfs.vpath
|
||||||
body["ptop"] = vfs.realpath
|
body["ptop"] = vfs.realpath
|
||||||
body["prel"] = rem
|
body["prel"] = rem
|
||||||
body["addr"] = self.ip
|
body["addr"] = self.ip
|
||||||
body["flag"] = vfs.flags
|
body["vcfg"] = vfs.flags
|
||||||
|
|
||||||
|
if sub:
|
||||||
|
try:
|
||||||
|
dst = os.path.join(vfs.realpath, rem)
|
||||||
|
os.makedirs(dst)
|
||||||
|
except:
|
||||||
|
if not os.path.isdir(dst):
|
||||||
|
raise Pebkac(400, "some file got your folder name")
|
||||||
|
|
||||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||||
response = x.get()
|
ret = x.get()
|
||||||
response = json.dumps(response)
|
if sub:
|
||||||
|
ret["name"] = "/".join([sub, ret["name"]])
|
||||||
|
|
||||||
self.log(response)
|
ret = json.dumps(ret)
|
||||||
self.reply(response.encode("utf-8"), mime="application/json")
|
self.log(ret)
|
||||||
|
self.reply(ret.encode("utf-8"), mime="application/json")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def handle_search(self, body):
|
def handle_search(self, body):
|
||||||
@@ -442,20 +493,42 @@ class HttpCli(object):
|
|||||||
vols.append([vfs.vpath, vfs.realpath, vfs.flags])
|
vols.append([vfs.vpath, vfs.realpath, vfs.flags])
|
||||||
|
|
||||||
idx = self.conn.get_u2idx()
|
idx = self.conn.get_u2idx()
|
||||||
|
t0 = time.time()
|
||||||
|
if idx.p_end:
|
||||||
|
penalty = 0.7
|
||||||
|
t_idle = t0 - idx.p_end
|
||||||
|
if idx.p_dur > 0.7 and t_idle < penalty:
|
||||||
|
m = "rate-limit ({:.1f} sec), cost {:.2f}, idle {:.2f}"
|
||||||
|
raise Pebkac(429, m.format(penalty, idx.p_dur, t_idle))
|
||||||
|
|
||||||
if "srch" in body:
|
if "srch" in body:
|
||||||
# search by up2k hashlist
|
# search by up2k hashlist
|
||||||
vbody = copy.deepcopy(body)
|
vbody = copy.deepcopy(body)
|
||||||
vbody["hash"] = len(vbody["hash"])
|
vbody["hash"] = len(vbody["hash"])
|
||||||
self.log("qj: " + repr(vbody))
|
self.log("qj: " + repr(vbody))
|
||||||
hits = idx.fsearch(vols, body)
|
hits = idx.fsearch(vols, body)
|
||||||
self.log("q#: " + repr(hits))
|
msg = repr(hits)
|
||||||
|
taglist = []
|
||||||
else:
|
else:
|
||||||
# search by query params
|
# search by query params
|
||||||
self.log("qj: " + repr(body))
|
self.log("qj: " + repr(body))
|
||||||
hits = idx.search(vols, body)
|
hits, taglist = idx.search(vols, body)
|
||||||
self.log("q#: " + str(len(hits)))
|
msg = len(hits)
|
||||||
|
|
||||||
r = json.dumps(hits).encode("utf-8")
|
idx.p_end = time.time()
|
||||||
|
idx.p_dur = idx.p_end - t0
|
||||||
|
self.log("q#: {} ({:.2f}s)".format(msg, idx.p_dur))
|
||||||
|
|
||||||
|
order = []
|
||||||
|
cfg = self.args.mte.split(",")
|
||||||
|
for t in cfg:
|
||||||
|
if t in taglist:
|
||||||
|
order.append(t)
|
||||||
|
for t in taglist:
|
||||||
|
if t not in order:
|
||||||
|
order.append(t)
|
||||||
|
|
||||||
|
r = json.dumps({"hits": hits, "tag_order": order}).encode("utf-8")
|
||||||
self.reply(r, mime="application/json")
|
self.reply(r, mime="application/json")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -503,7 +576,7 @@ class HttpCli(object):
|
|||||||
if len(cstart) > 1 and path != os.devnull:
|
if len(cstart) > 1 and path != os.devnull:
|
||||||
self.log(
|
self.log(
|
||||||
"clone {} to {}".format(
|
"clone {} to {}".format(
|
||||||
cstart[0], " & ".join(str(x) for x in cstart[1:])
|
cstart[0], " & ".join(unicode(x) for x in cstart[1:])
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
ofs = 0
|
ofs = 0
|
||||||
@@ -551,7 +624,7 @@ class HttpCli(object):
|
|||||||
pwd = "x" # nosec
|
pwd = "x" # nosec
|
||||||
|
|
||||||
h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
|
h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
|
||||||
html = self.conn.tpl_msg.render(h1=msg, h2='<a href="/">ack</a>', redir="/")
|
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
|
||||||
self.reply(html.encode("utf-8"), headers=h)
|
self.reply(html.encode("utf-8"), headers=h)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -582,7 +655,8 @@ class HttpCli(object):
|
|||||||
|
|
||||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||||
esc_paths = [quotep(vpath), html_escape(vpath)]
|
esc_paths = [quotep(vpath), html_escape(vpath)]
|
||||||
html = self.conn.tpl_msg.render(
|
html = self.j2(
|
||||||
|
"msg",
|
||||||
h2='<a href="/{}">go to /{}</a>'.format(*esc_paths),
|
h2='<a href="/{}">go to /{}</a>'.format(*esc_paths),
|
||||||
pre="aight",
|
pre="aight",
|
||||||
click=True,
|
click=True,
|
||||||
@@ -614,7 +688,8 @@ class HttpCli(object):
|
|||||||
f.write(b"`GRUNNUR`\n")
|
f.write(b"`GRUNNUR`\n")
|
||||||
|
|
||||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||||
html = self.conn.tpl_msg.render(
|
html = self.j2(
|
||||||
|
"msg",
|
||||||
h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
|
h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
|
||||||
quotep(vpath), html_escape(vpath)
|
quotep(vpath), html_escape(vpath)
|
||||||
),
|
),
|
||||||
@@ -661,6 +736,9 @@ class HttpCli(object):
|
|||||||
raise Pebkac(400, "empty files in post")
|
raise Pebkac(400, "empty files in post")
|
||||||
|
|
||||||
files.append([sz, sha512_hex])
|
files.append([sz, sha512_hex])
|
||||||
|
self.conn.hsrv.broker.put(
|
||||||
|
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname
|
||||||
|
)
|
||||||
self.conn.nbyte += sz
|
self.conn.nbyte += sz
|
||||||
|
|
||||||
except Pebkac:
|
except Pebkac:
|
||||||
@@ -676,7 +754,7 @@ class HttpCli(object):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
except Pebkac as ex:
|
except Pebkac as ex:
|
||||||
errmsg = str(ex)
|
errmsg = unicode(ex)
|
||||||
|
|
||||||
td = max(0.1, time.time() - t0)
|
td = max(0.1, time.time() - t0)
|
||||||
sz_total = sum(x[0] for x in files)
|
sz_total = sum(x[0] for x in files)
|
||||||
@@ -717,7 +795,8 @@ class HttpCli(object):
|
|||||||
).encode("utf-8")
|
).encode("utf-8")
|
||||||
)
|
)
|
||||||
|
|
||||||
html = self.conn.tpl_msg.render(
|
html = self.j2(
|
||||||
|
"msg",
|
||||||
h2='<a href="/{}">return to /{}</a>'.format(
|
h2='<a href="/{}">return to /{}</a>'.format(
|
||||||
quotep(self.vpath), html_escape(self.vpath)
|
quotep(self.vpath), html_escape(self.vpath)
|
||||||
),
|
),
|
||||||
@@ -985,7 +1064,7 @@ class HttpCli(object):
|
|||||||
mime=guess_mime(req_path)[0] or "application/octet-stream",
|
mime=guess_mime(req_path)[0] or "application/octet-stream",
|
||||||
)
|
)
|
||||||
|
|
||||||
logmsg += str(status) + logtail
|
logmsg += unicode(status) + logtail
|
||||||
|
|
||||||
if self.mode == "HEAD" or not do_send:
|
if self.mode == "HEAD" or not do_send:
|
||||||
self.log(logmsg)
|
self.log(logmsg)
|
||||||
@@ -999,22 +1078,81 @@ class HttpCli(object):
|
|||||||
remains = sendfile_py(lower, upper, f, self.s)
|
remains = sendfile_py(lower, upper, f, self.s)
|
||||||
|
|
||||||
if remains > 0:
|
if remains > 0:
|
||||||
logmsg += " \033[31m" + str(upper - remains) + "\033[0m"
|
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
||||||
|
|
||||||
spd = self._spd((upper - lower) - remains)
|
spd = self._spd((upper - lower) - remains)
|
||||||
self.log("{}, {}".format(logmsg, spd))
|
self.log("{}, {}".format(logmsg, spd))
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
def tx_zip(self, fmt, uarg, vn, rem, items, dots):
|
||||||
|
if self.args.no_zip:
|
||||||
|
raise Pebkac(400, "not enabled")
|
||||||
|
|
||||||
|
logmsg = "{:4} {} ".format("", self.req)
|
||||||
|
self.keepalive = False
|
||||||
|
|
||||||
|
if not uarg:
|
||||||
|
uarg = ""
|
||||||
|
|
||||||
|
if fmt == "tar":
|
||||||
|
mime = "application/x-tar"
|
||||||
|
packer = StreamTar
|
||||||
|
else:
|
||||||
|
mime = "application/zip"
|
||||||
|
packer = StreamZip
|
||||||
|
|
||||||
|
fn = items[0] if items and items[0] else self.vpath
|
||||||
|
if fn:
|
||||||
|
fn = fn.rstrip("/").split("/")[-1]
|
||||||
|
else:
|
||||||
|
fn = self.headers.get("host", "hey")
|
||||||
|
|
||||||
|
afn = "".join(
|
||||||
|
[x if x in (string.ascii_letters + string.digits) else "_" for x in fn]
|
||||||
|
)
|
||||||
|
|
||||||
|
bascii = unicode(string.ascii_letters + string.digits).encode("utf-8")
|
||||||
|
ufn = fn.encode("utf-8", "xmlcharrefreplace")
|
||||||
|
if PY2:
|
||||||
|
ufn = [unicode(x) if x in bascii else "%{:02x}".format(ord(x)) for x in ufn]
|
||||||
|
else:
|
||||||
|
ufn = [
|
||||||
|
chr(x).encode("utf-8")
|
||||||
|
if x in bascii
|
||||||
|
else "%{:02x}".format(x).encode("ascii")
|
||||||
|
for x in ufn
|
||||||
|
]
|
||||||
|
ufn = b"".join(ufn).decode("ascii")
|
||||||
|
|
||||||
|
cdis = "attachment; filename=\"{}.{}\"; filename*=UTF-8''{}.{}"
|
||||||
|
cdis = cdis.format(afn, fmt, ufn, fmt)
|
||||||
|
self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis})
|
||||||
|
|
||||||
|
fgen = vn.zipgen(rem, items, self.uname, dots, not self.args.no_scandir)
|
||||||
|
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
|
||||||
|
bgen = packer(fgen, utf8="utf" in uarg, pre_crc="crc" in uarg)
|
||||||
|
bsent = 0
|
||||||
|
for buf in bgen.gen():
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.s.sendall(buf)
|
||||||
|
bsent += len(buf)
|
||||||
|
except:
|
||||||
|
logmsg += " \033[31m" + unicode(bsent) + "\033[0m"
|
||||||
|
break
|
||||||
|
|
||||||
|
spd = self._spd(bsent)
|
||||||
|
self.log("{}, {}".format(logmsg, spd))
|
||||||
|
return True
|
||||||
|
|
||||||
def tx_md(self, fs_path):
|
def tx_md(self, fs_path):
|
||||||
logmsg = "{:4} {} ".format("", self.req)
|
logmsg = "{:4} {} ".format("", self.req)
|
||||||
if "edit2" in self.uparam:
|
|
||||||
html_path = "web/mde.html"
|
|
||||||
template = self.conn.tpl_mde
|
|
||||||
else:
|
|
||||||
html_path = "web/md.html"
|
|
||||||
template = self.conn.tpl_md
|
|
||||||
|
|
||||||
html_path = os.path.join(E.mod, html_path)
|
tpl = "mde" if "edit2" in self.uparam else "md"
|
||||||
|
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
|
||||||
|
template = self.j2(tpl)
|
||||||
|
|
||||||
st = os.stat(fsenc(fs_path))
|
st = os.stat(fsenc(fs_path))
|
||||||
# sz_md = st.st_size
|
# sz_md = st.st_size
|
||||||
@@ -1046,7 +1184,7 @@ class HttpCli(object):
|
|||||||
sz_html = len(template.render(**targs).encode("utf-8"))
|
sz_html = len(template.render(**targs).encode("utf-8"))
|
||||||
self.send_headers(sz_html + sz_md, status)
|
self.send_headers(sz_html + sz_md, status)
|
||||||
|
|
||||||
logmsg += str(status)
|
logmsg += unicode(status)
|
||||||
if self.mode == "HEAD" or not do_send:
|
if self.mode == "HEAD" or not do_send:
|
||||||
self.log(logmsg)
|
self.log(logmsg)
|
||||||
return True
|
return True
|
||||||
@@ -1060,13 +1198,13 @@ class HttpCli(object):
|
|||||||
self.log(logmsg + " \033[31md/c\033[0m")
|
self.log(logmsg + " \033[31md/c\033[0m")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self.log(logmsg + " " + str(len(html)))
|
self.log(logmsg + " " + unicode(len(html)))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def tx_mounts(self):
|
def tx_mounts(self):
|
||||||
rvol = [x + "/" if x else x for x in self.rvol]
|
rvol = [x + "/" if x else x for x in self.rvol]
|
||||||
wvol = [x + "/" if x else x for x in self.wvol]
|
wvol = [x + "/" if x else x for x in self.wvol]
|
||||||
html = self.conn.tpl_mounts.render(this=self, rvol=rvol, wvol=wvol)
|
html = self.j2("splash", this=self, rvol=rvol, wvol=wvol)
|
||||||
self.reply(html.encode("utf-8"))
|
self.reply(html.encode("utf-8"))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -1094,11 +1232,12 @@ class HttpCli(object):
|
|||||||
excl = None
|
excl = None
|
||||||
if target:
|
if target:
|
||||||
excl, target = (target.split("/", 1) + [""])[:2]
|
excl, target = (target.split("/", 1) + [""])[:2]
|
||||||
ret["k" + excl] = self.gen_tree("/".join([top, excl]).strip("/"), target)
|
sub = self.gen_tree("/".join([top, excl]).strip("/"), target)
|
||||||
|
ret["k" + quotep(excl)] = sub
|
||||||
|
|
||||||
try:
|
try:
|
||||||
vn, rem = self.auth.vfs.get(top, self.uname, True, False)
|
vn, rem = self.auth.vfs.get(top, self.uname, True, False)
|
||||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)
|
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
|
||||||
except:
|
except:
|
||||||
vfs_ls = []
|
vfs_ls = []
|
||||||
vfs_virt = {}
|
vfs_virt = {}
|
||||||
@@ -1109,13 +1248,13 @@ class HttpCli(object):
|
|||||||
|
|
||||||
dirs = []
|
dirs = []
|
||||||
|
|
||||||
|
vfs_ls = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||||
|
|
||||||
if not self.args.ed or "dots" not in self.uparam:
|
if not self.args.ed or "dots" not in self.uparam:
|
||||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||||
|
|
||||||
for fn in [x for x in vfs_ls if x != excl]:
|
for fn in [x for x in vfs_ls if x != excl]:
|
||||||
abspath = os.path.join(fsroot, fn)
|
dirs.append(quotep(fn))
|
||||||
if os.path.isdir(abspath):
|
|
||||||
dirs.append(fn)
|
|
||||||
|
|
||||||
for x in vfs_virt.keys():
|
for x in vfs_virt.keys():
|
||||||
if x != excl:
|
if x != excl:
|
||||||
@@ -1154,7 +1293,14 @@ class HttpCli(object):
|
|||||||
|
|
||||||
return self.tx_file(abspath)
|
return self.tx_file(abspath)
|
||||||
|
|
||||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname)
|
for k in ["zip", "tar"]:
|
||||||
|
v = self.uparam.get(k)
|
||||||
|
if v is not None:
|
||||||
|
return self.tx_zip(k, v, vn, rem, [], self.args.ed)
|
||||||
|
|
||||||
|
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
|
||||||
|
stats = {k: v for k, v in vfs_ls}
|
||||||
|
vfs_ls = [x[0] for x in vfs_ls]
|
||||||
vfs_ls.extend(vfs_virt.keys())
|
vfs_ls.extend(vfs_virt.keys())
|
||||||
|
|
||||||
# check for old versions of files,
|
# check for old versions of files,
|
||||||
@@ -1183,6 +1329,11 @@ class HttpCli(object):
|
|||||||
|
|
||||||
is_ls = "ls" in self.uparam
|
is_ls = "ls" in self.uparam
|
||||||
|
|
||||||
|
icur = None
|
||||||
|
if "e2t" in vn.flags:
|
||||||
|
idx = self.conn.get_u2idx()
|
||||||
|
icur = idx.get_cur(vn.realpath)
|
||||||
|
|
||||||
dirs = []
|
dirs = []
|
||||||
files = []
|
files = []
|
||||||
for fn in vfs_ls:
|
for fn in vfs_ls:
|
||||||
@@ -1200,15 +1351,18 @@ class HttpCli(object):
|
|||||||
fspath = fsroot + "/" + fn
|
fspath = fsroot + "/" + fn
|
||||||
|
|
||||||
try:
|
try:
|
||||||
inf = os.stat(fsenc(fspath))
|
inf = stats.get(fn) or os.stat(fsenc(fspath))
|
||||||
except:
|
except:
|
||||||
self.log("broken symlink: {}".format(repr(fspath)))
|
self.log("broken symlink: {}".format(repr(fspath)))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
is_dir = stat.S_ISDIR(inf.st_mode)
|
is_dir = stat.S_ISDIR(inf.st_mode)
|
||||||
if is_dir:
|
if is_dir:
|
||||||
margin = "DIR"
|
|
||||||
href += "/"
|
href += "/"
|
||||||
|
if self.args.no_zip:
|
||||||
|
margin = "DIR"
|
||||||
|
else:
|
||||||
|
margin = '<a href="{}?zip">zip</a>'.format(quotep(href))
|
||||||
elif fn in hist:
|
elif fn in hist:
|
||||||
margin = '<a href="{}.hist/{}">#{}</a>'.format(
|
margin = '<a href="{}.hist/{}">#{}</a>'.format(
|
||||||
base, html_escape(hist[fn][2], quote=True), hist[fn][0]
|
base, html_escape(hist[fn][2], quote=True), hist[fn][0]
|
||||||
@@ -1232,18 +1386,49 @@ class HttpCli(object):
|
|||||||
"sz": sz,
|
"sz": sz,
|
||||||
"ext": ext,
|
"ext": ext,
|
||||||
"dt": dt,
|
"dt": dt,
|
||||||
"ts": inf.st_mtime,
|
"ts": int(inf.st_mtime),
|
||||||
}
|
}
|
||||||
if is_dir:
|
if is_dir:
|
||||||
dirs.append(item)
|
dirs.append(item)
|
||||||
else:
|
else:
|
||||||
files.append(item)
|
files.append(item)
|
||||||
|
item["rd"] = rem
|
||||||
|
|
||||||
|
taglist = {}
|
||||||
|
for f in files:
|
||||||
|
fn = f["name"]
|
||||||
|
rd = f["rd"]
|
||||||
|
del f["rd"]
|
||||||
|
if icur:
|
||||||
|
q = "select w from up where rd = ? and fn = ?"
|
||||||
|
try:
|
||||||
|
r = icur.execute(q, (rd, fn)).fetchone()
|
||||||
|
except:
|
||||||
|
args = s3enc(idx.mem_cur, rd, fn)
|
||||||
|
r = icur.execute(q, args).fetchone()
|
||||||
|
|
||||||
|
tags = {}
|
||||||
|
f["tags"] = tags
|
||||||
|
|
||||||
|
if not r:
|
||||||
|
continue
|
||||||
|
|
||||||
|
w = r[0][:16]
|
||||||
|
q = "select k, v from mt where w = ? and k != 'x'"
|
||||||
|
for k, v in icur.execute(q, (w,)):
|
||||||
|
taglist[k] = True
|
||||||
|
tags[k] = v
|
||||||
|
|
||||||
|
if icur:
|
||||||
|
taglist = [k for k in vn.flags.get("mte", "").split(",") if k in taglist]
|
||||||
|
for f in dirs:
|
||||||
|
f["tags"] = {}
|
||||||
|
|
||||||
srv_info = []
|
srv_info = []
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not self.args.nih:
|
if not self.args.nih:
|
||||||
srv_info.append(str(socket.gethostname()).split(".")[0])
|
srv_info.append(unicode(socket.gethostname()).split(".")[0])
|
||||||
except:
|
except:
|
||||||
self.log("#wow #whoa")
|
self.log("#wow #whoa")
|
||||||
pass
|
pass
|
||||||
@@ -1275,34 +1460,47 @@ class HttpCli(object):
|
|||||||
if self.writable:
|
if self.writable:
|
||||||
perms.append("write")
|
perms.append("write")
|
||||||
|
|
||||||
if is_ls:
|
logues = ["", ""]
|
||||||
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
|
|
||||||
ret = {"dirs": dirs, "files": files, "srvinf": srv_info, "perms": perms}
|
|
||||||
ret = json.dumps(ret)
|
|
||||||
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
|
|
||||||
return True
|
|
||||||
|
|
||||||
logues = [None, None]
|
|
||||||
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
|
for n, fn in enumerate([".prologue.html", ".epilogue.html"]):
|
||||||
fn = os.path.join(abspath, fn)
|
fn = os.path.join(abspath, fn)
|
||||||
if os.path.exists(fsenc(fn)):
|
if os.path.exists(fsenc(fn)):
|
||||||
with open(fsenc(fn), "rb") as f:
|
with open(fsenc(fn), "rb") as f:
|
||||||
logues[n] = f.read().decode("utf-8")
|
logues[n] = f.read().decode("utf-8")
|
||||||
|
|
||||||
|
if is_ls:
|
||||||
|
[x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y]
|
||||||
|
ret = {
|
||||||
|
"dirs": dirs,
|
||||||
|
"files": files,
|
||||||
|
"srvinf": srv_info,
|
||||||
|
"perms": perms,
|
||||||
|
"logues": logues,
|
||||||
|
"taglist": taglist,
|
||||||
|
}
|
||||||
|
ret = json.dumps(ret)
|
||||||
|
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
|
||||||
|
return True
|
||||||
|
|
||||||
ts = ""
|
ts = ""
|
||||||
# ts = "?{}".format(time.time())
|
# ts = "?{}".format(time.time())
|
||||||
|
|
||||||
dirs.extend(files)
|
dirs.extend(files)
|
||||||
|
|
||||||
html = self.conn.tpl_browser.render(
|
html = self.j2(
|
||||||
|
"browser",
|
||||||
vdir=quotep(self.vpath),
|
vdir=quotep(self.vpath),
|
||||||
vpnodes=vpnodes,
|
vpnodes=vpnodes,
|
||||||
files=dirs,
|
files=dirs,
|
||||||
ts=ts,
|
ts=ts,
|
||||||
perms=json.dumps(perms),
|
perms=json.dumps(perms),
|
||||||
have_up2k_idx=self.args.e2d,
|
taglist=taglist,
|
||||||
prologue=logues[0],
|
tag_order=json.dumps(
|
||||||
epilogue=logues[1],
|
vn.flags["mte"].split(",") if "mte" in vn.flags else []
|
||||||
|
),
|
||||||
|
have_up2k_idx=("e2d" in vn.flags),
|
||||||
|
have_tags_idx=("e2t" in vn.flags),
|
||||||
|
have_zip=(not self.args.no_zip),
|
||||||
|
logues=logues,
|
||||||
title=html_escape(self.vpath),
|
title=html_escape(self.vpath),
|
||||||
srv_info=srv_info,
|
srv_info=srv_info,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -12,21 +12,6 @@ try:
|
|||||||
except:
|
except:
|
||||||
HAVE_SSL = False
|
HAVE_SSL = False
|
||||||
|
|
||||||
try:
|
|
||||||
import jinja2
|
|
||||||
except ImportError:
|
|
||||||
print(
|
|
||||||
"""\033[1;31m
|
|
||||||
you do not have jinja2 installed,\033[33m
|
|
||||||
choose one of these:\033[0m
|
|
||||||
* apt install python-jinja2
|
|
||||||
* python3 -m pip install --user jinja2
|
|
||||||
* (try another python version, if you have one)
|
|
||||||
* (try copyparty.sfx instead)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
from .__init__ import E
|
from .__init__ import E
|
||||||
from .util import Unrecv
|
from .util import Unrecv
|
||||||
from .httpcli import HttpCli
|
from .httpcli import HttpCli
|
||||||
@@ -55,14 +40,6 @@ class HttpConn(object):
|
|||||||
self.log_func = hsrv.log
|
self.log_func = hsrv.log
|
||||||
self.set_rproxy()
|
self.set_rproxy()
|
||||||
|
|
||||||
env = jinja2.Environment()
|
|
||||||
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
|
||||||
self.tpl_mounts = env.get_template("splash.html")
|
|
||||||
self.tpl_browser = env.get_template("browser.html")
|
|
||||||
self.tpl_msg = env.get_template("msg.html")
|
|
||||||
self.tpl_md = env.get_template("md.html")
|
|
||||||
self.tpl_mde = env.get_template("mde.html")
|
|
||||||
|
|
||||||
def set_rproxy(self, ip=None):
|
def set_rproxy(self, ip=None):
|
||||||
if ip is None:
|
if ip is None:
|
||||||
color = 36
|
color = 36
|
||||||
@@ -79,8 +56,8 @@ class HttpConn(object):
|
|||||||
def respath(self, res_name):
|
def respath(self, res_name):
|
||||||
return os.path.join(E.mod, "web", res_name)
|
return os.path.join(E.mod, "web", res_name)
|
||||||
|
|
||||||
def log(self, msg):
|
def log(self, msg, c=0):
|
||||||
self.log_func(self.log_src, msg)
|
self.log_func(self.log_src, msg, c)
|
||||||
|
|
||||||
def get_u2idx(self):
|
def get_u2idx(self):
|
||||||
if not self.u2idx:
|
if not self.u2idx:
|
||||||
@@ -110,7 +87,9 @@ class HttpConn(object):
|
|||||||
err = "need at least 4 bytes in the first packet; got {}".format(
|
err = "need at least 4 bytes in the first packet; got {}".format(
|
||||||
len(method)
|
len(method)
|
||||||
)
|
)
|
||||||
self.log(err)
|
if method:
|
||||||
|
self.log(err)
|
||||||
|
|
||||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -127,7 +106,7 @@ class HttpConn(object):
|
|||||||
|
|
||||||
if is_https:
|
if is_https:
|
||||||
if self.sr:
|
if self.sr:
|
||||||
self.log("\033[1;31mTODO: cannot do https in jython\033[0m")
|
self.log("TODO: cannot do https in jython", c="1;31")
|
||||||
return
|
return
|
||||||
|
|
||||||
self.log_src = self.log_src.replace("[36m", "[35m")
|
self.log_src = self.log_src.replace("[36m", "[35m")
|
||||||
@@ -178,7 +157,7 @@ class HttpConn(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.log("\033[35mhandshake\033[0m " + em)
|
self.log("handshake\033[0m " + em, c=5)
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -2,10 +2,28 @@
|
|||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
import socket
|
import socket
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
|
try:
|
||||||
|
import jinja2
|
||||||
|
except ImportError:
|
||||||
|
print(
|
||||||
|
"""\033[1;31m
|
||||||
|
you do not have jinja2 installed,\033[33m
|
||||||
|
choose one of these:\033[0m
|
||||||
|
* apt install python-jinja2
|
||||||
|
* {} -m pip install --user jinja2
|
||||||
|
* (try another python version, if you have one)
|
||||||
|
* (try copyparty.sfx instead)
|
||||||
|
""".format(
|
||||||
|
os.path.basename(sys.executable)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
from .__init__ import E, MACOS
|
from .__init__ import E, MACOS
|
||||||
from .httpconn import HttpConn
|
from .httpconn import HttpConn
|
||||||
from .authsrv import AuthSrv
|
from .authsrv import AuthSrv
|
||||||
@@ -30,6 +48,13 @@ class HttpSrv(object):
|
|||||||
self.workload_thr_alive = False
|
self.workload_thr_alive = False
|
||||||
self.auth = AuthSrv(self.args, self.log)
|
self.auth = AuthSrv(self.args, self.log)
|
||||||
|
|
||||||
|
env = jinja2.Environment()
|
||||||
|
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
||||||
|
self.j2 = {
|
||||||
|
x: env.get_template(x + ".html")
|
||||||
|
for x in ["splash", "browser", "msg", "md", "mde"]
|
||||||
|
}
|
||||||
|
|
||||||
cert_path = os.path.join(E.cfg, "cert.pem")
|
cert_path = os.path.join(E.cfg, "cert.pem")
|
||||||
if os.path.exists(cert_path):
|
if os.path.exists(cert_path):
|
||||||
self.cert_path = cert_path
|
self.cert_path = cert_path
|
||||||
@@ -38,7 +63,9 @@ class HttpSrv(object):
|
|||||||
|
|
||||||
def accept(self, sck, addr):
|
def accept(self, sck, addr):
|
||||||
"""takes an incoming tcp connection and creates a thread to handle it"""
|
"""takes an incoming tcp connection and creates a thread to handle it"""
|
||||||
self.log("%s %s" % addr, "\033[1;30m|%sC-cthr\033[0m" % ("-" * 5,))
|
if self.args.log_conn:
|
||||||
|
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
|
||||||
|
|
||||||
thr = threading.Thread(target=self.thr_client, args=(sck, addr))
|
thr = threading.Thread(target=self.thr_client, args=(sck, addr))
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
@@ -66,11 +93,15 @@ class HttpSrv(object):
|
|||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.log("%s %s" % addr, "\033[1;30m|%sC-crun\033[0m" % ("-" * 6,))
|
if self.args.log_conn:
|
||||||
|
self.log("%s %s" % addr, "|%sC-crun" % ("-" * 6,), c="1;30")
|
||||||
|
|
||||||
cli.run()
|
cli.run()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self.log("%s %s" % addr, "\033[1;30m|%sC-cdone\033[0m" % ("-" * 7,))
|
if self.args.log_conn:
|
||||||
|
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sck.shutdown(socket.SHUT_RDWR)
|
sck.shutdown(socket.SHUT_RDWR)
|
||||||
sck.close()
|
sck.close()
|
||||||
@@ -78,7 +109,8 @@ class HttpSrv(object):
|
|||||||
if not MACOS:
|
if not MACOS:
|
||||||
self.log(
|
self.log(
|
||||||
"%s %s" % addr,
|
"%s %s" % addr,
|
||||||
"\033[1;30mshut({}): {}\033[0m".format(sck.fileno(), ex),
|
"shut({}): {}".format(sck.fileno(), ex),
|
||||||
|
c="1;30",
|
||||||
)
|
)
|
||||||
if ex.errno not in [10038, 10054, 107, 57, 9]:
|
if ex.errno not in [10038, 10054, 107, 57, 9]:
|
||||||
# 10038 No longer considered a socket
|
# 10038 No longer considered a socket
|
||||||
|
|||||||
347
copyparty/mtag.py
Normal file
347
copyparty/mtag.py
Normal file
@@ -0,0 +1,347 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import shutil
|
||||||
|
import subprocess as sp
|
||||||
|
|
||||||
|
from .__init__ import PY2, WINDOWS
|
||||||
|
from .util import fsenc, fsdec, REKOBO_LKEY
|
||||||
|
|
||||||
|
if not PY2:
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
|
||||||
|
class MTag(object):
|
||||||
|
def __init__(self, log_func, args):
|
||||||
|
self.log_func = log_func
|
||||||
|
self.usable = True
|
||||||
|
self.prefer_mt = False
|
||||||
|
mappings = args.mtm
|
||||||
|
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
|
||||||
|
or_ffprobe = " or ffprobe"
|
||||||
|
|
||||||
|
if self.backend == "mutagen":
|
||||||
|
self.get = self.get_mutagen
|
||||||
|
try:
|
||||||
|
import mutagen
|
||||||
|
except:
|
||||||
|
self.log("could not load mutagen, trying ffprobe instead", c=3)
|
||||||
|
self.backend = "ffprobe"
|
||||||
|
|
||||||
|
if self.backend == "ffprobe":
|
||||||
|
self.get = self.get_ffprobe
|
||||||
|
self.prefer_mt = True
|
||||||
|
# about 20x slower
|
||||||
|
if PY2:
|
||||||
|
cmd = [b"ffprobe", b"-version"]
|
||||||
|
try:
|
||||||
|
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||||
|
except:
|
||||||
|
self.usable = False
|
||||||
|
else:
|
||||||
|
if not shutil.which("ffprobe"):
|
||||||
|
self.usable = False
|
||||||
|
|
||||||
|
if self.usable and WINDOWS and sys.version_info < (3, 8):
|
||||||
|
self.usable = False
|
||||||
|
or_ffprobe = " or python >= 3.8"
|
||||||
|
msg = "found ffprobe but your python is too old; need 3.8 or newer"
|
||||||
|
self.log(msg, c=1)
|
||||||
|
|
||||||
|
if not self.usable:
|
||||||
|
msg = "need mutagen{} to read media tags so please run this:\n {} -m pip install --user mutagen"
|
||||||
|
self.log(msg.format(or_ffprobe, os.path.basename(sys.executable)), c=1)
|
||||||
|
return
|
||||||
|
|
||||||
|
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||||
|
tagmap = {
|
||||||
|
"album": ["album", "talb", "\u00a9alb", "original-album", "toal"],
|
||||||
|
"artist": [
|
||||||
|
"artist",
|
||||||
|
"tpe1",
|
||||||
|
"\u00a9art",
|
||||||
|
"composer",
|
||||||
|
"performer",
|
||||||
|
"arranger",
|
||||||
|
"\u00a9wrt",
|
||||||
|
"tcom",
|
||||||
|
"tpe3",
|
||||||
|
"original-artist",
|
||||||
|
"tope",
|
||||||
|
],
|
||||||
|
"title": ["title", "tit2", "\u00a9nam"],
|
||||||
|
"circle": [
|
||||||
|
"album-artist",
|
||||||
|
"tpe2",
|
||||||
|
"aart",
|
||||||
|
"conductor",
|
||||||
|
"organization",
|
||||||
|
"band",
|
||||||
|
],
|
||||||
|
".tn": ["tracknumber", "trck", "trkn", "track"],
|
||||||
|
"genre": ["genre", "tcon", "\u00a9gen"],
|
||||||
|
"date": [
|
||||||
|
"original-release-date",
|
||||||
|
"release-date",
|
||||||
|
"date",
|
||||||
|
"tdrc",
|
||||||
|
"\u00a9day",
|
||||||
|
"original-date",
|
||||||
|
"original-year",
|
||||||
|
"tyer",
|
||||||
|
"tdor",
|
||||||
|
"tory",
|
||||||
|
"year",
|
||||||
|
"creation-time",
|
||||||
|
],
|
||||||
|
".bpm": ["bpm", "tbpm", "tmpo", "tbp"],
|
||||||
|
"key": ["initial-key", "tkey", "key"],
|
||||||
|
"comment": ["comment", "comm", "\u00a9cmt", "comments", "description"],
|
||||||
|
}
|
||||||
|
|
||||||
|
if mappings:
|
||||||
|
for k, v in [x.split("=") for x in mappings]:
|
||||||
|
tagmap[k] = v.split(",")
|
||||||
|
|
||||||
|
self.tagmap = {}
|
||||||
|
for k, vs in tagmap.items():
|
||||||
|
vs2 = []
|
||||||
|
for v in vs:
|
||||||
|
if "-" not in v:
|
||||||
|
vs2.append(v)
|
||||||
|
continue
|
||||||
|
|
||||||
|
vs2.append(v.replace("-", " "))
|
||||||
|
vs2.append(v.replace("-", "_"))
|
||||||
|
vs2.append(v.replace("-", ""))
|
||||||
|
|
||||||
|
self.tagmap[k] = vs2
|
||||||
|
|
||||||
|
self.rmap = {
|
||||||
|
v: [n, k] for k, vs in self.tagmap.items() for n, v in enumerate(vs)
|
||||||
|
}
|
||||||
|
# self.get = self.compare
|
||||||
|
|
||||||
|
def log(self, msg, c=0):
|
||||||
|
self.log_func("mtag", msg, c)
|
||||||
|
|
||||||
|
def normalize_tags(self, ret, md):
|
||||||
|
for k, v in dict(md).items():
|
||||||
|
if not v:
|
||||||
|
continue
|
||||||
|
|
||||||
|
k = k.lower().split("::")[0].strip()
|
||||||
|
mk = self.rmap.get(k)
|
||||||
|
if not mk:
|
||||||
|
continue
|
||||||
|
|
||||||
|
pref, mk = mk
|
||||||
|
if mk not in ret or ret[mk][0] > pref:
|
||||||
|
ret[mk] = [pref, v[0]]
|
||||||
|
|
||||||
|
# take first value
|
||||||
|
ret = {k: unicode(v[1]).strip() for k, v in ret.items()}
|
||||||
|
|
||||||
|
# track 3/7 => track 3
|
||||||
|
for k, v in ret.items():
|
||||||
|
if k[0] == ".":
|
||||||
|
v = v.split("/")[0].strip().lstrip("0")
|
||||||
|
ret[k] = v or 0
|
||||||
|
|
||||||
|
# normalize key notation to rkeobo
|
||||||
|
okey = ret.get("key")
|
||||||
|
if okey:
|
||||||
|
key = okey.replace(" ", "").replace("maj", "").replace("min", "m")
|
||||||
|
ret["key"] = REKOBO_LKEY.get(key.lower(), okey)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def compare(self, abspath):
|
||||||
|
if abspath.endswith(".au"):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
print("\n" + abspath)
|
||||||
|
r1 = self.get_mutagen(abspath)
|
||||||
|
r2 = self.get_ffprobe(abspath)
|
||||||
|
|
||||||
|
keys = {}
|
||||||
|
for d in [r1, r2]:
|
||||||
|
for k in d.keys():
|
||||||
|
keys[k] = True
|
||||||
|
|
||||||
|
diffs = []
|
||||||
|
l1 = []
|
||||||
|
l2 = []
|
||||||
|
for k in sorted(keys.keys()):
|
||||||
|
if k in [".q", ".dur"]:
|
||||||
|
continue # lenient
|
||||||
|
|
||||||
|
v1 = r1.get(k)
|
||||||
|
v2 = r2.get(k)
|
||||||
|
if v1 == v2:
|
||||||
|
print(" ", k, v1)
|
||||||
|
elif v1 != "0000": # ffprobe date=0
|
||||||
|
diffs.append(k)
|
||||||
|
print(" 1", k, v1)
|
||||||
|
print(" 2", k, v2)
|
||||||
|
if v1:
|
||||||
|
l1.append(k)
|
||||||
|
if v2:
|
||||||
|
l2.append(k)
|
||||||
|
|
||||||
|
if diffs:
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
return r1
|
||||||
|
|
||||||
|
def get_mutagen(self, abspath):
|
||||||
|
import mutagen
|
||||||
|
|
||||||
|
try:
|
||||||
|
md = mutagen.File(abspath, easy=True)
|
||||||
|
x = md.info.length
|
||||||
|
except Exception as ex:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
ret = {}
|
||||||
|
try:
|
||||||
|
dur = int(md.info.length)
|
||||||
|
try:
|
||||||
|
q = int(md.info.bitrate / 1024)
|
||||||
|
except:
|
||||||
|
q = int((os.path.getsize(abspath) / dur) / 128)
|
||||||
|
|
||||||
|
ret[".dur"] = [0, dur]
|
||||||
|
ret[".q"] = [0, q]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return self.normalize_tags(ret, md)
|
||||||
|
|
||||||
|
def get_ffprobe(self, abspath):
|
||||||
|
cmd = [b"ffprobe", b"-hide_banner", b"--", fsenc(abspath)]
|
||||||
|
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||||
|
r = p.communicate()
|
||||||
|
txt = r[1].decode("utf-8", "replace")
|
||||||
|
txt = [x.rstrip("\r") for x in txt.split("\n")]
|
||||||
|
|
||||||
|
"""
|
||||||
|
note:
|
||||||
|
tags which contain newline will be truncated on first \n,
|
||||||
|
ffprobe emits \n and spacepads the : to align visually
|
||||||
|
note:
|
||||||
|
the Stream ln always mentions Audio: if audio
|
||||||
|
the Stream ln usually has kb/s, is more accurate
|
||||||
|
the Duration ln always has kb/s
|
||||||
|
the Metadata: after Chapter may contain BPM info,
|
||||||
|
title : Tempo: 126.0
|
||||||
|
|
||||||
|
Input #0, wav,
|
||||||
|
Metadata:
|
||||||
|
date : <OK>
|
||||||
|
Duration:
|
||||||
|
Chapter #
|
||||||
|
Metadata:
|
||||||
|
title : <NG>
|
||||||
|
|
||||||
|
Input #0, mp3,
|
||||||
|
Metadata:
|
||||||
|
album : <OK>
|
||||||
|
Duration:
|
||||||
|
Stream #0:0: Audio:
|
||||||
|
Stream #0:1: Video:
|
||||||
|
Metadata:
|
||||||
|
comment : <NG>
|
||||||
|
"""
|
||||||
|
|
||||||
|
ptn_md_beg = re.compile("^( +)Metadata:$")
|
||||||
|
ptn_md_kv = re.compile("^( +)([^:]+) *: (.*)")
|
||||||
|
ptn_dur = re.compile("^ *Duration: ([^ ]+)(, |$)")
|
||||||
|
ptn_br1 = re.compile("^ *Duration: .*, bitrate: ([0-9]+) kb/s(, |$)")
|
||||||
|
ptn_br2 = re.compile("^ *Stream.*: Audio:.* ([0-9]+) kb/s(, |$)")
|
||||||
|
ptn_audio = re.compile("^ *Stream .*: Audio: ")
|
||||||
|
ptn_au_parent = re.compile("^ *(Input #|Stream .*: Audio: )")
|
||||||
|
|
||||||
|
ret = {}
|
||||||
|
md = {}
|
||||||
|
in_md = False
|
||||||
|
is_audio = False
|
||||||
|
au_parent = False
|
||||||
|
for ln in txt:
|
||||||
|
m = ptn_md_kv.match(ln)
|
||||||
|
if m and in_md and len(m.group(1)) == in_md:
|
||||||
|
_, k, v = [x.strip() for x in m.groups()]
|
||||||
|
if k != "" and v != "":
|
||||||
|
md[k] = [v]
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
in_md = False
|
||||||
|
|
||||||
|
m = ptn_md_beg.match(ln)
|
||||||
|
if m and au_parent:
|
||||||
|
in_md = len(m.group(1)) + 2
|
||||||
|
continue
|
||||||
|
|
||||||
|
au_parent = bool(ptn_au_parent.search(ln))
|
||||||
|
|
||||||
|
if ptn_audio.search(ln):
|
||||||
|
is_audio = True
|
||||||
|
|
||||||
|
m = ptn_dur.search(ln)
|
||||||
|
if m:
|
||||||
|
sec = 0
|
||||||
|
tstr = m.group(1)
|
||||||
|
if tstr.lower() != "n/a":
|
||||||
|
try:
|
||||||
|
tf = tstr.split(",")[0].split(".")[0].split(":")
|
||||||
|
for f in tf:
|
||||||
|
sec *= 60
|
||||||
|
sec += int(f)
|
||||||
|
except:
|
||||||
|
self.log("invalid timestr from ffprobe: [{}]".format(tstr), c=3)
|
||||||
|
|
||||||
|
ret[".dur"] = sec
|
||||||
|
m = ptn_br1.search(ln)
|
||||||
|
if m:
|
||||||
|
ret[".q"] = m.group(1)
|
||||||
|
|
||||||
|
m = ptn_br2.search(ln)
|
||||||
|
if m:
|
||||||
|
ret[".q"] = m.group(1)
|
||||||
|
|
||||||
|
if not is_audio:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
ret = {k: [0, v] for k, v in ret.items()}
|
||||||
|
|
||||||
|
return self.normalize_tags(ret, md)
|
||||||
|
|
||||||
|
def get_bin(self, parsers, abspath):
|
||||||
|
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||||
|
pypath = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||||
|
pypath = str(os.pathsep.join(pypath))
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["PYTHONPATH"] = pypath
|
||||||
|
|
||||||
|
ret = {}
|
||||||
|
for tagname, (binpath, timeout) in parsers.items():
|
||||||
|
try:
|
||||||
|
cmd = [sys.executable, binpath, abspath]
|
||||||
|
args = {"env": env, "timeout": timeout}
|
||||||
|
|
||||||
|
if WINDOWS:
|
||||||
|
args["creationflags"] = 0x4000
|
||||||
|
else:
|
||||||
|
cmd = ["nice"] + cmd
|
||||||
|
|
||||||
|
cmd = [fsenc(x) for x in cmd]
|
||||||
|
v = sp.check_output(cmd, **args).strip()
|
||||||
|
if v:
|
||||||
|
ret[tagname] = v.decode("utf-8")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return ret
|
||||||
95
copyparty/star.py
Normal file
95
copyparty/star.py
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
import os
|
||||||
|
import tarfile
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from .sutil import errdesc
|
||||||
|
from .util import Queue, fsenc
|
||||||
|
|
||||||
|
|
||||||
|
class QFile(object):
|
||||||
|
"""file-like object which buffers writes into a queue"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.q = Queue(64)
|
||||||
|
self.bq = []
|
||||||
|
self.nq = 0
|
||||||
|
|
||||||
|
def write(self, buf):
|
||||||
|
if buf is None or self.nq >= 240 * 1024:
|
||||||
|
self.q.put(b"".join(self.bq))
|
||||||
|
self.bq = []
|
||||||
|
self.nq = 0
|
||||||
|
|
||||||
|
if buf is None:
|
||||||
|
self.q.put(None)
|
||||||
|
else:
|
||||||
|
self.bq.append(buf)
|
||||||
|
self.nq += len(buf)
|
||||||
|
|
||||||
|
|
||||||
|
class StreamTar(object):
|
||||||
|
"""construct in-memory tar file from the given path"""
|
||||||
|
|
||||||
|
def __init__(self, fgen, **kwargs):
|
||||||
|
self.ci = 0
|
||||||
|
self.co = 0
|
||||||
|
self.qfile = QFile()
|
||||||
|
self.fgen = fgen
|
||||||
|
self.errf = None
|
||||||
|
|
||||||
|
# python 3.8 changed to PAX_FORMAT as default,
|
||||||
|
# waste of space and don't care about the new features
|
||||||
|
fmt = tarfile.GNU_FORMAT
|
||||||
|
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
|
||||||
|
|
||||||
|
w = threading.Thread(target=self._gen)
|
||||||
|
w.daemon = True
|
||||||
|
w.start()
|
||||||
|
|
||||||
|
def gen(self):
|
||||||
|
while True:
|
||||||
|
buf = self.qfile.q.get()
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
self.co += len(buf)
|
||||||
|
yield buf
|
||||||
|
|
||||||
|
yield None
|
||||||
|
if self.errf:
|
||||||
|
os.unlink(self.errf["ap"])
|
||||||
|
|
||||||
|
def ser(self, f):
|
||||||
|
name = f["vp"]
|
||||||
|
src = f["ap"]
|
||||||
|
fsi = f["st"]
|
||||||
|
|
||||||
|
inf = tarfile.TarInfo(name=name)
|
||||||
|
inf.mode = fsi.st_mode
|
||||||
|
inf.size = fsi.st_size
|
||||||
|
inf.mtime = fsi.st_mtime
|
||||||
|
inf.uid = 0
|
||||||
|
inf.gid = 0
|
||||||
|
|
||||||
|
self.ci += inf.size
|
||||||
|
with open(fsenc(src), "rb", 512 * 1024) as f:
|
||||||
|
self.tar.addfile(inf, f)
|
||||||
|
|
||||||
|
def _gen(self):
|
||||||
|
errors = []
|
||||||
|
for f in self.fgen:
|
||||||
|
if "err" in f:
|
||||||
|
errors.append([f["vp"], f["err"]])
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.ser(f)
|
||||||
|
except Exception as ex:
|
||||||
|
errors.append([f["vp"], repr(ex)])
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
self.errf = errdesc(errors)
|
||||||
|
self.ser(self.errf)
|
||||||
|
|
||||||
|
self.tar.close()
|
||||||
|
self.qfile.write(None)
|
||||||
25
copyparty/sutil.py
Normal file
25
copyparty/sutil.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
import tempfile
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
def errdesc(errors):
|
||||||
|
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||||
|
|
||||||
|
for fn, err in errors:
|
||||||
|
report.extend([" file: {}".format(fn), "error: {}".format(err), ""])
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile(prefix="copyparty-", delete=False) as tf:
|
||||||
|
tf_path = tf.name
|
||||||
|
tf.write("\r\n".join(report).encode("utf-8", "replace"))
|
||||||
|
|
||||||
|
dt = datetime.utcfromtimestamp(time.time())
|
||||||
|
dt = dt.strftime("%Y-%m%d-%H%M%S")
|
||||||
|
|
||||||
|
os.chmod(tf_path, 0o444)
|
||||||
|
return {
|
||||||
|
"vp": "archive-errors-{}.txt".format(dt),
|
||||||
|
"ap": tf_path,
|
||||||
|
"st": os.stat(tf_path),
|
||||||
|
}
|
||||||
@@ -9,7 +9,6 @@ from datetime import datetime, timedelta
|
|||||||
import calendar
|
import calendar
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS, MACOS, VT100
|
from .__init__ import PY2, WINDOWS, MACOS, VT100
|
||||||
from .authsrv import AuthSrv
|
|
||||||
from .tcpsrv import TcpSrv
|
from .tcpsrv import TcpSrv
|
||||||
from .up2k import Up2k
|
from .up2k import Up2k
|
||||||
from .util import mp
|
from .util import mp
|
||||||
@@ -39,14 +38,6 @@ class SvcHub(object):
|
|||||||
self.tcpsrv = TcpSrv(self)
|
self.tcpsrv = TcpSrv(self)
|
||||||
self.up2k = Up2k(self)
|
self.up2k = Up2k(self)
|
||||||
|
|
||||||
if self.args.e2ds:
|
|
||||||
auth = AuthSrv(self.args, self.log, False)
|
|
||||||
vols = auth.vfs.all_vols.values()
|
|
||||||
if not self.args.e2dsa:
|
|
||||||
vols = [x for x in vols if x.uwrite]
|
|
||||||
|
|
||||||
self.up2k.build_indexes(vols)
|
|
||||||
|
|
||||||
# decide which worker impl to use
|
# decide which worker impl to use
|
||||||
if self.check_mp_enable():
|
if self.check_mp_enable():
|
||||||
from .broker_mp import BrokerMp as Broker
|
from .broker_mp import BrokerMp as Broker
|
||||||
@@ -74,10 +65,10 @@ class SvcHub(object):
|
|||||||
self.broker.shutdown()
|
self.broker.shutdown()
|
||||||
print("nailed it")
|
print("nailed it")
|
||||||
|
|
||||||
def _log_disabled(self, src, msg):
|
def _log_disabled(self, src, msg, c=0):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _log_enabled(self, src, msg):
|
def _log_enabled(self, src, msg, c=0):
|
||||||
"""handles logging from all components"""
|
"""handles logging from all components"""
|
||||||
with self.log_mutex:
|
with self.log_mutex:
|
||||||
now = time.time()
|
now = time.time()
|
||||||
@@ -95,11 +86,18 @@ class SvcHub(object):
|
|||||||
|
|
||||||
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
|
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
|
||||||
if not VT100:
|
if not VT100:
|
||||||
fmt = "{} {:21} {}"
|
fmt = "{} {:21} {}\n"
|
||||||
if "\033" in msg:
|
if "\033" in msg:
|
||||||
msg = self.ansi_re.sub("", msg)
|
msg = self.ansi_re.sub("", msg)
|
||||||
if "\033" in src:
|
if "\033" in src:
|
||||||
src = self.ansi_re.sub("", src)
|
src = self.ansi_re.sub("", src)
|
||||||
|
elif c:
|
||||||
|
if isinstance(c, int):
|
||||||
|
msg = "\033[3{}m{}".format(c, msg)
|
||||||
|
elif "\033" not in c:
|
||||||
|
msg = "\033[{}m{}\033[0m".format(c, msg)
|
||||||
|
else:
|
||||||
|
msg = "{}{}\033[0m".format(c, msg)
|
||||||
|
|
||||||
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
|
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
|
||||||
msg = fmt.format(ts, src, msg)
|
msg = fmt.format(ts, src, msg)
|
||||||
|
|||||||
271
copyparty/szip.py
Normal file
271
copyparty/szip.py
Normal file
@@ -0,0 +1,271 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
import zlib
|
||||||
|
import struct
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from .sutil import errdesc
|
||||||
|
from .util import yieldfile, sanitize_fn
|
||||||
|
|
||||||
|
|
||||||
|
def dostime2unix(buf):
|
||||||
|
t, d = struct.unpack("<HH", buf)
|
||||||
|
|
||||||
|
ts = (t & 0x1F) * 2
|
||||||
|
tm = (t >> 5) & 0x3F
|
||||||
|
th = t >> 11
|
||||||
|
|
||||||
|
dd = d & 0x1F
|
||||||
|
dm = (d >> 5) & 0xF
|
||||||
|
dy = (d >> 9) + 1980
|
||||||
|
|
||||||
|
tt = (dy, dm, dd, th, tm, ts)
|
||||||
|
tf = "{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}"
|
||||||
|
iso = tf.format(*tt)
|
||||||
|
|
||||||
|
dt = datetime.strptime(iso, "%Y-%m-%d %H:%M:%S")
|
||||||
|
return int(dt.timestamp())
|
||||||
|
|
||||||
|
|
||||||
|
def unixtime2dos(ts):
|
||||||
|
tt = time.gmtime(ts)
|
||||||
|
dy, dm, dd, th, tm, ts = list(tt)[:6]
|
||||||
|
|
||||||
|
bd = ((dy - 1980) << 9) + (dm << 5) + dd
|
||||||
|
bt = (th << 11) + (tm << 5) + ts // 2
|
||||||
|
return struct.pack("<HH", bt, bd)
|
||||||
|
|
||||||
|
|
||||||
|
def gen_fdesc(sz, crc32, z64):
|
||||||
|
ret = b"\x50\x4b\x07\x08"
|
||||||
|
fmt = "<LQQ" if z64 else "<LLL"
|
||||||
|
ret += struct.pack(fmt, crc32, sz, sz)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
||||||
|
"""
|
||||||
|
does regular file headers
|
||||||
|
and the central directory meme if h_pos is set
|
||||||
|
(h_pos = absolute position of the regular header)
|
||||||
|
"""
|
||||||
|
|
||||||
|
# appnote 4.5 / zip 3.0 (2008) / unzip 6.0 (2009) says to add z64
|
||||||
|
# extinfo for values which exceed H, but that becomes an off-by-one
|
||||||
|
# (can't tell if it was clamped or exactly maxval), make it obvious
|
||||||
|
z64 = sz >= 0xFFFFFFFF
|
||||||
|
z64v = [sz, sz] if z64 else []
|
||||||
|
if h_pos and h_pos >= 0xFFFFFFFF:
|
||||||
|
# central, also consider ptr to original header
|
||||||
|
z64v.append(h_pos)
|
||||||
|
|
||||||
|
# confusingly this doesn't bump if h_pos
|
||||||
|
req_ver = b"\x2d\x00" if z64 else b"\x0a\x00"
|
||||||
|
|
||||||
|
if crc32:
|
||||||
|
crc32 = struct.pack("<L", crc32)
|
||||||
|
else:
|
||||||
|
crc32 = b"\x00" * 4
|
||||||
|
|
||||||
|
if h_pos is None:
|
||||||
|
# 4b magic, 2b min-ver
|
||||||
|
ret = b"\x50\x4b\x03\x04" + req_ver
|
||||||
|
else:
|
||||||
|
# 4b magic, 2b spec-ver, 2b min-ver
|
||||||
|
ret = b"\x50\x4b\x01\x02\x1e\x03" + req_ver
|
||||||
|
|
||||||
|
ret += b"\x00" if pre_crc else b"\x08" # streaming
|
||||||
|
ret += b"\x08" if utf8 else b"\x00" # appnote 6.3.2 (2007)
|
||||||
|
|
||||||
|
# 2b compression, 4b time, 4b crc
|
||||||
|
ret += b"\x00\x00" + unixtime2dos(lastmod) + crc32
|
||||||
|
|
||||||
|
# spec says to put zeros when !crc if bit3 (streaming)
|
||||||
|
# however infozip does actual sz and it even works on winxp
|
||||||
|
# (same reasning for z64 extradata later)
|
||||||
|
vsz = 0xFFFFFFFF if z64 else sz
|
||||||
|
ret += struct.pack("<LL", vsz, vsz)
|
||||||
|
|
||||||
|
# windows support (the "?" replace below too)
|
||||||
|
fn = sanitize_fn(fn, "/")
|
||||||
|
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
|
||||||
|
|
||||||
|
z64_len = len(z64v) * 8 + 4 if z64v else 0
|
||||||
|
ret += struct.pack("<HH", len(bfn), z64_len)
|
||||||
|
|
||||||
|
if h_pos is not None:
|
||||||
|
# 2b comment, 2b diskno
|
||||||
|
ret += b"\x00" * 4
|
||||||
|
|
||||||
|
# 2b internal.attr, 4b external.attr
|
||||||
|
# infozip-macos: 0100 0000 a481 file:644
|
||||||
|
# infozip-macos: 0100 0100 0080 file:000
|
||||||
|
ret += b"\x01\x00\x00\x00\xa4\x81"
|
||||||
|
|
||||||
|
# 4b local-header-ofs
|
||||||
|
ret += struct.pack("<L", min(h_pos, 0xFFFFFFFF))
|
||||||
|
|
||||||
|
ret += bfn
|
||||||
|
|
||||||
|
if z64v:
|
||||||
|
ret += struct.pack("<HH" + "Q" * len(z64v), 1, len(z64v) * 8, *z64v)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def gen_ecdr(items, cdir_pos, cdir_end):
|
||||||
|
"""
|
||||||
|
summary of all file headers,
|
||||||
|
usually the zipfile footer unless something clamps
|
||||||
|
"""
|
||||||
|
|
||||||
|
ret = b"\x50\x4b\x05\x06"
|
||||||
|
|
||||||
|
# 2b ndisk, 2b disk0
|
||||||
|
ret += b"\x00" * 4
|
||||||
|
|
||||||
|
cdir_sz = cdir_end - cdir_pos
|
||||||
|
|
||||||
|
nitems = min(0xFFFF, len(items))
|
||||||
|
csz = min(0xFFFFFFFF, cdir_sz)
|
||||||
|
cpos = min(0xFFFFFFFF, cdir_pos)
|
||||||
|
|
||||||
|
need_64 = nitems == 0xFFFF or 0xFFFFFFFF in [csz, cpos]
|
||||||
|
|
||||||
|
# 2b tnfiles, 2b dnfiles, 4b dir sz, 4b dir pos
|
||||||
|
ret += struct.pack("<HHLL", nitems, nitems, csz, cpos)
|
||||||
|
|
||||||
|
# 2b comment length
|
||||||
|
ret += b"\x00\x00"
|
||||||
|
|
||||||
|
return [ret, need_64]
|
||||||
|
|
||||||
|
|
||||||
|
def gen_ecdr64(items, cdir_pos, cdir_end):
|
||||||
|
"""
|
||||||
|
z64 end of central directory
|
||||||
|
added when numfiles or a headerptr clamps
|
||||||
|
"""
|
||||||
|
|
||||||
|
ret = b"\x50\x4b\x06\x06"
|
||||||
|
|
||||||
|
# 8b own length from hereon
|
||||||
|
ret += b"\x2c" + b"\x00" * 7
|
||||||
|
|
||||||
|
# 2b spec-ver, 2b min-ver
|
||||||
|
ret += b"\x1e\x03\x2d\x00"
|
||||||
|
|
||||||
|
# 4b ndisk, 4b disk0
|
||||||
|
ret += b"\x00" * 8
|
||||||
|
|
||||||
|
# 8b tnfiles, 8b dnfiles, 8b dir sz, 8b dir pos
|
||||||
|
cdir_sz = cdir_end - cdir_pos
|
||||||
|
ret += struct.pack("<QQQQ", len(items), len(items), cdir_sz, cdir_pos)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def gen_ecdr64_loc(ecdr64_pos):
|
||||||
|
"""
|
||||||
|
z64 end of central directory locator
|
||||||
|
points to ecdr64
|
||||||
|
why
|
||||||
|
"""
|
||||||
|
|
||||||
|
ret = b"\x50\x4b\x06\x07"
|
||||||
|
|
||||||
|
# 4b cdisk, 8b start of ecdr64, 4b ndisks
|
||||||
|
ret += struct.pack("<LQL", 0, ecdr64_pos, 1)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
class StreamZip(object):
|
||||||
|
def __init__(self, fgen, utf8=False, pre_crc=False):
|
||||||
|
self.fgen = fgen
|
||||||
|
self.utf8 = utf8
|
||||||
|
self.pre_crc = pre_crc
|
||||||
|
|
||||||
|
self.pos = 0
|
||||||
|
self.items = []
|
||||||
|
|
||||||
|
def _ct(self, buf):
|
||||||
|
self.pos += len(buf)
|
||||||
|
return buf
|
||||||
|
|
||||||
|
def ser(self, f):
|
||||||
|
name = f["vp"]
|
||||||
|
src = f["ap"]
|
||||||
|
st = f["st"]
|
||||||
|
|
||||||
|
sz = st.st_size
|
||||||
|
ts = st.st_mtime + 1
|
||||||
|
|
||||||
|
crc = None
|
||||||
|
if self.pre_crc:
|
||||||
|
crc = 0
|
||||||
|
for buf in yieldfile(src):
|
||||||
|
crc = zlib.crc32(buf, crc)
|
||||||
|
|
||||||
|
crc &= 0xFFFFFFFF
|
||||||
|
|
||||||
|
h_pos = self.pos
|
||||||
|
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||||
|
yield self._ct(buf)
|
||||||
|
|
||||||
|
crc = crc or 0
|
||||||
|
for buf in yieldfile(src):
|
||||||
|
if not self.pre_crc:
|
||||||
|
crc = zlib.crc32(buf, crc)
|
||||||
|
|
||||||
|
yield self._ct(buf)
|
||||||
|
|
||||||
|
crc &= 0xFFFFFFFF
|
||||||
|
|
||||||
|
self.items.append([name, sz, ts, crc, h_pos])
|
||||||
|
|
||||||
|
z64 = sz >= 4 * 1024 * 1024 * 1024
|
||||||
|
|
||||||
|
if z64 or not self.pre_crc:
|
||||||
|
buf = gen_fdesc(sz, crc, z64)
|
||||||
|
yield self._ct(buf)
|
||||||
|
|
||||||
|
def gen(self):
|
||||||
|
errors = []
|
||||||
|
for f in self.fgen:
|
||||||
|
if "err" in f:
|
||||||
|
errors.append([f["vp"], f["err"]])
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
for x in self.ser(f):
|
||||||
|
yield x
|
||||||
|
except Exception as ex:
|
||||||
|
errors.append([f["vp"], repr(ex)])
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
errf = errdesc(errors)
|
||||||
|
print(repr(errf))
|
||||||
|
for x in self.ser(errf):
|
||||||
|
yield x
|
||||||
|
|
||||||
|
cdir_pos = self.pos
|
||||||
|
for name, sz, ts, crc, h_pos in self.items:
|
||||||
|
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||||
|
yield self._ct(buf)
|
||||||
|
cdir_end = self.pos
|
||||||
|
|
||||||
|
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||||
|
if need_64:
|
||||||
|
ecdir64_pos = self.pos
|
||||||
|
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
|
||||||
|
yield self._ct(buf)
|
||||||
|
|
||||||
|
buf = gen_ecdr64_loc(ecdir64_pos)
|
||||||
|
yield self._ct(buf)
|
||||||
|
|
||||||
|
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||||
|
yield self._ct(ecdr)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
os.unlink(errf["ap"])
|
||||||
@@ -68,22 +68,29 @@ class TcpSrv(object):
|
|||||||
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
|
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
self.log("tcpsrv", "\033[1;30m|%sC-ncli\033[0m" % ("-" * 1,))
|
if self.args.log_conn:
|
||||||
|
self.log("tcpsrv", "|%sC-ncli" % ("-" * 1,), c="1;30")
|
||||||
|
|
||||||
if self.num_clients.v >= self.args.nc:
|
if self.num_clients.v >= self.args.nc:
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self.log("tcpsrv", "\033[1;30m|%sC-acc1\033[0m" % ("-" * 2,))
|
if self.args.log_conn:
|
||||||
|
self.log("tcpsrv", "|%sC-acc1" % ("-" * 2,), c="1;30")
|
||||||
|
|
||||||
ready, _, _ = select.select(self.srv, [], [])
|
ready, _, _ = select.select(self.srv, [], [])
|
||||||
for srv in ready:
|
for srv in ready:
|
||||||
sck, addr = srv.accept()
|
sck, addr = srv.accept()
|
||||||
sip, sport = srv.getsockname()
|
sip, sport = srv.getsockname()
|
||||||
self.log(
|
if self.args.log_conn:
|
||||||
"%s %s" % addr,
|
self.log(
|
||||||
"\033[1;30m|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
"%s %s" % addr,
|
||||||
"-" * 3, sip, sport % 8, sport
|
"|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||||
),
|
"-" * 3, sip, sport % 8, sport
|
||||||
)
|
),
|
||||||
|
c="1;30",
|
||||||
|
)
|
||||||
|
|
||||||
self.num_clients.add()
|
self.num_clients.add()
|
||||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,13 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import re
|
||||||
import os
|
import os
|
||||||
|
import time
|
||||||
|
import threading
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from .util import u8safe
|
from .util import u8safe, s3dec, html_escape, Pebkac
|
||||||
from .up2k import up2k_wark_from_hashlist
|
from .up2k import up2k_wark_from_hashlist
|
||||||
|
|
||||||
|
|
||||||
@@ -19,15 +22,21 @@ class U2idx(object):
|
|||||||
def __init__(self, args, log_func):
|
def __init__(self, args, log_func):
|
||||||
self.args = args
|
self.args = args
|
||||||
self.log_func = log_func
|
self.log_func = log_func
|
||||||
|
self.timeout = args.srch_time
|
||||||
|
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
self.log("could not load sqlite3; searchign wqill be disabled")
|
self.log("could not load sqlite3; searchign wqill be disabled")
|
||||||
return
|
return
|
||||||
|
|
||||||
self.dbs = {}
|
self.cur = {}
|
||||||
|
self.mem_cur = sqlite3.connect(":memory:")
|
||||||
|
self.mem_cur.execute(r"create table a (b text)")
|
||||||
|
|
||||||
def log(self, msg):
|
self.p_end = None
|
||||||
self.log_func("u2idx", msg)
|
self.p_dur = 0
|
||||||
|
|
||||||
|
def log(self, msg, c=0):
|
||||||
|
self.log_func("u2idx", msg, c)
|
||||||
|
|
||||||
def fsearch(self, vols, body):
|
def fsearch(self, vols, body):
|
||||||
"""search by up2k hashlist"""
|
"""search by up2k hashlist"""
|
||||||
@@ -37,7 +46,26 @@ class U2idx(object):
|
|||||||
fsize = body["size"]
|
fsize = body["size"]
|
||||||
fhash = body["hash"]
|
fhash = body["hash"]
|
||||||
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
|
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
|
||||||
return self.run_query(vols, "select * from up where w = ?", [wark])
|
|
||||||
|
uq = "substr(w,1,16) = ? and w = ?"
|
||||||
|
uv = [wark[:16], wark]
|
||||||
|
|
||||||
|
try:
|
||||||
|
return self.run_query(vols, uq, uv, {})[0]
|
||||||
|
except Exception as ex:
|
||||||
|
raise Pebkac(500, repr(ex))
|
||||||
|
|
||||||
|
def get_cur(self, ptop):
|
||||||
|
cur = self.cur.get(ptop)
|
||||||
|
if cur:
|
||||||
|
return cur
|
||||||
|
|
||||||
|
cur = _open(ptop)
|
||||||
|
if not cur:
|
||||||
|
return None
|
||||||
|
|
||||||
|
self.cur[ptop] = cur
|
||||||
|
return cur
|
||||||
|
|
||||||
def search(self, vols, body):
|
def search(self, vols, body):
|
||||||
"""search by query params"""
|
"""search by query params"""
|
||||||
@@ -45,59 +73,137 @@ class U2idx(object):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
qobj = {}
|
qobj = {}
|
||||||
_conv_sz(qobj, body, "sz_min", "sz >= ?")
|
_conv_sz(qobj, body, "sz_min", "up.sz >= ?")
|
||||||
_conv_sz(qobj, body, "sz_max", "sz <= ?")
|
_conv_sz(qobj, body, "sz_max", "up.sz <= ?")
|
||||||
_conv_dt(qobj, body, "dt_min", "mt >= ?")
|
_conv_dt(qobj, body, "dt_min", "up.mt >= ?")
|
||||||
_conv_dt(qobj, body, "dt_max", "mt <= ?")
|
_conv_dt(qobj, body, "dt_max", "up.mt <= ?")
|
||||||
for seg, dk in [["path", "rd"], ["name", "fn"]]:
|
for seg, dk in [["path", "up.rd"], ["name", "up.fn"]]:
|
||||||
if seg in body:
|
if seg in body:
|
||||||
_conv_txt(qobj, body, seg, dk)
|
_conv_txt(qobj, body, seg, dk)
|
||||||
|
|
||||||
qstr = "select * from up"
|
uq, uv = _sqlize(qobj)
|
||||||
qv = []
|
|
||||||
if qobj:
|
|
||||||
qk = []
|
|
||||||
for k, v in sorted(qobj.items()):
|
|
||||||
qk.append(k.split("\n")[0])
|
|
||||||
qv.append(v)
|
|
||||||
|
|
||||||
qstr = " and ".join(qk)
|
qobj = {}
|
||||||
qstr = "select * from up where " + qstr
|
if "tags" in body:
|
||||||
|
_conv_txt(qobj, body, "tags", "mt.v")
|
||||||
|
|
||||||
return self.run_query(vols, qstr, qv)
|
if "adv" in body:
|
||||||
|
_conv_adv(qobj, body, "adv")
|
||||||
|
|
||||||
def run_query(self, vols, qstr, qv):
|
try:
|
||||||
qv = tuple(qv)
|
return self.run_query(vols, uq, uv, qobj)
|
||||||
self.log("qs: {} {}".format(qstr, repr(qv)))
|
except Exception as ex:
|
||||||
|
raise Pebkac(500, repr(ex))
|
||||||
|
|
||||||
|
def run_query(self, vols, uq, uv, targs):
|
||||||
|
self.log("qs: {} {} , {}".format(uq, repr(uv), repr(targs)))
|
||||||
|
|
||||||
|
done_flag = []
|
||||||
|
self.active_id = "{:.6f}_{}".format(
|
||||||
|
time.time(), threading.current_thread().ident
|
||||||
|
)
|
||||||
|
thr = threading.Thread(
|
||||||
|
target=self.terminator,
|
||||||
|
args=(
|
||||||
|
self.active_id,
|
||||||
|
done_flag,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
thr.daemon = True
|
||||||
|
thr.start()
|
||||||
|
|
||||||
|
if not targs:
|
||||||
|
if not uq:
|
||||||
|
q = "select * from up"
|
||||||
|
v = ()
|
||||||
|
else:
|
||||||
|
q = "select * from up where " + uq
|
||||||
|
v = tuple(uv)
|
||||||
|
else:
|
||||||
|
q = "select up.* from up"
|
||||||
|
keycmp = "substr(up.w,1,16)"
|
||||||
|
where = []
|
||||||
|
v = []
|
||||||
|
ctr = 0
|
||||||
|
for tq, tv in sorted(targs.items()):
|
||||||
|
ctr += 1
|
||||||
|
tq = tq.split("\n")[0]
|
||||||
|
keycmp2 = "mt{}.w".format(ctr)
|
||||||
|
q += " inner join mt mt{} on {} = {}".format(ctr, keycmp, keycmp2)
|
||||||
|
keycmp = keycmp2
|
||||||
|
where.append(tq.replace("mt.", keycmp[:-1]))
|
||||||
|
v.append(tv)
|
||||||
|
|
||||||
|
if uq:
|
||||||
|
where.append(uq)
|
||||||
|
v.extend(uv)
|
||||||
|
|
||||||
|
q += " where " + (" and ".join(where))
|
||||||
|
|
||||||
|
# self.log("q2: {} {}".format(q, repr(v)))
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
lim = 100
|
lim = 1000
|
||||||
|
taglist = {}
|
||||||
for (vtop, ptop, flags) in vols:
|
for (vtop, ptop, flags) in vols:
|
||||||
db = self.dbs.get(ptop)
|
cur = self.get_cur(ptop)
|
||||||
if not db:
|
if not cur:
|
||||||
db = _open(ptop)
|
continue
|
||||||
if not db:
|
|
||||||
continue
|
|
||||||
|
|
||||||
self.dbs[ptop] = db
|
self.active_cur = cur
|
||||||
# self.log("idx /{} @ {} {}".format(vtop, ptop, flags))
|
|
||||||
|
|
||||||
c = db.execute(qstr, qv)
|
sret = []
|
||||||
for _, ts, sz, rd, fn in c:
|
c = cur.execute(q, v)
|
||||||
|
for hit in c:
|
||||||
|
w, ts, sz, rd, fn = hit
|
||||||
lim -= 1
|
lim -= 1
|
||||||
if lim <= 0:
|
if lim <= 0:
|
||||||
break
|
break
|
||||||
|
|
||||||
rp = os.path.join(vtop, rd, fn).replace("\\", "/")
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
ret.append({"ts": int(ts), "sz": sz, "rp": rp})
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
return ret
|
rp = os.path.join(vtop, rd, fn).replace("\\", "/")
|
||||||
|
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||||
|
|
||||||
|
for hit in sret:
|
||||||
|
w = hit["w"]
|
||||||
|
del hit["w"]
|
||||||
|
tags = {}
|
||||||
|
q2 = "select k, v from mt where w = ? and k != 'x'"
|
||||||
|
for k, v2 in cur.execute(q2, (w,)):
|
||||||
|
taglist[k] = True
|
||||||
|
tags[k] = v2
|
||||||
|
|
||||||
|
hit["tags"] = tags
|
||||||
|
|
||||||
|
ret.extend(sret)
|
||||||
|
|
||||||
|
done_flag.append(True)
|
||||||
|
self.active_id = None
|
||||||
|
|
||||||
|
# undupe hits from multiple metadata keys
|
||||||
|
if len(ret) > 1:
|
||||||
|
ret = [ret[0]] + [
|
||||||
|
y for x, y in zip(ret[:-1], ret[1:]) if x["rp"] != y["rp"]
|
||||||
|
]
|
||||||
|
|
||||||
|
return ret, list(taglist.keys())
|
||||||
|
|
||||||
|
def terminator(self, identifier, done_flag):
|
||||||
|
for _ in range(self.timeout):
|
||||||
|
time.sleep(1)
|
||||||
|
if done_flag:
|
||||||
|
return
|
||||||
|
|
||||||
|
if identifier == self.active_id:
|
||||||
|
self.active_cur.connection.interrupt()
|
||||||
|
|
||||||
|
|
||||||
def _open(ptop):
|
def _open(ptop):
|
||||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||||
if os.path.exists(db_path):
|
if os.path.exists(db_path):
|
||||||
return sqlite3.connect(db_path)
|
return sqlite3.connect(db_path).cursor()
|
||||||
|
|
||||||
|
|
||||||
def _conv_sz(q, body, k, sql):
|
def _conv_sz(q, body, k, sql):
|
||||||
@@ -146,3 +252,30 @@ def _conv_txt(q, body, k, sql):
|
|||||||
|
|
||||||
qk = "{} {} like {}?{}".format(sql, inv, head, tail)
|
qk = "{} {} like {}?{}".format(sql, inv, head, tail)
|
||||||
q[qk + "\n" + v] = u8safe(v)
|
q[qk + "\n" + v] = u8safe(v)
|
||||||
|
|
||||||
|
|
||||||
|
def _conv_adv(q, body, k):
|
||||||
|
ptn = re.compile(r"^(\.?[a-z]+) *(==?|!=|<=?|>=?) *(.*)$")
|
||||||
|
|
||||||
|
parts = body[k].split(" ")
|
||||||
|
parts = [x.strip() for x in parts if x.strip()]
|
||||||
|
|
||||||
|
for part in parts:
|
||||||
|
m = ptn.match(part)
|
||||||
|
if not m:
|
||||||
|
p = html_escape(part)
|
||||||
|
raise Pebkac(400, "invalid argument [" + p + "]")
|
||||||
|
|
||||||
|
k, op, v = m.groups()
|
||||||
|
qk = "mt.k = '{}' and mt.v {} ?".format(k, op)
|
||||||
|
q[qk + "\n" + v] = u8safe(v)
|
||||||
|
|
||||||
|
|
||||||
|
def _sqlize(qobj):
|
||||||
|
keys = []
|
||||||
|
values = []
|
||||||
|
for k, v in sorted(qobj.items()):
|
||||||
|
keys.append(k.split("\n")[0])
|
||||||
|
values.append(v)
|
||||||
|
|
||||||
|
return " and ".join(keys), values
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -10,6 +10,7 @@ import select
|
|||||||
import struct
|
import struct
|
||||||
import hashlib
|
import hashlib
|
||||||
import platform
|
import platform
|
||||||
|
import traceback
|
||||||
import threading
|
import threading
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import contextlib
|
import contextlib
|
||||||
@@ -56,11 +57,58 @@ HTTPCODE = {
|
|||||||
413: "Payload Too Large",
|
413: "Payload Too Large",
|
||||||
416: "Requested Range Not Satisfiable",
|
416: "Requested Range Not Satisfiable",
|
||||||
422: "Unprocessable Entity",
|
422: "Unprocessable Entity",
|
||||||
|
429: "Too Many Requests",
|
||||||
500: "Internal Server Error",
|
500: "Internal Server Error",
|
||||||
501: "Not Implemented",
|
501: "Not Implemented",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
IMPLICATIONS = [
|
||||||
|
["e2dsa", "e2ds"],
|
||||||
|
["e2ds", "e2d"],
|
||||||
|
["e2tsr", "e2ts"],
|
||||||
|
["e2ts", "e2t"],
|
||||||
|
["e2t", "e2d"],
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
REKOBO_KEY = {
|
||||||
|
v: ln.split(" ", 1)[0]
|
||||||
|
for ln in """
|
||||||
|
1B 6d B
|
||||||
|
2B 7d Gb F#
|
||||||
|
3B 8d Db C#
|
||||||
|
4B 9d Ab G#
|
||||||
|
5B 10d Eb D#
|
||||||
|
6B 11d Bb A#
|
||||||
|
7B 12d F
|
||||||
|
8B 1d C
|
||||||
|
9B 2d G
|
||||||
|
10B 3d D
|
||||||
|
11B 4d A
|
||||||
|
12B 5d E
|
||||||
|
1A 6m Abm G#m
|
||||||
|
2A 7m Ebm D#m
|
||||||
|
3A 8m Bbm A#m
|
||||||
|
4A 9m Fm
|
||||||
|
5A 10m Cm
|
||||||
|
6A 11m Gm
|
||||||
|
7A 12m Dm
|
||||||
|
8A 1m Am
|
||||||
|
9A 2m Em
|
||||||
|
10A 3m Bm
|
||||||
|
11A 4m Gbm F#m
|
||||||
|
12A 5m Dbm C#m
|
||||||
|
""".strip().split(
|
||||||
|
"\n"
|
||||||
|
)
|
||||||
|
for v in ln.strip().split(" ")[1:]
|
||||||
|
if v
|
||||||
|
}
|
||||||
|
|
||||||
|
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
|
||||||
|
|
||||||
|
|
||||||
class Counter(object):
|
class Counter(object):
|
||||||
def __init__(self, v=0):
|
def __init__(self, v=0):
|
||||||
self.v = v
|
self.v = v
|
||||||
@@ -119,19 +167,51 @@ class ProgressPrinter(threading.Thread):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
msg = self.msg
|
msg = self.msg
|
||||||
m = " {}\033[K\r".format(msg)
|
uprint(" {}\033[K\r".format(msg))
|
||||||
try:
|
|
||||||
print(m, end="")
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
try:
|
|
||||||
print(m.encode("utf-8", "replace").decode(), end="")
|
|
||||||
except:
|
|
||||||
print(m.encode("ascii", "replace").decode(), end="")
|
|
||||||
|
|
||||||
print("\033[K", end="")
|
print("\033[K", end="")
|
||||||
sys.stdout.flush() # necessary on win10 even w/ stderr btw
|
sys.stdout.flush() # necessary on win10 even w/ stderr btw
|
||||||
|
|
||||||
|
|
||||||
|
def uprint(msg):
|
||||||
|
try:
|
||||||
|
print(msg, end="")
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
try:
|
||||||
|
print(msg.encode("utf-8", "replace").decode(), end="")
|
||||||
|
except:
|
||||||
|
print(msg.encode("ascii", "replace").decode(), end="")
|
||||||
|
|
||||||
|
|
||||||
|
def nuprint(msg):
|
||||||
|
uprint("{}\n".format(msg))
|
||||||
|
|
||||||
|
|
||||||
|
def rice_tid():
|
||||||
|
tid = threading.current_thread().ident
|
||||||
|
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
|
||||||
|
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
|
||||||
|
|
||||||
|
|
||||||
|
def trace(*args, **kwargs):
|
||||||
|
t = time.time()
|
||||||
|
stack = "".join(
|
||||||
|
"\033[36m{}\033[33m{}".format(x[0].split(os.sep)[-1][:-3], x[1])
|
||||||
|
for x in traceback.extract_stack()[3:-1]
|
||||||
|
)
|
||||||
|
parts = ["{:.6f}".format(t), rice_tid(), stack]
|
||||||
|
|
||||||
|
if args:
|
||||||
|
parts.append(repr(args))
|
||||||
|
|
||||||
|
if kwargs:
|
||||||
|
parts.append(repr(kwargs))
|
||||||
|
|
||||||
|
msg = "\033[0m ".join(parts)
|
||||||
|
# _tracebuf.append(msg)
|
||||||
|
nuprint(msg)
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def ren_open(fname, *args, **kwargs):
|
def ren_open(fname, *args, **kwargs):
|
||||||
fdir = kwargs.pop("fdir", None)
|
fdir = kwargs.pop("fdir", None)
|
||||||
@@ -470,6 +550,16 @@ def get_spd(nbyte, t0, t=None):
|
|||||||
return "{} \033[0m{}/s\033[0m".format(s1, s2)
|
return "{} \033[0m{}/s\033[0m".format(s1, s2)
|
||||||
|
|
||||||
|
|
||||||
|
def s2hms(s, optional_h=False):
|
||||||
|
s = int(s)
|
||||||
|
h, s = divmod(s, 3600)
|
||||||
|
m, s = divmod(s, 60)
|
||||||
|
if not h and optional_h:
|
||||||
|
return "{}:{:02}".format(m, s)
|
||||||
|
|
||||||
|
return "{}:{:02}:{:02}".format(h, m, s)
|
||||||
|
|
||||||
|
|
||||||
def undot(path):
|
def undot(path):
|
||||||
ret = []
|
ret = []
|
||||||
for node in path.split("/"):
|
for node in path.split("/"):
|
||||||
@@ -486,11 +576,12 @@ def undot(path):
|
|||||||
return "/".join(ret)
|
return "/".join(ret)
|
||||||
|
|
||||||
|
|
||||||
def sanitize_fn(fn):
|
def sanitize_fn(fn, ok=""):
|
||||||
fn = fn.replace("\\", "/").split("/")[-1]
|
if "/" not in ok:
|
||||||
|
fn = fn.replace("\\", "/").split("/")[-1]
|
||||||
|
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
for bad, good in [
|
for bad, good in [x for x in [
|
||||||
["<", "<"],
|
["<", "<"],
|
||||||
[">", ">"],
|
[">", ">"],
|
||||||
[":", ":"],
|
[":", ":"],
|
||||||
@@ -500,7 +591,7 @@ def sanitize_fn(fn):
|
|||||||
["|", "|"],
|
["|", "|"],
|
||||||
["?", "?"],
|
["?", "?"],
|
||||||
["*", "*"],
|
["*", "*"],
|
||||||
]:
|
] if x[0] not in ok]:
|
||||||
fn = fn.replace(bad, good)
|
fn = fn.replace(bad, good)
|
||||||
|
|
||||||
bad = ["con", "prn", "aux", "nul"]
|
bad = ["con", "prn", "aux", "nul"]
|
||||||
@@ -521,9 +612,7 @@ def u8safe(txt):
|
|||||||
|
|
||||||
|
|
||||||
def exclude_dotfiles(filepaths):
|
def exclude_dotfiles(filepaths):
|
||||||
for fpath in filepaths:
|
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
||||||
if not fpath.split("/")[-1].startswith("."):
|
|
||||||
yield fpath
|
|
||||||
|
|
||||||
|
|
||||||
def html_escape(s, quote=False):
|
def html_escape(s, quote=False):
|
||||||
@@ -599,6 +688,31 @@ else:
|
|||||||
fsdec = w8dec
|
fsdec = w8dec
|
||||||
|
|
||||||
|
|
||||||
|
def s3enc(mem_cur, rd, fn):
|
||||||
|
ret = []
|
||||||
|
for v in [rd, fn]:
|
||||||
|
try:
|
||||||
|
mem_cur.execute("select * from a where b = ?", (v,))
|
||||||
|
ret.append(v)
|
||||||
|
except:
|
||||||
|
ret.append("//" + w8b64enc(v))
|
||||||
|
# self.log("mojien/{} [{}] {}".format(k, v, ret[-1][2:]))
|
||||||
|
|
||||||
|
return tuple(ret)
|
||||||
|
|
||||||
|
|
||||||
|
def s3dec(rd, fn):
|
||||||
|
ret = []
|
||||||
|
for k, v in [["d", rd], ["f", fn]]:
|
||||||
|
if v.startswith("//"):
|
||||||
|
ret.append(w8b64dec(v[2:]))
|
||||||
|
# self.log("mojide/{} [{}] {}".format(k, ret[-1], v[2:]))
|
||||||
|
else:
|
||||||
|
ret.append(v)
|
||||||
|
|
||||||
|
return tuple(ret)
|
||||||
|
|
||||||
|
|
||||||
def atomic_move(src, dst):
|
def atomic_move(src, dst):
|
||||||
if not PY2:
|
if not PY2:
|
||||||
os.replace(src, dst)
|
os.replace(src, dst)
|
||||||
@@ -633,6 +747,50 @@ def read_socket_unbounded(sr):
|
|||||||
yield buf
|
yield buf
|
||||||
|
|
||||||
|
|
||||||
|
def read_socket_chunked(sr, log=None):
|
||||||
|
err = "expected chunk length, got [{}] |{}| instead"
|
||||||
|
while True:
|
||||||
|
buf = b""
|
||||||
|
while b"\r" not in buf:
|
||||||
|
rbuf = sr.recv(2)
|
||||||
|
if not rbuf or len(buf) > 16:
|
||||||
|
err = err.format(buf.decode("utf-8", "replace"), len(buf))
|
||||||
|
raise Pebkac(400, err)
|
||||||
|
|
||||||
|
buf += rbuf
|
||||||
|
|
||||||
|
if not buf.endswith(b"\n"):
|
||||||
|
sr.recv(1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
chunklen = int(buf.rstrip(b"\r\n"), 16)
|
||||||
|
except:
|
||||||
|
err = err.format(buf.decode("utf-8", "replace"), len(buf))
|
||||||
|
raise Pebkac(400, err)
|
||||||
|
|
||||||
|
if chunklen == 0:
|
||||||
|
sr.recv(2) # \r\n after final chunk
|
||||||
|
return
|
||||||
|
|
||||||
|
if log:
|
||||||
|
log("receiving {} byte chunk".format(chunklen))
|
||||||
|
|
||||||
|
for chunk in read_socket(sr, chunklen):
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
sr.recv(2) # \r\n after each chunk too
|
||||||
|
|
||||||
|
|
||||||
|
def yieldfile(fn):
|
||||||
|
with open(fsenc(fn), "rb", 512 * 1024) as f:
|
||||||
|
while True:
|
||||||
|
buf = f.read(64 * 1024)
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
yield buf
|
||||||
|
|
||||||
|
|
||||||
def hashcopy(actor, fin, fout):
|
def hashcopy(actor, fin, fout):
|
||||||
u32_lim = int((2 ** 31) * 0.9)
|
u32_lim = int((2 ** 31) * 0.9)
|
||||||
hashobj = hashlib.sha512()
|
hashobj = hashlib.sha512()
|
||||||
@@ -692,6 +850,33 @@ def sendfile_kern(lower, upper, f, s):
|
|||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def statdir(logger, scandir, lstat, top):
|
||||||
|
try:
|
||||||
|
btop = fsenc(top)
|
||||||
|
if scandir and hasattr(os, "scandir"):
|
||||||
|
src = "scandir"
|
||||||
|
with os.scandir(btop) as dh:
|
||||||
|
for fh in dh:
|
||||||
|
try:
|
||||||
|
yield [fsdec(fh.name), fh.stat(follow_symlinks=not lstat)]
|
||||||
|
except Exception as ex:
|
||||||
|
msg = "scan-stat: \033[36m{} @ {}"
|
||||||
|
logger(msg.format(repr(ex), fsdec(fh.path)))
|
||||||
|
else:
|
||||||
|
src = "listdir"
|
||||||
|
fun = os.lstat if lstat else os.stat
|
||||||
|
for name in os.listdir(btop):
|
||||||
|
abspath = os.path.join(btop, name)
|
||||||
|
try:
|
||||||
|
yield [fsdec(name), fun(abspath)]
|
||||||
|
except Exception as ex:
|
||||||
|
msg = "list-stat: \033[36m{} @ {}"
|
||||||
|
logger(msg.format(repr(ex), fsdec(abspath)))
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
logger("{}: \033[31m{} @ {}".format(src, repr(ex), top))
|
||||||
|
|
||||||
|
|
||||||
def unescape_cookie(orig):
|
def unescape_cookie(orig):
|
||||||
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
|
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
|
||||||
ret = ""
|
ret = ""
|
||||||
@@ -746,7 +931,11 @@ def chkcmd(*argv):
|
|||||||
def gzip_orig_sz(fn):
|
def gzip_orig_sz(fn):
|
||||||
with open(fsenc(fn), "rb") as f:
|
with open(fsenc(fn), "rb") as f:
|
||||||
f.seek(-4, 2)
|
f.seek(-4, 2)
|
||||||
return struct.unpack(b"I", f.read(4))[0]
|
rv = f.read(4)
|
||||||
|
try:
|
||||||
|
return struct.unpack(b"I", rv)[0]
|
||||||
|
except:
|
||||||
|
return struct.unpack("I", rv)[0]
|
||||||
|
|
||||||
|
|
||||||
def py_desc():
|
def py_desc():
|
||||||
@@ -756,7 +945,11 @@ def py_desc():
|
|||||||
if ofs > 0:
|
if ofs > 0:
|
||||||
py_ver = py_ver[:ofs]
|
py_ver = py_ver[:ofs]
|
||||||
|
|
||||||
bitness = struct.calcsize(b"P") * 8
|
try:
|
||||||
|
bitness = struct.calcsize(b"P") * 8
|
||||||
|
except:
|
||||||
|
bitness = struct.calcsize("P") * 8
|
||||||
|
|
||||||
host_os = platform.system()
|
host_os = platform.system()
|
||||||
compiler = platform.python_compiler()
|
compiler = platform.python_compiler()
|
||||||
|
|
||||||
|
|||||||
@@ -42,12 +42,8 @@ body {
|
|||||||
#path #entree {
|
#path #entree {
|
||||||
margin-left: -.7em;
|
margin-left: -.7em;
|
||||||
}
|
}
|
||||||
#treetab {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
#files {
|
#files {
|
||||||
border-collapse: collapse;
|
border-spacing: 0;
|
||||||
margin-top: 2em;
|
|
||||||
z-index: 1;
|
z-index: 1;
|
||||||
position: relative;
|
position: relative;
|
||||||
}
|
}
|
||||||
@@ -55,11 +51,10 @@ body {
|
|||||||
display: block;
|
display: block;
|
||||||
padding: .3em 0;
|
padding: .3em 0;
|
||||||
}
|
}
|
||||||
#files[ts] tbody div a {
|
#files tbody div a {
|
||||||
color: #f5a;
|
color: #f5a;
|
||||||
}
|
}
|
||||||
a,
|
a, #files tbody div a:last-child {
|
||||||
#files[ts] tbody div a:last-child {
|
|
||||||
color: #fc5;
|
color: #fc5;
|
||||||
padding: .2em;
|
padding: .2em;
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
@@ -67,16 +62,18 @@ a,
|
|||||||
#files a:hover {
|
#files a:hover {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
background: #161616;
|
background: #161616;
|
||||||
|
text-decoration: underline;
|
||||||
}
|
}
|
||||||
#files thead a {
|
#files thead a {
|
||||||
color: #999;
|
color: #999;
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
}
|
}
|
||||||
#files tr:hover {
|
#files tr+tr:hover {
|
||||||
background: #1c1c1c;
|
background: #1c1c1c;
|
||||||
}
|
}
|
||||||
#files thead th {
|
#files thead th {
|
||||||
padding: .5em 1.3em .3em 1.3em;
|
padding: .5em 1.3em .3em 1.3em;
|
||||||
|
cursor: pointer;
|
||||||
}
|
}
|
||||||
#files thead th:last-child {
|
#files thead th:last-child {
|
||||||
background: #444;
|
background: #444;
|
||||||
@@ -94,6 +91,16 @@ a,
|
|||||||
margin: 0;
|
margin: 0;
|
||||||
padding: 0 .5em;
|
padding: 0 .5em;
|
||||||
}
|
}
|
||||||
|
#files td {
|
||||||
|
border-bottom: 1px solid #111;
|
||||||
|
}
|
||||||
|
#files td+td+td {
|
||||||
|
max-width: 30em;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
#files tr+tr td {
|
||||||
|
border-top: 1px solid #383838;
|
||||||
|
}
|
||||||
#files tbody td:nth-child(3) {
|
#files tbody td:nth-child(3) {
|
||||||
font-family: monospace;
|
font-family: monospace;
|
||||||
font-size: 1.3em;
|
font-size: 1.3em;
|
||||||
@@ -112,6 +119,9 @@ a,
|
|||||||
padding-bottom: 1.3em;
|
padding-bottom: 1.3em;
|
||||||
border-bottom: .5em solid #444;
|
border-bottom: .5em solid #444;
|
||||||
}
|
}
|
||||||
|
#files tbody tr td:last-child {
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
#files thead th[style] {
|
#files thead th[style] {
|
||||||
width: auto !important;
|
width: auto !important;
|
||||||
}
|
}
|
||||||
@@ -143,6 +153,15 @@ a,
|
|||||||
.logue {
|
.logue {
|
||||||
padding: .2em 1.5em;
|
padding: .2em 1.5em;
|
||||||
}
|
}
|
||||||
|
.logue:empty {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
#pro.logue {
|
||||||
|
margin-bottom: .8em;
|
||||||
|
}
|
||||||
|
#epi.logue {
|
||||||
|
margin: .8em 0;
|
||||||
|
}
|
||||||
#srv_info {
|
#srv_info {
|
||||||
opacity: .5;
|
opacity: .5;
|
||||||
font-size: .8em;
|
font-size: .8em;
|
||||||
@@ -160,7 +179,13 @@ a,
|
|||||||
margin: -.2em;
|
margin: -.2em;
|
||||||
}
|
}
|
||||||
#files tbody a.play.act {
|
#files tbody a.play.act {
|
||||||
color: #af0;
|
color: #840;
|
||||||
|
text-shadow: 0 0 .3em #b80;
|
||||||
|
}
|
||||||
|
#files tbody tr.sel td {
|
||||||
|
background: #80b;
|
||||||
|
color: #fff;
|
||||||
|
border-color: #a3d;
|
||||||
}
|
}
|
||||||
#blocked {
|
#blocked {
|
||||||
position: fixed;
|
position: fixed;
|
||||||
@@ -248,6 +273,25 @@ a,
|
|||||||
padding: .2em 0 0 .07em;
|
padding: .2em 0 0 .07em;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
}
|
}
|
||||||
|
#wtoggle>span {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
#wtoggle.sel {
|
||||||
|
width: 4.27em;
|
||||||
|
}
|
||||||
|
#wtoggle.sel>span {
|
||||||
|
display: inline-block;
|
||||||
|
line-height: 0;
|
||||||
|
}
|
||||||
|
#wtoggle.sel>span a {
|
||||||
|
font-size: .4em;
|
||||||
|
margin: -.3em 0;
|
||||||
|
position: relative;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
#wtoggle.sel>span #selzip {
|
||||||
|
top: -.6em;
|
||||||
|
}
|
||||||
#barpos,
|
#barpos,
|
||||||
#barbuf {
|
#barbuf {
|
||||||
position: absolute;
|
position: absolute;
|
||||||
@@ -291,6 +335,20 @@ a,
|
|||||||
width: calc(100% - 10.5em);
|
width: calc(100% - 10.5em);
|
||||||
background: rgba(0,0,0,0.2);
|
background: rgba(0,0,0,0.2);
|
||||||
}
|
}
|
||||||
|
@media (min-width: 90em) {
|
||||||
|
#barpos,
|
||||||
|
#barbuf {
|
||||||
|
width: calc(100% - 24em);
|
||||||
|
left: 9.8em;
|
||||||
|
top: .7em;
|
||||||
|
height: 1.6em;
|
||||||
|
bottom: auto;
|
||||||
|
}
|
||||||
|
#widget {
|
||||||
|
bottom: -3.2em;
|
||||||
|
height: 3.2em;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -371,7 +429,7 @@ input[type="checkbox"]:checked+label {
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
#op_search table {
|
#srch_form {
|
||||||
border: 1px solid #3a3a3a;
|
border: 1px solid #3a3a3a;
|
||||||
box-shadow: 0 0 1em #222 inset;
|
box-shadow: 0 0 1em #222 inset;
|
||||||
background: #2d2d2d;
|
background: #2d2d2d;
|
||||||
@@ -380,14 +438,25 @@ input[type="checkbox"]:checked+label {
|
|||||||
margin-bottom: 0;
|
margin-bottom: 0;
|
||||||
padding: 0 .5em .5em 0;
|
padding: 0 .5em .5em 0;
|
||||||
}
|
}
|
||||||
|
#srch_form table {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
#srch_form td {
|
#srch_form td {
|
||||||
padding: .6em .6em;
|
padding: .6em .6em;
|
||||||
}
|
}
|
||||||
|
#srch_form td:first-child {
|
||||||
|
width: 3em;
|
||||||
|
padding-right: .2em;
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
#op_search input {
|
#op_search input {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
}
|
}
|
||||||
#srch_q {
|
#srch_q {
|
||||||
white-space: pre;
|
white-space: pre;
|
||||||
|
color: #f80;
|
||||||
|
height: 1em;
|
||||||
|
margin: .2em 0 -1em 1.6em;
|
||||||
}
|
}
|
||||||
#files td div span {
|
#files td div span {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
@@ -400,39 +469,78 @@ input[type="checkbox"]:checked+label {
|
|||||||
color: #fff;
|
color: #fff;
|
||||||
}
|
}
|
||||||
#files td div a {
|
#files td div a {
|
||||||
display: table-cell;
|
display: inline-block;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
#files td div a:last-child {
|
#files td div a:last-child {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
#files td div {
|
#files td div {
|
||||||
display: table;
|
|
||||||
border-collapse: collapse;
|
border-collapse: collapse;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
#files td div a:last-child {
|
#files td div a:last-child {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
#tree,
|
#wrap {
|
||||||
#treefiles {
|
margin-top: 2em;
|
||||||
vertical-align: top;
|
|
||||||
}
|
}
|
||||||
#tree {
|
#tree {
|
||||||
padding-top: 2em;
|
display: none;
|
||||||
|
position: fixed;
|
||||||
|
left: 0;
|
||||||
|
bottom: 0;
|
||||||
|
top: 7em;
|
||||||
|
padding-top: .2em;
|
||||||
|
overflow-y: auto;
|
||||||
|
-ms-scroll-chaining: none;
|
||||||
|
overscroll-behavior-y: none;
|
||||||
|
scrollbar-color: #eb0 #333;
|
||||||
|
}
|
||||||
|
#thx_ff {
|
||||||
|
padding: 5em 0;
|
||||||
|
}
|
||||||
|
#tree::-webkit-scrollbar-track {
|
||||||
|
background: #333;
|
||||||
|
}
|
||||||
|
#tree::-webkit-scrollbar {
|
||||||
|
background: #333;
|
||||||
|
}
|
||||||
|
#tree::-webkit-scrollbar-thumb {
|
||||||
|
background: #eb0;
|
||||||
|
}
|
||||||
|
#tree:hover {
|
||||||
|
z-index: 2;
|
||||||
|
}
|
||||||
|
#treeul {
|
||||||
|
position: relative;
|
||||||
|
left: -1.7em;
|
||||||
|
width: calc(100% + 1.3em);
|
||||||
|
}
|
||||||
|
.tglbtn,
|
||||||
|
#tree>a+a {
|
||||||
|
padding: .2em .4em;
|
||||||
|
font-size: 1.2em;
|
||||||
|
background: #2a2a2a;
|
||||||
|
box-shadow: 0 .1em .2em #222 inset;
|
||||||
|
border-radius: .3em;
|
||||||
|
margin: .2em;
|
||||||
|
position: relative;
|
||||||
|
top: -.2em;
|
||||||
|
}
|
||||||
|
.tglbtn:hover,
|
||||||
|
#tree>a+a:hover {
|
||||||
|
background: #805;
|
||||||
|
}
|
||||||
|
.tglbtn.on,
|
||||||
|
#tree>a+a.on {
|
||||||
|
background: #fc4;
|
||||||
|
color: #400;
|
||||||
|
text-shadow: none;
|
||||||
}
|
}
|
||||||
#detree {
|
#detree {
|
||||||
padding: .3em .5em;
|
padding: .3em .5em;
|
||||||
font-size: 1.5em;
|
font-size: 1.5em;
|
||||||
display: inline-block;
|
|
||||||
min-width: 12em;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
#treefiles #files tbody {
|
|
||||||
border-radius: 0 .7em 0 .7em;
|
|
||||||
}
|
|
||||||
#treefiles #files thead th:nth-child(1) {
|
|
||||||
border-radius: .7em 0 0 0;
|
|
||||||
}
|
}
|
||||||
#tree ul,
|
#tree ul,
|
||||||
#tree li {
|
#tree li {
|
||||||
@@ -440,53 +548,38 @@ input[type="checkbox"]:checked+label {
|
|||||||
margin: 0;
|
margin: 0;
|
||||||
}
|
}
|
||||||
#tree ul {
|
#tree ul {
|
||||||
border-left: .2em solid #444;
|
border-left: .2em solid #555;
|
||||||
}
|
}
|
||||||
#tree li {
|
#tree li {
|
||||||
margin-left: 1em;
|
margin-left: 1em;
|
||||||
list-style: none;
|
list-style: none;
|
||||||
white-space: nowrap;
|
border-top: 1px solid #4c4c4c;
|
||||||
|
border-bottom: 1px solid #222;
|
||||||
}
|
}
|
||||||
#tree a.hl {
|
#tree li:last-child {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
#treeul a.hl {
|
||||||
color: #400;
|
color: #400;
|
||||||
background: #fc4;
|
background: #fc4;
|
||||||
border-radius: .3em;
|
border-radius: .3em;
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
}
|
||||||
#tree a {
|
#treeul a {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
}
|
}
|
||||||
#tree a+a {
|
#treeul a+a {
|
||||||
width: calc(100% - 2em);
|
width: calc(100% - 2em);
|
||||||
background: #333;
|
background: #333;
|
||||||
|
line-height: 1em;
|
||||||
}
|
}
|
||||||
#tree a+a:hover {
|
#treeul a+a:hover {
|
||||||
background: #222;
|
background: #222;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
}
|
}
|
||||||
#treeul {
|
|
||||||
position: relative;
|
|
||||||
overflow: hidden;
|
|
||||||
left: -1.7em;
|
|
||||||
}
|
|
||||||
#treeul:hover {
|
|
||||||
z-index: 2;
|
|
||||||
overflow: visible;
|
|
||||||
}
|
|
||||||
#treeul:hover a+a {
|
|
||||||
width: auto;
|
|
||||||
min-width: calc(100% - 2em);
|
|
||||||
}
|
|
||||||
#treeul a:first-child {
|
#treeul a:first-child {
|
||||||
font-family: monospace, monospace;
|
font-family: monospace, monospace;
|
||||||
}
|
}
|
||||||
#treefiles {
|
|
||||||
opacity: 1;
|
|
||||||
transition: opacity 0.2s ease-in-out;
|
|
||||||
}
|
|
||||||
#tree:hover+#treefiles {
|
|
||||||
opacity: .8;
|
|
||||||
}
|
|
||||||
.dumb_loader_thing {
|
.dumb_loader_thing {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
margin: 1em .3em 1em 1em;
|
margin: 1em .3em 1em 1em;
|
||||||
@@ -496,3 +589,90 @@ input[type="checkbox"]:checked+label {
|
|||||||
position: absolute;
|
position: absolute;
|
||||||
z-index: 9;
|
z-index: 9;
|
||||||
}
|
}
|
||||||
|
#files .cfg {
|
||||||
|
display: none;
|
||||||
|
font-size: 2em;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
#files th:hover .cfg,
|
||||||
|
#files th.min .cfg {
|
||||||
|
display: block;
|
||||||
|
width: 1em;
|
||||||
|
border-radius: .2em;
|
||||||
|
margin: -1.3em auto 0 auto;
|
||||||
|
background: #444;
|
||||||
|
}
|
||||||
|
#files th.min .cfg {
|
||||||
|
margin: -.6em;
|
||||||
|
}
|
||||||
|
#files>thead>tr>th.min span {
|
||||||
|
position: absolute;
|
||||||
|
transform: rotate(270deg);
|
||||||
|
background: linear-gradient(90deg, rgba(68,68,68,0), rgba(68,68,68,0.5) 70%, #444);
|
||||||
|
margin-left: -4.6em;
|
||||||
|
padding: .4em;
|
||||||
|
top: 5.4em;
|
||||||
|
width: 8em;
|
||||||
|
text-align: right;
|
||||||
|
letter-spacing: .04em;
|
||||||
|
}
|
||||||
|
#files td:nth-child(2n) {
|
||||||
|
color: #f5a;
|
||||||
|
}
|
||||||
|
#files td.min a {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
#files tr.play td {
|
||||||
|
background: #fc4;
|
||||||
|
border-color: transparent;
|
||||||
|
color: #400;
|
||||||
|
text-shadow: none;
|
||||||
|
}
|
||||||
|
#files tr.play a {
|
||||||
|
color: inherit;
|
||||||
|
}
|
||||||
|
#files tr.play a:hover {
|
||||||
|
color: #300;
|
||||||
|
background: #fea;
|
||||||
|
}
|
||||||
|
#op_cfg {
|
||||||
|
max-width: none;
|
||||||
|
margin-right: 1.5em;
|
||||||
|
}
|
||||||
|
#op_cfg>div>a {
|
||||||
|
line-height: 2em;
|
||||||
|
}
|
||||||
|
#op_cfg>div>span {
|
||||||
|
display: inline-block;
|
||||||
|
padding: .2em .4em;
|
||||||
|
}
|
||||||
|
#op_cfg h3 {
|
||||||
|
margin: .8em 0 0 .6em;
|
||||||
|
padding: 0;
|
||||||
|
border-bottom: 1px solid #555;
|
||||||
|
}
|
||||||
|
#opdesc {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
#ops:hover #opdesc {
|
||||||
|
display: block;
|
||||||
|
background: linear-gradient(0deg,#555, #4c4c4c 80%, #444);
|
||||||
|
box-shadow: 0 .3em 1em #222;
|
||||||
|
padding: 1em;
|
||||||
|
border-radius: .3em;
|
||||||
|
position: absolute;
|
||||||
|
z-index: 3;
|
||||||
|
top: 6em;
|
||||||
|
right: 1.5em;
|
||||||
|
}
|
||||||
|
#ops:hover #opdesc.off {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
#opdesc code {
|
||||||
|
background: #3c3c3c;
|
||||||
|
padding: .2em .3em;
|
||||||
|
border-top: 1px solid #777;
|
||||||
|
border-radius: .3em;
|
||||||
|
font-family: monospace, monospace;
|
||||||
|
line-height: 2em;
|
||||||
|
}
|
||||||
|
|||||||
@@ -12,25 +12,43 @@
|
|||||||
|
|
||||||
<body>
|
<body>
|
||||||
<div id="ops">
|
<div id="ops">
|
||||||
<a href="#" data-dest="">---</a>
|
<a href="#" data-dest="" data-desc="close submenu">---</a>
|
||||||
<a href="#" data-perm="read" data-dest="search">🔎</a>
|
<a href="#" data-perm="read" data-dest="search" data-desc="search for files by attributes, path/name, music tags, or any combination of those.<br /><br /><code>foo bar</code> = must contain both foo and bar,<br /><code>foo -bar</code> = must contain foo but not bar,<br /><code>^yana .opus$</code> = must start with yana and have the opus extension">🔎</a>
|
||||||
{%- if have_up2k_idx %}
|
{%- if have_up2k_idx %}
|
||||||
<a href="#" data-dest="up2k">🚀</a>
|
<a href="#" data-dest="up2k" data-desc="up2k: upload files (if you have write-access) or toggle into the search-mode and drag files onto the search button to see if they exist somewhere on the server">🚀</a>
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<a href="#" data-perm="write" data-dest="up2k">🚀</a>
|
<a href="#" data-perm="write" data-dest="up2k" data-desc="up2k: upload files with resume support (close your browser and drop the same files in later)">🚀</a>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
<a href="#" data-perm="write" data-dest="bup">🎈</a>
|
<a href="#" data-perm="write" data-dest="bup" data-desc="bup: basic uploader, even supports netscape 4.0">🎈</a>
|
||||||
<a href="#" data-perm="write" data-dest="mkdir">📂</a>
|
<a href="#" data-perm="write" data-dest="mkdir" data-desc="mkdir: create a new directory">📂</a>
|
||||||
<a href="#" data-perm="write" data-dest="new_md">📝</a>
|
<a href="#" data-perm="write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
|
||||||
<a href="#" data-perm="write" data-dest="msg">📟</a>
|
<a href="#" data-perm="write" data-dest="msg" data-desc="msg: send a message to the server log">📟</a>
|
||||||
|
<a href="#" data-dest="cfg" data-desc="configuration options">⚙️</a>
|
||||||
|
<div id="opdesc"></div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div id="op_search" class="opview">
|
<div id="op_search" class="opview">
|
||||||
<table id="srch_form"></table>
|
{%- if have_tags_idx %}
|
||||||
|
<div id="srch_form" class="tags"></div>
|
||||||
|
{%- else %}
|
||||||
|
<div id="srch_form"></div>
|
||||||
|
{%- endif %}
|
||||||
<div id="srch_q"></div>
|
<div id="srch_q"></div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{%- include 'upload.html' %}
|
{%- include 'upload.html' %}
|
||||||
|
|
||||||
|
<div id="op_cfg" class="opview opbox">
|
||||||
|
<h3>key notation</h3>
|
||||||
|
<div id="key_notation"></div>
|
||||||
|
{%- if have_zip %}
|
||||||
|
<h3>folder download</h3>
|
||||||
|
<div id="arc_fmt"></div>
|
||||||
|
{%- endif %}
|
||||||
|
<h3>tooltips</h3>
|
||||||
|
<div><a id="tooltips" class="tglbtn" href="#">enable</a></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<h1 id="path">
|
<h1 id="path">
|
||||||
<a href="#" id="entree">🌲</a>
|
<a href="#" id="entree">🌲</a>
|
||||||
{%- for n in vpnodes %}
|
{%- for n in vpnodes %}
|
||||||
@@ -38,51 +56,70 @@
|
|||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
</h1>
|
</h1>
|
||||||
|
|
||||||
{%- if prologue %}
|
<div id="tree">
|
||||||
<div id="pro" class="logue">{{ prologue }}</div>
|
<a href="#" id="detree">🍞...</a>
|
||||||
{%- endif %}
|
<a href="#" step="2" id="twobytwo">+</a>
|
||||||
|
<a href="#" step="-2" id="twig">–</a>
|
||||||
|
<a href="#" class="tglbtn" id="dyntree">a</a>
|
||||||
|
<ul id="treeul"></ul>
|
||||||
|
<div id="thx_ff"> </div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<table id="treetab">
|
<div id="wrap">
|
||||||
<tr>
|
|
||||||
<td id="tree">
|
<div id="pro" class="logue">{{ logues[0] }}</div>
|
||||||
<a href="#" id="detree">🍞...</a>
|
|
||||||
<ul id="treeul"></ul>
|
|
||||||
</td>
|
|
||||||
<td id="treefiles"></td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
<table id="files">
|
<table id="files">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th></th>
|
<th name="lead"><span>c</span></th>
|
||||||
<th>File Name</th>
|
<th name="href"><span>File Name</span></th>
|
||||||
<th sort="int">File Size</th>
|
<th name="sz" sort="int"><span>Size</span></th>
|
||||||
<th>T</th>
|
{%- for k in taglist %}
|
||||||
<th>Date</th>
|
{%- if k.startswith('.') %}
|
||||||
|
<th name="tags/{{ k }}" sort="int"><span>{{ k[1:] }}</span></th>
|
||||||
|
{%- else %}
|
||||||
|
<th name="tags/{{ k }}"><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
|
||||||
|
{%- endif %}
|
||||||
|
{%- endfor %}
|
||||||
|
<th name="ext"><span>T</span></th>
|
||||||
|
<th name="ts"><span>Date</span></th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
|
|
||||||
{%- for f in files %}
|
{%- for f in files %}
|
||||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
|
||||||
|
{%- if f.tags is defined %}
|
||||||
|
{%- for k in taglist %}
|
||||||
|
<td>{{ f.tags[k] }}</td>
|
||||||
|
{%- endfor %}
|
||||||
|
{%- endif %}
|
||||||
|
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
{%- if epilogue %}
|
<div id="epi" class="logue">{{ logues[1] }}</div>
|
||||||
<div id="epi" class="logue">{{ epilogue }}</div>
|
|
||||||
{%- endif %}
|
|
||||||
|
|
||||||
<h2><a href="?h">control-panel</a></h2>
|
<h2><a href="?h">control-panel</a></h2>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
{%- if srv_info %}
|
{%- if srv_info %}
|
||||||
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<div id="widget">
|
<div id="widget">
|
||||||
<div id="wtoggle">♫</div>
|
<div id="wtoggle">
|
||||||
|
<span>
|
||||||
|
<a href="#" id="selall">sel.<br />all</a>
|
||||||
|
<a href="#" id="selinv">sel.<br />inv.</a>
|
||||||
|
<a href="#" id="selzip">zip</a>
|
||||||
|
</span>
|
||||||
|
♫
|
||||||
|
</div>
|
||||||
<div id="widgeti">
|
<div id="widgeti">
|
||||||
<div id="pctl"><a href="#" id="bprev">⏮</a><a href="#" id="bplay">▶</a><a href="#" id="bnext">⏭</a></div>
|
<div id="pctl"><a href="#" id="bprev">⏮</a><a href="#" id="bplay">▶</a><a href="#" id="bnext">⏭</a></div>
|
||||||
<canvas id="pvol" width="288" height="38"></canvas>
|
<canvas id="pvol" width="288" height="38"></canvas>
|
||||||
@@ -91,6 +128,9 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
var tag_order_cfg = {{ tag_order }};
|
||||||
|
</script>
|
||||||
<script src="/.cpr/util.js{{ ts }}"></script>
|
<script src="/.cpr/util.js{{ ts }}"></script>
|
||||||
<script src="/.cpr/browser.js{{ ts }}"></script>
|
<script src="/.cpr/browser.js{{ ts }}"></script>
|
||||||
<script src="/.cpr/up2k.js{{ ts }}"></script>
|
<script src="/.cpr/up2k.js{{ ts }}"></script>
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -147,7 +147,7 @@ var md_opt = {
|
|||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js"></script>
|
<script src="/.cpr/util.js"></script>
|
||||||
<script src="/.cpr/deps/marked.full.js"></script>
|
<script src="/.cpr/deps/marked.js"></script>
|
||||||
<script src="/.cpr/md.js"></script>
|
<script src="/.cpr/md.js"></script>
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<script src="/.cpr/md2.js"></script>
|
<script src="/.cpr/md2.js"></script>
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ function statify(obj) {
|
|||||||
if (a > 0)
|
if (a > 0)
|
||||||
loc.push(n[a]);
|
loc.push(n[a]);
|
||||||
|
|
||||||
var dec = hesc(decodeURIComponent(n[a]));
|
var dec = hesc(uricom_dec(n[a])[0]);
|
||||||
|
|
||||||
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
||||||
}
|
}
|
||||||
@@ -524,11 +524,9 @@ dom_navtgl.onclick = function () {
|
|||||||
dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav';
|
dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav';
|
||||||
dom_nav.style.display = hidden ? 'none' : 'block';
|
dom_nav.style.display = hidden ? 'none' : 'block';
|
||||||
|
|
||||||
if (window.localStorage)
|
swrite('hidenav', hidden ? 1 : 0);
|
||||||
localStorage.setItem('hidenav', hidden ? 1 : 0);
|
|
||||||
|
|
||||||
redraw();
|
redraw();
|
||||||
};
|
};
|
||||||
|
|
||||||
if (window.localStorage && localStorage.getItem('hidenav') == 1)
|
if (sread('hidenav') == 1)
|
||||||
dom_navtgl.onclick();
|
dom_navtgl.onclick();
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ var dom_md = ebi('mt');
|
|||||||
if (a > 0)
|
if (a > 0)
|
||||||
loc.push(n[a]);
|
loc.push(n[a]);
|
||||||
|
|
||||||
var dec = decodeURIComponent(n[a]).replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
var dec = uricom_dec(n[a])[0].replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||||
|
|
||||||
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -46,9 +46,9 @@ function up2k_flagbus() {
|
|||||||
var dbg = function (who, msg) {
|
var dbg = function (who, msg) {
|
||||||
console.log('flagbus(' + flag.id + '): [' + who + '] ' + msg);
|
console.log('flagbus(' + flag.id + '): [' + who + '] ' + msg);
|
||||||
};
|
};
|
||||||
flag.ch.onmessage = function (ev) {
|
flag.ch.onmessage = function (e) {
|
||||||
var who = ev.data[0],
|
var who = e.data[0],
|
||||||
what = ev.data[1];
|
what = e.data[1];
|
||||||
|
|
||||||
if (who == flag.id) {
|
if (who == flag.id) {
|
||||||
dbg(who, 'hi me (??)');
|
dbg(who, 'hi me (??)');
|
||||||
@@ -83,7 +83,7 @@ function up2k_flagbus() {
|
|||||||
flag.ch.postMessage([flag.id, "hey"]);
|
flag.ch.postMessage([flag.id, "hey"]);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
dbg('?', ev.data);
|
dbg('?', e.data);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
var tx = function (now, msg) {
|
var tx = function (now, msg) {
|
||||||
@@ -194,7 +194,7 @@ function up2k_init(have_crypto) {
|
|||||||
|
|
||||||
// handle user intent to use the basic uploader instead
|
// handle user intent to use the basic uploader instead
|
||||||
ebi('u2nope').onclick = function (e) {
|
ebi('u2nope').onclick = function (e) {
|
||||||
e.preventDefault();
|
ev(e);
|
||||||
setmsg();
|
setmsg();
|
||||||
goto('bup');
|
goto('bup');
|
||||||
};
|
};
|
||||||
@@ -209,42 +209,7 @@ function up2k_init(have_crypto) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function cfg_get(name) {
|
var parallel_uploads = icfg_get('nthread');
|
||||||
var val = localStorage.getItem(name);
|
|
||||||
if (val === null)
|
|
||||||
return parseInt(ebi(name).value);
|
|
||||||
|
|
||||||
ebi(name).value = val;
|
|
||||||
return val;
|
|
||||||
}
|
|
||||||
|
|
||||||
function bcfg_get(name, defval) {
|
|
||||||
var o = ebi(name);
|
|
||||||
if (!o)
|
|
||||||
return defval;
|
|
||||||
|
|
||||||
var val = localStorage.getItem(name);
|
|
||||||
if (val === null)
|
|
||||||
val = defval;
|
|
||||||
else
|
|
||||||
val = (val == '1');
|
|
||||||
|
|
||||||
o.checked = val;
|
|
||||||
return val;
|
|
||||||
}
|
|
||||||
|
|
||||||
function bcfg_set(name, val) {
|
|
||||||
localStorage.setItem(
|
|
||||||
name, val ? '1' : '0');
|
|
||||||
|
|
||||||
var o = ebi(name);
|
|
||||||
if (o)
|
|
||||||
o.checked = val;
|
|
||||||
|
|
||||||
return val;
|
|
||||||
}
|
|
||||||
|
|
||||||
var parallel_uploads = cfg_get('nthread');
|
|
||||||
var multitask = bcfg_get('multitask', true);
|
var multitask = bcfg_get('multitask', true);
|
||||||
var ask_up = bcfg_get('ask_up', true);
|
var ask_up = bcfg_get('ask_up', true);
|
||||||
var flag_en = bcfg_get('flag_en', false);
|
var flag_en = bcfg_get('flag_en', false);
|
||||||
@@ -282,49 +247,58 @@ function up2k_init(have_crypto) {
|
|||||||
|
|
||||||
var flag = false;
|
var flag = false;
|
||||||
apply_flag_cfg();
|
apply_flag_cfg();
|
||||||
apply_fsearch_cfg();
|
set_fsearch();
|
||||||
|
|
||||||
function nav() {
|
function nav() {
|
||||||
ebi('file' + fdom_ctr).click();
|
ebi('file' + fdom_ctr).click();
|
||||||
}
|
}
|
||||||
ebi('u2btn').addEventListener('click', nav, false);
|
ebi('u2btn').addEventListener('click', nav, false);
|
||||||
|
|
||||||
function ondrag(ev) {
|
function ondrag(e) {
|
||||||
ev.stopPropagation();
|
e.stopPropagation();
|
||||||
ev.preventDefault();
|
e.preventDefault();
|
||||||
ev.dataTransfer.dropEffect = 'copy';
|
e.dataTransfer.dropEffect = 'copy';
|
||||||
ev.dataTransfer.effectAllowed = 'copy';
|
e.dataTransfer.effectAllowed = 'copy';
|
||||||
}
|
}
|
||||||
ebi('u2btn').addEventListener('dragover', ondrag, false);
|
ebi('u2btn').addEventListener('dragover', ondrag, false);
|
||||||
ebi('u2btn').addEventListener('dragenter', ondrag, false);
|
ebi('u2btn').addEventListener('dragenter', ondrag, false);
|
||||||
|
|
||||||
function gotfile(ev) {
|
function gotfile(e) {
|
||||||
ev.stopPropagation();
|
e.stopPropagation();
|
||||||
ev.preventDefault();
|
e.preventDefault();
|
||||||
|
|
||||||
var files;
|
var files;
|
||||||
var is_itemlist = false;
|
var is_itemlist = false;
|
||||||
if (ev.dataTransfer) {
|
if (e.dataTransfer) {
|
||||||
if (ev.dataTransfer.items) {
|
if (e.dataTransfer.items) {
|
||||||
files = ev.dataTransfer.items; // DataTransferItemList
|
files = e.dataTransfer.items; // DataTransferItemList
|
||||||
is_itemlist = true;
|
is_itemlist = true;
|
||||||
}
|
}
|
||||||
else files = ev.dataTransfer.files; // FileList
|
else files = e.dataTransfer.files; // FileList
|
||||||
}
|
}
|
||||||
else files = ev.target.files;
|
else files = e.target.files;
|
||||||
|
|
||||||
if (files.length == 0)
|
if (!files || files.length == 0)
|
||||||
return alert('no files selected??');
|
return alert('no files selected??');
|
||||||
|
|
||||||
more_one_file();
|
more_one_file();
|
||||||
var bad_files = [];
|
var bad_files = [];
|
||||||
var good_files = [];
|
var good_files = [];
|
||||||
|
var dirs = [];
|
||||||
for (var a = 0; a < files.length; a++) {
|
for (var a = 0; a < files.length; a++) {
|
||||||
var fobj = files[a];
|
var fobj = files[a];
|
||||||
if (is_itemlist) {
|
if (is_itemlist) {
|
||||||
if (fobj.kind !== 'file')
|
if (fobj.kind !== 'file')
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
|
try {
|
||||||
|
var wi = fobj.webkitGetAsEntry();
|
||||||
|
if (wi.isDirectory) {
|
||||||
|
dirs.push(wi);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
fobj = fobj.getAsFile();
|
fobj = fobj.getAsFile();
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
@@ -335,12 +309,69 @@ function up2k_init(have_crypto) {
|
|||||||
bad_files.push(fobj.name);
|
bad_files.push(fobj.name);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
good_files.push(fobj);
|
good_files.push([fobj, fobj.name]);
|
||||||
|
}
|
||||||
|
if (dirs) {
|
||||||
|
return read_dirs(null, [], dirs, good_files, bad_files);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function read_dirs(rd, pf, dirs, good, bad) {
|
||||||
|
if (!dirs.length) {
|
||||||
|
if (!pf.length)
|
||||||
|
return gotallfiles(good, bad);
|
||||||
|
|
||||||
|
console.log("retry pf, " + pf.length);
|
||||||
|
setTimeout(function () {
|
||||||
|
read_dirs(rd, pf, dirs, good, bad);
|
||||||
|
}, 50);
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!rd)
|
||||||
|
rd = dirs[0].createReader();
|
||||||
|
|
||||||
|
rd.readEntries(function (ents) {
|
||||||
|
var ngot = 0;
|
||||||
|
ents.forEach(function (dn) {
|
||||||
|
if (dn.isDirectory) {
|
||||||
|
dirs.push(dn);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
var name = dn.fullPath;
|
||||||
|
if (name.indexOf('/') === 0)
|
||||||
|
name = name.slice(1);
|
||||||
|
|
||||||
|
pf.push(name);
|
||||||
|
dn.file(function (fobj) {
|
||||||
|
var idx = pf.indexOf(name);
|
||||||
|
pf.splice(idx, 1);
|
||||||
|
try {
|
||||||
|
if (fobj.size > 0) {
|
||||||
|
good.push([fobj, name]);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
bad.push(name);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
ngot += 1;
|
||||||
|
});
|
||||||
|
// console.log("ngot: " + ngot);
|
||||||
|
if (!ngot) {
|
||||||
|
dirs.shift();
|
||||||
|
rd = null;
|
||||||
|
}
|
||||||
|
return read_dirs(rd, pf, dirs, good, bad);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function gotallfiles(good_files, bad_files) {
|
||||||
if (bad_files.length > 0) {
|
if (bad_files.length > 0) {
|
||||||
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length);
|
var ntot = bad_files.length + good_files.length;
|
||||||
for (var a = 0; a < bad_files.length; a++)
|
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, ntot);
|
||||||
|
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
|
||||||
msg += '-- ' + bad_files[a] + '\n';
|
msg += '-- ' + bad_files[a] + '\n';
|
||||||
|
|
||||||
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
|
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
|
||||||
@@ -350,24 +381,24 @@ function up2k_init(have_crypto) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var msg = ['upload these ' + good_files.length + ' files?'];
|
var msg = ['upload these ' + good_files.length + ' files?'];
|
||||||
for (var a = 0; a < good_files.length; a++)
|
for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++)
|
||||||
msg.push(good_files[a].name);
|
msg.push(good_files[a][1]);
|
||||||
|
|
||||||
if (ask_up && !fsearch && !confirm(msg.join('\n')))
|
if (ask_up && !fsearch && !confirm(msg.join('\n')))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
for (var a = 0; a < good_files.length; a++) {
|
for (var a = 0; a < good_files.length; a++) {
|
||||||
var fobj = good_files[a];
|
var fobj = good_files[a][0];
|
||||||
var now = new Date().getTime();
|
var now = new Date().getTime();
|
||||||
var lmod = fobj.lastModified || now;
|
var lmod = fobj.lastModified || now;
|
||||||
var entry = {
|
var entry = {
|
||||||
"n": parseInt(st.files.length.toString()),
|
"n": parseInt(st.files.length.toString()),
|
||||||
"t0": now, // TODO remove probably
|
"t0": now,
|
||||||
"fobj": fobj,
|
"fobj": fobj,
|
||||||
"name": fobj.name,
|
"name": good_files[a][1],
|
||||||
"size": fobj.size,
|
"size": fobj.size,
|
||||||
"lmod": lmod / 1000,
|
"lmod": lmod / 1000,
|
||||||
"purl": get_vpath(),
|
"purl": get_evpath(),
|
||||||
"done": false,
|
"done": false,
|
||||||
"hash": []
|
"hash": []
|
||||||
};
|
};
|
||||||
@@ -690,8 +721,8 @@ function up2k_init(have_crypto) {
|
|||||||
prog(t.n, nchunk, col_hashing);
|
prog(t.n, nchunk, col_hashing);
|
||||||
};
|
};
|
||||||
|
|
||||||
var segm_load = function (ev) {
|
var segm_load = function (e) {
|
||||||
cache_buf = ev.target.result;
|
cache_buf = e.target.result;
|
||||||
cache_ofs = 0;
|
cache_ofs = 0;
|
||||||
hash_calc();
|
hash_calc();
|
||||||
};
|
};
|
||||||
@@ -765,20 +796,20 @@ function up2k_init(have_crypto) {
|
|||||||
st.busy.handshake.push(t);
|
st.busy.handshake.push(t);
|
||||||
|
|
||||||
var xhr = new XMLHttpRequest();
|
var xhr = new XMLHttpRequest();
|
||||||
xhr.onload = function (ev) {
|
xhr.onload = function (e) {
|
||||||
if (xhr.status == 200) {
|
if (xhr.status == 200) {
|
||||||
var response = JSON.parse(xhr.responseText);
|
var response = JSON.parse(xhr.responseText);
|
||||||
|
|
||||||
if (!response.name) {
|
if (!response.name) {
|
||||||
var msg = '';
|
var msg = '';
|
||||||
var smsg = '';
|
var smsg = '';
|
||||||
if (!response || !response.length) {
|
if (!response || !response.hits || !response.hits.length) {
|
||||||
msg = 'not found on server';
|
msg = 'not found on server';
|
||||||
smsg = '404';
|
smsg = '404';
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
smsg = 'found';
|
smsg = 'found';
|
||||||
var hit = response[0],
|
var hit = response.hits[0],
|
||||||
msg = linksplit(hit.rp).join(''),
|
msg = linksplit(hit.rp).join(''),
|
||||||
tr = unix2iso(hit.ts),
|
tr = unix2iso(hit.ts),
|
||||||
tu = unix2iso(t.lmod),
|
tu = unix2iso(t.lmod),
|
||||||
@@ -916,7 +947,7 @@ function up2k_init(have_crypto) {
|
|||||||
alert('y o u b r o k e i t\n\n(was that a folder? just files please)');
|
alert('y o u b r o k e i t\n\n(was that a folder? just files please)');
|
||||||
};
|
};
|
||||||
|
|
||||||
reader.onload = function (ev) {
|
reader.onload = function (e) {
|
||||||
var xhr = new XMLHttpRequest();
|
var xhr = new XMLHttpRequest();
|
||||||
xhr.upload.onprogress = function (xev) {
|
xhr.upload.onprogress = function (xev) {
|
||||||
var perc = xev.loaded / (cdr - car) * 100;
|
var perc = xev.loaded / (cdr - car) * 100;
|
||||||
@@ -950,7 +981,7 @@ function up2k_init(have_crypto) {
|
|||||||
xhr.setRequestHeader('Content-Type', 'application/octet-stream');
|
xhr.setRequestHeader('Content-Type', 'application/octet-stream');
|
||||||
xhr.overrideMimeType('Content-Type', 'application/octet-stream');
|
xhr.overrideMimeType('Content-Type', 'application/octet-stream');
|
||||||
xhr.responseType = 'text';
|
xhr.responseType = 'text';
|
||||||
xhr.send(ev.target.result);
|
xhr.send(e.target.result);
|
||||||
};
|
};
|
||||||
|
|
||||||
reader.readAsArrayBuffer(bobslice.call(t.fobj, car, cdr));
|
reader.readAsArrayBuffer(bobslice.call(t.fobj, car, cdr));
|
||||||
@@ -979,7 +1010,7 @@ function up2k_init(have_crypto) {
|
|||||||
/// config ui
|
/// config ui
|
||||||
//
|
//
|
||||||
|
|
||||||
function onresize(ev) {
|
function onresize(e) {
|
||||||
var bar = ebi('ops'),
|
var bar = ebi('ops'),
|
||||||
wpx = innerWidth,
|
wpx = innerWidth,
|
||||||
fpx = parseInt(getComputedStyle(bar)['font-size']),
|
fpx = parseInt(getComputedStyle(bar)['font-size']),
|
||||||
@@ -994,17 +1025,17 @@ function up2k_init(have_crypto) {
|
|||||||
ebi('u2conf').setAttribute('class', wide ? 'has_btn' : '');
|
ebi('u2conf').setAttribute('class', wide ? 'has_btn' : '');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
window.onresize = onresize;
|
window.addEventListener('resize', onresize);
|
||||||
onresize();
|
onresize();
|
||||||
|
|
||||||
function desc_show(ev) {
|
function desc_show(e) {
|
||||||
var msg = this.getAttribute('alt');
|
var msg = this.getAttribute('alt');
|
||||||
msg = msg.replace(/\$N/g, "<br />");
|
msg = msg.replace(/\$N/g, "<br />");
|
||||||
var cdesc = ebi('u2cdesc');
|
var cdesc = ebi('u2cdesc');
|
||||||
cdesc.innerHTML = msg;
|
cdesc.innerHTML = msg;
|
||||||
cdesc.setAttribute('class', 'show');
|
cdesc.setAttribute('class', 'show');
|
||||||
}
|
}
|
||||||
function desc_hide(ev) {
|
function desc_hide(e) {
|
||||||
ebi('u2cdesc').setAttribute('class', '');
|
ebi('u2cdesc').setAttribute('class', '');
|
||||||
}
|
}
|
||||||
var o = document.querySelectorAll('#u2conf *[alt]');
|
var o = document.querySelectorAll('#u2conf *[alt]');
|
||||||
@@ -1033,7 +1064,7 @@ function up2k_init(have_crypto) {
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
parallel_uploads = v;
|
parallel_uploads = v;
|
||||||
localStorage.setItem('nthread', v);
|
swrite('nthread', v);
|
||||||
obj.style.background = '#444';
|
obj.style.background = '#444';
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -1061,12 +1092,31 @@ function up2k_init(have_crypto) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function tgl_fsearch() {
|
function tgl_fsearch() {
|
||||||
fsearch = !fsearch;
|
set_fsearch(!fsearch);
|
||||||
bcfg_set('fsearch', fsearch);
|
|
||||||
apply_fsearch_cfg();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function apply_fsearch_cfg() {
|
function set_fsearch(new_state) {
|
||||||
|
var perms = document.body.getAttribute('perms');
|
||||||
|
var read_only = false;
|
||||||
|
|
||||||
|
if (!ebi('fsearch')) {
|
||||||
|
new_state = false;
|
||||||
|
}
|
||||||
|
else if (perms && perms.indexOf('write') === -1) {
|
||||||
|
new_state = true;
|
||||||
|
read_only = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (new_state !== undefined) {
|
||||||
|
fsearch = new_state;
|
||||||
|
bcfg_set('fsearch', fsearch);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
document.querySelector('label[for="fsearch"]').style.opacity = read_only ? '0' : '1';
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
|
||||||
try {
|
try {
|
||||||
var fun = fsearch ? 'add' : 'remove';
|
var fun = fsearch ? 'add' : 'remove';
|
||||||
ebi('op_up2k').classList[fun]('srch');
|
ebi('op_up2k').classList[fun]('srch');
|
||||||
@@ -1078,11 +1128,6 @@ function up2k_init(have_crypto) {
|
|||||||
catch (ex) { }
|
catch (ex) { }
|
||||||
}
|
}
|
||||||
|
|
||||||
function set_fsearch() {
|
|
||||||
if (!fsearch)
|
|
||||||
tgl_fsearch();
|
|
||||||
}
|
|
||||||
|
|
||||||
function tgl_flag_en() {
|
function tgl_flag_en() {
|
||||||
flag_en = !flag_en;
|
flag_en = !flag_en;
|
||||||
bcfg_set('flag_en', flag_en);
|
bcfg_set('flag_en', flag_en);
|
||||||
@@ -1105,17 +1150,17 @@ function up2k_init(have_crypto) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function nop(ev) {
|
function nop(e) {
|
||||||
ev.preventDefault();
|
ev(e);
|
||||||
this.click();
|
this.click();
|
||||||
}
|
}
|
||||||
|
|
||||||
ebi('nthread_add').onclick = function (ev) {
|
ebi('nthread_add').onclick = function (e) {
|
||||||
ev.preventDefault();
|
ev(e);
|
||||||
bumpthread(1);
|
bumpthread(1);
|
||||||
};
|
};
|
||||||
ebi('nthread_sub').onclick = function (ev) {
|
ebi('nthread_sub').onclick = function (e) {
|
||||||
ev.preventDefault();
|
ev(e);
|
||||||
bumpthread(-1);
|
bumpthread(-1);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1131,12 +1176,8 @@ function up2k_init(have_crypto) {
|
|||||||
for (var a = nodes.length - 1; a >= 0; a--)
|
for (var a = nodes.length - 1; a >= 0; a--)
|
||||||
nodes[a].addEventListener('touchend', nop, false);
|
nodes[a].addEventListener('touchend', nop, false);
|
||||||
|
|
||||||
var perms = document.body.getAttribute('perms');
|
set_fsearch();
|
||||||
if (perms && perms.indexOf('write') === -1)
|
|
||||||
set_fsearch();
|
|
||||||
|
|
||||||
bumpthread({ "target": 1 })
|
bumpthread({ "target": 1 })
|
||||||
|
|
||||||
return { "init_deps": init_deps, "set_fsearch": set_fsearch }
|
return { "init_deps": init_deps, "set_fsearch": set_fsearch }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -62,7 +62,7 @@
|
|||||||
width: calc(100% - 2em);
|
width: calc(100% - 2em);
|
||||||
max-width: 100em;
|
max-width: 100em;
|
||||||
}
|
}
|
||||||
#u2form.srch #u2tab {
|
#op_up2k.srch #u2tab {
|
||||||
max-width: none;
|
max-width: none;
|
||||||
}
|
}
|
||||||
#u2tab td {
|
#u2tab td {
|
||||||
@@ -76,7 +76,7 @@
|
|||||||
#u2tab td:nth-child(3) {
|
#u2tab td:nth-child(3) {
|
||||||
width: 40%;
|
width: 40%;
|
||||||
}
|
}
|
||||||
#u2form.srch #u2tab td:nth-child(3) {
|
#op_up2k.srch #u2tab td:nth-child(3) {
|
||||||
font-family: sans-serif;
|
font-family: sans-serif;
|
||||||
width: auto;
|
width: auto;
|
||||||
}
|
}
|
||||||
@@ -88,7 +88,7 @@
|
|||||||
width: 30em;
|
width: 30em;
|
||||||
}
|
}
|
||||||
#u2conf.has_btn {
|
#u2conf.has_btn {
|
||||||
width: 46em;
|
width: 48em;
|
||||||
}
|
}
|
||||||
#u2conf * {
|
#u2conf * {
|
||||||
text-align: center;
|
text-align: center;
|
||||||
|
|||||||
@@ -73,7 +73,8 @@
|
|||||||
<div id="u2btn_ct">
|
<div id="u2btn_ct">
|
||||||
<div id="u2btn">
|
<div id="u2btn">
|
||||||
<span id="u2bm"></span><br />
|
<span id="u2bm"></span><br />
|
||||||
drop files here<br />
|
drag/drop files<br />
|
||||||
|
and folders here<br />
|
||||||
(or click me)
|
(or click me)
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ function esc(txt) {
|
|||||||
}
|
}
|
||||||
function vis_exh(msg, url, lineNo, columnNo, error) {
|
function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||||
window.onerror = undefined;
|
window.onerror = undefined;
|
||||||
|
window['vis_exh'] = null;
|
||||||
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
||||||
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
||||||
|
|
||||||
@@ -43,6 +44,21 @@ function ebi(id) {
|
|||||||
return document.getElementById(id);
|
return document.getElementById(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function ev(e) {
|
||||||
|
e = e || window.event;
|
||||||
|
if (!e)
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (e.preventDefault)
|
||||||
|
e.preventDefault()
|
||||||
|
|
||||||
|
if (e.stopPropagation)
|
||||||
|
e.stopPropagation();
|
||||||
|
|
||||||
|
e.returnValue = false;
|
||||||
|
return e;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||||
if (!String.prototype.endsWith) {
|
if (!String.prototype.endsWith) {
|
||||||
@@ -75,46 +91,170 @@ function import_js(url, cb) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function sortTable(table, col) {
|
var crctab = (function () {
|
||||||
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows
|
var c, tab = [];
|
||||||
|
for (var n = 0; n < 256; n++) {
|
||||||
|
c = n;
|
||||||
|
for (var k = 0; k < 8; k++) {
|
||||||
|
c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));
|
||||||
|
}
|
||||||
|
tab[n] = c;
|
||||||
|
}
|
||||||
|
return tab;
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
|
function crc32(str) {
|
||||||
|
var crc = 0 ^ (-1);
|
||||||
|
for (var i = 0; i < str.length; i++) {
|
||||||
|
crc = (crc >>> 8) ^ crctab[(crc ^ str.charCodeAt(i)) & 0xFF];
|
||||||
|
}
|
||||||
|
return ((crc ^ (-1)) >>> 0).toString(16);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
function sortfiles(nodes) {
|
||||||
|
var sopts = jread('fsort', [["lead", -1, ""], ["href", 1, ""]]);
|
||||||
|
|
||||||
|
try {
|
||||||
|
var is_srch = false;
|
||||||
|
if (nodes[0]['rp']) {
|
||||||
|
is_srch = true;
|
||||||
|
for (var b = 0, bb = nodes.length; b < bb; b++)
|
||||||
|
nodes[b].ext = nodes[b].rp.split('.').pop();
|
||||||
|
for (var b = 0; b < sopts.length; b++)
|
||||||
|
if (sopts[b][0] == 'href')
|
||||||
|
sopts[b][0] = 'rp';
|
||||||
|
}
|
||||||
|
for (var a = sopts.length - 1; a >= 0; a--) {
|
||||||
|
var name = sopts[a][0], rev = sopts[a][1], typ = sopts[a][2];
|
||||||
|
if (!name)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
if (name.indexOf('tags/') === 0) {
|
||||||
|
name = name.slice(5);
|
||||||
|
for (var b = 0, bb = nodes.length; b < bb; b++)
|
||||||
|
nodes[b]._sv = nodes[b].tags[name];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
for (var b = 0, bb = nodes.length; b < bb; b++) {
|
||||||
|
var v = nodes[b][name];
|
||||||
|
|
||||||
|
if ((v + '').indexOf('<a ') === 0)
|
||||||
|
v = v.split('>')[1];
|
||||||
|
else if (name == "href" && v)
|
||||||
|
v = uricom_dec(v)[0]
|
||||||
|
|
||||||
|
nodes[b]._sv = v;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var onodes = nodes.map((x) => x);
|
||||||
|
nodes.sort(function (n1, n2) {
|
||||||
|
var v1 = n1._sv,
|
||||||
|
v2 = n2._sv;
|
||||||
|
|
||||||
|
if (v1 === undefined) {
|
||||||
|
if (v2 === undefined) {
|
||||||
|
return onodes.indexOf(n1) - onodes.indexOf(n2);
|
||||||
|
}
|
||||||
|
return -1 * rev;
|
||||||
|
}
|
||||||
|
if (v2 === undefined) return 1 * rev;
|
||||||
|
|
||||||
|
var ret = rev * (typ == 'int' ? (v1 - v2) : (v1.localeCompare(v2)));
|
||||||
|
if (ret === 0)
|
||||||
|
ret = onodes.indexOf(n1) - onodes.indexOf(n2);
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
for (var b = 0, bb = nodes.length; b < bb; b++) {
|
||||||
|
delete nodes[b]._sv;
|
||||||
|
if (is_srch)
|
||||||
|
delete nodes[b].ext;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
console.log("failed to apply sort config: " + ex);
|
||||||
|
}
|
||||||
|
return nodes;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function sortTable(table, col, cb) {
|
||||||
|
var tb = table.tBodies[0],
|
||||||
th = table.tHead.rows[0].cells,
|
th = table.tHead.rows[0].cells,
|
||||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||||
i, reverse = th[col].className == 'sort1' ? -1 : 1;
|
i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1;
|
||||||
for (var a = 0, thl = th.length; a < thl; a++)
|
for (var a = 0, thl = th.length; a < thl; a++)
|
||||||
th[a].className = '';
|
th[a].className = th[a].className.replace(/ *sort-?1 */, " ");
|
||||||
th[col].className = 'sort' + reverse;
|
th[col].className += ' sort' + reverse;
|
||||||
var stype = th[col].getAttribute('sort');
|
var stype = th[col].getAttribute('sort');
|
||||||
tr = tr.sort(function (a, b) {
|
try {
|
||||||
if (!a.cells[col])
|
var nrules = [], rules = jread("fsort", []);
|
||||||
|
rules.unshift([th[col].getAttribute('name'), reverse, stype || '']);
|
||||||
|
for (var a = 0; a < rules.length; a++) {
|
||||||
|
var add = true;
|
||||||
|
for (var b = 0; b < a; b++)
|
||||||
|
if (rules[a][0] == rules[b][0])
|
||||||
|
add = false;
|
||||||
|
|
||||||
|
if (add)
|
||||||
|
nrules.push(rules[a]);
|
||||||
|
|
||||||
|
if (nrules.length >= 10)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
jwrite("fsort", nrules);
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
console.log("failed to persist sort rules, resetting: " + ex);
|
||||||
|
jwrite("fsort", null);
|
||||||
|
}
|
||||||
|
var vl = [];
|
||||||
|
for (var a = 0; a < tr.length; a++) {
|
||||||
|
var cell = tr[a].cells[col];
|
||||||
|
if (!cell) {
|
||||||
|
vl.push([null, a]);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
var v = cell.getAttribute('sortv') || cell.textContent.trim();
|
||||||
|
if (stype == 'int') {
|
||||||
|
v = parseInt(v.replace(/[, ]/g, '')) || 0;
|
||||||
|
}
|
||||||
|
vl.push([v, a]);
|
||||||
|
}
|
||||||
|
vl.sort(function (a, b) {
|
||||||
|
a = a[0];
|
||||||
|
b = b[0];
|
||||||
|
if (a === null)
|
||||||
return -1;
|
return -1;
|
||||||
if (!b.cells[col])
|
if (b === null)
|
||||||
return 1;
|
return 1;
|
||||||
|
|
||||||
var v1 = a.cells[col].textContent.trim();
|
|
||||||
var v2 = b.cells[col].textContent.trim();
|
|
||||||
if (stype == 'int') {
|
if (stype == 'int') {
|
||||||
v1 = parseInt(v1.replace(/,/g, ''));
|
return reverse * (a - b);
|
||||||
v2 = parseInt(v2.replace(/,/g, ''));
|
|
||||||
return reverse * (v1 - v2);
|
|
||||||
}
|
}
|
||||||
return reverse * (v1.localeCompare(v2));
|
return reverse * (a.localeCompare(b));
|
||||||
});
|
});
|
||||||
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]);
|
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[vl[i][1]]);
|
||||||
|
if (cb) cb();
|
||||||
}
|
}
|
||||||
function makeSortable(table) {
|
function makeSortable(table, cb) {
|
||||||
var th = table.tHead, i;
|
var th = table.tHead, i;
|
||||||
th && (th = th.rows[0]) && (th = th.cells);
|
th && (th = th.rows[0]) && (th = th.cells);
|
||||||
if (th) i = th.length;
|
if (th) i = th.length;
|
||||||
else return; // if no `<thead>` then do nothing
|
else return; // if no `<thead>` then do nothing
|
||||||
while (--i >= 0) (function (i) {
|
while (--i >= 0) (function (i) {
|
||||||
th[i].onclick = function () {
|
th[i].onclick = function (e) {
|
||||||
sortTable(table, i);
|
ev(e);
|
||||||
|
sortTable(table, i, cb);
|
||||||
};
|
};
|
||||||
}(i));
|
}(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
(function () {
|
(function () {
|
||||||
var ops = document.querySelectorAll('#ops>a');
|
var ops = document.querySelectorAll('#ops>a');
|
||||||
for (var a = 0; a < ops.length; a++) {
|
for (var a = 0; a < ops.length; a++) {
|
||||||
@@ -123,16 +263,13 @@ function makeSortable(table) {
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
|
|
||||||
function opclick(ev) {
|
function opclick(e) {
|
||||||
if (ev) //ie
|
ev(e);
|
||||||
ev.preventDefault();
|
|
||||||
|
|
||||||
var dest = this.getAttribute('data-dest');
|
var dest = this.getAttribute('data-dest');
|
||||||
goto(dest);
|
goto(dest);
|
||||||
|
|
||||||
// writing a blank value makes ie8 segfault w
|
swrite('opmode', dest || null);
|
||||||
if (window.localStorage)
|
|
||||||
localStorage.setItem('opmode', dest || '.');
|
|
||||||
|
|
||||||
var input = document.querySelector('.opview.act input:not([type="hidden"])')
|
var input = document.querySelector('.opview.act input:not([type="hidden"])')
|
||||||
if (input)
|
if (input)
|
||||||
@@ -149,10 +286,6 @@ function goto(dest) {
|
|||||||
for (var a = obj.length - 1; a >= 0; a--)
|
for (var a = obj.length - 1; a >= 0; a--)
|
||||||
obj[a].classList.remove('act');
|
obj[a].classList.remove('act');
|
||||||
|
|
||||||
var others = ['path', 'files', 'widget'];
|
|
||||||
for (var a = 0; a < others.length; a++)
|
|
||||||
ebi(others[a]).classList.remove('hidden');
|
|
||||||
|
|
||||||
if (dest) {
|
if (dest) {
|
||||||
var ui = ebi('op_' + dest);
|
var ui = ebi('op_' + dest);
|
||||||
ui.classList.add('act');
|
ui.classList.add('act');
|
||||||
@@ -162,16 +295,20 @@ function goto(dest) {
|
|||||||
if (fn)
|
if (fn)
|
||||||
fn();
|
fn();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (window['treectl'])
|
||||||
|
treectl.onscroll();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
(function () {
|
(function () {
|
||||||
goto();
|
goto();
|
||||||
if (window.localStorage) {
|
var op = sread('opmode');
|
||||||
var op = localStorage.getItem('opmode');
|
if (op !== null && op !== '.')
|
||||||
if (op !== null && op !== '.')
|
try {
|
||||||
goto(op);
|
goto(op);
|
||||||
}
|
}
|
||||||
|
catch (ex) { }
|
||||||
})();
|
})();
|
||||||
|
|
||||||
|
|
||||||
@@ -202,6 +339,31 @@ function linksplit(rp) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function uricom_enc(txt, do_fb_enc) {
|
||||||
|
try {
|
||||||
|
return encodeURIComponent(txt);
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
console.log("uce-err [" + txt + "]");
|
||||||
|
if (do_fb_enc)
|
||||||
|
return esc(txt);
|
||||||
|
|
||||||
|
return txt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function uricom_dec(txt) {
|
||||||
|
try {
|
||||||
|
return [decodeURIComponent(txt), true];
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
console.log("ucd-err [" + txt + "]");
|
||||||
|
return [txt, false];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function get_evpath() {
|
function get_evpath() {
|
||||||
var ret = document.location.pathname;
|
var ret = document.location.pathname;
|
||||||
|
|
||||||
@@ -216,7 +378,7 @@ function get_evpath() {
|
|||||||
|
|
||||||
|
|
||||||
function get_vpath() {
|
function get_vpath() {
|
||||||
return decodeURIComponent(get_evpath());
|
return uricom_dec(get_evpath())[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -225,6 +387,13 @@ function unix2iso(ts) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function s2ms(s) {
|
||||||
|
s = Math.floor(s);
|
||||||
|
var m = Math.floor(s / 60);
|
||||||
|
return m + ":" + ("0" + (s - m * 60)).slice(-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function has(haystack, needle) {
|
function has(haystack, needle) {
|
||||||
for (var a = 0; a < haystack.length; a++)
|
for (var a = 0; a < haystack.length; a++)
|
||||||
if (haystack[a] == needle)
|
if (haystack[a] == needle)
|
||||||
@@ -232,3 +401,93 @@ function has(haystack, needle) {
|
|||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function sread(key) {
|
||||||
|
if (window.localStorage)
|
||||||
|
return localStorage.getItem(key);
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function swrite(key, val) {
|
||||||
|
if (window.localStorage) {
|
||||||
|
if (val === undefined || val === null)
|
||||||
|
localStorage.removeItem(key);
|
||||||
|
else
|
||||||
|
localStorage.setItem(key, val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function jread(key, fb) {
|
||||||
|
var str = sread(key);
|
||||||
|
if (!str)
|
||||||
|
return fb;
|
||||||
|
|
||||||
|
return JSON.parse(str);
|
||||||
|
}
|
||||||
|
|
||||||
|
function jwrite(key, val) {
|
||||||
|
if (!val)
|
||||||
|
swrite(key);
|
||||||
|
else
|
||||||
|
swrite(key, JSON.stringify(val));
|
||||||
|
}
|
||||||
|
|
||||||
|
function icfg_get(name, defval) {
|
||||||
|
var o = ebi(name);
|
||||||
|
|
||||||
|
var val = parseInt(sread(name));
|
||||||
|
if (isNaN(val))
|
||||||
|
return parseInt(o ? o.value : defval);
|
||||||
|
|
||||||
|
if (o)
|
||||||
|
o.value = val;
|
||||||
|
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
|
||||||
|
function bcfg_get(name, defval) {
|
||||||
|
var o = ebi(name);
|
||||||
|
if (!o)
|
||||||
|
return defval;
|
||||||
|
|
||||||
|
var val = sread(name);
|
||||||
|
if (val === null)
|
||||||
|
val = defval;
|
||||||
|
else
|
||||||
|
val = (val == '1');
|
||||||
|
|
||||||
|
bcfg_upd_ui(name, val);
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
|
||||||
|
function bcfg_set(name, val) {
|
||||||
|
swrite(name, val ? '1' : '0');
|
||||||
|
bcfg_upd_ui(name, val);
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
|
||||||
|
function bcfg_upd_ui(name, val) {
|
||||||
|
var o = ebi(name);
|
||||||
|
if (!o)
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (o.getAttribute('type') == 'checkbox')
|
||||||
|
o.checked = val;
|
||||||
|
else if (o) {
|
||||||
|
var fun = val ? 'add' : 'remove';
|
||||||
|
o.classList[fun]('on');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function hist_push(url) {
|
||||||
|
console.log("h-push " + url);
|
||||||
|
history.pushState(url, url, url);
|
||||||
|
}
|
||||||
|
|
||||||
|
function hist_replace(url) {
|
||||||
|
console.log("h-repl " + url);
|
||||||
|
history.replaceState(url, url, url);
|
||||||
|
}
|
||||||
|
|||||||
242
docs/music-analysis.sh
Normal file
242
docs/music-analysis.sh
Normal file
@@ -0,0 +1,242 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
echo please dont actually run this as a scriopt
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
|
||||||
|
# dependency-heavy, not particularly good fit
|
||||||
|
pacman -S llvm10
|
||||||
|
python3 -m pip install --user librosa
|
||||||
|
git clone https://github.com/librosa/librosa.git
|
||||||
|
|
||||||
|
|
||||||
|
# correct bpm for tracks with bad tags
|
||||||
|
br='
|
||||||
|
/Trip Trip Trip\(Hardcore Edit\).mp3/ {v=176}
|
||||||
|
/World!!.BIG_SOS/ {v=175}
|
||||||
|
/\/08\..*\(BIG_SOS Bootleg\)\.mp3/ {v=175}
|
||||||
|
/もってけ!セーラ服.Asterisk DnB/ {v=175}
|
||||||
|
/Rondo\(Asterisk DnB Re.mp3/ {v=175}
|
||||||
|
/Ray Nautica 175 Edit/ {v=175;x="thunk"}
|
||||||
|
/TOKIMEKI Language.Jauz/ {v=174}
|
||||||
|
/YUPPUN Hardcore Remix\).mp3/ {v=174;x="keeps drifting"}
|
||||||
|
/(èâAâï.î╧ûδ|バーチャリアル.狐耶)J-Core Remix\).mp3/ {v=172;x="hard"}
|
||||||
|
/lucky train..Freezer/ {v=170}
|
||||||
|
/Alf zero Bootleg ReMix/ {v=170}
|
||||||
|
/Prisoner of Love.Kacky/ {v=170}
|
||||||
|
/火炎 .Qota/ {v=170}
|
||||||
|
/\(hu-zin Bootleg\)\.mp3/ {v=170}
|
||||||
|
/15. STRAIGHT BET\(Milynn Bootleg\)\.mp3/ {v=170}
|
||||||
|
/\/13.*\(Milynn Bootleg\)\.mp3/ {v=167;x="way hard"}
|
||||||
|
/COLOR PLANET .10SAI . nijikon Remix\)\.mp3/ {v=165}
|
||||||
|
/11\. (朝はご飯派|Æ⌐é═é▓ö╤öh)\.mp3/ {v=162}
|
||||||
|
/09\. Where.s the core/ {v=160}
|
||||||
|
/PLANET\(Koushif Jersey Club Bootleg\)remaster.mp3/ {v=160;x="starts ez turns bs"}
|
||||||
|
/kened Soul - Madeon x Angel Beats!.mp3/ {v=160}
|
||||||
|
/Dear Moments\(Mother Harlot Bootleg\)\.mp3/ {v=150}
|
||||||
|
/POWER.Ringos UKG/ {v=140}
|
||||||
|
/ブルー・フィールド\(Ringos UKG Remix\).mp3/ {v=135}
|
||||||
|
/プラチナジェット.Ringo Remix..mp3/ {v=131.2}
|
||||||
|
/Mirrorball Love \(TKM Bootleg Mix\).mp3/ {v=130}
|
||||||
|
/Photon Melodies \(TKM Bootleg Mix\).mp3/ {v=128}
|
||||||
|
/Trap of Love \(TKM Bootleg Mix\).mp3/ {v=128}
|
||||||
|
/One Step \(TKM Bootleg Mix\)\.mp3/ {v=126}
|
||||||
|
/04 (トリカムイ岩|âgâèâJâÇâCèΓ).mp3/ {v=125}
|
||||||
|
/Get your Wish \(NAWN REMIX\)\.mp3/ {v=95}
|
||||||
|
/Flicker .Nitro Fun/ {v=92}
|
||||||
|
/\/14\..*suicat Remix/ {v=85.5;x="tricky"}
|
||||||
|
/Yanagi Nagi - Harumodoki \(EO Remix\)\.mp3/ {v=150}
|
||||||
|
/Azure - Nicology\.mp3/ {v=128;x="off by 5 how"}
|
||||||
|
'
|
||||||
|
|
||||||
|
|
||||||
|
# afun host, collects/grades the results
|
||||||
|
runfun() { cores=8; touch run; rm -f /dev/shm/mres.*; t00=$(date +%s); tbc() { bc | sed -r 's/(\.[0-9]{2}).*/\1/'; }; for ((core=0; core<$cores; core++)); do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db 'select dur.w, dur.v, bpm.v from mt bpm join mt dur on bpm.w = dur.w where bpm.k = ".bpm" and dur.k = ".dur" order by dur.w' | uniq -w16 | while IFS=\| read w dur bpm; do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db "select rd, fn from up where substr(w,1,16) = '$w'" | sed -r "s/^/$bpm /"; done | grep mir/cr | tr \| / | awk '{v=$1;sub(/[^ ]+ /,"")} '"$br"' {printf "%s %s\n",v,$0}' | while read bpm fn; do [ -e run ] || break; n=$((n+1)); ncore=$((n%cores)); [ $ncore -eq $core ] || continue; t0=$(date +%s.%N); (afun || exit 1; t=$(date +%s.%N); td=$(echo "scale=3; $t - $t0" | tbc); bd=$(echo "scale=3; $bpm / $py" | tbc); printf '%4s sec, %4s orig, %6s py, %4s div, %s\n' $td $bpm $py $bd "$fn") | tee -a /dev/shm/mres.$ncore; rv=${PIPESTATUS[0]}; [ $rv -eq 0 ] || { echo "FAULT($rv): $fn"; }; done & done; wait 2>/dev/null; cat /dev/shm/mres.* | awk 'function prt(c) {printf "\033[3%sm%s\033[0m\n",c,$0} $8!="div,"{next} $5!~/^[0-9\.]+/{next} {meta=$3;det=$5;div=meta/det} div<0.7{det/=2} div>1.3{det*=2} {idet=sprintf("%.0f",det)} {idiff=idet-meta} meta>idet{idiff=meta-idet} idiff==0{n0++;prt(6);next} idiff==1{n1++;prt(3);next} idiff>10{nx++;prt(1);next} {n10++;prt(5)} END {printf "ok: %d 1off: %2s (%3s) 10off: %2s (%3s) fail: %2s\n",n0,n1,n0+n1,n10,n0+n1+n10,nx}'; te=$(date +%s); echo $((te-t00)) sec spent; }
|
||||||
|
|
||||||
|
|
||||||
|
# ok: 8 1off: 62 ( 70) 10off: 86 (156) fail: 25 # 105 sec, librosa @ 8c archvm on 3700x w10
|
||||||
|
# ok: 4 1off: 59 ( 63) 10off: 65 (128) fail: 53 # using original tags (bad)
|
||||||
|
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -t 60 /dev/shm/$core.wav || return 1; py="$(/home/ed/src/librosa/examples/beat_tracker.py /dev/shm/$core.wav x 2>&1 | awk 'BEGIN {v=1} /^Estimated tempo: /{v=$3} END {print v}')"; } runfun
|
||||||
|
|
||||||
|
|
||||||
|
# ok: 119 1off: 5 (124) 10off: 8 (132) fail: 49 # 51 sec, vamp-example-fixedtempo
|
||||||
|
# ok: 109 1off: 4 (113) 10off: 9 (122) fail: 59 # bad-tags
|
||||||
|
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 22050 -f f32le /dev/shm/$core.pcm || return 1; py="$(python3 -c 'import vamp; import numpy as np; f = open("/dev/shm/'$core'.pcm", "rb"); d = np.fromfile(f, dtype=np.float32); c = vamp.collect(d, 22050, "vamp-example-plugins:fixedtempo", parameters={"maxdflen":40}); print(c["list"][0]["label"].split(" ")[0])')"; }; runfun
|
||||||
|
|
||||||
|
|
||||||
|
# ok: 102 1off: 61 (163) 10off: 12 (175) fail: 6 # 61 sec, vamp-qm-tempotracker
|
||||||
|
# ok: 80 1off: 48 (128) 10off: 11 (139) fail: 42 # bad-tags
|
||||||
|
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 22050 -f f32le /dev/shm/$core.pcm || return 1; py="$(python3 -c 'import vamp; import numpy as np; f = open("/dev/shm/'$core'.pcm", "rb"); d = np.fromfile(f, dtype=np.float32); c = vamp.collect(d, 22050, "qm-vamp-plugins:qm-tempotracker", parameters={"inputtempo":150}); v = [float(x["label"].split(" ")[0]) for x in c["list"] if x["label"]]; v = list(sorted(v))[len(v)//4:-len(v)//4]; print(round(sum(v) / len(v), 1))')"; }; runfun
|
||||||
|
|
||||||
|
|
||||||
|
# ok: 133 1off: 32 (165) 10off: 12 (177) fail: 3 # 51 sec, vamp-beatroot
|
||||||
|
# ok: 101 1off: 22 (123) 10off: 16 (139) fail: 39 # bad-tags
|
||||||
|
# note: some tracks fully fail to analyze (unlike the others which always provide a guess)
|
||||||
|
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 22050 -f f32le /dev/shm/$core.pcm || return 1; py="$(python3 -c 'import vamp; import numpy as np; f = open("/dev/shm/'$core'.pcm", "rb"); d = np.fromfile(f, dtype=np.float32); c = vamp.collect(d, 22050, "beatroot-vamp:beatroot"); cl=c["list"]; print(round(60*((len(cl)-1)/(float(cl[-1]["timestamp"]-cl[1]["timestamp"]))), 2))')"; }; runfun
|
||||||
|
|
||||||
|
|
||||||
|
# ok: 124 1off: 9 (133) 10off: 40 (173) fail: 8 # 231 sec, essentia/full
|
||||||
|
# ok: 109 1off: 8 (117) 10off: 22 (139) fail: 42 # bad-tags
|
||||||
|
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 /dev/shm/$core.wav || return 1; py="$(python3 -c 'import essentia; import essentia.standard as es; fe, fef = es.MusicExtractor(lowlevelStats=["mean", "stdev"], rhythmStats=["mean", "stdev"], tonalStats=["mean", "stdev"])("/dev/shm/'$core'.wav"); print("{:.2f}".format(fe["rhythm.bpm"]))')"; }; runfun
|
||||||
|
|
||||||
|
|
||||||
|
# ok: 113 1off: 18 (131) 10off: 46 (177) fail: 4 # 134 sec, essentia/re2013
|
||||||
|
# ok: 101 1off: 15 (116) 10off: 26 (142) fail: 39 # bad-tags
|
||||||
|
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 /dev/shm/$core.wav || return 1; py="$(python3 -c 'from essentia.standard import *; a=MonoLoader(filename="/dev/shm/'$core'.wav")(); bpm,beats,confidence,_,intervals=RhythmExtractor2013(method="multifeature")(a); print("{:.2f}".format(bpm))')"; }; runfun
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
########################################################################
|
||||||
|
##
|
||||||
|
## key detectyion
|
||||||
|
##
|
||||||
|
########################################################################
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# console scriptlet reusing keytabs from browser.js
|
||||||
|
var m=''; for (var a=0; a<24; a++) m += 's/\\|(' + maps["traktor_sharps"][a].trim() + "|" + maps["rekobo_classic"][a].trim() + "|" + maps["traktor_musical"][a].trim() + "|" + maps["traktor_open"][a].trim() + ')$/|' + maps["rekobo_alnum"][a].trim() + '/;'; console.log(m);
|
||||||
|
|
||||||
|
|
||||||
|
# translate to camelot
|
||||||
|
re='s/\|(B|B|B|6d)$/|1B/;s/\|(F#|F#|Gb|7d)$/|2B/;s/\|(C#|Db|Db|8d)$/|3B/;s/\|(G#|Ab|Ab|9d)$/|4B/;s/\|(D#|Eb|Eb|10d)$/|5B/;s/\|(A#|Bb|Bb|11d)$/|6B/;s/\|(F|F|F|12d)$/|7B/;s/\|(C|C|C|1d)$/|8B/;s/\|(G|G|G|2d)$/|9B/;s/\|(D|D|D|3d)$/|10B/;s/\|(A|A|A|4d)$/|11B/;s/\|(E|E|E|5d)$/|12B/;s/\|(G#m|Abm|Abm|6m)$/|1A/;s/\|(D#m|Ebm|Ebm|7m)$/|2A/;s/\|(A#m|Bbm|Bbm|8m)$/|3A/;s/\|(Fm|Fm|Fm|9m)$/|4A/;s/\|(Cm|Cm|Cm|10m)$/|5A/;s/\|(Gm|Gm|Gm|11m)$/|6A/;s/\|(Dm|Dm|Dm|12m)$/|7A/;s/\|(Am|Am|Am|1m)$/|8A/;s/\|(Em|Em|Em|2m)$/|9A/;s/\|(Bm|Bm|Bm|3m)$/|10A/;s/\|(F#m|F#m|Gbm|4m)$/|11A/;s/\|(C#m|Dbm|Dbm|5m)$/|12A/;'
|
||||||
|
|
||||||
|
|
||||||
|
# runner/wrapper
|
||||||
|
runfun() { cores=8; touch run; tbc() { bc | sed -r 's/(\.[0-9]{2}).*/\1/'; }; for ((core=0; core<$cores; core++)); do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db 'select dur.w, dur.v, key.v from mt key join mt dur on key.w = dur.w where key.k = "key" and dur.k = ".dur" order by dur.w' | uniq -w16 | grep -vE '(Off-Key|None)$' | sed -r "s/ //g;$re" | uniq -w16 | while IFS=\| read w dur bpm; do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db "select rd, fn from up where substr(w,1,16) = '$w'" | sed -r "s/^/$bpm /"; done| grep mir/cr | tr \| / | while read key fn; do [ -e run ] || break; n=$((n+1)); ncore=$((n%cores)); [ $ncore -eq $core ] || continue; t0=$(date +%s.%N); (afun || exit 1; t=$(date +%s.%N); td=$(echo "scale=3; $t - $t0" | tbc); [ "$key" = "$py" ] && c=2 || c=5; printf '%4s sec, %4s orig, \033[3%dm%4s py,\033[0m %s\n' $td "$key" $c "$py" "$fn") || break; done & done; time wait 2>/dev/null; }
|
||||||
|
|
||||||
|
|
||||||
|
# ok: 26 1off: 10 2off: 1 fail: 3 # 15 sec, keyfinder
|
||||||
|
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 -t 60 /dev/shm/$core.wav || break; py="$(python3 -c 'import sys; import keyfinder; print(keyfinder.key(sys.argv[1]).camelot())' "/dev/shm/$core.wav")"; }; runfun
|
||||||
|
|
||||||
|
|
||||||
|
# https://github.com/MTG/essentia/raw/master/src/examples/tutorial/example_key_by_steps_streaming.py
|
||||||
|
# https://essentia.upf.edu/reference/std_Key.html # edma edmm braw bgate
|
||||||
|
sed -ri 's/^(key = Key\().*/\1profileType="bgate")/' example_key_by_steps_streaming.py
|
||||||
|
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 -t 60 /dev/shm/$core.wav || break; py="$(python3 example_key_by_steps_streaming.py /dev/shm/$core.{wav,yml} 2>/dev/null | sed -r "s/ major//;s/ minor/m/;s/^/|/;$re;s/.//")"; }; runfun
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
########################################################################
|
||||||
|
##
|
||||||
|
## misc
|
||||||
|
##
|
||||||
|
########################################################################
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
python3 -m pip install --user vamp
|
||||||
|
|
||||||
|
import librosa
|
||||||
|
d, r = librosa.load('/dev/shm/0.wav')
|
||||||
|
d.dtype
|
||||||
|
# dtype('float32')
|
||||||
|
d.shape
|
||||||
|
# (1323000,)
|
||||||
|
d
|
||||||
|
# array([-1.9614939e-08, 1.8037968e-08, -1.4106059e-08, ...,
|
||||||
|
# 1.2024145e-01, 2.7462116e-01, 1.6202132e-01], dtype=float32)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
import vamp
|
||||||
|
c = vamp.collect(d, r, "vamp-example-plugins:fixedtempo")
|
||||||
|
c
|
||||||
|
# {'list': [{'timestamp': 0.005804988, 'duration': 9.999092971, 'label': '110.0 bpm', 'values': array([109.98116], dtype=float32)}]}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
ffmpeg -ss 48 -i /mnt/Users/ed/Music/mir/cr-a/'I Beg You(ths Bootleg).wav' -ac 1 -ar 22050 -f f32le -t 60 /dev/shm/f32.pcm
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
f = open('/dev/shm/f32.pcm', 'rb')
|
||||||
|
d = np.fromfile(f, dtype=np.float32)
|
||||||
|
d
|
||||||
|
array([-0.17803933, -0.27206388, -0.41586545, ..., -0.04940119,
|
||||||
|
-0.0267825 , -0.03564296], dtype=float32)
|
||||||
|
|
||||||
|
d = np.reshape(d, [1, -1])
|
||||||
|
d
|
||||||
|
array([[-0.17803933, -0.27206388, -0.41586545, ..., -0.04940119,
|
||||||
|
-0.0267825 , -0.03564296]], dtype=float32)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
import vampyhost
|
||||||
|
print("\n".join(vampyhost.list_plugins()))
|
||||||
|
|
||||||
|
mvamp:marsyas_bextract_centroid
|
||||||
|
mvamp:marsyas_bextract_lpcc
|
||||||
|
mvamp:marsyas_bextract_lsp
|
||||||
|
mvamp:marsyas_bextract_mfcc
|
||||||
|
mvamp:marsyas_bextract_rolloff
|
||||||
|
mvamp:marsyas_bextract_scf
|
||||||
|
mvamp:marsyas_bextract_sfm
|
||||||
|
mvamp:marsyas_bextract_zero_crossings
|
||||||
|
mvamp:marsyas_ibt
|
||||||
|
mvamp:zerocrossing
|
||||||
|
qm-vamp-plugins:qm-adaptivespectrogram
|
||||||
|
qm-vamp-plugins:qm-barbeattracker
|
||||||
|
qm-vamp-plugins:qm-chromagram
|
||||||
|
qm-vamp-plugins:qm-constantq
|
||||||
|
qm-vamp-plugins:qm-dwt
|
||||||
|
qm-vamp-plugins:qm-keydetector
|
||||||
|
qm-vamp-plugins:qm-mfcc
|
||||||
|
qm-vamp-plugins:qm-onsetdetector
|
||||||
|
qm-vamp-plugins:qm-segmenter
|
||||||
|
qm-vamp-plugins:qm-similarity
|
||||||
|
qm-vamp-plugins:qm-tempotracker
|
||||||
|
qm-vamp-plugins:qm-tonalchange
|
||||||
|
qm-vamp-plugins:qm-transcription
|
||||||
|
vamp-aubio:aubiomelenergy
|
||||||
|
vamp-aubio:aubiomfcc
|
||||||
|
vamp-aubio:aubionotes
|
||||||
|
vamp-aubio:aubioonset
|
||||||
|
vamp-aubio:aubiopitch
|
||||||
|
vamp-aubio:aubiosilence
|
||||||
|
vamp-aubio:aubiospecdesc
|
||||||
|
vamp-aubio:aubiotempo
|
||||||
|
vamp-example-plugins:amplitudefollower
|
||||||
|
vamp-example-plugins:fixedtempo
|
||||||
|
vamp-example-plugins:percussiononsets
|
||||||
|
vamp-example-plugins:powerspectrum
|
||||||
|
vamp-example-plugins:spectralcentroid
|
||||||
|
vamp-example-plugins:zerocrossing
|
||||||
|
vamp-rubberband:rubberband
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
plug = vampyhost.load_plugin("vamp-example-plugins:fixedtempo", 22050, 0)
|
||||||
|
plug.info
|
||||||
|
{'apiVersion': 2, 'pluginVersion': 1, 'identifier': 'fixedtempo', 'name': 'Simple Fixed Tempo Estimator', 'description': 'Study a short section of audio and estimate its tempo, assuming the tempo is constant', 'maker': 'Vamp SDK Example Plugins', 'copyright': 'Code copyright 2008 Queen Mary, University of London. Freely redistributable (BSD license)'}
|
||||||
|
plug = vampyhost.load_plugin("qm-vamp-plugins:qm-tempotracker", 22050, 0)
|
||||||
|
from pprint import pprint; pprint(plug.parameters)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
for c in plug.parameters: print("{} \033[36m{} [\033[33m{}\033[36m] = {}\033[0m".format(c["identifier"], c["name"], "\033[36m, \033[33m".join(c["valueNames"]), c["valueNames"][int(c["defaultValue"])])) if "valueNames" in c else print("{} \033[36m{} [\033[33m{}..{}\033[36m] = {}\033[0m".format(c["identifier"], c["name"], c["minValue"], c["maxValue"], c["defaultValue"]))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
beatroot-vamp:beatroot
|
||||||
|
cl=c["list"]; 60*((len(cl)-1)/(float(cl[-1]["timestamp"]-cl[1]["timestamp"])))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
ffmpeg -ss 48 -i /mnt/Users/ed/Music/mir/cr-a/'I Beg You(ths Bootleg).wav' -ac 1 -ar 22050 -f f32le -t 60 /dev/shm/f32.pcm
|
||||||
|
# 128 bpm, key 5A Cm
|
||||||
|
|
||||||
|
import vamp
|
||||||
|
import numpy as np
|
||||||
|
f = open('/dev/shm/f32.pcm', 'rb')
|
||||||
|
d = np.fromfile(f, dtype=np.float32)
|
||||||
|
c = vamp.collect(d, 22050, "vamp-example-plugins:fixedtempo", parameters={"maxdflen":40})
|
||||||
|
c["list"][0]["label"]
|
||||||
|
# 127.6 bpm
|
||||||
|
|
||||||
|
c = vamp.collect(d, 22050, "qm-vamp-plugins:qm-tempotracker", parameters={"inputtempo":150})
|
||||||
|
print("\n".join([v["label"] for v in c["list"] if v["label"]]))
|
||||||
|
v = [float(x["label"].split(' ')[0]) for x in c["list"] if x["label"]]
|
||||||
|
v = list(sorted(v))[len(v)//4:-len(v)//4]
|
||||||
|
v = sum(v) / len(v)
|
||||||
|
# 128.1 bpm
|
||||||
|
|
||||||
@@ -11,6 +11,13 @@ gzip -d < .hist/up2k.snap | jq -r '.[].tnam' | while IFS= read -r f; do rm -f --
|
|||||||
gzip -d < .hist/up2k.snap | jq -r '.[].name' | while IFS= read -r f; do wc -c -- "$f" | grep -qiE '^[^0-9a-z]*0' && rm -f -- "$f"; done
|
gzip -d < .hist/up2k.snap | jq -r '.[].name' | while IFS= read -r f; do wc -c -- "$f" | grep -qiE '^[^0-9a-z]*0' && rm -f -- "$f"; done
|
||||||
|
|
||||||
|
|
||||||
|
##
|
||||||
|
## detect partial uploads based on file contents
|
||||||
|
## (in case of context loss or old copyparties)
|
||||||
|
|
||||||
|
echo; find -type f | while IFS= read -r x; do printf '\033[A\033[36m%s\033[K\033[0m\n' "$x"; tail -c$((1024*1024)) <"$x" | xxd -a | awk 'NR==1&&/^[0: ]+.{16}$/{next} NR==2&&/^\*$/{next} NR==3&&/^[0f]+: [0 ]+65 +.{16}$/{next} {e=1} END {exit e}' || continue; printf '\033[A\033[31msus:\033[33m %s \033[0m\n\n' "$x"; done
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
## create a test payload
|
## create a test payload
|
||||||
|
|
||||||
@@ -60,6 +67,36 @@ wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:392
|
|||||||
shab64() { sp=$1; f="$2"; v=0; sz=$(stat -c%s "$f"); while true; do w=$((v+sp*1024*1024)); printf $(tail -c +$((v+1)) "$f" | head -c $((w-v)) | sha512sum | cut -c-64 | sed -r 's/ .*//;s/(..)/\\x\1/g') | base64 -w0 | cut -c-43 | tr '+/' '-_'; v=$w; [ $v -lt $sz ] || break; done; }
|
shab64() { sp=$1; f="$2"; v=0; sz=$(stat -c%s "$f"); while true; do w=$((v+sp*1024*1024)); printf $(tail -c +$((v+1)) "$f" | head -c $((w-v)) | sha512sum | cut -c-64 | sed -r 's/ .*//;s/(..)/\\x\1/g') | base64 -w0 | cut -c-43 | tr '+/' '-_'; v=$w; [ $v -lt $sz ] || break; done; }
|
||||||
|
|
||||||
|
|
||||||
|
##
|
||||||
|
## poll url for performance issues
|
||||||
|
|
||||||
|
command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s ",k,p}}') 2>&1 | awk -v ts=$t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s \033[3%dm%s %s\033[0m\n",ts,c,t2,t1}'; sleep 0.1 || break; done
|
||||||
|
|
||||||
|
|
||||||
|
##
|
||||||
|
## sqlite3 stuff
|
||||||
|
|
||||||
|
# find dupe metadata keys
|
||||||
|
sqlite3 up2k.db 'select mt1.w, mt1.k, mt1.v, mt2.v from mt mt1 inner join mt mt2 on mt1.w = mt2.w where mt1.k = mt2.k and mt1.rowid != mt2.rowid'
|
||||||
|
|
||||||
|
# partial reindex by deleting all tags for a list of files
|
||||||
|
time sqlite3 up2k.db 'select mt1.w from mt mt1 inner join mt mt2 on mt1.w = mt2.w where mt1.k = +mt2.k and mt1.rowid != mt2.rowid' > warks
|
||||||
|
cat warks | while IFS= read -r x; do sqlite3 up2k.db "delete from mt where w = '$x'"; done
|
||||||
|
|
||||||
|
# dump all dbs
|
||||||
|
find -iname up2k.db | while IFS= read -r x; do sqlite3 "$x" 'select substr(w,1,12), rd, fn from up' | sed -r 's/\|/ \| /g' | while IFS= read -r y; do printf '%s | %s\n' "$x" "$y"; done; done
|
||||||
|
|
||||||
|
|
||||||
|
##
|
||||||
|
## media
|
||||||
|
|
||||||
|
# split track into test files
|
||||||
|
e=6; s=10; d=~/dev/copyparty/srv/aus; n=1; p=0; e=$((e*60)); rm -rf $d; mkdir $d; while true; do ffmpeg -hide_banner -ss $p -i 'nervous_testpilot - office.mp3' -c copy -t $s $d/$(printf %04d $n).mp3; n=$((n+1)); p=$((p+s)); [ $p -gt $e ] && break; done
|
||||||
|
|
||||||
|
-v srv/aus:aus:r:ce2dsa:ce2ts:cmtp=fgsfds=bin/mtag/sleep.py
|
||||||
|
sqlite3 .hist/up2k.db 'select * from mt where k="fgsfds" or k="t:mtp"' | tee /dev/stderr | wc -l
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
## vscode
|
## vscode
|
||||||
|
|
||||||
@@ -89,6 +126,18 @@ for d in /usr /var; do find $d -type f -size +30M 2>/dev/null; done | while IFS=
|
|||||||
brew install python@2
|
brew install python@2
|
||||||
pip install virtualenv
|
pip install virtualenv
|
||||||
|
|
||||||
|
# readme toc
|
||||||
|
cat README.md | awk '!/^#/{next} {lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab)} {printf "%" ((lv-1)*4+1) "s [%s](#%s)\n", "*",$0,bab}'
|
||||||
|
|
||||||
|
# fix firefox phantom breakpoints,
|
||||||
|
# suggestions from bugtracker, doesnt work (debugger is not attachable)
|
||||||
|
devtools settings >> advanced >> enable browser chrome debugging + enable remote debugging
|
||||||
|
burger > developer >> browser toolbox (ctrl-alt-shift-i)
|
||||||
|
iframe btn topright >> chrome://devtools/content/debugger/index.html
|
||||||
|
dbg.asyncStore.pendingBreakpoints = {}
|
||||||
|
|
||||||
|
# fix firefox phantom breakpoints
|
||||||
|
about:config >> devtools.debugger.prefs-schema-version = -1
|
||||||
|
|
||||||
##
|
##
|
||||||
## http 206
|
## http 206
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ set -e
|
|||||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
|
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
|
||||||
|
|
||||||
|
|
||||||
|
command -v gnutar && tar() { gnutar "$@"; }
|
||||||
command -v gtar && tar() { gtar "$@"; }
|
command -v gtar && tar() { gtar "$@"; }
|
||||||
command -v gsed && sed() { gsed "$@"; }
|
command -v gsed && sed() { gsed "$@"; }
|
||||||
td="$(mktemp -d)"
|
td="$(mktemp -d)"
|
||||||
@@ -29,11 +30,11 @@ pwd
|
|||||||
|
|
||||||
|
|
||||||
dl_text() {
|
dl_text() {
|
||||||
command -v curl && exec curl "$@"
|
command -v curl >/dev/null && exec curl "$@"
|
||||||
exec wget -O- "$@"
|
exec wget -O- "$@"
|
||||||
}
|
}
|
||||||
dl_files() {
|
dl_files() {
|
||||||
command -v curl && exec curl -L --remote-name-all "$@"
|
command -v curl >/dev/null && exec curl -L --remote-name-all "$@"
|
||||||
exec wget "$@"
|
exec wget "$@"
|
||||||
}
|
}
|
||||||
export -f dl_files
|
export -f dl_files
|
||||||
|
|||||||
@@ -28,6 +28,13 @@ gtar=$(command -v gtar || command -v gnutar) || true
|
|||||||
unexpand() { gunexpand "$@"; }
|
unexpand() { gunexpand "$@"; }
|
||||||
command -v grealpath >/dev/null &&
|
command -v grealpath >/dev/null &&
|
||||||
realpath() { grealpath "$@"; }
|
realpath() { grealpath "$@"; }
|
||||||
|
|
||||||
|
[ -e /opt/local/bin/bzip2 ] &&
|
||||||
|
bzip2() { /opt/local/bin/bzip2 "$@"; }
|
||||||
|
}
|
||||||
|
pybin=$(command -v python3 || command -v python) || {
|
||||||
|
echo need python
|
||||||
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
[ -e copyparty/__main__.py ] || cd ..
|
[ -e copyparty/__main__.py ] || cd ..
|
||||||
@@ -38,11 +45,15 @@ gtar=$(command -v gtar || command -v gnutar) || true
|
|||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
do_sh=1
|
||||||
|
do_py=1
|
||||||
while [ ! -z "$1" ]; do
|
while [ ! -z "$1" ]; do
|
||||||
[ "$1" = clean ] && clean=1 && shift && continue
|
[ "$1" = clean ] && clean=1 && shift && continue
|
||||||
[ "$1" = re ] && repack=1 && shift && continue
|
[ "$1" = re ] && repack=1 && shift && continue
|
||||||
[ "$1" = no-ogv ] && no_ogv=1 && shift && continue
|
[ "$1" = no-ogv ] && no_ogv=1 && shift && continue
|
||||||
[ "$1" = no-cm ] && no_cm=1 && shift && continue
|
[ "$1" = no-cm ] && no_cm=1 && shift && continue
|
||||||
|
[ "$1" = no-sh ] && do_sh= && shift && continue
|
||||||
|
[ "$1" = no-py ] && do_py= && shift && continue
|
||||||
break
|
break
|
||||||
done
|
done
|
||||||
|
|
||||||
@@ -122,7 +133,7 @@ git describe --tags >/dev/null 2>/dev/null && {
|
|||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
dt="$(git log -1 --format=%cd --date=format:'%Y,%m,%d' | sed -E 's/,0?/, /g')"
|
dt="$(git log -1 --format=%cd --date=short | sed -E 's/-0?/, /g')"
|
||||||
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
|
printf 'git %3s: \033[36m%s\033[0m\n' ver "$ver" dt "$dt"
|
||||||
sed -ri '
|
sed -ri '
|
||||||
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;
|
s/^(VERSION =)(.*)/#\1\2\n\1 ('"$t_ver"')/;
|
||||||
@@ -150,7 +161,7 @@ find .. -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
|
|||||||
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
|
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
|
||||||
|
|
||||||
echo use smol web deps
|
echo use smol web deps
|
||||||
rm -f copyparty/web/deps/*.full.*
|
rm -f copyparty/web/deps/*.full.* copyparty/web/{Makefile,splash.js}
|
||||||
|
|
||||||
# it's fine dw
|
# it's fine dw
|
||||||
grep -lE '\.full\.(js|css)' copyparty/web/* |
|
grep -lE '\.full\.(js|css)' copyparty/web/* |
|
||||||
@@ -169,10 +180,11 @@ done
|
|||||||
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
|
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[ $repack ] ||
|
||||||
find | grep -E '\.py$' |
|
find | grep -E '\.py$' |
|
||||||
grep -vE '__version__' |
|
grep -vE '__version__' |
|
||||||
tr '\n' '\0' |
|
tr '\n' '\0' |
|
||||||
xargs -0 python ../scripts/uncomment.py
|
xargs -0 $pybin ../scripts/uncomment.py
|
||||||
|
|
||||||
f=dep-j2/jinja2/constants.py
|
f=dep-j2/jinja2/constants.py
|
||||||
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
|
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
|
||||||
@@ -180,7 +192,7 @@ tmv "$f"
|
|||||||
|
|
||||||
# up2k goes from 28k to 22k laff
|
# up2k goes from 28k to 22k laff
|
||||||
echo entabbening
|
echo entabbening
|
||||||
find | grep -E '\.(js|css|html|py)$' | while IFS= read -r f; do
|
find | grep -E '\.(js|css|html)$' | while IFS= read -r f; do
|
||||||
unexpand -t 4 --first-only <"$f" >t
|
unexpand -t 4 --first-only <"$f" >t
|
||||||
tmv "$f"
|
tmv "$f"
|
||||||
done
|
done
|
||||||
@@ -194,25 +206,36 @@ tar -cf tar "${args[@]}" --numeric-owner copyparty dep-j2
|
|||||||
|
|
||||||
echo compressing tar
|
echo compressing tar
|
||||||
# detect best level; bzip2 -7 is usually better than -9
|
# detect best level; bzip2 -7 is usually better than -9
|
||||||
for n in {2..9}; do cp tar t.$n; bzip2 -$n t.$n & done; wait; mv -v $(ls -1S t.*.bz2 | tail -n 1) tar.bz2
|
[ $do_py ] && { for n in {2..9}; do cp tar t.$n; bzip2 -$n t.$n & done; wait; mv -v $(ls -1S t.*.bz2 | tail -n 1) tar.bz2; }
|
||||||
for n in {2..9}; do cp tar t.$n; xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz | tail -n 1) tar.xz
|
[ $do_sh ] && { for n in {2..9}; do cp tar t.$n; xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz | tail -n 1) tar.xz; }
|
||||||
rm t.*
|
rm t.* || true
|
||||||
|
exts=()
|
||||||
|
|
||||||
|
|
||||||
|
[ $do_sh ] && {
|
||||||
|
exts+=(sh)
|
||||||
echo creating unix sfx
|
echo creating unix sfx
|
||||||
(
|
(
|
||||||
sed "s/PACK_TS/$ts/; s/PACK_HTS/$hts/; s/CPP_VER/$ver/" <../scripts/sfx.sh |
|
sed "s/PACK_TS/$ts/; s/PACK_HTS/$hts/; s/CPP_VER/$ver/" <../scripts/sfx.sh |
|
||||||
grep -E '^sfx_eof$' -B 9001;
|
grep -E '^sfx_eof$' -B 9001;
|
||||||
cat tar.xz
|
cat tar.xz
|
||||||
) >$sfx_out.sh
|
) >$sfx_out.sh
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
[ $do_py ] && {
|
||||||
|
exts+=(py)
|
||||||
echo creating generic sfx
|
echo creating generic sfx
|
||||||
python ../scripts/sfx.py --sfx-make tar.bz2 $ver $ts
|
$pybin ../scripts/sfx.py --sfx-make tar.bz2 $ver $ts
|
||||||
mv sfx.out $sfx_out.py
|
mv sfx.out $sfx_out.py
|
||||||
chmod 755 $sfx_out.*
|
chmod 755 $sfx_out.*
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
printf "done:\n"
|
printf "done:\n"
|
||||||
printf " %s\n" "$(realpath $sfx_out)."{sh,py}
|
for ext in ${exts[@]}; do
|
||||||
# rm -rf *
|
printf " %s\n" "$(realpath $sfx_out)."$ext
|
||||||
|
done
|
||||||
|
|
||||||
# tar -tvf ../sfx/tar | sed -r 's/(.* ....-..-.. ..:.. )(.*)/\2 `` \1/' | sort | sed -r 's/(.*) `` (.*)/\2 \1/'| less
|
# apk add bash python3 tar xz bzip2
|
||||||
# for n in {1..9}; do tar -tf tar | grep -vE '/$' | sed -r 's/(.*)\.(.*)/\2.\1/' | sort | sed -r 's/([^\.]+)\.(.*)/\2.\1/' | tar -cT- | bzip2 -c$n | wc -c; done
|
# while true; do ./make-sfx.sh; for f in ..//dist/copyparty-sfx.{sh,py}; do mv $f $f.$(wc -c <$f | awk '{print$1}'); done; done
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# coding: utf-8
|
# coding: latin-1
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import os, sys, time, shutil, signal, tarfile, hashlib, platform, tempfile
|
import os, sys, time, shutil, runpy, tarfile, hashlib, platform, tempfile, traceback
|
||||||
import subprocess as sp
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
run me with any version of python, i will unpack and run copyparty
|
run me with any version of python, i will unpack and run copyparty
|
||||||
@@ -344,20 +343,24 @@ def get_payload():
|
|||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
def confirm():
|
def confirm(rv):
|
||||||
msg()
|
msg()
|
||||||
|
msg(traceback.format_exc())
|
||||||
msg("*** hit enter to exit ***")
|
msg("*** hit enter to exit ***")
|
||||||
try:
|
try:
|
||||||
raw_input() if PY2 else input()
|
raw_input() if PY2 else input()
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
sys.exit(rv)
|
||||||
|
|
||||||
|
|
||||||
def run(tmp, j2ver):
|
def run(tmp, j2ver):
|
||||||
global cpp
|
global cpp
|
||||||
|
|
||||||
msg("jinja2:", j2ver or "bundled")
|
msg("jinja2:", j2ver or "bundled")
|
||||||
msg("sfxdir:", tmp)
|
msg("sfxdir:", tmp)
|
||||||
|
msg()
|
||||||
|
|
||||||
# "systemd-tmpfiles-clean.timer"?? HOW do you even come up with this shit
|
# "systemd-tmpfiles-clean.timer"?? HOW do you even come up with this shit
|
||||||
try:
|
try:
|
||||||
@@ -373,30 +376,16 @@ def run(tmp, j2ver):
|
|||||||
if j2ver:
|
if j2ver:
|
||||||
del ld[-1]
|
del ld[-1]
|
||||||
|
|
||||||
cmd = (
|
for x in ld:
|
||||||
"import sys, runpy; "
|
sys.path.insert(0, x)
|
||||||
+ "".join(['sys.path.insert(0, r"' + x + '"); ' for x in ld])
|
|
||||||
+ 'runpy.run_module("copyparty", run_name="__main__")'
|
|
||||||
)
|
|
||||||
cmd = [sys.executable, "-c", cmd] + list(sys.argv[1:])
|
|
||||||
|
|
||||||
cmd = [str(x) for x in cmd]
|
|
||||||
msg("\n", cmd, "\n")
|
|
||||||
cpp = sp.Popen(cmd)
|
|
||||||
try:
|
try:
|
||||||
cpp.wait()
|
runpy.run_module(str("copyparty"), run_name=str("__main__"))
|
||||||
|
except SystemExit as ex:
|
||||||
|
if ex.code:
|
||||||
|
confirm(ex.code)
|
||||||
except:
|
except:
|
||||||
cpp.wait()
|
confirm(1)
|
||||||
|
|
||||||
if cpp.returncode != 0:
|
|
||||||
confirm()
|
|
||||||
|
|
||||||
sys.exit(cpp.returncode)
|
|
||||||
|
|
||||||
|
|
||||||
def bye(sig, frame):
|
|
||||||
if cpp is not None:
|
|
||||||
cpp.terminate()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -430,8 +419,6 @@ def main():
|
|||||||
|
|
||||||
# skip 0
|
# skip 0
|
||||||
|
|
||||||
signal.signal(signal.SIGTERM, bye)
|
|
||||||
|
|
||||||
tmp = unpack()
|
tmp = unpack()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -439,7 +426,7 @@ def main():
|
|||||||
except:
|
except:
|
||||||
j2ver = None
|
j2ver = None
|
||||||
|
|
||||||
return run(tmp, j2ver)
|
run(tmp, j2ver)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -16,6 +16,14 @@ from copyparty.authsrv import AuthSrv
|
|||||||
from copyparty import util
|
from copyparty import util
|
||||||
|
|
||||||
|
|
||||||
|
class Cfg(Namespace):
|
||||||
|
def __init__(self, a=[], v=[], c=None):
|
||||||
|
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
||||||
|
ex["mtp"] = []
|
||||||
|
ex["mte"] = "a"
|
||||||
|
super(Cfg, self).__init__(a=a, v=v, c=c, **ex)
|
||||||
|
|
||||||
|
|
||||||
class TestVFS(unittest.TestCase):
|
class TestVFS(unittest.TestCase):
|
||||||
def dump(self, vfs):
|
def dump(self, vfs):
|
||||||
print(json.dumps(vfs, indent=4, sort_keys=True, default=lambda o: o.__dict__))
|
print(json.dumps(vfs, indent=4, sort_keys=True, default=lambda o: o.__dict__))
|
||||||
@@ -35,7 +43,13 @@ class TestVFS(unittest.TestCase):
|
|||||||
def ls(self, vfs, vpath, uname):
|
def ls(self, vfs, vpath, uname):
|
||||||
"""helper for resolving and listing a folder"""
|
"""helper for resolving and listing a folder"""
|
||||||
vn, rem = vfs.get(vpath, uname, True, False)
|
vn, rem = vfs.get(vpath, uname, True, False)
|
||||||
return vn.ls(rem, uname)
|
r1 = vn.ls(rem, uname, False)
|
||||||
|
r2 = vn.ls(rem, uname, False)
|
||||||
|
self.assertEqual(r1, r2)
|
||||||
|
|
||||||
|
fsdir, real, virt = r1
|
||||||
|
real = [x[0] for x in real]
|
||||||
|
return fsdir, real, virt
|
||||||
|
|
||||||
def runcmd(self, *argv):
|
def runcmd(self, *argv):
|
||||||
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
|
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||||
@@ -78,7 +92,7 @@ class TestVFS(unittest.TestCase):
|
|||||||
finally:
|
finally:
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def log(self, src, msg):
|
def log(self, src, msg, c=0):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def test(self):
|
def test(self):
|
||||||
@@ -102,7 +116,7 @@ class TestVFS(unittest.TestCase):
|
|||||||
f.write(fn)
|
f.write(fn)
|
||||||
|
|
||||||
# defaults
|
# defaults
|
||||||
vfs = AuthSrv(Namespace(c=None, a=[], v=[]), self.log).vfs
|
vfs = AuthSrv(Cfg(), self.log).vfs
|
||||||
self.assertEqual(vfs.nodes, {})
|
self.assertEqual(vfs.nodes, {})
|
||||||
self.assertEqual(vfs.vpath, "")
|
self.assertEqual(vfs.vpath, "")
|
||||||
self.assertEqual(vfs.realpath, td)
|
self.assertEqual(vfs.realpath, td)
|
||||||
@@ -110,7 +124,7 @@ class TestVFS(unittest.TestCase):
|
|||||||
self.assertEqual(vfs.uwrite, ["*"])
|
self.assertEqual(vfs.uwrite, ["*"])
|
||||||
|
|
||||||
# single read-only rootfs (relative path)
|
# single read-only rootfs (relative path)
|
||||||
vfs = AuthSrv(Namespace(c=None, a=[], v=["a/ab/::r"]), self.log).vfs
|
vfs = AuthSrv(Cfg(v=["a/ab/::r"]), self.log).vfs
|
||||||
self.assertEqual(vfs.nodes, {})
|
self.assertEqual(vfs.nodes, {})
|
||||||
self.assertEqual(vfs.vpath, "")
|
self.assertEqual(vfs.vpath, "")
|
||||||
self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
|
self.assertEqual(vfs.realpath, os.path.join(td, "a", "ab"))
|
||||||
@@ -118,9 +132,7 @@ class TestVFS(unittest.TestCase):
|
|||||||
self.assertEqual(vfs.uwrite, [])
|
self.assertEqual(vfs.uwrite, [])
|
||||||
|
|
||||||
# single read-only rootfs (absolute path)
|
# single read-only rootfs (absolute path)
|
||||||
vfs = AuthSrv(
|
vfs = AuthSrv(Cfg(v=[td + "//a/ac/../aa//::r"]), self.log).vfs
|
||||||
Namespace(c=None, a=[], v=[td + "//a/ac/../aa//::r"]), self.log
|
|
||||||
).vfs
|
|
||||||
self.assertEqual(vfs.nodes, {})
|
self.assertEqual(vfs.nodes, {})
|
||||||
self.assertEqual(vfs.vpath, "")
|
self.assertEqual(vfs.vpath, "")
|
||||||
self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
|
self.assertEqual(vfs.realpath, os.path.join(td, "a", "aa"))
|
||||||
@@ -129,7 +141,7 @@ class TestVFS(unittest.TestCase):
|
|||||||
|
|
||||||
# read-only rootfs with write-only subdirectory (read-write for k)
|
# read-only rootfs with write-only subdirectory (read-write for k)
|
||||||
vfs = AuthSrv(
|
vfs = AuthSrv(
|
||||||
Namespace(c=None, a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]),
|
Cfg(a=["k:k"], v=[".::r:ak", "a/ac/acb:a/ac/acb:w:ak"]),
|
||||||
self.log,
|
self.log,
|
||||||
).vfs
|
).vfs
|
||||||
self.assertEqual(len(vfs.nodes), 1)
|
self.assertEqual(len(vfs.nodes), 1)
|
||||||
@@ -192,7 +204,10 @@ class TestVFS(unittest.TestCase):
|
|||||||
self.assertEqual(list(virt), [])
|
self.assertEqual(list(virt), [])
|
||||||
|
|
||||||
# admin-only rootfs with all-read-only subfolder
|
# admin-only rootfs with all-read-only subfolder
|
||||||
vfs = AuthSrv(Namespace(c=None, a=["k:k"], v=[".::ak", "a:a:r"]), self.log,).vfs
|
vfs = AuthSrv(
|
||||||
|
Cfg(a=["k:k"], v=[".::ak", "a:a:r"]),
|
||||||
|
self.log,
|
||||||
|
).vfs
|
||||||
self.assertEqual(len(vfs.nodes), 1)
|
self.assertEqual(len(vfs.nodes), 1)
|
||||||
self.assertEqual(vfs.vpath, "")
|
self.assertEqual(vfs.vpath, "")
|
||||||
self.assertEqual(vfs.realpath, td)
|
self.assertEqual(vfs.realpath, td)
|
||||||
@@ -211,9 +226,7 @@ class TestVFS(unittest.TestCase):
|
|||||||
|
|
||||||
# breadth-first construction
|
# breadth-first construction
|
||||||
vfs = AuthSrv(
|
vfs = AuthSrv(
|
||||||
Namespace(
|
Cfg(
|
||||||
c=None,
|
|
||||||
a=[],
|
|
||||||
v=[
|
v=[
|
||||||
"a/ac/acb:a/ac/acb:w",
|
"a/ac/acb:a/ac/acb:w",
|
||||||
"a:a:w",
|
"a:a:w",
|
||||||
@@ -234,7 +247,7 @@ class TestVFS(unittest.TestCase):
|
|||||||
self.undot(vfs, "./.././foo/..", "")
|
self.undot(vfs, "./.././foo/..", "")
|
||||||
|
|
||||||
# shadowing
|
# shadowing
|
||||||
vfs = AuthSrv(Namespace(c=None, a=[], v=[".::r", "b:a/ac:r"]), self.log).vfs
|
vfs = AuthSrv(Cfg(v=[".::r", "b:a/ac:r"]), self.log).vfs
|
||||||
|
|
||||||
fsp, r1, v1 = self.ls(vfs, "", "*")
|
fsp, r1, v1 = self.ls(vfs, "", "*")
|
||||||
self.assertEqual(fsp, td)
|
self.assertEqual(fsp, td)
|
||||||
@@ -271,7 +284,7 @@ class TestVFS(unittest.TestCase):
|
|||||||
).encode("utf-8")
|
).encode("utf-8")
|
||||||
)
|
)
|
||||||
|
|
||||||
au = AuthSrv(Namespace(c=[cfg_path], a=[], v=[]), self.log)
|
au = AuthSrv(Cfg(c=[cfg_path]), self.log)
|
||||||
self.assertEqual(au.user["a"], "123")
|
self.assertEqual(au.user["a"], "123")
|
||||||
self.assertEqual(au.user["asd"], "fgh:jkl")
|
self.assertEqual(au.user["asd"], "fgh:jkl")
|
||||||
n = au.vfs
|
n = au.vfs
|
||||||
|
|||||||
Reference in New Issue
Block a user